[Pkg-samba-maint] r4073 - in trunk/samba/debian: . patches

bubulle at alioth.debian.org bubulle at alioth.debian.org
Wed May 9 10:06:50 UTC 2012


tags 654499 pending
thanks

Author: bubulle
Date: 2012-05-09 10:06:50 +0000 (Wed, 09 May 2012)
New Revision: 4073

Added:
   trunk/samba/debian/patches/waf-as-source.patch
Modified:
   trunk/samba/debian/changelog
   trunk/samba/debian/patches/series
Log:
Add upstream commit that adds waf source to the buildtools/
directory. As upstream will, one day or another, merge this, I
prefer this over removing the waf binary and repack upstream
tarball.
Closes: #654499

Modified: trunk/samba/debian/changelog
===================================================================
--- trunk/samba/debian/changelog	2012-05-09 10:06:22 UTC (rev 4072)
+++ trunk/samba/debian/changelog	2012-05-09 10:06:50 UTC (rev 4073)
@@ -24,6 +24,11 @@
     Closes: #249873
   * Disable useless smbtorture4 build. Thanks to Ivo De Decker for the patch.
     Closes: #670561
+  * Add upstream commit that adds waf source to the buildtools/
+    directory. As upstream will, one day or another, merge this, I
+    prefer this over removing the waf binary and repack upstream
+    tarball.
+    Closes: #654499
 
  -- Christian Perrier <bubulle at debian.org>  Mon, 07 May 2012 22:16:32 +0200
 

Modified: trunk/samba/debian/patches/series
===================================================================
--- trunk/samba/debian/patches/series	2012-05-09 10:06:22 UTC (rev 4072)
+++ trunk/samba/debian/patches/series	2012-05-09 10:06:50 UTC (rev 4073)
@@ -18,3 +18,4 @@
 bug_387266_upstream_4104_mention-kerberos-in-smbspool-manpage.patch
 bug_604768_upstream_7826_drop-using-samba-link.patch
 bug_604768_upstream_7826_fix-WHATSNEW-link.patch
+waf-as-source.patch

Added: trunk/samba/debian/patches/waf-as-source.patch
===================================================================
--- trunk/samba/debian/patches/waf-as-source.patch	                        (rev 0)
+++ trunk/samba/debian/patches/waf-as-source.patch	2012-05-09 10:06:50 UTC (rev 4073)
@@ -0,0 +1,18538 @@
+commit 4f4bce5301ffd8c12aed1b108affa1a75feefb67
+Author: Jelmer Vernooij <jelmer at samba.org>
+Date:   Wed Jan 4 00:31:27 2012 +0100
+
+    Include waf as an extracted source directory, rather than as a one-in-a-file script.
+
+diff --git a/buildtools/README b/buildtools/README
+new file mode 100644
+index 0000000..eab0382
+--- /dev/null
++++ b/buildtools/README
+@@ -0,0 +1,12 @@
++See http://code.google.com/p/waf/ for more information on waf
++
++You can get a svn copy of the upstream source with:
++
++  svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
++
++Samba currently uses waf 1.5, which can be found at:
++
++  http://waf.googlecode.com/svn/branches/waf-1.5
++
++To update the current copy of waf, use the update-waf.sh script in this
++directory.
+diff --git a/buildtools/bin/README b/buildtools/bin/README
+deleted file mode 100644
+index 9ef8a1f..0000000
+--- a/buildtools/bin/README
++++ /dev/null
+@@ -1,16 +0,0 @@
+-This copy of waf-svn is taken from the git mirror of waf
+-at:
+-
+-  git://git.samba.org/tridge/waf-svn.git
+-
+-using the waf-samba branch
+-
+-It was built using the command:
+-
+-  ./waf-light --zip-type=gz --make-waf
+-
+-See http://code.google.com/p/waf/ for more information on waf
+-
+-You can get a svn copy of the upstream source with:
+-
+-  svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
+diff --git a/buildtools/bin/waf-svn b/buildtools/bin/waf-svn
+deleted file mode 100755
+index 6d54d5f..0000000
+Binary files a/buildtools/bin/waf-svn and /dev/null differ
+diff --git a/buildtools/update-waf.sh b/buildtools/update-waf.sh
+new file mode 100755
+index 0000000..bb3a4bf
+--- /dev/null
++++ b/buildtools/update-waf.sh
+@@ -0,0 +1,13 @@
++#!/bin/sh
++# Update our copy of waf
++
++TARGETDIR="`dirname $0`"
++WORKDIR="`mktemp -d`"
++
++mkdir -p "$WORKDIR"
++
++svn checkout http://waf.googlecode.com/svn/branches/waf-1.5/wafadmin "$WORKDIR/wafadmin"
++
++rsync -C -avz --delete "$WORKDIR/wafadmin/" "$TARGETDIR/wafadmin/"
++
++rm -rf "$WORKDIR"
+diff --git a/buildtools/wafadmin/3rdparty/ParallelDebug.py b/buildtools/wafadmin/3rdparty/ParallelDebug.py
+new file mode 100644
+index 0000000..9d0493e
+--- /dev/null
++++ b/buildtools/wafadmin/3rdparty/ParallelDebug.py
+@@ -0,0 +1,299 @@
++#! /usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2007-2010 (ita)
++
++"""
++debugging helpers for parallel compilation, outputs
++a svg file in the build directory
++"""
++
++import os, time, sys, threading
++try: from Queue import Queue
++except: from queue import Queue
++import Runner, Options, Utils, Task, Logs
++from Constants import *
++
++#import random
++#random.seed(100)
++
++def set_options(opt):
++	opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
++		help='title for the svg diagram', dest='dtitle')
++	opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=1000, dest='dwidth')
++	opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
++	opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
++	opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
++
++# red   #ff4d4d
++# green #4da74d
++# lila  #a751ff
++
++color2code = {
++	'GREEN'  : '#4da74d',
++	'YELLOW' : '#fefe44',
++	'PINK'   : '#a751ff',
++	'RED'    : '#cc1d1d',
++	'BLUE'   : '#6687bb',
++	'CYAN'   : '#34e2e2',
++
++}
++
++mp = {}
++info = [] # list of (text,color)
++
++def map_to_color(name):
++	if name in mp:
++		return mp[name]
++	try:
++		cls = Task.TaskBase.classes[name]
++	except KeyError:
++		return color2code['RED']
++	if cls.color in mp:
++		return mp[cls.color]
++	if cls.color in color2code:
++		return color2code[cls.color]
++	return color2code['RED']
++
++def loop(self):
++	while 1:
++		tsk=Runner.TaskConsumer.ready.get()
++		tsk.master.set_running(1, id(threading.currentThread()), tsk)
++		Runner.process_task(tsk)
++		tsk.master.set_running(-1, id(threading.currentThread()), tsk)
++Runner.TaskConsumer.loop = loop
++
++
++old_start = Runner.Parallel.start
++def do_start(self):
++        print Options.options
++	try:
++		Options.options.dband
++	except AttributeError:
++		raise ValueError('use def options(opt): opt.load("parallel_debug")!')
++
++	self.taskinfo = Queue()
++	old_start(self)
++	process_colors(self)
++Runner.Parallel.start = do_start
++
++def set_running(self, by, i, tsk):
++	self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by)  )
++Runner.Parallel.set_running = set_running
++
++def name2class(name):
++	return name.replace(' ', '_').replace('.', '_')
++
++def process_colors(producer):
++	# first, cast the parameters
++	tmp = []
++	try:
++		while True:
++			tup = producer.taskinfo.get(False)
++			tmp.append(list(tup))
++	except:
++		pass
++
++	try:
++		ini = float(tmp[0][2])
++	except:
++		return
++
++	if not info:
++		seen = []
++		for x in tmp:
++			name = x[3]
++			if not name in seen:
++				seen.append(name)
++			else:
++				continue
++
++			info.append((name, map_to_color(name)))
++		info.sort(key=lambda x: x[0])
++
++	thread_count = 0
++	acc = []
++	for x in tmp:
++		thread_count += x[6]
++		acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
++	f = open('pdebug.dat', 'w')
++	#Utils.write('\n'.join(acc))
++	f.write('\n'.join(acc))
++
++	tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
++
++	st = {}
++	for l in tmp:
++		if not l[0] in st:
++			st[l[0]] = len(st.keys())
++	tmp = [  [st[lst[0]]] + lst[1:] for lst in tmp ]
++	THREAD_AMOUNT = len(st.keys())
++
++	st = {}
++	for l in tmp:
++		if not l[1] in st:
++			st[l[1]] = len(st.keys())
++	tmp = [  [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
++
++
++	BAND = Options.options.dband
++
++	seen = {}
++	acc = []
++	for x in range(len(tmp)):
++		line = tmp[x]
++		id = line[1]
++
++		if id in seen:
++			continue
++		seen[id] = True
++
++		begin = line[2]
++		thread_id = line[0]
++		for y in range(x + 1, len(tmp)):
++			line = tmp[y]
++			if line[1] == id:
++				end = line[2]
++				#print id, thread_id, begin, end
++				#acc.append(  ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
++				acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
++				break
++
++	if Options.options.dmaxtime < 0.1:
++		gwidth = 1
++		for x in tmp:
++			m = BAND * x[2]
++			if m > gwidth:
++				gwidth = m
++	else:
++		gwidth = BAND * Options.options.dmaxtime
++
++	ratio = float(Options.options.dwidth) / gwidth
++	gwidth = Options.options.dwidth
++
++	gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
++
++	out = []
++
++	out.append("""<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>
++<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\"
++\"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\">
++<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" version=\"1.0\"
++   x=\"%r\" y=\"%r\" width=\"%r\" height=\"%r\"
++   id=\"svg602\" xml:space=\"preserve\">
++
++<style type='text/css' media='screen'>
++    g.over rect  { stroke:#FF0000; fill-opacity:0.4 }
++</style>
++
++<script type='text/javascript'><![CDATA[
++    var svg  = document.getElementsByTagName('svg')[0];
++    var svgNS = svg.getAttribute('xmlns');
++    svg.addEventListener('mouseover',function(e){
++      var g = e.target.parentNode;
++      var x = document.getElementById('r_'+g.id);
++      if (x) {
++         g.setAttribute('class', g.getAttribute('class')+' over');
++         x.setAttribute('class', x.getAttribute('class')+' over');
++         showInfo(e, g.id);
++      }
++    },false);
++    svg.addEventListener('mouseout',function(e){
++      var g = e.target.parentNode;
++      var x = document.getElementById('r_'+g.id);
++      if (x) {
++         g.setAttribute('class',g.getAttribute('class').replace(' over',''));
++         x.setAttribute('class',x.getAttribute('class').replace(' over',''));
++         hideInfo(e);
++      }
++    },false);
++
++function showInfo(evt, txt) {
++    tooltip = document.getElementById('tooltip');
++
++    var t = document.getElementById('tooltiptext');
++    t.firstChild.data = txt;
++
++    var x = evt.clientX+10;
++    if (x > 200) { x -= t.getComputedTextLength() + 16; }
++    var y = evt.clientY+30;
++    tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
++    tooltip.setAttributeNS(null,"visibility","visible");
++
++    var r = document.getElementById('tooltiprect');
++    r.setAttribute('width', t.getComputedTextLength()+6)
++}
++
++
++function hideInfo(evt) {
++    tooltip = document.getElementById('tooltip');
++    tooltip.setAttributeNS(null,"visibility","hidden");
++}
++
++]]></script>
++
++<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
++<rect
++   x='%r' y='%r'
++   width='%r' height='%r' z-index='10'
++   style=\"font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;\"
++   />\n
++
++""" % (0, 0, gwidth + 4, gheight + 4,   0, 0, gwidth + 4, gheight + 4))
++
++	# main title
++	if Options.options.dtitle:
++		out.append("""<text x="%d" y="%d" style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">%s</text>
++""" % (gwidth/2, gheight - 5, Options.options.dtitle))
++
++	# the rectangles
++	groups = {}
++	for (x, y, w, h, clsname) in acc:
++		try:
++			groups[clsname].append((x, y, w, h))
++		except:
++			groups[clsname] = [(x, y, w, h)]
++
++	for cls in groups:
++
++		out.append("<g id='%s'>\n" % name2class(cls))
++
++		for (x, y, w, h) in groups[cls]:
++			out.append("""   <rect
++   x='%r' y='%r'
++   width='%r' height='%r' z-index='11'
++   style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
++   />\n""" % (2 + x*ratio, 2 + y, w*ratio, h, map_to_color(cls)))
++
++		out.append("</g>\n")
++
++	# output the caption
++	cnt = THREAD_AMOUNT
++
++	for (text, color) in info:
++		# caption box
++		b = BAND/2
++		out.append("""<g id='r_%s'><rect
++		x='%r' y='%r'
++		width='%r' height='%r'
++		style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
++  />\n""" %                       (name2class(text), 2 + BAND,     5 + (cnt + 0.5) * BAND, b, b, color))
++
++		# caption text
++		out.append("""<text
++   style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
++   x="%r" y="%d">%s</text></g>\n""" % (2 + 2 * BAND, 5 + (cnt + 0.5) * BAND + 10, text))
++		cnt += 1
++
++	out.append("""
++<g transform="translate(0,0)" visibility="hidden" id="tooltip">
++  <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
++  <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
++</g>""")
++
++	out.append("\n</svg>")
++
++	#node = producer.bld.path.make_node('pdebug.svg')
++	f = open('pdebug.svg', 'w')
++	f.write("".join(out))
++
++
+diff --git a/buildtools/wafadmin/3rdparty/batched_cc.py b/buildtools/wafadmin/3rdparty/batched_cc.py
+new file mode 100644
+index 0000000..8e31074
+--- /dev/null
++++ b/buildtools/wafadmin/3rdparty/batched_cc.py
+@@ -0,0 +1,183 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++
++"""
++Batched builds - compile faster
++instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
++cc -c ../file1.c ../file2.c ../file3.c
++
++Files are output on the directory where the compiler is called, and dependencies are more difficult
++to track (do not run the command on all source files if only one file changes)
++
++As such, we do as if the files were compiled one by one, but no command is actually run:
++replace each cc/cpp Task by a TaskSlave
++A new task called TaskMaster collects the signatures from each slave and finds out the command-line
++to run.
++
++To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds
++it is only necessary to import this module in the configuration (no other change required)
++"""
++
++MAX_BATCH = 50
++MAXPARALLEL = False
++
++EXT_C = ['.c', '.cc', '.cpp', '.cxx']
++
++import os, threading
++import TaskGen, Task, ccroot, Build, Logs
++from TaskGen import extension, feature, before
++from Constants import *
++
++cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRCLST}'
++cc_fun = Task.compile_fun_noshell('batched_cc', cc_str)[0]
++
++cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRCLST}'
++cxx_fun = Task.compile_fun_noshell('batched_cxx', cxx_str)[0]
++
++count = 70000
++class batch_task(Task.Task):
++	color = 'RED'
++
++	after = 'cc cxx'
++	before = 'cc_link cxx_link static_link'
++
++	def __str__(self):
++		return '(batch compilation for %d slaves)\n' % len(self.slaves)
++
++	def __init__(self, *k, **kw):
++		Task.Task.__init__(self, *k, **kw)
++		self.slaves = []
++		self.inputs = []
++		self.hasrun = 0
++
++		global count
++		count += 1
++		self.idx = count
++
++	def add_slave(self, slave):
++		self.slaves.append(slave)
++		self.set_run_after(slave)
++
++	def runnable_status(self):
++		for t in self.run_after:
++			if not t.hasrun:
++				return ASK_LATER
++
++		for t in self.slaves:
++			#if t.executed:
++			if t.hasrun != SKIPPED:
++				return RUN_ME
++
++		return SKIP_ME
++
++	def run(self):
++		outputs = []
++		self.outputs = []
++
++		srclst = []
++		slaves = []
++		for t in self.slaves:
++			if t.hasrun != SKIPPED:
++				slaves.append(t)
++				srclst.append(t.inputs[0].abspath(self.env))
++
++		self.env.SRCLST = srclst
++		self.cwd = slaves[0].inputs[0].parent.abspath(self.env)
++
++		env = self.env
++		app = env.append_unique
++		cpppath_st = env['CPPPATH_ST']
++		env._CCINCFLAGS = env.CXXINCFLAGS = []
++
++		# local flags come first
++		# set the user-defined includes paths
++		for i in env['INC_PATHS']:
++			app('_CCINCFLAGS', cpppath_st % i.abspath())
++			app('_CXXINCFLAGS', cpppath_st % i.abspath())
++			app('_CCINCFLAGS', cpppath_st % i.abspath(env))
++			app('_CXXINCFLAGS', cpppath_st % i.abspath(env))
++
++		# set the library include paths
++		for i in env['CPPPATH']:
++			app('_CCINCFLAGS', cpppath_st % i)
++			app('_CXXINCFLAGS', cpppath_st % i)
++
++		if self.slaves[0].__class__.__name__ == 'cc':
++			ret = cc_fun(self)
++		else:
++			ret = cxx_fun(self)
++
++		if ret:
++			return ret
++
++		for t in slaves:
++			t.old_post_run()
++
++from TaskGen import extension, feature, after
++
++import cc, cxx
++def wrap(fun):
++	def foo(self, node):
++		# we cannot control the extension, this sucks
++		self.obj_ext = '.o'
++
++		task = fun(self, node)
++		if not getattr(self, 'masters', None):
++			self.masters = {}
++			self.allmasters = []
++
++		if not node.parent.id in self.masters:
++			m = self.masters[node.parent.id] = self.master = self.create_task('batch')
++			self.allmasters.append(m)
++		else:
++			m = self.masters[node.parent.id]
++			if len(m.slaves) > MAX_BATCH:
++				m = self.masters[node.parent.id] = self.master = self.create_task('batch')
++				self.allmasters.append(m)
++
++		m.add_slave(task)
++		return task
++	return foo
++
++c_hook = wrap(cc.c_hook)
++extension(cc.EXT_CC)(c_hook)
++
++cxx_hook = wrap(cxx.cxx_hook)
++extension(cxx.EXT_CXX)(cxx_hook)
++
++
++ at feature('cprogram', 'cshlib', 'cstaticlib')
++ at after('apply_link')
++def link_after_masters(self):
++	if getattr(self, 'allmasters', None):
++		for m in self.allmasters:
++			self.link_task.set_run_after(m)
++
++for c in ['cc', 'cxx']:
++	t = Task.TaskBase.classes[c]
++	def run(self):
++		pass
++
++	def post_run(self):
++		#self.executed=1
++		pass
++
++	def can_retrieve_cache(self):
++		if self.old_can_retrieve_cache():
++			for m in self.generator.allmasters:
++				try:
++					m.slaves.remove(self)
++				except ValueError:
++					pass	#this task wasn't included in that master
++			return 1
++		else:
++			return None
++
++	setattr(t, 'oldrun', t.__dict__['run'])
++	setattr(t, 'run', run)
++	setattr(t, 'old_post_run', t.post_run)
++	setattr(t, 'post_run', post_run)
++	setattr(t, 'old_can_retrieve_cache', t.can_retrieve_cache)
++	setattr(t, 'can_retrieve_cache', can_retrieve_cache)
++
+diff --git a/buildtools/wafadmin/3rdparty/boost.py b/buildtools/wafadmin/3rdparty/boost.py
+new file mode 100644
+index 0000000..e690a4e
+--- /dev/null
++++ b/buildtools/wafadmin/3rdparty/boost.py
+@@ -0,0 +1,343 @@
++#!/usr/bin/env python
++# encoding: utf-8
++#
++# partially based on boost.py written by Gernot Vormayr
++# written by Ruediger Sonderfeld <ruediger at c-plusplus.de>, 2008
++# modified by Bjoern Michaelsen, 2008
++# modified by Luca Fossati, 2008
++# rewritten for waf 1.5.1, Thomas Nagy, 2008
++#
++#def set_options(opt):
++#	opt.tool_options('boost')
++#	# ...
++#
++#def configure(conf):
++#	# ... (e.g. conf.check_tool('g++'))
++#	conf.check_tool('boost')
++#   conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
++#
++#def build(bld):
++#   bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
++#
++#ISSUES:
++# * find_includes should be called only once!
++# * support mandatory
++
++######## boost update ###########
++## ITA: * the method get_boost_version_number does work
++##      * the rest of the code has not really been tried
++#       * make certain a demo is provided (in demos/adv for example)
++
++# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
++
++import os.path, glob, types, re, sys
++import Configure, config_c, Options, Utils, Logs
++from Logs import warn, debug
++from Configure import conf
++
++boost_code = '''
++#include <iostream>
++#include <boost/version.hpp>
++int main() { std::cout << BOOST_VERSION << std::endl; }
++'''
++
++boost_libpath = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
++boost_cpppath = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
++
++STATIC_NOSTATIC = 'nostatic'
++STATIC_BOTH = 'both'
++STATIC_ONLYSTATIC = 'onlystatic'
++
++is_versiontag = re.compile('^\d+_\d+_?\d*$')
++is_threadingtag = re.compile('^mt$')
++is_abitag = re.compile('^[sgydpn]+$')
++is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
++is_pythontag=re.compile('^py[0-9]{2}$')
++
++def set_options(opt):
++	opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
++	opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib')
++
++def string_to_version(s):
++	version = s.split('.')
++	if len(version) < 3: return 0
++	return int(version[0])*100000 + int(version[1])*100 + int(version[2])
++
++def version_string(version):
++	major = version / 100000
++	minor = version / 100 % 1000
++	minor_minor = version % 100
++	if minor_minor == 0:
++		return "%d_%d" % (major, minor)
++	else:
++		return "%d_%d_%d" % (major, minor, minor_minor)
++
++def libfiles(lib, pattern, lib_paths):
++	result = []
++	for lib_path in lib_paths:
++		libname = pattern % ('boost_%s[!_]*' % lib)
++		result += glob.glob(os.path.join(lib_path, libname))
++	return result
++
++ at conf
++def get_boost_version_number(self, dir):
++	"""silently retrieve the boost version number"""
++	try:
++		return self.run_c_code(compiler='cxx', code=boost_code, includes=dir, execute=1, env=self.env.copy(), type='cprogram', compile_mode='cxx', compile_filename='test.cpp')
++	except Configure.ConfigurationError, e:
++		return -1
++
++def set_default(kw, var, val):
++	if not var in kw:
++		kw[var] = val
++
++def tags_score(tags, kw):
++	"""
++	checks library tags
++
++	see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1
++	"""
++	score = 0
++	needed_tags = {
++		'threading': kw['tag_threading'],
++		'abi':       kw['tag_abi'],
++		'toolset':   kw['tag_toolset'],
++		'version':   kw['tag_version'],
++		'python':    kw['tag_python']
++	}
++
++	if kw['tag_toolset'] is None:
++		v = kw['env']
++		toolset = v['CXX_NAME']
++		if v['CXX_VERSION']:
++			version_no = v['CXX_VERSION'].split('.')
++			toolset += version_no[0]
++			if len(version_no) > 1:
++				toolset += version_no[1]
++		needed_tags['toolset'] = toolset
++
++	found_tags = {}
++	for tag in tags:
++		if is_versiontag.match(tag): found_tags['version'] = tag
++		if is_threadingtag.match(tag): found_tags['threading'] = tag
++		if is_abitag.match(tag): found_tags['abi'] = tag
++		if is_toolsettag.match(tag): found_tags['toolset'] = tag
++		if is_pythontag.match(tag): found_tags['python'] = tag
++
++	for tagname in needed_tags.iterkeys():
++		if needed_tags[tagname] is not None and tagname in found_tags:
++			if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
++				score += kw['score_' + tagname][0]
++			else:
++				score += kw['score_' + tagname][1]
++	return score
++
++ at conf
++def validate_boost(self, kw):
++	ver = kw.get('version', '')
++
++	for x in 'min_version max_version version'.split():
++		set_default(kw, x, ver)
++
++	set_default(kw, 'lib', '')
++	kw['lib'] = Utils.to_list(kw['lib'])
++
++	set_default(kw, 'env', self.env)
++
++	set_default(kw, 'libpath', boost_libpath)
++	set_default(kw, 'cpppath', boost_cpppath)
++
++	for x in 'tag_threading tag_version tag_toolset'.split():
++		set_default(kw, x, None)
++	set_default(kw, 'tag_abi', '^[^d]*$')
++
++	set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
++	set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
++
++	set_default(kw, 'score_threading', (10, -10))
++	set_default(kw, 'score_abi', (10, -10))
++	set_default(kw, 'score_python', (10,-10))
++	set_default(kw, 'score_toolset', (1, -1))
++	set_default(kw, 'score_version', (100, -100))
++
++	set_default(kw, 'score_min', 0)
++	set_default(kw, 'static', STATIC_NOSTATIC)
++	set_default(kw, 'found_includes', False)
++	set_default(kw, 'min_score', 0)
++
++	set_default(kw, 'errmsg', 'not found')
++	set_default(kw, 'okmsg', 'ok')
++
++ at conf
++def find_boost_includes(self, kw):
++	"""
++	check every path in kw['cpppath'] for subdir
++	that either starts with boost- or is named boost.
++
++	Then the version is checked and selected accordingly to
++	min_version/max_version. The highest possible version number is
++	selected!
++
++	If no versiontag is set the versiontag is set accordingly to the
++	selected library and CPPPATH_BOOST is set.
++	"""
++	boostPath = getattr(Options.options, 'boostincludes', '')
++	if boostPath:
++		boostPath = [os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
++	else:
++		boostPath = Utils.to_list(kw['cpppath'])
++
++	min_version = string_to_version(kw.get('min_version', ''))
++	max_version = string_to_version(kw.get('max_version', '')) or (sys.maxint - 1)
++
++	version = 0
++	for include_path in boostPath:
++		boost_paths = [p for p in glob.glob(os.path.join(include_path, 'boost*')) if os.path.isdir(p)]
++		debug('BOOST Paths: %r' % boost_paths)
++		for path in boost_paths:
++			pathname = os.path.split(path)[-1]
++			ret = -1
++			if pathname == 'boost':
++				path = include_path
++				ret = self.get_boost_version_number(path)
++			elif pathname.startswith('boost-'):
++				ret = self.get_boost_version_number(path)
++			ret = int(ret)
++
++			if ret != -1 and ret >= min_version and ret <= max_version and ret > version:
++				boost_path = path
++				version = ret
++	if not version:
++		self.fatal('boost headers not found! (required version min: %s max: %s)'
++			  % (kw['min_version'], kw['max_version']))
++		return False
++
++	found_version = version_string(version)
++	versiontag = '^' + found_version + '$'
++	if kw['tag_version'] is None:
++		kw['tag_version'] = versiontag
++	elif kw['tag_version'] != versiontag:
++		warn('boost header version %r and tag_version %r do not match!' % (versiontag, kw['tag_version']))
++	env = self.env
++	env['CPPPATH_BOOST'] = boost_path
++	env['BOOST_VERSION'] = found_version
++	self.found_includes = 1
++	ret = 'Version %s (%s)' % (found_version, boost_path)
++	return ret
++
++ at conf
++def find_boost_library(self, lib, kw):
++
++	def find_library_from_list(lib, files):
++		lib_pattern = re.compile('.*boost_(.*?)\..*')
++		result = (None, None)
++		resultscore = kw['min_score'] - 1
++		for file in files:
++			m = lib_pattern.search(file, 1)
++			if m:
++				libname = m.group(1)
++				libtags = libname.split('-')[1:]
++				currentscore = tags_score(libtags, kw)
++				if currentscore > resultscore:
++					result = (libname, file)
++					resultscore = currentscore
++		return result
++
++	lib_paths = getattr(Options.options, 'boostlibs', '')
++	if lib_paths:
++		lib_paths = [os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
++	else:
++		lib_paths = Utils.to_list(kw['libpath'])
++
++	v = kw.get('env', self.env)
++
++	(libname, file) = (None, None)
++	if kw['static'] in [STATIC_NOSTATIC, STATIC_BOTH]:
++		st_env_prefix = 'LIB'
++		files = libfiles(lib, v['shlib_PATTERN'], lib_paths)
++		(libname, file) = find_library_from_list(lib, files)
++	if libname is None and kw['static'] in [STATIC_ONLYSTATIC, STATIC_BOTH]:
++		st_env_prefix = 'STATICLIB'
++		staticLibPattern = v['staticlib_PATTERN']
++		if self.env['CC_NAME'] == 'msvc':
++			staticLibPattern = 'lib' + staticLibPattern
++		files = libfiles(lib, staticLibPattern, lib_paths)
++		(libname, file) = find_library_from_list(lib, files)
++	if libname is not None:
++		v['LIBPATH_BOOST_' + lib.upper()] = [os.path.split(file)[0]]
++		if self.env['CC_NAME'] == 'msvc' and os.path.splitext(file)[1] == '.lib':
++			v[st_env_prefix + '_BOOST_' + lib.upper()] = ['libboost_'+libname]
++		else:
++			v[st_env_prefix + '_BOOST_' + lib.upper()] = ['boost_'+libname]
++		return
++	self.fatal('lib boost_' + lib + ' not found!')
++
++ at conf
++def check_boost(self, *k, **kw):
++	"""
++	This should be the main entry point
++
++- min_version
++- max_version
++- version
++- include_path
++- lib_path
++- lib
++- toolsettag   - None or a regexp
++- threadingtag - None or a regexp
++- abitag       - None or a regexp
++- versiontag   - WARNING: you should rather use version or min_version/max_version
++- static       - look for static libs (values:
++	  'nostatic'   or STATIC_NOSTATIC   - ignore static libs (default)
++	  'both'       or STATIC_BOTH       - find static libs, too
++	  'onlystatic' or STATIC_ONLYSTATIC - find only static libs
++- score_version
++- score_abi
++- scores_threading
++- score_toolset
++ * the scores are tuples (match_score, nomatch_score)
++   match_score is the added to the score if the tag is matched
++   nomatch_score is added when a tag is found and does not match
++- min_score
++	"""
++
++	if not self.env['CXX']:
++		self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
++	self.validate_boost(kw)
++	ret = None
++	try:
++		if not kw.get('found_includes', None):
++			self.check_message_1(kw.get('msg_includes', 'boost headers'))
++			ret = self.find_boost_includes(kw)
++
++	except Configure.ConfigurationError, e:
++		if 'errmsg' in kw:
++			self.check_message_2(kw['errmsg'], 'YELLOW')
++		if 'mandatory' in kw:
++			if Logs.verbose > 1:
++				raise
++			else:
++				self.fatal('the configuration failed (see %r)' % self.log.name)
++	else:
++		if 'okmsg' in kw:
++			self.check_message_2(kw.get('okmsg_includes', ret))
++
++	for lib in kw['lib']:
++		self.check_message_1('library boost_'+lib)
++		try:
++			self.find_boost_library(lib, kw)
++		except Configure.ConfigurationError, e:
++			ret = False
++			if 'errmsg' in kw:
++				self.check_message_2(kw['errmsg'], 'YELLOW')
++			if 'mandatory' in kw:
++				if Logs.verbose > 1:
++					raise
++				else:
++					self.fatal('the configuration failed (see %r)' % self.log.name)
++		else:
++			if 'okmsg' in kw:
++				self.check_message_2(kw['okmsg'])
++
++	return ret
++
+diff --git a/buildtools/wafadmin/3rdparty/fluid.py b/buildtools/wafadmin/3rdparty/fluid.py
+new file mode 100644
+index 0000000..117edef
+--- /dev/null
++++ b/buildtools/wafadmin/3rdparty/fluid.py
+@@ -0,0 +1,27 @@
++#!/usr/bin/python
++# encoding: utf-8
++# Grygoriy Fuchedzhy 2009
++
++"""
++Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature.
++"""
++
++import Task
++from TaskGen import extension
++
++Task.simple_task_type('fluid', '${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}', 'BLUE', shell=False, ext_out='.cxx')
++
++ at extension('.fl')
++def fluid(self, node):
++	"""add the .fl to the source list; the cxx file generated will be compiled when possible"""
++	cpp = node.change_ext('.cpp')
++	hpp = node.change_ext('.hpp')
++	self.create_task('fluid', node, [cpp, hpp])
++
++	if 'cxx' in self.features:
++		self.allnodes.append(cpp)
++
++def detect(conf):
++    fluid = conf.find_program('fluid', var='FLUID', mandatory=True)
++    conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
++
+diff --git a/buildtools/wafadmin/3rdparty/gccdeps.py b/buildtools/wafadmin/3rdparty/gccdeps.py
+new file mode 100644
+index 0000000..6600c9c
+--- /dev/null
++++ b/buildtools/wafadmin/3rdparty/gccdeps.py
+@@ -0,0 +1,128 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2008-2010 (ita)
++
++"""
++Execute the tasks with gcc -MD, read the dependencies from the .d file
++and prepare the dependency calculation for the next run
++"""
++
++import os, re, threading
++import Task, Logs, Utils, preproc
++from TaskGen import before, after, feature
++
++lock = threading.Lock()
++
++preprocessor_flag = '-MD'
++
++ at feature('cc')
++ at before('apply_core')
++def add_mmd_cc(self):
++	if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
++		self.env.append_value('CCFLAGS', preprocessor_flag)
++
++ at feature('cxx')
++ at before('apply_core')
++def add_mmd_cxx(self):
++	if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
++		self.env.append_value('CXXFLAGS', preprocessor_flag)
++
++def scan(self):
++	"the scanner does not do anything initially"
++	nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
++	names = []
++	return (nodes, names)
++
++re_o = re.compile("\.o$")
++re_src = re.compile("^(\.\.)[\\/](.*)$")
++
++def post_run(self):
++	# The following code is executed by threads, it is not safe, so a lock is needed...
++
++	if getattr(self, 'cached', None):
++		return Task.Task.post_run(self)
++
++	name = self.outputs[0].abspath(self.env)
++	name = re_o.sub('.d', name)
++	txt = Utils.readf(name)
++	#os.unlink(name)
++
++	txt = txt.replace('\\\n', '')
++
++	lst = txt.strip().split(':')
++	val = ":".join(lst[1:])
++	val = val.split()
++
++	nodes = []
++	bld = self.generator.bld
++
++	f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
++	for x in val:
++		if os.path.isabs(x):
++
++			if not preproc.go_absolute:
++				continue
++
++			lock.acquire()
++			try:
++				node = bld.root.find_resource(x)
++			finally:
++				lock.release()
++		else:
++			g = re.search(re_src, x)
++			if g:
++				x = g.group(2)
++				lock.acquire()
++				try:
++					node = bld.bldnode.parent.find_resource(x)
++				finally:
++					lock.release()
++			else:
++				g = re.search(f, x)
++				if g:
++					x = g.group(2)
++					lock.acquire()
++					try:
++						node = bld.srcnode.find_resource(x)
++					finally:
++						lock.release()
++
++		if id(node) == id(self.inputs[0]):
++			# ignore the source file, it is already in the dependencies
++			# this way, successful config tests may be retrieved from the cache
++			continue
++
++		if not node:
++			raise ValueError('could not find %r for %r' % (x, self))
++		else:
++			nodes.append(node)
++
++	Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
++
++	bld.node_deps[self.unique_id()] = nodes
++	bld.raw_deps[self.unique_id()] = []
++
++	try:
++		del self.cache_sig
++	except:
++		pass
++
++	Task.Task.post_run(self)
++
++import Constants, Utils
++def sig_implicit_deps(self):
++	try:
++		return Task.Task.sig_implicit_deps(self)
++	except Utils.WafError:
++		return Constants.SIG_NIL
++
++for name in 'cc cxx'.split():
++	try:
++		cls = Task.TaskBase.classes[name]
++	except KeyError:
++		pass
++	else:
++		cls.post_run = post_run
++		cls.scan = scan
++		cls.sig_implicit_deps = sig_implicit_deps
++
+diff --git a/buildtools/wafadmin/3rdparty/go.py b/buildtools/wafadmin/3rdparty/go.py
+new file mode 100644
+index 0000000..2d8df0d
+--- /dev/null
++++ b/buildtools/wafadmin/3rdparty/go.py
+@@ -0,0 +1,111 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# go.py - Waf tool for the Go programming language
++# By: Tom Wambold <tom5760 at gmail.com>
++
++import platform, os
++
++import Task
++import Utils
++from TaskGen import feature, extension, after
++
++Task.simple_task_type('gocompile', '${GOC} ${GOCFLAGS} -o ${TGT} ${SRC}', shell=False)
++Task.simple_task_type('gopack', '${GOP} grc ${TGT} ${SRC}', shell=False)
++Task.simple_task_type('golink', '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}', shell=False)
++
++def detect(conf):
++
++	def set_def(var, val):
++		if not conf.env[var]:
++			conf.env[var] = val
++
++	goarch = os.getenv("GOARCH")
++
++	if goarch == '386':
++		set_def('GO_PLATFORM', 'i386')
++	elif goarch == 'amd64':
++		set_def('GO_PLATFORM', 'x86_64')
++	elif goarch == 'arm':
++		set_def('GO_PLATFORM', 'arm')
++	else:
++		set_def('GO_PLATFORM', platform.machine())
++
++	if conf.env.GO_PLATFORM == 'x86_64':
++		set_def('GO_COMPILER', '6g')
++		set_def('GO_LINKER', '6l')
++		set_def('GO_EXTENSION', '.6')
++	elif conf.env.GO_PLATFORM in ['i386', 'i486', 'i586', 'i686']:
++		set_def('GO_COMPILER', '8g')
++		set_def('GO_LINKER', '8l')
++		set_def('GO_EXTENSION', '.8')
++	elif conf.env.GO_PLATFORM == 'arm':
++		set_def('GO_COMPILER', '5g')
++		set_def('GO_LINKER', '5l')
++		set_def('GO_EXTENSION', '.5')
++
++	if not (conf.env.GO_COMPILER or conf.env.GO_LINKER or conf.env.GO_EXTENSION):
++		raise conf.fatal('Unsupported platform ' + platform.machine())
++
++	set_def('GO_PACK', 'gopack')
++	set_def('GO_PACK_EXTENSION', '.a')
++
++	conf.find_program(conf.env.GO_COMPILER, var='GOC', mandatory=True)
++	conf.find_program(conf.env.GO_LINKER,   var='GOL', mandatory=True)
++	conf.find_program(conf.env.GO_PACK,     var='GOP', mandatory=True)
++	conf.find_program('cgo',                var='CGO', mandatory=True)
++
++ at extension('.go')
++def compile_go(self, node):
++	try:
++		self.go_nodes.append(node)
++	except AttributeError:
++		self.go_nodes = [node]
++
++ at feature('go')
++ at after('apply_core')
++def apply_compile_go(self):
++	try:
++		nodes = self.go_nodes
++	except AttributeError:
++		self.go_compile_task = None
++	else:
++		self.go_compile_task = self.create_task('gocompile',
++			nodes,
++			[self.path.find_or_declare(self.target + self.env.GO_EXTENSION)])
++
++ at feature('gopackage', 'goprogram')
++ at after('apply_compile_go')
++def apply_goinc(self):
++	if not getattr(self, 'go_compile_task', None):
++		return
++
++	names = self.to_list(getattr(self, 'uselib_local', []))
++	for name in names:
++		obj = self.name_to_obj(name)
++		if not obj:
++			raise Utils.WafError('object %r was not found in uselib_local '
++					'(required by %r)' % (lib_name, self.name))
++		obj.post()
++		self.go_compile_task.set_run_after(obj.go_package_task)
++		self.go_compile_task.dep_nodes.extend(obj.go_package_task.outputs)
++		self.env.append_unique('GOCFLAGS', '-I' + obj.path.abspath(obj.env))
++		self.env.append_unique('GOLFLAGS', '-L' + obj.path.abspath(obj.env))
++
++ at feature('gopackage')
++ at after('apply_goinc')
++def apply_gopackage(self):
++	self.go_package_task = self.create_task('gopack',
++			self.go_compile_task.outputs[0],
++			self.path.find_or_declare(self.target + self.env.GO_PACK_EXTENSION))
++	self.go_package_task.set_run_after(self.go_compile_task)
++	self.go_package_task.dep_nodes.extend(self.go_compile_task.outputs)
++
++ at feature('goprogram')
++ at after('apply_goinc')
++def apply_golink(self):
++	self.go_link_task = self.create_task('golink',
++			self.go_compile_task.outputs[0],
++			self.path.find_or_declare(self.target))
++	self.go_link_task.set_run_after(self.go_compile_task)
++	self.go_link_task.dep_nodes.extend(self.go_compile_task.outputs)
++
+diff --git a/buildtools/wafadmin/3rdparty/lru_cache.py b/buildtools/wafadmin/3rdparty/lru_cache.py
+new file mode 100644
+index 0000000..5b00abc
+--- /dev/null
++++ b/buildtools/wafadmin/3rdparty/lru_cache.py
+@@ -0,0 +1,97 @@
++#! /usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy 2011
++
++import os, shutil, re
++import Options, Build, Logs
++
++"""
++Apply a least recently used policy to the Waf cache.
++
++For performance reasons, it is called after the build is complete.
++
++We assume that the the folders are written atomically
++
++Do export WAFCACHE=/tmp/foo-xyz where xyz represents the cache size in megabytes
++If missing, the default cache size will be set to 10GB
++"""
++
++re_num = re.compile('[a-zA-Z_]+(\d+)')
++
++CACHESIZE = 10*1024*1024*1024 # in bytes
++CLEANRATIO = 0.8
++DIRSIZE = 4096
++
++def compile(self):
++	if Options.cache_global and not Options.options.nocache:
++		try:
++			os.makedirs(Options.cache_global)
++		except:
++			pass
++
++	try:
++		self.raw_compile()
++	finally:
++		if Options.cache_global and not Options.options.nocache:
++			self.sweep()
++
++def sweep(self):
++	global CACHESIZE
++	CACHEDIR = Options.cache_global
++
++	# get the cache max size from the WAFCACHE filename
++	re_num = re.compile('[a-zA-Z_]+(\d+)')
++	val = re_num.sub('\\1', os.path.basename(Options.cache_global))
++	try:
++		CACHESIZE = int(val)
++	except:
++		pass
++
++	# map folder names to timestamps
++	flist = {}
++	for x in os.listdir(CACHEDIR):
++		j = os.path.join(CACHEDIR, x)
++		if os.path.isdir(j) and len(x) == 32: # dir names are md5 hexdigests
++			flist[x] = [os.stat(j).st_mtime, 0]
++
++	for (x, v) in flist.items():
++		cnt = DIRSIZE # each entry takes 4kB
++		d = os.path.join(CACHEDIR, x)
++		for k in os.listdir(d):
++			cnt += os.stat(os.path.join(d, k)).st_size
++		flist[x][1] = cnt
++
++	total = sum([x[1] for x in flist.values()])
++	Logs.debug('lru: Cache size is %r' % total)
++
++	if total >= CACHESIZE:
++		Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE))
++
++		# make a list to sort the folders by timestamp
++		lst = [(p, v[0], v[1]) for (p, v) in flist.items()]
++		lst.sort(key=lambda x: x[1]) # sort by timestamp
++		lst.reverse()
++
++		while total >= CACHESIZE * CLEANRATIO:
++			(k, t, s) = lst.pop()
++			p = os.path.join(CACHEDIR, k)
++			v = p + '.del'
++			try:
++				os.rename(p, v)
++			except:
++				# someone already did it
++				pass
++			else:
++				try:
++					shutil.rmtree(v)
++				except:
++					# this should not happen, but who knows?
++					Logs.warn('If you ever see this message, report it (%r)' % v)
++			total -= s
++			del flist[k]
++	Logs.debug('lru: Total at the end %r' % total)
++
++Build.BuildContext.raw_compile = Build.BuildContext.compile
++Build.BuildContext.compile = compile
++Build.BuildContext.sweep = sweep
++
+diff --git a/buildtools/wafadmin/3rdparty/paranoid.py b/buildtools/wafadmin/3rdparty/paranoid.py
+new file mode 100644
+index 0000000..ead64ea
+--- /dev/null
++++ b/buildtools/wafadmin/3rdparty/paranoid.py
+@@ -0,0 +1,35 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# ita 2010
++
++import Logs, Utils, Build, Task
++
++def say(txt):
++	Logs.warn("^o^: %s" % txt)
++
++try:
++	ret = Utils.cmd_output('which cowsay 2> /dev/null').strip()
++except Exception, e:
++	pass
++else:
++	def say(txt):
++		f = Utils.cmd_output([ret, txt])
++		Utils.pprint('PINK', f)
++
++say('you make the errors, we detect them')
++
++def check_task_classes(self):
++	for x in Task.TaskBase.classes:
++		if isinstance(x, Task.Task):
++			if not getattr(cls, 'ext_in', None) or getattr(cls, 'before', None):
++				say('class %s has no precedence constraints (ext_in/before)')
++			if not getattr(cls, 'ext_out', None) or getattr(cls, 'after', None):
++				say('class %s has no precedence constraints (ext_out/after)')
++
++comp = Build.BuildContext.compile
++def compile(self):
++	if not getattr(self, 'magic', None):
++		check_task_classes(self)
++	return comp(self)
++Build.BuildContext.compile = compile
++
+diff --git a/buildtools/wafadmin/3rdparty/swig.py b/buildtools/wafadmin/3rdparty/swig.py
+new file mode 100644
+index 0000000..c0a4108
+--- /dev/null
++++ b/buildtools/wafadmin/3rdparty/swig.py
+@@ -0,0 +1,190 @@
++#! /usr/bin/env python
++# encoding: UTF-8
++# Petar Forai
++# Thomas Nagy 2008
++
++import re
++import Task, Utils, Logs
++from TaskGen import extension
++from Configure import conf
++import preproc
++
++"""
++Welcome in the hell of adding tasks dynamically
++
++swig interface files may be created at runtime, the module name may be unknown in advance
++
++rev 5859 is much more simple
++"""
++
++SWIG_EXTS = ['.swig', '.i']
++
++swig_str = '${SWIG} ${SWIGFLAGS} ${_CCINCFLAGS} ${_CXXINCFLAGS} ${_CCDEFFLAGS} ${_CXXDEFFLAGS} ${SRC}'
++cls = Task.simple_task_type('swig', swig_str, color='BLUE', ext_in='.i .h', ext_out='.o .c .cxx', shell=False)
++
++def runnable_status(self):
++	for t in self.run_after:
++		if not t.hasrun:
++			return ASK_LATER
++
++	if not getattr(self, 'init_outputs', None):
++		self.init_outputs = True
++		if not getattr(self, 'module', None):
++			# search the module name
++			txt = self.inputs[0].read(self.env)
++			m = re_module.search(txt)
++			if not m:
++				raise ValueError("could not find the swig module name")
++			self.module = m.group(1)
++
++		swig_c(self)
++
++		# add the language-specific output files as nodes
++		# call funs in the dict swig_langs
++		for x in self.env['SWIGFLAGS']:
++			# obtain the language
++			x = x[1:]
++			try:
++				fun = swig_langs[x]
++			except KeyError:
++				pass
++			else:
++				fun(self)
++
++	return Task.Task.runnable_status(self)
++setattr(cls, 'runnable_status', runnable_status)
++
++re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
++
++re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
++re_2 = re.compile('%include "(.*)"', re.M)
++re_3 = re.compile('#include "(.*)"', re.M)
++
++def scan(self):
++	"scan for swig dependencies, climb the .i files"
++	env = self.env
++
++	lst_src = []
++
++	seen = []
++	to_see = [self.inputs[0]]
++
++	while to_see:
++		node = to_see.pop(0)
++		if node.id in seen:
++			continue
++		seen.append(node.id)
++		lst_src.append(node)
++
++		# read the file
++		code = node.read(env)
++		code = preproc.re_nl.sub('', code)
++		code = preproc.re_cpp.sub(preproc.repl, code)
++
++		# find .i files and project headers
++		names = re_2.findall(code) + re_3.findall(code)
++		for n in names:
++			for d in self.generator.env.INC_PATHS + [node.parent]:
++				u = d.find_resource(n)
++				if u:
++					to_see.append(u)
++					break
++			else:
++				Logs.warn('could not find %r' % n)
++
++	# list of nodes this one depends on, and module name if present
++	if Logs.verbose:
++		Logs.debug('deps: deps for %s: %s' % (str(self), str(lst_src)))
++	return (lst_src, [])
++cls.scan = scan
++
++# provide additional language processing
++swig_langs = {}
++def swig(fun):
++	swig_langs[fun.__name__.replace('swig_', '')] = fun
++
++def swig_c(self):
++	ext = '.swigwrap_%d.c' % self.generator.idx
++	flags = self.env['SWIGFLAGS']
++	if '-c++' in flags:
++		ext += 'xx'
++	out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
++
++	try:
++		if '-c++' in flags:
++			fun = self.generator.cxx_hook
++		else:
++			fun = self.generator.c_hook
++	except AttributeError:
++		raise Utils.WafError('No c%s compiler was found to process swig files' % ('-c++' in flags and '++' or ''))
++
++	task = fun(out_node)
++	task.set_run_after(self)
++
++	ge = self.generator.bld.generator
++	ge.outstanding.insert(0, task)
++	ge.total += 1
++
++	try:
++		ltask = self.generator.link_task
++	except AttributeError:
++		pass
++	else:
++		ltask.inputs.append(task.outputs[0])
++
++	self.outputs.append(out_node)
++
++	if not '-o' in self.env['SWIGFLAGS']:
++		self.env.append_value('SWIGFLAGS', '-o')
++		self.env.append_value('SWIGFLAGS', self.outputs[0].abspath(self.env))
++
++ at swig
++def swig_python(tsk):
++	tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py'))
++
++ at swig
++def swig_ocaml(tsk):
++	tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml'))
++	tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli'))
++
++ at extension(SWIG_EXTS)
++def i_file(self, node):
++	# the task instance
++	tsk = self.create_task('swig')
++	tsk.set_inputs(node)
++	tsk.module = getattr(self, 'swig_module', None)
++
++	flags = self.to_list(getattr(self, 'swig_flags', []))
++	self.env.append_value('SWIGFLAGS', flags)
++
++	if not '-outdir' in flags:
++		flags.append('-outdir')
++		flags.append(node.parent.abspath(self.env))
++
++ at conf
++def check_swig_version(conf, minver=None):
++	"""Check for a minimum swig version like conf.check_swig_version('1.3.28')
++	or conf.check_swig_version((1,3,28)) """
++	reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
++
++	swig_out = Utils.cmd_output('%s -version' % conf.env['SWIG'])
++
++	swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split('.')]
++	if isinstance(minver, basestring):
++		minver = [int(s) for s in minver.split(".")]
++	if isinstance(minver, tuple):
++		minver = [int(s) for s in minver]
++	result = (minver is None) or (minver[:3] <= swigver[:3])
++	swigver_full = '.'.join(map(str, swigver))
++	if result:
++		conf.env['SWIG_VERSION'] = swigver_full
++	minver_str = '.'.join(map(str, minver))
++	if minver is None:
++		conf.check_message_custom('swig version', '', swigver_full)
++	else:
++		conf.check_message('swig version', '>= %s' % (minver_str,), result, option=swigver_full)
++	return result
++
++def detect(conf):
++	swig = conf.find_program('swig', var='SWIG', mandatory=True)
++
+diff --git a/buildtools/wafadmin/3rdparty/valadoc.py b/buildtools/wafadmin/3rdparty/valadoc.py
+new file mode 100644
+index 0000000..d0a9fe8
+--- /dev/null
++++ b/buildtools/wafadmin/3rdparty/valadoc.py
+@@ -0,0 +1,113 @@
++#! /usr/bin/env python
++# encoding: UTF-8
++# Nicolas Joseph 2009
++
++from fnmatch import fnmatchcase
++import os, os.path, re, stat
++import Task, Utils, Node, Constants
++from TaskGen import feature, extension, after
++from Logs import debug, warn, error
++
++VALADOC_STR = '${VALADOC}'
++
++class valadoc_task(Task.Task):
++
++  vars = ['VALADOC', 'VALADOCFLAGS']
++  color = 'BLUE'
++  after = 'cxx_link cc_link'
++  quiet = True
++
++  output_dir = ''
++  doclet = ''
++  package_name = ''
++  package_version = ''
++  files = []
++  protected = True
++  private = False
++  inherit = False
++  deps = False
++  enable_non_null_experimental = False
++  force = False
++
++  def runnable_status(self):
++    return True
++
++  def run(self):
++    if self.env['VALADOC']:
++      if not self.env['VALADOCFLAGS']:
++        self.env['VALADOCFLAGS'] = ''
++      cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
++      cmd.append ('-o %s' % self.output_dir)
++      if getattr(self, 'doclet', None):
++        cmd.append ('--doclet %s' % self.doclet)
++      cmd.append ('--package-name %s' % self.package_name)
++      if getattr(self, 'version', None):
++        cmd.append ('--package-version %s' % self.package_version)
++      if getattr(self, 'packages', None):
++        for package in self.packages:
++          cmd.append ('--pkg %s' % package)
++      if getattr(self, 'vapi_dirs', None):
++        for vapi_dir in self.vapi_dirs:
++          cmd.append ('--vapidir %s' % vapi_dir)
++      if not getattr(self, 'protected', None):
++        cmd.append ('--no-protected')
++      if getattr(self, 'private', None):
++        cmd.append ('--private')
++      if getattr(self, 'inherit', None):
++        cmd.append ('--inherit')
++      if getattr(self, 'deps', None):
++        cmd.append ('--deps')
++      if getattr(self, 'enable_non_null_experimental', None):
++        cmd.append ('--enable-non-null-experimental')
++      if getattr(self, 'force', None):
++        cmd.append ('--force')
++      cmd.append (' '.join ([x.relpath_gen (self.generator.bld.bldnode) for x in self.files]))
++      return self.generator.bld.exec_command(' '.join(cmd))
++    else:
++      error ('You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
++      return -1
++
++ at feature('valadoc')
++def process_valadoc(self):
++  task = getattr(self, 'task', None)
++  if not task:
++    task = self.create_task('valadoc')
++    self.task = task
++    if getattr(self, 'output_dir', None):
++      task.output_dir = self.output_dir
++    else:
++      Utils.WafError('no output directory')
++    if getattr(self, 'doclet', None):
++      task.doclet = self.doclet
++    else:
++      Utils.WafError('no doclet directory')
++    if getattr(self, 'package_name', None):
++      task.package_name = self.package_name
++    else:
++      Utils.WafError('no package name')
++    if getattr(self, 'package_version', None):
++      task.package_version = self.package_version
++    if getattr(self, 'packages', None):
++      task.packages = Utils.to_list(self.packages)
++    if getattr(self, 'vapi_dirs', None):
++      task.vapi_dirs = Utils.to_list(self.vapi_dirs)
++    if getattr(self, 'files', None):
++      task.files = self.files
++    else:
++      Utils.WafError('no input file')
++    if getattr(self, 'protected', None):
++      task.protected = self.protected
++    if getattr(self, 'private', None):
++      task.private = self.private
++    if getattr(self, 'inherit', None):
++      task.inherit = self.inherit
++    if getattr(self, 'deps', None):
++      task.deps = self.deps
++    if getattr(self, 'enable_non_null_experimental', None):
++      task.enable_non_null_experimental = self.enable_non_null_experimental
++    if getattr(self, 'force', None):
++      task.force = self.force
++
++def detect(conf):
++  conf.find_program('valadoc', var='VALADOC', mandatory=False)
++
+diff --git a/buildtools/wafadmin/Build.py b/buildtools/wafadmin/Build.py
+new file mode 100644
+index 0000000..8e7c72c
+--- /dev/null
++++ b/buildtools/wafadmin/Build.py
+@@ -0,0 +1,1033 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005 (ita)
++
++"""
++Dependency tree holder
++
++The class Build holds all the info related to a build:
++* file system representation (tree of Node instances)
++* various cached objects (task signatures, file scan results, ..)
++
++There is only one Build object at a time (bld singleton)
++"""
++
++import os, sys, errno, re, glob, gc, datetime, shutil
++try: import cPickle
++except: import pickle as cPickle
++import Runner, TaskGen, Node, Scripting, Utils, Environment, Task, Logs, Options
++from Logs import debug, error, info
++from Constants import *
++
++SAVED_ATTRS = 'root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
++"Build class members to save"
++
++bld = None
++"singleton - safe to use when Waf is not used as a library"
++
++class BuildError(Utils.WafError):
++	def __init__(self, b=None, t=[]):
++		self.bld = b
++		self.tasks = t
++		self.ret = 1
++		Utils.WafError.__init__(self, self.format_error())
++
++	def format_error(self):
++		lst = ['Build failed:']
++		for tsk in self.tasks:
++			txt = tsk.format_error()
++			if txt: lst.append(txt)
++		sep = ' '
++		if len(lst) > 2:
++			sep = '\n'
++		return sep.join(lst)
++
++def group_method(fun):
++	"""
++	sets a build context method to execute after the current group has finished executing
++	this is useful for installing build files:
++	* calling install_files/install_as will fail if called too early
++	* people do not want to define install method in their task classes
++
++	TODO: try it
++	"""
++	def f(*k, **kw):
++		if not k[0].is_install:
++			return False
++
++		postpone = True
++		if 'postpone' in kw:
++			postpone = kw['postpone']
++			del kw['postpone']
++
++		# TODO waf 1.6 in theory there should be no reference to the TaskManager internals here
++		if postpone:
++			m = k[0].task_manager
++			if not m.groups: m.add_group()
++			m.groups[m.current_group].post_funs.append((fun, k, kw))
++			if not 'cwd' in kw:
++				kw['cwd'] = k[0].path
++		else:
++			fun(*k, **kw)
++	return f
++
++class BuildContext(Utils.Context):
++	"holds the dependency tree"
++	def __init__(self):
++
++		# not a singleton, but provided for compatibility
++		global bld
++		bld = self
++
++		self.task_manager = Task.TaskManager()
++
++		# instead of hashing the nodes, we assign them a unique id when they are created
++		self.id_nodes = 0
++		self.idx = {}
++
++		# map names to environments, the 'default' must be defined
++		self.all_envs = {}
++
++		# ======================================= #
++		# code for reading the scripts
++
++		# project build directory - do not reset() from load_dirs()
++		self.bdir = ''
++
++		# the current directory from which the code is run
++		# the folder changes everytime a wscript is read
++		self.path = None
++
++		# Manual dependencies.
++		self.deps_man = Utils.DefaultDict(list)
++
++		# ======================================= #
++		# cache variables
++
++		# local cache for absolute paths - cache_node_abspath[variant][node]
++		self.cache_node_abspath = {}
++
++		# list of folders that are already scanned
++		# so that we do not need to stat them one more time
++		self.cache_scanned_folders = {}
++
++		# list of targets to uninstall for removing the empty folders after uninstalling
++		self.uninstall = []
++
++		# ======================================= #
++		# tasks and objects
++
++		# build dir variants (release, debug, ..)
++		for v in 'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
++			var = {}
++			setattr(self, v, var)
++
++		self.cache_dir_contents = {}
++
++		self.all_task_gen = []
++		self.task_gen_cache_names = {}
++		self.cache_sig_vars = {}
++		self.log = None
++
++		self.root = None
++		self.srcnode = None
++		self.bldnode = None
++
++		# bind the build context to the nodes in use
++		# this means better encapsulation and no build context singleton
++		class node_class(Node.Node):
++			pass
++		self.node_class = node_class
++		self.node_class.__module__ = "Node"
++		self.node_class.__name__ = "Nodu"
++		self.node_class.bld = self
++
++		self.is_install = None
++
++	def __copy__(self):
++		"nodes are not supposed to be copied"
++		raise Utils.WafError('build contexts are not supposed to be cloned')
++
++	def load(self):
++		"load the cache from the disk"
++		try:
++			env = Environment.Environment(os.path.join(self.cachedir, 'build.config.py'))
++		except (IOError, OSError):
++			pass
++		else:
++			if env['version'] < HEXVERSION:
++				raise Utils.WafError('Version mismatch! reconfigure the project')
++			for t in env['tools']:
++				self.setup(**t)
++
++		try:
++			gc.disable()
++			f = data = None
++
++			Node.Nodu = self.node_class
++
++			try:
++				f = open(os.path.join(self.bdir, DBFILE), 'rb')
++			except (IOError, EOFError):
++				# handle missing file/empty file
++				pass
++
++			try:
++				if f: data = cPickle.load(f)
++			except AttributeError:
++				# handle file of an old Waf version
++				# that has an attribute which no longer exist
++				# (e.g. AttributeError: 'module' object has no attribute 'BuildDTO')
++				if Logs.verbose > 1: raise
++
++			if data:
++				for x in SAVED_ATTRS: setattr(self, x, data[x])
++			else:
++				debug('build: Build cache loading failed')
++
++		finally:
++			if f: f.close()
++			gc.enable()
++
++	def save(self):
++		"store the cache on disk, see self.load"
++		gc.disable()
++		self.root.__class__.bld = None
++
++		# some people are very nervous with ctrl+c so we have to make a temporary file
++		Node.Nodu = self.node_class
++		db = os.path.join(self.bdir, DBFILE)
++		file = open(db + '.tmp', 'wb')
++		data = {}
++		for x in SAVED_ATTRS: data[x] = getattr(self, x)
++		cPickle.dump(data, file, -1)
++		file.close()
++
++		# do not use shutil.move
++		try: os.unlink(db)
++		except OSError: pass
++		os.rename(db + '.tmp', db)
++		self.root.__class__.bld = self
++		gc.enable()
++
++	# ======================================= #
++
++	def clean(self):
++		debug('build: clean called')
++
++		# does not clean files created during the configuration
++		precious = set([])
++		for env in self.all_envs.values():
++			for x in env[CFG_FILES]:
++				node = self.srcnode.find_resource(x)
++				if node:
++					precious.add(node.id)
++
++		def clean_rec(node):
++			for x in list(node.childs.keys()):
++				nd = node.childs[x]
++
++				tp = nd.id & 3
++				if tp == Node.DIR:
++					clean_rec(nd)
++				elif tp == Node.BUILD:
++					if nd.id in precious: continue
++					for env in self.all_envs.values():
++						try: os.remove(nd.abspath(env))
++						except OSError: pass
++					node.childs.__delitem__(x)
++
++		clean_rec(self.srcnode)
++
++		for v in 'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
++			setattr(self, v, {})
++
++	def compile(self):
++		"""The cache file is not written if nothing was build at all (build is up to date)"""
++		debug('build: compile called')
++
++		"""
++		import cProfile, pstats
++		cProfile.run("import Build\nBuild.bld.flush()", 'profi.txt')
++		p = pstats.Stats('profi.txt')
++		p.sort_stats('cumulative').print_stats(80)
++		"""
++		self.flush()
++		#"""
++
++		self.generator = Runner.Parallel(self, Options.options.jobs)
++
++		def dw(on=True):
++			if Options.options.progress_bar:
++				if on: sys.stderr.write(Logs.colors.cursor_on)
++				else: sys.stderr.write(Logs.colors.cursor_off)
++
++		debug('build: executor starting')
++
++		back = os.getcwd()
++		os.chdir(self.bldnode.abspath())
++
++		try:
++			try:
++				dw(on=False)
++				self.generator.start()
++			except KeyboardInterrupt:
++				dw()
++				# if self.generator.processed != 1: TODO
++				self.save()
++				raise
++			except Exception:
++				dw()
++				# do not store anything, for something bad happened
++				raise
++			else:
++				dw()
++				#if self.generator.processed != 1: TODO
++				self.save()
++
++			if self.generator.error:
++				raise BuildError(self, self.task_manager.tasks_done)
++
++		finally:
++			os.chdir(back)
++
++	def install(self):
++		"this function is called for both install and uninstall"
++		debug('build: install called')
++
++		self.flush()
++
++		# remove empty folders after uninstalling
++		if self.is_install < 0:
++			lst = []
++			for x in self.uninstall:
++				dir = os.path.dirname(x)
++				if not dir in lst: lst.append(dir)
++			lst.sort()
++			lst.reverse()
++
++			nlst = []
++			for y in lst:
++				x = y
++				while len(x) > 4:
++					if not x in nlst: nlst.append(x)
++					x = os.path.dirname(x)
++
++			nlst.sort()
++			nlst.reverse()
++			for x in nlst:
++				try: os.rmdir(x)
++				except OSError: pass
++
++	def new_task_gen(self, *k, **kw):
++		if self.task_gen_cache_names:
++			self.task_gen_cache_names = {}
++
++		kw['bld'] = self
++		if len(k) == 0:
++			ret = TaskGen.task_gen(*k, **kw)
++		else:
++			cls_name = k[0]
++
++			try: cls = TaskGen.task_gen.classes[cls_name]
++			except KeyError: raise Utils.WscriptError('%s is not a valid task generator -> %s' %
++				(cls_name, [x for x in TaskGen.task_gen.classes]))
++			ret = cls(*k, **kw)
++		return ret
++
++	def __call__(self, *k, **kw):
++		if self.task_gen_cache_names:
++			self.task_gen_cache_names = {}
++
++		kw['bld'] = self
++		return TaskGen.task_gen(*k, **kw)
++
++	def load_envs(self):
++		try:
++			lst = Utils.listdir(self.cachedir)
++		except OSError, e:
++			if e.errno == errno.ENOENT:
++				raise Utils.WafError('The project was not configured: run "waf configure" first!')
++			else:
++				raise
++
++		if not lst:
++			raise Utils.WafError('The cache directory is empty: reconfigure the project')
++
++		for file in lst:
++			if file.endswith(CACHE_SUFFIX):
++				env = Environment.Environment(os.path.join(self.cachedir, file))
++				name = file[:-len(CACHE_SUFFIX)]
++
++				self.all_envs[name] = env
++
++		self.init_variants()
++
++		for env in self.all_envs.values():
++			for f in env[CFG_FILES]:
++				newnode = self.path.find_or_declare(f)
++				try:
++					hash = Utils.h_file(newnode.abspath(env))
++				except (IOError, AttributeError):
++					error("cannot find "+f)
++					hash = SIG_NIL
++				self.node_sigs[env.variant()][newnode.id] = hash
++
++		# TODO: hmmm, these nodes are removed from the tree when calling rescan()
++		self.bldnode = self.root.find_dir(self.bldnode.abspath())
++		self.path = self.srcnode = self.root.find_dir(self.srcnode.abspath())
++		self.cwd = self.bldnode.abspath()
++
++	def setup(self, tool, tooldir=None, funs=None):
++		"setup tools for build process"
++		if isinstance(tool, list):
++			for i in tool: self.setup(i, tooldir)
++			return
++
++		if not tooldir: tooldir = Options.tooldir
++
++		module = Utils.load_tool(tool, tooldir)
++		if hasattr(module, "setup"): module.setup(self)
++
++	def init_variants(self):
++		debug('build: init variants')
++
++		lstvariants = []
++		for env in self.all_envs.values():
++			if not env.variant() in lstvariants:
++				lstvariants.append(env.variant())
++		self.lst_variants = lstvariants
++
++		debug('build: list of variants is %r', lstvariants)
++
++		for name in lstvariants+[0]:
++			for v in 'node_sigs cache_node_abspath'.split():
++				var = getattr(self, v)
++				if not name in var:
++					var[name] = {}
++
++	# ======================================= #
++	# node and folder handling
++
++	# this should be the main entry point
++	def load_dirs(self, srcdir, blddir, load_cache=1):
++		"this functions should be the start of everything"
++
++		assert(os.path.isabs(srcdir))
++		assert(os.path.isabs(blddir))
++
++		self.cachedir = os.path.join(blddir, CACHE_DIR)
++
++		if srcdir == blddir:
++			raise Utils.WafError("build dir must be different from srcdir: %s <-> %s " % (srcdir, blddir))
++
++		self.bdir = blddir
++
++		# try to load the cache file, if it does not exist, nothing happens
++		self.load()
++
++		if not self.root:
++			Node.Nodu = self.node_class
++			self.root = Node.Nodu('', None, Node.DIR)
++
++		if not self.srcnode:
++			self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
++		debug('build: srcnode is %s and srcdir %s', self.srcnode.name, srcdir)
++
++		self.path = self.srcnode
++
++		# create this build dir if necessary
++		try: os.makedirs(blddir)
++		except OSError: pass
++
++		if not self.bldnode:
++			self.bldnode = self.root.ensure_dir_node_from_path(blddir)
++
++		self.init_variants()
++
++	def rescan(self, src_dir_node):
++		"""
++		look the contents of a (folder)node and update its list of childs
++
++		The intent is to perform the following steps
++		* remove the nodes for the files that have disappeared
++		* remove the signatures for the build files that have disappeared
++		* cache the results of os.listdir
++		* create the build folder equivalent (mkdir) for each variant
++		src/bar -> build/default/src/bar, build/release/src/bar
++
++		when a folder in the source directory is removed, we do not check recursively
++		to remove the unused nodes. To do that, call 'waf clean' and build again.
++		"""
++
++		# do not rescan over and over again
++		# TODO use a single variable in waf 1.6
++		if self.cache_scanned_folders.get(src_dir_node.id, None): return
++		self.cache_scanned_folders[src_dir_node.id] = True
++
++		# TODO remove in waf 1.6
++		if hasattr(self, 'repository'): self.repository(src_dir_node)
++
++		if not src_dir_node.name and sys.platform == 'win32':
++			# the root has no name, contains drive letters, and cannot be listed
++			return
++
++
++		# first, take the case of the source directory
++		parent_path = src_dir_node.abspath()
++		try:
++			lst = set(Utils.listdir(parent_path))
++		except OSError:
++			lst = set([])
++
++		# TODO move this at the bottom
++		self.cache_dir_contents[src_dir_node.id] = lst
++
++		# hash the existing source files, remove the others
++		cache = self.node_sigs[0]
++		for x in src_dir_node.childs.values():
++			if x.id & 3 != Node.FILE: continue
++			if x.name in lst:
++				try:
++					cache[x.id] = Utils.h_file(x.abspath())
++				except IOError:
++					raise Utils.WafError('The file %s is not readable or has become a dir' % x.abspath())
++			else:
++				try: del cache[x.id]
++				except KeyError: pass
++
++				del src_dir_node.childs[x.name]
++
++
++		# first obtain the differences between srcnode and src_dir_node
++		h1 = self.srcnode.height()
++		h2 = src_dir_node.height()
++
++		lst = []
++		child = src_dir_node
++		while h2 > h1:
++			lst.append(child.name)
++			child = child.parent
++			h2 -= 1
++		lst.reverse()
++
++		# list the files in the build dirs
++		try:
++			for variant in self.lst_variants:
++				sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
++				self.listdir_bld(src_dir_node, sub_path, variant)
++		except OSError:
++
++			# listdir failed, remove the build node signatures for all variants
++			for node in src_dir_node.childs.values():
++				if node.id & 3 != Node.BUILD:
++					continue
++
++				for dct in self.node_sigs.values():
++					if node.id in dct:
++						dct.__delitem__(node.id)
++
++				# the policy is to avoid removing nodes representing directories
++				src_dir_node.childs.__delitem__(node.name)
++
++			for variant in self.lst_variants:
++				sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
++				try:
++					os.makedirs(sub_path)
++				except OSError:
++					pass
++
++	# ======================================= #
++	def listdir_src(self, parent_node):
++		"""do not use, kept for compatibility"""
++		pass
++
++	def remove_node(self, node):
++		"""do not use, kept for compatibility"""
++		pass
++
++	def listdir_bld(self, parent_node, path, variant):
++		"""in this method we do not add timestamps but we remove them
++		when the files no longer exist (file removed in the build dir)"""
++
++		i_existing_nodes = [x for x in parent_node.childs.values() if x.id & 3 == Node.BUILD]
++
++		lst = set(Utils.listdir(path))
++		node_names = set([x.name for x in i_existing_nodes])
++		remove_names = node_names - lst
++
++		# remove the stamps of the build nodes that no longer exist on the filesystem
++		ids_to_remove = [x.id for x in i_existing_nodes if x.name in remove_names]
++		cache = self.node_sigs[variant]
++		for nid in ids_to_remove:
++			if nid in cache:
++				cache.__delitem__(nid)
++
++	def get_env(self):
++		return self.env_of_name('default')
++	def set_env(self, name, val):
++		self.all_envs[name] = val
++
++	env = property(get_env, set_env)
++
++	def add_manual_dependency(self, path, value):
++		if isinstance(path, Node.Node):
++			node = path
++		elif os.path.isabs(path):
++			node = self.root.find_resource(path)
++		else:
++			node = self.path.find_resource(path)
++		self.deps_man[node.id].append(value)
++
++	def launch_node(self):
++		"""return the launch directory as a node"""
++		# p_ln is kind of private, but public in case if
++		try:
++			return self.p_ln
++		except AttributeError:
++			self.p_ln = self.root.find_dir(Options.launch_dir)
++			return self.p_ln
++
++	def glob(self, pattern, relative=True):
++		"files matching the pattern, seen from the current folder"
++		path = self.path.abspath()
++		files = [self.root.find_resource(x) for x in glob.glob(path+os.sep+pattern)]
++		if relative:
++			files = [x.path_to_parent(self.path) for x in files if x]
++		else:
++			files = [x.abspath() for x in files if x]
++		return files
++
++	## the following methods are candidates for the stable apis ##
++
++	def add_group(self, *k):
++		self.task_manager.add_group(*k)
++
++	def set_group(self, *k, **kw):
++		self.task_manager.set_group(*k, **kw)
++
++	def hash_env_vars(self, env, vars_lst):
++		"""hash environment variables
++		['CXX', ..] -> [env['CXX'], ..] -> md5()"""
++
++		# ccroot objects use the same environment for building the .o at once
++		# the same environment and the same variables are used
++
++		idx = str(id(env)) + str(vars_lst)
++		try: return self.cache_sig_vars[idx]
++		except KeyError: pass
++
++		lst = [str(env[a]) for a in vars_lst]
++		ret = Utils.h_list(lst)
++		debug('envhash: %r %r', ret, lst)
++
++		# next time
++		self.cache_sig_vars[idx] = ret
++		return ret
++
++	def name_to_obj(self, name, env):
++		"""retrieve a task generator from its name or its target name
++		remember that names must be unique"""
++		cache = self.task_gen_cache_names
++		if not cache:
++			# create the index lazily
++			for x in self.all_task_gen:
++				vt = x.env.variant() + '_'
++				if x.name:
++					cache[vt + x.name] = x
++				else:
++					if isinstance(x.target, str):
++						target = x.target
++					else:
++						target = ' '.join(x.target)
++					v = vt + target
++					if not cache.get(v, None):
++						cache[v] = x
++		return cache.get(env.variant() + '_' + name, None)
++
++	def flush(self, all=1):
++		"""tell the task generators to create the tasks"""
++
++		self.ini = datetime.datetime.now()
++		# force the initialization of the mapping name->object in flush
++		# name_to_obj can be used in userland scripts, in that case beware of incomplete mapping
++		self.task_gen_cache_names = {}
++		self.name_to_obj('', self.env)
++
++		debug('build: delayed operation TaskGen.flush() called')
++
++		if Options.options.compile_targets:
++			debug('task_gen: posting objects %r listed in compile_targets', Options.options.compile_targets)
++
++			mana = self.task_manager
++			to_post = []
++			min_grp = 0
++
++			# ensure the target names exist, fail before any post()
++			target_objects = Utils.DefaultDict(list)
++			for target_name in Options.options.compile_targets.split(','):
++				# trim target_name (handle cases when the user added spaces to targets)
++				target_name = target_name.strip()
++				for env in self.all_envs.values():
++					tg = self.name_to_obj(target_name, env)
++					if tg:
++						target_objects[target_name].append(tg)
++
++						m = mana.group_idx(tg)
++						if m > min_grp:
++							min_grp = m
++							to_post = [tg]
++						elif m == min_grp:
++							to_post.append(tg)
++
++				if not target_name in target_objects and all:
++					raise Utils.WafError("target '%s' does not exist" % target_name)
++
++			debug('group: Forcing up to group %s for target %s', mana.group_name(min_grp), Options.options.compile_targets)
++
++			# post all the task generators in previous groups
++			for i in xrange(len(mana.groups)):
++				mana.current_group = i
++				if i == min_grp:
++					break
++				g = mana.groups[i]
++				debug('group: Forcing group %s', mana.group_name(g))
++				for t in g.tasks_gen:
++					debug('group: Posting %s', t.name or t.target)
++					t.post()
++
++			# then post the task generators listed in compile_targets in the last group
++			for t in to_post:
++				t.post()
++
++		else:
++			debug('task_gen: posting objects (normal)')
++			ln = self.launch_node()
++			# if the build is started from the build directory, do as if it was started from the top-level
++			# for the pretty-printing (Node.py), the two lines below cannot be moved to Build::launch_node
++			if ln.is_child_of(self.bldnode) or not ln.is_child_of(self.srcnode):
++				ln = self.srcnode
++
++			# if the project file is located under the source directory, build all targets by default
++			# else 'waf configure build' does nothing
++			proj_node = self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
++			if proj_node.id != self.srcnode.id:
++				ln = self.srcnode
++
++			for i in xrange(len(self.task_manager.groups)):
++				g = self.task_manager.groups[i]
++				self.task_manager.current_group = i
++				if Logs.verbose:
++					groups = [x for x in self.task_manager.groups_names if id(self.task_manager.groups_names[x]) == id(g)]
++					name = groups and groups[0] or 'unnamed'
++					Logs.debug('group: group', name)
++				for tg in g.tasks_gen:
++					if not tg.path.is_child_of(ln):
++						continue
++					if Logs.verbose:
++						Logs.debug('group: %s' % tg)
++					tg.post()
++
++	def env_of_name(self, name):
++		try:
++			return self.all_envs[name]
++		except KeyError:
++			error('no such environment: '+name)
++			return None
++
++	def progress_line(self, state, total, col1, col2):
++		n = len(str(total))
++
++		Utils.rot_idx += 1
++		ind = Utils.rot_chr[Utils.rot_idx % 4]
++
++		ini = self.ini
++
++		pc = (100.*state)/total
++		eta = Utils.get_elapsed_time(ini)
++		fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind)
++		left = fs % (state, total, col1, pc, col2)
++		right = '][%s%s%s]' % (col1, eta, col2)
++
++		cols = Utils.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
++		if cols < 7: cols = 7
++
++		ratio = int((cols*state)/total) - 1
++
++		bar = ('='*ratio+'>').ljust(cols)
++		msg = Utils.indicator % (left, bar, right)
++
++		return msg
++
++
++	# do_install is not used anywhere
++	def do_install(self, src, tgt, chmod=O644):
++		"""returns true if the file was effectively installed or uninstalled, false otherwise"""
++		if self.is_install > 0:
++			if not Options.options.force:
++				# check if the file is already there to avoid a copy
++				try:
++					st1 = os.stat(tgt)
++					st2 = os.stat(src)
++				except OSError:
++					pass
++				else:
++					# same size and identical timestamps -> make no copy
++					if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
++						return False
++
++			srclbl = src.replace(self.srcnode.abspath(None)+os.sep, '')
++			info("* installing %s as %s" % (srclbl, tgt))
++
++			# following is for shared libs and stale inodes (-_-)
++			try: os.remove(tgt)
++			except OSError: pass
++
++			try:
++				shutil.copy2(src, tgt)
++				os.chmod(tgt, chmod)
++			except IOError:
++				try:
++					os.stat(src)
++				except (OSError, IOError):
++					error('File %r does not exist' % src)
++				raise Utils.WafError('Could not install the file %r' % tgt)
++			return True
++
++		elif self.is_install < 0:
++			info("* uninstalling %s" % tgt)
++
++			self.uninstall.append(tgt)
++
++			try:
++				os.remove(tgt)
++			except OSError, e:
++				if e.errno != errno.ENOENT:
++					if not getattr(self, 'uninstall_error', None):
++						self.uninstall_error = True
++						Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
++					if Logs.verbose > 1:
++						Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno))
++			return True
++
++	red = re.compile(r"^([A-Za-z]:)?[/\\\\]*")
++	def get_install_path(self, path, env=None):
++		"installation path prefixed by the destdir, the variables like in '${PREFIX}/bin' are substituted"
++		if not env: env = self.env
++		destdir = env.get_destdir()
++		path = path.replace('/', os.sep)
++		destpath = Utils.subst_vars(path, env)
++		if destdir:
++			destpath = os.path.join(destdir, self.red.sub('', destpath))
++		return destpath
++
++	def install_dir(self, path, env=None):
++		"""
++		create empty folders for the installation (very rarely used)
++		"""
++		if env:
++			assert isinstance(env, Environment.Environment), "invalid parameter"
++		else:
++			env = self.env
++
++		if not path:
++			return []
++
++		destpath = self.get_install_path(path, env)
++
++		if self.is_install > 0:
++			info('* creating %s' % destpath)
++			Utils.check_dir(destpath)
++		elif self.is_install < 0:
++			info('* removing %s' % destpath)
++			self.uninstall.append(destpath + '/xxx') # yes, ugly
++
++	def install_files(self, path, files, env=None, chmod=O644, relative_trick=False, cwd=None):
++		"""To install files only after they have been built, put the calls in a method named
++		post_build on the top-level wscript
++
++		The files must be a list and contain paths as strings or as Nodes
++
++		The relative_trick flag can be set to install folders, use bld.path.ant_glob() with it
++		"""
++		if env:
++			assert isinstance(env, Environment.Environment), "invalid parameter"
++		else:
++			env = self.env
++
++		if not path: return []
++
++		if not cwd:
++			cwd = self.path
++
++		if isinstance(files, str) and '*' in files:
++			gl = cwd.abspath() + os.sep + files
++			lst = glob.glob(gl)
++		else:
++			lst = Utils.to_list(files)
++
++		if not getattr(lst, '__iter__', False):
++			lst = [lst]
++
++		destpath = self.get_install_path(path, env)
++
++		Utils.check_dir(destpath)
++
++		installed_files = []
++		for filename in lst:
++			if isinstance(filename, str) and os.path.isabs(filename):
++				alst = Utils.split_path(filename)
++				destfile = os.path.join(destpath, alst[-1])
++			else:
++				if isinstance(filename, Node.Node):
++					nd = filename
++				else:
++					nd = cwd.find_resource(filename)
++				if not nd:
++					raise Utils.WafError("Unable to install the file %r (not found in %s)" % (filename, cwd))
++
++				if relative_trick:
++					destfile = os.path.join(destpath, filename)
++					Utils.check_dir(os.path.dirname(destfile))
++				else:
++					destfile = os.path.join(destpath, nd.name)
++
++				filename = nd.abspath(env)
++
++			if self.do_install(filename, destfile, chmod):
++				installed_files.append(destfile)
++		return installed_files
++
++	def install_as(self, path, srcfile, env=None, chmod=O644, cwd=None):
++		"""
++		srcfile may be a string or a Node representing the file to install
++
++		returns True if the file was effectively installed, False otherwise
++		"""
++		if env:
++			assert isinstance(env, Environment.Environment), "invalid parameter"
++		else:
++			env = self.env
++
++		if not path:
++			raise Utils.WafError("where do you want to install %r? (%r?)" % (srcfile, path))
++
++		if not cwd:
++			cwd = self.path
++
++		destpath = self.get_install_path(path, env)
++
++		dir, name = os.path.split(destpath)
++		Utils.check_dir(dir)
++
++		# the source path
++		if isinstance(srcfile, Node.Node):
++			src = srcfile.abspath(env)
++		else:
++			src = srcfile
++			if not os.path.isabs(srcfile):
++				node = cwd.find_resource(srcfile)
++				if not node:
++					raise Utils.WafError("Unable to install the file %r (not found in %s)" % (srcfile, cwd))
++				src = node.abspath(env)
++
++		return self.do_install(src, destpath, chmod)
++
++	def symlink_as(self, path, src, env=None, cwd=None):
++		"""example:  bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') """
++
++		if sys.platform == 'win32':
++			# well, this *cannot* work
++			return
++
++		if not path:
++			raise Utils.WafError("where do you want to install %r? (%r?)" % (src, path))
++
++		tgt = self.get_install_path(path, env)
++
++		dir, name = os.path.split(tgt)
++		Utils.check_dir(dir)
++
++		if self.is_install > 0:
++			link = False
++			if not os.path.islink(tgt):
++				link = True
++			elif os.readlink(tgt) != src:
++				link = True
++
++			if link:
++				try: os.remove(tgt)
++				except OSError: pass
++
++				info('* symlink %s (-> %s)' % (tgt, src))
++				os.symlink(src, tgt)
++			return 0
++
++		else: # UNINSTALL
++			try:
++				info('* removing %s' % (tgt))
++				os.remove(tgt)
++				return 0
++			except OSError:
++				return 1
++
++	def exec_command(self, cmd, **kw):
++		# 'runner' zone is printed out for waf -v, see wafadmin/Options.py
++		debug('runner: system command -> %s', cmd)
++		if self.log:
++			self.log.write('%s\n' % cmd)
++			kw['log'] = self.log
++		try:
++			if not kw.get('cwd', None):
++				kw['cwd'] = self.cwd
++		except AttributeError:
++			self.cwd = kw['cwd'] = self.bldnode.abspath()
++		return Utils.exec_command(cmd, **kw)
++
++	def printout(self, s):
++		f = self.log or sys.stderr
++		f.write(s)
++		f.flush()
++
++	def add_subdirs(self, dirs):
++		self.recurse(dirs, 'build')
++
++	def pre_recurse(self, name_or_mod, path, nexdir):
++		if not hasattr(self, 'oldpath'):
++			self.oldpath = []
++		self.oldpath.append(self.path)
++		self.path = self.root.find_dir(nexdir)
++		return {'bld': self, 'ctx': self}
++
++	def post_recurse(self, name_or_mod, path, nexdir):
++		self.path = self.oldpath.pop()
++
++	###### user-defined behaviour
++
++	def pre_build(self):
++		if hasattr(self, 'pre_funs'):
++			for m in self.pre_funs:
++				m(self)
++
++	def post_build(self):
++		if hasattr(self, 'post_funs'):
++			for m in self.post_funs:
++				m(self)
++
++	def add_pre_fun(self, meth):
++		try: self.pre_funs.append(meth)
++		except AttributeError: self.pre_funs = [meth]
++
++	def add_post_fun(self, meth):
++		try: self.post_funs.append(meth)
++		except AttributeError: self.post_funs = [meth]
++
++	def use_the_magic(self):
++		Task.algotype = Task.MAXPARALLEL
++		Task.file_deps = Task.extract_deps
++		self.magic = True
++
++	install_as = group_method(install_as)
++	install_files = group_method(install_files)
++	symlink_as = group_method(symlink_as)
++
+diff --git a/buildtools/wafadmin/Configure.py b/buildtools/wafadmin/Configure.py
+new file mode 100644
+index 0000000..35b4e51
+--- /dev/null
++++ b/buildtools/wafadmin/Configure.py
+@@ -0,0 +1,444 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005-2008 (ita)
++
++"""
++Configuration system
++
++A configuration instance is created when "waf configure" is called, it is used to:
++* create data dictionaries (Environment instances)
++* store the list of modules to import
++
++The old model (copied from Scons) was to store logic (mapping file extensions to functions)
++along with the data. In Waf a way was found to separate that logic by adding an indirection
++layer (storing the names in the Environment instances)
++
++In the new model, the logic is more object-oriented, and the user scripts provide the
++logic. The data files (Environments) must contain configuration data only (flags, ..).
++
++Note: the c/c++ related code is in the module config_c
++"""
++
++import os, shlex, sys, time
++try: import cPickle
++except ImportError: import pickle as cPickle
++import Environment, Utils, Options, Logs
++from Logs import warn
++from Constants import *
++
++try:
++	from urllib import request
++except:
++	from urllib import urlopen
++else:
++	urlopen = request.urlopen
++
++conf_template = '''# project %(app)s configured on %(now)s by
++# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
++# using %(args)s
++#
++'''
++
++class ConfigurationError(Utils.WscriptError):
++	pass
++
++autoconfig = False
++"reconfigure the project automatically"
++
++def find_file(filename, path_list):
++	"""find a file in a list of paths
++	@param filename: name of the file to search for
++	@param path_list: list of directories to search
++	@return: the first occurrence filename or '' if filename could not be found
++"""
++	for directory in Utils.to_list(path_list):
++		if os.path.exists(os.path.join(directory, filename)):
++			return directory
++	return ''
++
++def find_program_impl(env, filename, path_list=[], var=None, environ=None):
++	"""find a program in folders path_lst, and sets env[var]
++	@param env: environment
++	@param filename: name of the program to search for
++	@param path_list: list of directories to search for filename
++	@param var: environment value to be checked for in env or os.environ
++	@return: either the value that is referenced with [var] in env or os.environ
++         or the first occurrence filename or '' if filename could not be found
++"""
++
++	if not environ:
++		environ = os.environ
++
++	try: path_list = path_list.split()
++	except AttributeError: pass
++
++	if var:
++		if env[var]: return env[var]
++		if var in environ: env[var] = environ[var]
++
++	if not path_list: path_list = environ.get('PATH', '').split(os.pathsep)
++
++	ext = (Options.platform == 'win32') and '.exe,.com,.bat,.cmd' or ''
++	for y in [filename+x for x in ext.split(',')]:
++		for directory in path_list:
++			x = os.path.join(directory, y)
++			if os.path.isfile(x):
++				if var: env[var] = x
++				return x
++	return ''
++
++class ConfigurationContext(Utils.Context):
++	tests = {}
++	error_handlers = []
++	def __init__(self, env=None, blddir='', srcdir=''):
++		self.env = None
++		self.envname = ''
++
++		self.environ = dict(os.environ)
++
++		self.line_just = 40
++
++		self.blddir = blddir
++		self.srcdir = srcdir
++		self.all_envs = {}
++
++		# curdir: necessary for recursion
++		self.cwd = self.curdir = os.getcwd()
++
++		self.tools = [] # tools loaded in the configuration, and that will be loaded when building
++
++		self.setenv(DEFAULT)
++
++		self.lastprog = ''
++
++		self.hash = 0
++		self.files = []
++
++		self.tool_cache = []
++
++		if self.blddir:
++			self.post_init()
++
++	def post_init(self):
++
++		self.cachedir = os.path.join(self.blddir, CACHE_DIR)
++
++		path = os.path.join(self.blddir, WAF_CONFIG_LOG)
++		try: os.unlink(path)
++		except (OSError, IOError): pass
++
++		try:
++			self.log = open(path, 'w')
++		except (OSError, IOError):
++			self.fatal('could not open %r for writing' % path)
++
++		app = Utils.g_module.APPNAME
++		if app:
++			ver = getattr(Utils.g_module, 'VERSION', '')
++			if ver:
++				app = "%s (%s)" % (app, ver)
++
++		now = time.ctime()
++		pyver = sys.hexversion
++		systype = sys.platform
++		args = " ".join(sys.argv)
++		wafver = WAFVERSION
++		abi = ABI
++		self.log.write(conf_template % vars())
++
++	def __del__(self):
++		"""cleanup function: close config.log"""
++
++		# may be ran by the gc, not always after initialization
++		if hasattr(self, 'log') and self.log:
++			self.log.close()
++
++	def fatal(self, msg):
++		raise ConfigurationError(msg)
++
++	def check_tool(self, input, tooldir=None, funs=None):
++		"load a waf tool"
++
++		tools = Utils.to_list(input)
++		if tooldir: tooldir = Utils.to_list(tooldir)
++		for tool in tools:
++			tool = tool.replace('++', 'xx')
++			if tool == 'java': tool = 'javaw'
++			if tool.lower() == 'unittest': tool = 'unittestw'
++			# avoid loading the same tool more than once with the same functions
++			# used by composite projects
++
++			mag = (tool, id(self.env), funs)
++			if mag in self.tool_cache:
++				continue
++			self.tool_cache.append(mag)
++
++			module = None
++			try:
++				module = Utils.load_tool(tool, tooldir)
++			except Exception, e:
++				ex = e
++				if Options.options.download:
++					_3rdparty = os.path.normpath(Options.tooldir[0] + os.sep + '..' + os.sep + '3rdparty')
++
++					# try to download the tool from the repository then
++					# the default is set to false
++					for x in Utils.to_list(Options.remote_repo):
++						for sub in ['branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty']:
++							url = '/'.join((x, sub, tool + '.py'))
++							try:
++								web = urlopen(url)
++								if web.getcode() != 200:
++									continue
++							except Exception, e:
++								# on python3 urlopen throws an exception
++								continue
++							else:
++								loc = None
++								try:
++									loc = open(_3rdparty + os.sep + tool + '.py', 'wb')
++									loc.write(web.read())
++									web.close()
++								finally:
++									if loc:
++										loc.close()
++								Logs.warn('downloaded %s from %s' % (tool, url))
++								try:
++									module = Utils.load_tool(tool, tooldir)
++								except:
++									Logs.warn('module %s from %s is unusable' % (tool, url))
++									try:
++										os.unlink(_3rdparty + os.sep + tool + '.py')
++									except:
++										pass
++									continue
++						else:
++							break
++
++					if not module:
++						Logs.error('Could not load the tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
++						raise ex
++				else:
++					Logs.error('Could not load the tool %r in %r (try the --download option?):\n%s' % (tool, sys.path, e))
++					raise ex
++
++			if funs is not None:
++				self.eval_rules(funs)
++			else:
++				func = getattr(module, 'detect', None)
++				if func:
++					if type(func) is type(find_file): func(self)
++					else: self.eval_rules(func)
++
++			self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
++
++	def sub_config(self, k):
++		"executes the configure function of a wscript module"
++		self.recurse(k, name='configure')
++
++	def pre_recurse(self, name_or_mod, path, nexdir):
++		return {'conf': self, 'ctx': self}
++
++	def post_recurse(self, name_or_mod, path, nexdir):
++		if not autoconfig:
++			return
++		self.hash = hash((self.hash, getattr(name_or_mod, 'waf_hash_val', name_or_mod)))
++		self.files.append(path)
++
++	def store(self, file=''):
++		"save the config results into the cache file"
++		if not os.path.isdir(self.cachedir):
++			os.makedirs(self.cachedir)
++
++		if not file:
++			file = open(os.path.join(self.cachedir, 'build.config.py'), 'w')
++		file.write('version = 0x%x\n' % HEXVERSION)
++		file.write('tools = %r\n' % self.tools)
++		file.close()
++
++		if not self.all_envs:
++			self.fatal('nothing to store in the configuration context!')
++		for key in self.all_envs:
++			tmpenv = self.all_envs[key]
++			tmpenv.store(os.path.join(self.cachedir, key + CACHE_SUFFIX))
++
++	def set_env_name(self, name, env):
++		"add a new environment called name"
++		self.all_envs[name] = env
++		return env
++
++	def retrieve(self, name, fromenv=None):
++		"retrieve an environment called name"
++		try:
++			env = self.all_envs[name]
++		except KeyError:
++			env = Environment.Environment()
++			env['PREFIX'] = os.path.abspath(os.path.expanduser(Options.options.prefix))
++			self.all_envs[name] = env
++		else:
++			if fromenv: warn("The environment %s may have been configured already" % name)
++		return env
++
++	def setenv(self, name):
++		"enable the environment called name"
++		self.env = self.retrieve(name)
++		self.envname = name
++
++	def add_os_flags(self, var, dest=None):
++		# do not use 'get' to make certain the variable is not defined
++		try: self.env.append_value(dest or var, Utils.to_list(self.environ[var]))
++		except KeyError: pass
++
++	def check_message_1(self, sr):
++		self.line_just = max(self.line_just, len(sr))
++		for x in ('\n', self.line_just * '-', '\n', sr, '\n'):
++			self.log.write(x)
++		Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
++
++	def check_message_2(self, sr, color='GREEN'):
++		self.log.write(sr)
++		self.log.write('\n')
++		Utils.pprint(color, sr)
++
++	def check_message(self, th, msg, state, option=''):
++		sr = 'Checking for %s %s' % (th, msg)
++		self.check_message_1(sr)
++		p = self.check_message_2
++		if state: p('ok ' + str(option))
++		else: p('not found', 'YELLOW')
++
++	# FIXME remove in waf 1.6
++	# the parameter 'option' is not used (kept for compatibility)
++	def check_message_custom(self, th, msg, custom, option='', color='PINK'):
++		sr = 'Checking for %s %s' % (th, msg)
++		self.check_message_1(sr)
++		self.check_message_2(custom, color)
++
++	def msg(self, msg, result, color=None):
++		"""Prints a configuration message 'Checking for xxx: ok'"""
++		self.start_msg('Checking for ' + msg)
++
++		if not isinstance(color, str):
++			color = result and 'GREEN' or 'YELLOW'
++
++		self.end_msg(result, color)
++
++	def start_msg(self, msg):
++		try:
++			if self.in_msg:
++				return
++		except:
++			self.in_msg = 0
++		self.in_msg += 1
++
++		self.line_just = max(self.line_just, len(msg))
++		for x in ('\n', self.line_just * '-', '\n', msg, '\n'):
++			self.log.write(x)
++		Utils.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
++
++	def end_msg(self, result, color):
++		self.in_msg -= 1
++		if self.in_msg:
++			return
++
++		if not color:
++			color = 'GREEN'
++		if result == True:
++			msg = 'ok'
++		elif result == False:
++			msg = 'not found'
++			color = 'YELLOW'
++		else:
++			msg = str(result)
++
++		self.log.write(msg)
++		self.log.write('\n')
++		Utils.pprint(color, msg)
++
++	def find_program(self, filename, path_list=[], var=None, mandatory=False):
++		"wrapper that adds a configuration message"
++
++		ret = None
++		if var:
++			if self.env[var]:
++				ret = self.env[var]
++			elif var in os.environ:
++				ret = os.environ[var]
++
++		if not isinstance(filename, list): filename = [filename]
++		if not ret:
++			for x in filename:
++				ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
++				if ret: break
++
++		self.check_message_1('Checking for program %s' % ' or '.join(filename))
++		self.log.write('  find program=%r paths=%r var=%r\n  -> %r\n' % (filename, path_list, var, ret))
++		if ret:
++			Utils.pprint('GREEN', str(ret))
++		else:
++			Utils.pprint('YELLOW', 'not found')
++			if mandatory:
++				self.fatal('The program %r is required' % filename)
++
++		if var:
++			self.env[var] = ret
++		return ret
++
++	def cmd_to_list(self, cmd):
++		"commands may be written in pseudo shell like 'ccache g++'"
++		if isinstance(cmd, str) and cmd.find(' '):
++			try:
++				os.stat(cmd)
++			except OSError:
++				return shlex.split(cmd)
++			else:
++				return [cmd]
++		return cmd
++
++	def __getattr__(self, name):
++		r = self.__class__.__dict__.get(name, None)
++		if r: return r
++		if name and name.startswith('require_'):
++
++			for k in ['check_', 'find_']:
++				n = name.replace('require_', k)
++				ret = self.__class__.__dict__.get(n, None)
++				if ret:
++					def run(*k, **kw):
++						r = ret(self, *k, **kw)
++						if not r:
++							self.fatal('requirement failure')
++						return r
++					return run
++		self.fatal('No such method %r' % name)
++
++	def eval_rules(self, rules):
++		self.rules = Utils.to_list(rules)
++		for x in self.rules:
++			f = getattr(self, x)
++			if not f: self.fatal("No such method '%s'." % x)
++			try:
++				f()
++			except Exception, e:
++				ret = self.err_handler(x, e)
++				if ret == BREAK:
++					break
++				elif ret == CONTINUE:
++					continue
++				else:
++					self.fatal(e)
++
++	def err_handler(self, fun, error):
++		pass
++
++def conf(f):
++	"decorator: attach new configuration functions"
++	setattr(ConfigurationContext, f.__name__, f)
++	return f
++
++def conftest(f):
++	"decorator: attach new configuration tests (registered as strings)"
++	ConfigurationContext.tests[f.__name__] = f
++	return conf(f)
++
++
+diff --git a/buildtools/wafadmin/Constants.py b/buildtools/wafadmin/Constants.py
+new file mode 100644
+index 0000000..e67dda6
+--- /dev/null
++++ b/buildtools/wafadmin/Constants.py
+@@ -0,0 +1,76 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Yinon dot me gmail 2008
++
++"""
++these constants are somewhat public, try not to mess them
++
++maintainer: the version number is updated from the top-level wscript file
++"""
++
++# do not touch these three lines, they are updated automatically
++HEXVERSION=0x105019
++WAFVERSION="1.5.19"
++WAFREVISION = "9709M"
++ABI = 7
++
++# permissions
++O644 = 420
++O755 = 493
++
++MAXJOBS = 99999999
++
++CACHE_DIR          = 'c4che'
++CACHE_SUFFIX       = '.cache.py'
++DBFILE             = '.wafpickle-%d' % ABI
++WSCRIPT_FILE       = 'wscript'
++WSCRIPT_BUILD_FILE = 'wscript_build'
++WAF_CONFIG_LOG     = 'config.log'
++WAF_CONFIG_H       = 'config.h'
++
++SIG_NIL = 'iluvcuteoverload'
++
++VARIANT = '_VARIANT_'
++DEFAULT = 'default'
++
++SRCDIR  = 'srcdir'
++BLDDIR  = 'blddir'
++APPNAME = 'APPNAME'
++VERSION = 'VERSION'
++
++DEFINES = 'defines'
++UNDEFINED = ()
++
++BREAK = "break"
++CONTINUE = "continue"
++
++# task scheduler options
++JOBCONTROL = "JOBCONTROL"
++MAXPARALLEL = "MAXPARALLEL"
++NORMAL = "NORMAL"
++
++# task state
++NOT_RUN = 0
++MISSING = 1
++CRASHED = 2
++EXCEPTION = 3
++SKIPPED = 8
++SUCCESS = 9
++
++ASK_LATER = -1
++SKIP_ME = -2
++RUN_ME = -3
++
++
++LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
++HOUR_FORMAT = "%H:%M:%S"
++
++TEST_OK = True
++
++CFG_FILES = 'cfg_files'
++
++# positive '->' install
++# negative '<-' uninstall
++INSTALL = 1337
++UNINSTALL = -1337
++
+diff --git a/buildtools/wafadmin/Environment.py b/buildtools/wafadmin/Environment.py
+new file mode 100644
+index 0000000..52c83b4
+--- /dev/null
++++ b/buildtools/wafadmin/Environment.py
+@@ -0,0 +1,210 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005 (ita)
++
++"""Environment representation
++
++There is one gotcha: getitem returns [] if the contents evals to False
++This means env['foo'] = {}; print env['foo'] will print [] not {}
++"""
++
++import os, copy, re
++import Logs, Options, Utils
++from Constants import *
++re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
++
++class Environment(object):
++	"""A safe-to-use dictionary, but do not attach functions to it please (break cPickle)
++	An environment instance can be stored into a file and loaded easily
++	"""
++	__slots__ = ("table", "parent")
++	def __init__(self, filename=None):
++		self.table = {}
++		#self.parent = None
++
++		if filename:
++			self.load(filename)
++
++	def __contains__(self, key):
++		if key in self.table: return True
++		try: return self.parent.__contains__(key)
++		except AttributeError: return False # parent may not exist
++
++	def __str__(self):
++		keys = set()
++		cur = self
++		while cur:
++			keys.update(cur.table.keys())
++			cur = getattr(cur, 'parent', None)
++		keys = list(keys)
++		keys.sort()
++		return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in keys])
++
++	def __getitem__(self, key):
++		try:
++			while 1:
++				x = self.table.get(key, None)
++				if not x is None:
++					return x
++				self = self.parent
++		except AttributeError:
++			return []
++
++	def __setitem__(self, key, value):
++		self.table[key] = value
++
++	def __delitem__(self, key):
++		del self.table[key]
++
++	def pop(self, key, *args):
++		if len(args):
++			return self.table.pop(key, *args)
++		return self.table.pop(key)
++
++	def set_variant(self, name):
++		self.table[VARIANT] = name
++
++	def variant(self):
++		try:
++			while 1:
++				x = self.table.get(VARIANT, None)
++				if not x is None:
++					return x
++				self = self.parent
++		except AttributeError:
++			return DEFAULT
++
++	def copy(self):
++		# TODO waf 1.6 rename this method derive, #368
++		newenv = Environment()
++		newenv.parent = self
++		return newenv
++
++	def detach(self):
++		"""TODO try it
++		modifying the original env will not change the copy"""
++		tbl = self.get_merged_dict()
++		try:
++			delattr(self, 'parent')
++		except AttributeError:
++			pass
++		else:
++			keys = tbl.keys()
++			for x in keys:
++				tbl[x] = copy.deepcopy(tbl[x])
++			self.table = tbl
++
++	def get_flat(self, key):
++		s = self[key]
++		if isinstance(s, str): return s
++		return ' '.join(s)
++
++	def _get_list_value_for_modification(self, key):
++		"""Gets a value that must be a list for further modification.  The
++		list may be modified inplace and there is no need to
++		"self.table[var] = value" afterwards.
++		"""
++		try:
++			value = self.table[key]
++		except KeyError:
++			try: value = self.parent[key]
++			except AttributeError: value = []
++			if isinstance(value, list):
++				value = value[:]
++			else:
++				value = [value]
++		else:
++			if not isinstance(value, list):
++				value = [value]
++		self.table[key] = value
++		return value
++
++	def append_value(self, var, value):
++		current_value = self._get_list_value_for_modification(var)
++
++		if isinstance(value, list):
++			current_value.extend(value)
++		else:
++			current_value.append(value)
++
++	def prepend_value(self, var, value):
++		current_value = self._get_list_value_for_modification(var)
++
++		if isinstance(value, list):
++			current_value = value + current_value
++			# a new list: update the dictionary entry
++			self.table[var] = current_value
++		else:
++			current_value.insert(0, value)
++
++	# prepend unique would be ambiguous
++	def append_unique(self, var, value):
++		current_value = self._get_list_value_for_modification(var)
++
++		if isinstance(value, list):
++			for value_item in value:
++				if value_item not in current_value:
++					current_value.append(value_item)
++		else:
++			if value not in current_value:
++				current_value.append(value)
++
++	def get_merged_dict(self):
++		"""compute a merged table"""
++		table_list = []
++		env = self
++		while 1:
++			table_list.insert(0, env.table)
++			try: env = env.parent
++			except AttributeError: break
++		merged_table = {}
++		for table in table_list:
++			merged_table.update(table)
++		return merged_table
++
++	def store(self, filename):
++		"Write the variables into a file"
++		file = open(filename, 'w')
++		merged_table = self.get_merged_dict()
++		keys = list(merged_table.keys())
++		keys.sort()
++		for k in keys: file.write('%s = %r\n' % (k, merged_table[k]))
++		file.close()
++
++	def load(self, filename):
++		"Retrieve the variables from a file"
++		tbl = self.table
++		code = Utils.readf(filename)
++		for m in re_imp.finditer(code):
++			g = m.group
++			tbl[g(2)] = eval(g(3))
++		Logs.debug('env: %s', self.table)
++
++	def get_destdir(self):
++		"return the destdir, useful for installing"
++		if self.__getitem__('NOINSTALL'): return ''
++		return Options.options.destdir
++
++	def update(self, d):
++		for k, v in d.iteritems():
++			self[k] = v
++
++
++	def __getattr__(self, name):
++		if name in self.__slots__:
++			return object.__getattr__(self, name)
++		else:
++			return self[name]
++
++	def __setattr__(self, name, value):
++		if name in self.__slots__:
++			object.__setattr__(self, name, value)
++		else:
++			self[name] = value
++
++	def __delattr__(self, name):
++		if name in self.__slots__:
++			object.__delattr__(self, name)
++		else:
++			del self[name]
++
+diff --git a/buildtools/wafadmin/Logs.py b/buildtools/wafadmin/Logs.py
+new file mode 100644
+index 0000000..c160b37
+--- /dev/null
++++ b/buildtools/wafadmin/Logs.py
+@@ -0,0 +1,134 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005 (ita)
++
++import ansiterm
++import os, re, logging, traceback, sys
++from Constants import *
++
++zones = ''
++verbose = 0
++
++colors_lst = {
++'USE' : True,
++'BOLD'  :'\x1b[01;1m',
++'RED'   :'\x1b[01;31m',
++'GREEN' :'\x1b[32m',
++'YELLOW':'\x1b[33m',
++'PINK'  :'\x1b[35m',
++'BLUE'  :'\x1b[01;34m',
++'CYAN'  :'\x1b[36m',
++'NORMAL':'\x1b[0m',
++'cursor_on'  :'\x1b[?25h',
++'cursor_off' :'\x1b[?25l',
++}
++
++got_tty = False
++term = os.environ.get('TERM', 'dumb')
++if not term in ['dumb', 'emacs']:
++	try:
++		got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys'])
++	except AttributeError:
++		pass
++
++import Utils
++
++if not got_tty or 'NOCOLOR' in os.environ:
++	colors_lst['USE'] = False
++
++# test
++#if sys.platform == 'win32':
++#	colors_lst['USE'] = True
++
++def get_color(cl):
++	if not colors_lst['USE']: return ''
++	return colors_lst.get(cl, '')
++
++class foo(object):
++	def __getattr__(self, a):
++		return get_color(a)
++	def __call__(self, a):
++		return get_color(a)
++
++colors = foo()
++
++re_log = re.compile(r'(\w+): (.*)', re.M)
++class log_filter(logging.Filter):
++	def __init__(self, name=None):
++		pass
++
++	def filter(self, rec):
++		rec.c1 = colors.PINK
++		rec.c2 = colors.NORMAL
++		rec.zone = rec.module
++		if rec.levelno >= logging.INFO:
++			if rec.levelno >= logging.ERROR:
++				rec.c1 = colors.RED
++			elif rec.levelno >= logging.WARNING:
++				rec.c1 = colors.YELLOW
++			else:
++				rec.c1 = colors.GREEN
++			return True
++
++		zone = ''
++		m = re_log.match(rec.msg)
++		if m:
++			zone = rec.zone = m.group(1)
++			rec.msg = m.group(2)
++
++		if zones:
++			return getattr(rec, 'zone', '') in zones or '*' in zones
++		elif not verbose > 2:
++			return False
++		return True
++
++class formatter(logging.Formatter):
++	def __init__(self):
++		logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
++
++	def format(self, rec):
++		if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
++			try:
++				return '%s%s%s' % (rec.c1, rec.msg.decode('utf-8'), rec.c2)
++			except:
++				return rec.c1+rec.msg+rec.c2
++		return logging.Formatter.format(self, rec)
++
++def debug(*k, **kw):
++	if verbose:
++		k = list(k)
++		k[0] = k[0].replace('\n', ' ')
++		logging.debug(*k, **kw)
++
++def error(*k, **kw):
++	logging.error(*k, **kw)
++	if verbose > 1:
++		if isinstance(k[0], Utils.WafError):
++			st = k[0].stack
++		else:
++			st = traceback.extract_stack()
++		if st:
++			st = st[:-1]
++			buf = []
++			for filename, lineno, name, line in st:
++				buf.append('  File "%s", line %d, in %s' % (filename, lineno, name))
++				if line:
++					buf.append('	%s' % line.strip())
++			if buf: logging.error("\n".join(buf))
++
++warn = logging.warn
++info = logging.info
++
++def init_log():
++	log = logging.getLogger()
++	log.handlers = []
++	log.filters = []
++	hdlr = logging.StreamHandler()
++	hdlr.setFormatter(formatter())
++	log.addHandler(hdlr)
++	log.addFilter(log_filter())
++	log.setLevel(logging.DEBUG)
++
++# may be initialized more than once
++init_log()
++
+diff --git a/buildtools/wafadmin/Node.py b/buildtools/wafadmin/Node.py
+new file mode 100644
+index 0000000..236dd0d
+--- /dev/null
++++ b/buildtools/wafadmin/Node.py
+@@ -0,0 +1,695 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005 (ita)
++
++"""
++Node: filesystem structure, contains lists of nodes
++
++IMPORTANT:
++1. Each file/folder is represented by exactly one node.
++
++2. Most would-be class properties are stored in Build: nodes to depend on, signature, flags, ..
++unused class members increase the .wafpickle file size sensibly with lots of objects.
++
++3. The build is launched from the top of the build dir (for example, in _build_/).
++
++4. Node should not be instantiated directly.
++Each instance of Build.BuildContext has a Node subclass.
++(aka: 'Nodu', see BuildContext initializer)
++The BuildContext is referenced here as self.__class__.bld
++Its Node class is referenced here as self.__class__
++
++The public and advertised apis are the following:
++${TGT}                 -> dir/to/file.ext
++${TGT[0].base()}       -> dir/to/file
++${TGT[0].dir(env)}     -> dir/to
++${TGT[0].file()}       -> file.ext
++${TGT[0].file_base()}   -> file
++${TGT[0].suffix()}     -> .ext
++${TGT[0].abspath(env)} -> /path/to/dir/to/file.ext
++
++"""
++
++import os, sys, fnmatch, re, stat
++import Utils, Constants
++
++UNDEFINED = 0
++DIR = 1
++FILE = 2
++BUILD = 3
++
++type_to_string = {UNDEFINED: "unk", DIR: "dir", FILE: "src", BUILD: "bld"}
++
++# These fnmatch expressions are used by default to prune the directory tree
++# while doing the recursive traversal in the find_iter method of the Node class.
++prune_pats = '.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
++
++# These fnmatch expressions are used by default to exclude files and dirs
++# while doing the recursive traversal in the find_iter method of the Node class.
++exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
++
++# These Utils.jar_regexp expressions are used by default to exclude files and dirs and also prune the directory tree
++# while doing the recursive traversal in the ant_glob method of the Node class.
++exclude_regs = '''
++**/*~
++**/#*#
++**/.#*
++**/%*%
++**/._*
++**/CVS
++**/CVS/**
++**/.cvsignore
++**/SCCS
++**/SCCS/**
++**/vssver.scc
++**/.svn
++**/.svn/**
++**/.git
++**/.git/**
++**/.gitignore
++**/.bzr
++**/.bzr/**
++**/.hg
++**/.hg/**
++**/_MTN
++**/_MTN/**
++**/_darcs
++**/_darcs/**
++**/.DS_Store'''
++
++class Node(object):
++	__slots__ = ("name", "parent", "id", "childs")
++	def __init__(self, name, parent, node_type = UNDEFINED):
++		self.name = name
++		self.parent = parent
++
++		# assumption: one build object at a time
++		self.__class__.bld.id_nodes += 4
++		self.id = self.__class__.bld.id_nodes + node_type
++
++		if node_type == DIR: self.childs = {}
++
++		# We do not want to add another type attribute (memory)
++		# use the id to find out: type = id & 3
++		# for setting: new type = type + x - type & 3
++
++		if parent and name in parent.childs:
++			raise Utils.WafError('node %s exists in the parent files %r already' % (name, parent))
++
++		if parent: parent.childs[name] = self
++
++	def __setstate__(self, data):
++		if len(data) == 4:
++			(self.parent, self.name, self.id, self.childs) = data
++		else:
++			(self.parent, self.name, self.id) = data
++
++	def __getstate__(self):
++		if getattr(self, 'childs', None) is None:
++			return (self.parent, self.name, self.id)
++		else:
++			return (self.parent, self.name, self.id, self.childs)
++
++	def __str__(self):
++		if not self.parent: return ''
++		return "%s://%s" % (type_to_string[self.id & 3], self.abspath())
++
++	def __repr__(self):
++		return self.__str__()
++
++	def __hash__(self):
++		"expensive, make certain it is not used"
++		raise Utils.WafError('nodes, you are doing it wrong')
++
++	def __copy__(self):
++		"nodes are not supposed to be copied"
++		raise Utils.WafError('nodes are not supposed to be cloned')
++
++	def get_type(self):
++		return self.id & 3
++
++	def set_type(self, t):
++		"dangerous, you are not supposed to use this"
++		self.id = self.id + t - self.id & 3
++
++	def dirs(self):
++		return [x for x in self.childs.values() if x.id & 3 == DIR]
++
++	def files(self):
++		return [x for x in self.childs.values() if x.id & 3 == FILE]
++
++	def get_dir(self, name, default=None):
++		node = self.childs.get(name, None)
++		if not node or node.id & 3 != DIR: return default
++		return  node
++
++	def get_file(self, name, default=None):
++		node = self.childs.get(name, None)
++		if not node or node.id & 3 != FILE: return default
++		return node
++
++	def get_build(self, name, default=None):
++		node = self.childs.get(name, None)
++		if not node or node.id & 3 != BUILD: return default
++		return node
++
++	def find_resource(self, lst):
++		"Find an existing input file: either a build node declared previously or a source node"
++		if isinstance(lst, str):
++			lst = Utils.split_path(lst)
++
++		if len(lst) == 1:
++			parent = self
++		else:
++			parent = self.find_dir(lst[:-1])
++			if not parent: return None
++		self.__class__.bld.rescan(parent)
++
++		name = lst[-1]
++		node = parent.childs.get(name, None)
++		if node:
++			tp = node.id & 3
++			if tp == FILE or tp == BUILD:
++				return node
++			else:
++				return None
++
++		tree = self.__class__.bld
++		if not name in tree.cache_dir_contents[parent.id]:
++			return None
++
++		path = parent.abspath() + os.sep + name
++		try:
++			st = Utils.h_file(path)
++		except IOError:
++			return None
++
++		child = self.__class__(name, parent, FILE)
++		tree.node_sigs[0][child.id] = st
++		return child
++
++	def find_or_declare(self, lst):
++		"Used for declaring a build node representing a file being built"
++		if isinstance(lst, str):
++			lst = Utils.split_path(lst)
++
++		if len(lst) == 1:
++			parent = self
++		else:
++			parent = self.find_dir(lst[:-1])
++			if not parent: return None
++		self.__class__.bld.rescan(parent)
++
++		name = lst[-1]
++		node = parent.childs.get(name, None)
++		if node:
++			tp = node.id & 3
++			if tp != BUILD:
++				raise Utils.WafError('find_or_declare found a source file where a build file was expected %r' % '/'.join(lst))
++			return node
++		node = self.__class__(name, parent, BUILD)
++		return node
++
++	def find_dir(self, lst):
++		"search a folder in the filesystem"
++
++		if isinstance(lst, str):
++			lst = Utils.split_path(lst)
++
++		current = self
++		for name in lst:
++			self.__class__.bld.rescan(current)
++			prev = current
++
++			if not current.parent and name == current.name:
++				continue
++			elif not name:
++				continue
++			elif name == '.':
++				continue
++			elif name == '..':
++				current = current.parent or current
++			else:
++				current = prev.childs.get(name, None)
++				if current is None:
++					dir_cont = self.__class__.bld.cache_dir_contents
++					if prev.id in dir_cont and name in dir_cont[prev.id]:
++						if not prev.name:
++							if os.sep == '/':
++								# cygwin //machine/share
++								dirname = os.sep + name
++							else:
++								# windows c:
++								dirname = name
++						else:
++							# regular path
++							dirname = prev.abspath() + os.sep + name
++						if not os.path.isdir(dirname):
++							return None
++						current = self.__class__(name, prev, DIR)
++					elif (not prev.name and len(name) == 2 and name[1] == ':') or name.startswith('\\\\'):
++						# drive letter or \\ path for windows
++						current = self.__class__(name, prev, DIR)
++					else:
++						return None
++				else:
++					if current.id & 3 != DIR:
++						return None
++		return current
++
++	def ensure_dir_node_from_path(self, lst):
++		"used very rarely, force the construction of a branch of node instance for representing folders"
++
++		if isinstance(lst, str):
++			lst = Utils.split_path(lst)
++
++		current = self
++		for name in lst:
++			if not name:
++				continue
++			elif name == '.':
++				continue
++			elif name == '..':
++				current = current.parent or current
++			else:
++				prev = current
++				current = prev.childs.get(name, None)
++				if current is None:
++					current = self.__class__(name, prev, DIR)
++		return current
++
++	def exclusive_build_node(self, path):
++		"""
++		create a hierarchy in the build dir (no source folders) for ill-behaving compilers
++		the node is not hashed, so you must do it manually
++
++		after declaring such a node, find_dir and find_resource should work as expected
++		"""
++		lst = Utils.split_path(path)
++		name = lst[-1]
++		if len(lst) > 1:
++			parent = None
++			try:
++				parent = self.find_dir(lst[:-1])
++			except OSError:
++				pass
++			if not parent:
++				parent = self.ensure_dir_node_from_path(lst[:-1])
++				self.__class__.bld.rescan(parent)
++			else:
++				try:
++					self.__class__.bld.rescan(parent)
++				except OSError:
++					pass
++		else:
++			parent = self
++
++		node = parent.childs.get(name, None)
++		if not node:
++			node = self.__class__(name, parent, BUILD)
++
++		return node
++
++	def path_to_parent(self, parent):
++		"path relative to a direct ancestor, as string"
++		lst = []
++		p = self
++		h1 = parent.height()
++		h2 = p.height()
++		while h2 > h1:
++			h2 -= 1
++			lst.append(p.name)
++			p = p.parent
++		if lst:
++			lst.reverse()
++			ret = os.path.join(*lst)
++		else:
++			ret = ''
++		return ret
++
++	def find_ancestor(self, node):
++		"find a common ancestor for two nodes - for the shortest path in hierarchy"
++		dist = self.height() - node.height()
++		if dist < 0: return node.find_ancestor(self)
++		# now the real code
++		cand = self
++		while dist > 0:
++			cand = cand.parent
++			dist -= 1
++		if cand == node: return cand
++		cursor = node
++		while cand.parent:
++			cand = cand.parent
++			cursor = cursor.parent
++			if cand == cursor: return cand
++
++	def relpath_gen(self, from_node):
++		"string representing a relative path between self to another node"
++
++		if self == from_node: return '.'
++		if from_node.parent == self: return '..'
++
++		# up_path is '../../../' and down_path is 'dir/subdir/subdir/file'
++		ancestor = self.find_ancestor(from_node)
++		lst = []
++		cand = self
++		while not cand.id == ancestor.id:
++			lst.append(cand.name)
++			cand = cand.parent
++		cand = from_node
++		while not cand.id == ancestor.id:
++			lst.append('..')
++			cand = cand.parent
++		lst.reverse()
++		return os.sep.join(lst)
++
++	def nice_path(self, env=None):
++		"printed in the console, open files easily from the launch directory"
++		tree = self.__class__.bld
++		ln = tree.launch_node()
++
++		if self.id & 3 == FILE: return self.relpath_gen(ln)
++		else: return os.path.join(tree.bldnode.relpath_gen(ln), env.variant(), self.relpath_gen(tree.srcnode))
++
++	def is_child_of(self, node):
++		"does this node belong to the subtree node"
++		p = self
++		diff = self.height() - node.height()
++		while diff > 0:
++			diff -= 1
++			p = p.parent
++		return p.id == node.id
++
++	def variant(self, env):
++		"variant, or output directory for this node, a source has for variant 0"
++		if not env: return 0
++		elif self.id & 3 == FILE: return 0
++		else: return env.variant()
++
++	def height(self):
++		"amount of parents"
++		# README a cache can be added here if necessary
++		d = self
++		val = -1
++		while d:
++			d = d.parent
++			val += 1
++		return val
++
++	# helpers for building things
++
++	def abspath(self, env=None):
++		"""
++		absolute path
++		@param env [Environment]:
++			* obligatory for build nodes: build/variant/src/dir/bar.o
++			* optional for dirs: get either src/dir or build/variant/src/dir
++			* excluded for source nodes: src/dir/bar.c
++
++		Instead of computing the absolute path each time again,
++		store the already-computed absolute paths in one of (variants+1) dictionaries:
++		bld.cache_node_abspath[0] holds absolute paths for source nodes.
++		bld.cache_node_abspath[variant] holds the absolute path for the build nodes
++		which reside in the variant given by env.
++		"""
++		## absolute path - hot zone, so do not touch
++
++		# less expensive
++		variant = (env and (self.id & 3 != FILE) and env.variant()) or 0
++
++		ret = self.__class__.bld.cache_node_abspath[variant].get(self.id, None)
++		if ret: return ret
++
++		if not variant:
++			# source directory
++			if not self.parent:
++				val = os.sep == '/' and os.sep or ''
++			elif not self.parent.name: # root
++				val = (os.sep == '/' and os.sep or '') + self.name
++			else:
++				val = self.parent.abspath() + os.sep + self.name
++		else:
++			# build directory
++			val = os.sep.join((self.__class__.bld.bldnode.abspath(), variant, self.path_to_parent(self.__class__.bld.srcnode)))
++		self.__class__.bld.cache_node_abspath[variant][self.id] = val
++		return val
++
++	def change_ext(self, ext):
++		"node of the same path, but with a different extension - hot zone so do not touch"
++		name = self.name
++		k = name.rfind('.')
++		if k >= 0:
++			name = name[:k] + ext
++		else:
++			name = name + ext
++
++		return self.parent.find_or_declare([name])
++
++	def src_dir(self, env):
++		"src path without the file name"
++		return self.parent.srcpath(env)
++
++	def bld_dir(self, env):
++		"build path without the file name"
++		return self.parent.bldpath(env)
++
++	def bld_base(self, env):
++		"build path without the extension: src/dir/foo(.cpp)"
++		s = os.path.splitext(self.name)[0]
++		return os.path.join(self.bld_dir(env), s)
++
++	def bldpath(self, env=None):
++		"path seen from the build dir default/src/foo.cpp"
++		if self.id & 3 == FILE:
++			return self.relpath_gen(self.__class__.bld.bldnode)
++		p = self.path_to_parent(self.__class__.bld.srcnode)
++		if p is not '':
++			return env.variant() + os.sep + p
++		return env.variant()
++
++	def srcpath(self, env=None):
++		"path in the srcdir from the build dir ../src/foo.cpp"
++		if self.id & 3 == BUILD:
++			return self.bldpath(env)
++		return self.relpath_gen(self.__class__.bld.bldnode)
++
++	def read(self, env):
++		"get the contents of a file, it is not used anywhere for the moment"
++		return Utils.readf(self.abspath(env))
++
++	def dir(self, env):
++		"scons-like"
++		return self.parent.abspath(env)
++
++	def file(self):
++		"scons-like"
++		return self.name
++
++	def file_base(self):
++		"scons-like"
++		return os.path.splitext(self.name)[0]
++
++	def suffix(self):
++		"scons-like - hot zone so do not touch"
++		k = max(0, self.name.rfind('.'))
++		return self.name[k:]
++
++	def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
++		"""find nodes in the filesystem hierarchy, try to instanciate the nodes passively; same gotcha as ant_glob"""
++		bld_ctx = self.__class__.bld
++		bld_ctx.rescan(self)
++		for name in bld_ctx.cache_dir_contents[self.id]:
++			if accept_name(self, name):
++				node = self.find_resource(name)
++				if node:
++					if src and node.id & 3 == FILE:
++						yield node
++				else:
++					node = self.find_dir(name)
++					if node and node.id != bld_ctx.bldnode.id:
++						if dir:
++							yield node
++						if not is_prune(self, name):
++							if maxdepth:
++								for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
++									yield k
++			else:
++				if not is_prune(self, name):
++					node = self.find_resource(name)
++					if not node:
++						# not a file, it is a dir
++						node = self.find_dir(name)
++						if node and node.id != bld_ctx.bldnode.id:
++							if maxdepth:
++								for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
++									yield k
++
++		if bld:
++			for node in self.childs.values():
++				if node.id == bld_ctx.bldnode.id:
++					continue
++				if node.id & 3 == BUILD:
++					if accept_name(self, node.name):
++						yield node
++		raise StopIteration
++
++	def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
++		"""find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
++
++		if not (src or bld or dir):
++			raise StopIteration
++
++		if self.id & 3 != DIR:
++			raise StopIteration
++
++		in_pat = Utils.to_list(in_pat)
++		ex_pat = Utils.to_list(ex_pat)
++		prune_pat = Utils.to_list(prune_pat)
++
++		def accept_name(node, name):
++			for pat in ex_pat:
++				if fnmatch.fnmatchcase(name, pat):
++					return False
++			for pat in in_pat:
++				if fnmatch.fnmatchcase(name, pat):
++					return True
++			return False
++
++		def is_prune(node, name):
++			for pat in prune_pat:
++				if fnmatch.fnmatchcase(name, pat):
++					return True
++			return False
++
++		ret = self.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth)
++		if flat:
++			return " ".join([x.relpath_gen(self) for x in ret])
++
++		return ret
++
++	def ant_glob(self, *k, **kw):
++		"""
++		known gotcha: will enumerate the files, but only if the folder exists in the source directory
++		"""
++
++		src=kw.get('src', 1)
++		bld=kw.get('bld', 0)
++		dir=kw.get('dir', 0)
++		excl = kw.get('excl', exclude_regs)
++		incl = k and k[0] or kw.get('incl', '**')
++
++		def to_pat(s):
++			lst = Utils.to_list(s)
++			ret = []
++			for x in lst:
++				x = x.replace('//', '/')
++				if x.endswith('/'):
++					x += '**'
++				lst2 = x.split('/')
++				accu = []
++				for k in lst2:
++					if k == '**':
++						accu.append(k)
++					else:
++						k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.')
++						k = '^%s$' % k
++						#print "pattern", k
++						accu.append(re.compile(k))
++				ret.append(accu)
++			return ret
++
++		def filtre(name, nn):
++			ret = []
++			for lst in nn:
++				if not lst:
++					pass
++				elif lst[0] == '**':
++					ret.append(lst)
++					if len(lst) > 1:
++						if lst[1].match(name):
++							ret.append(lst[2:])
++					else:
++						ret.append([])
++				elif lst[0].match(name):
++					ret.append(lst[1:])
++			return ret
++
++		def accept(name, pats):
++			nacc = filtre(name, pats[0])
++			nrej = filtre(name, pats[1])
++			if [] in nrej:
++				nacc = []
++			return [nacc, nrej]
++
++		def ant_iter(nodi, maxdepth=25, pats=[]):
++			nodi.__class__.bld.rescan(nodi)
++			tmp = list(nodi.__class__.bld.cache_dir_contents[nodi.id])
++			tmp.sort()
++			for name in tmp:
++				npats = accept(name, pats)
++				if npats and npats[0]:
++					accepted = [] in npats[0]
++					#print accepted, nodi, name
++
++					node = nodi.find_resource(name)
++					if node and accepted:
++						if src and node.id & 3 == FILE:
++							yield node
++					else:
++						node = nodi.find_dir(name)
++						if node and node.id != nodi.__class__.bld.bldnode.id:
++							if accepted and dir:
++								yield node
++							if maxdepth:
++								for k in ant_iter(node, maxdepth=maxdepth - 1, pats=npats):
++									yield k
++			if bld:
++				for node in nodi.childs.values():
++					if node.id == nodi.__class__.bld.bldnode.id:
++						continue
++					if node.id & 3 == BUILD:
++						npats = accept(node.name, pats)
++						if npats and npats[0] and [] in npats[0]:
++							yield node
++			raise StopIteration
++
++		ret = [x for x in ant_iter(self, pats=[to_pat(incl), to_pat(excl)])]
++
++		if kw.get('flat', True):
++			return " ".join([x.relpath_gen(self) for x in ret])
++
++		return ret
++
++	def update_build_dir(self, env=None):
++
++		if not env:
++			for env in bld.all_envs:
++				self.update_build_dir(env)
++			return
++
++		path = self.abspath(env)
++
++		lst = Utils.listdir(path)
++		try:
++			self.__class__.bld.cache_dir_contents[self.id].update(lst)
++		except KeyError:
++			self.__class__.bld.cache_dir_contents[self.id] = set(lst)
++		self.__class__.bld.cache_scanned_folders[self.id] = True
++
++		for k in lst:
++			npath = path + os.sep + k
++			st = os.stat(npath)
++			if stat.S_ISREG(st[stat.ST_MODE]):
++				ick = self.find_or_declare(k)
++				if not (ick.id in self.__class__.bld.node_sigs[env.variant()]):
++					self.__class__.bld.node_sigs[env.variant()][ick.id] = Constants.SIG_NIL
++			elif stat.S_ISDIR(st[stat.ST_MODE]):
++				child = self.find_dir(k)
++				if not child:
++					child = self.ensure_dir_node_from_path(k)
++				child.update_build_dir(env)
++
++
++class Nodu(Node):
++	pass
++
+diff --git a/buildtools/wafadmin/Options.py b/buildtools/wafadmin/Options.py
+new file mode 100644
+index 0000000..c9ddcfe
+--- /dev/null
++++ b/buildtools/wafadmin/Options.py
+@@ -0,0 +1,288 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Scott Newton, 2005 (scottn)
++# Thomas Nagy, 2006 (ita)
++
++"Custom command-line options"
++
++import os, sys, imp, types, tempfile, optparse
++import Logs, Utils
++from Constants import *
++
++cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
++
++# TODO remove in waf 1.6 the following two
++commands = {}
++is_install = False
++
++options = {}
++arg_line = []
++launch_dir = ''
++tooldir = ''
++lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
++try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
++except KeyError: cache_global = ''
++platform = Utils.unversioned_sys_platform()
++conf_file = 'conf-runs-%s-%d.pickle' % (platform, ABI)
++
++remote_repo = ['http://waf.googlecode.com/svn/']
++"""remote directory for the plugins"""
++
++
++# Such a command-line should work:  JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure
++default_prefix = os.environ.get('PREFIX')
++if not default_prefix:
++	if platform == 'win32':
++		d = tempfile.gettempdir()
++		default_prefix = d[0].upper() + d[1:]
++		# win32 preserves the case, but gettempdir does not
++	else: default_prefix = '/usr/local/'
++
++default_jobs = os.environ.get('JOBS', -1)
++if default_jobs < 1:
++	try:
++		if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
++			default_jobs = os.sysconf('SC_NPROCESSORS_ONLN')
++		else:
++			default_jobs = int(Utils.cmd_output(['sysctl', '-n', 'hw.ncpu']))
++	except:
++		if os.name == 'java': # platform.system() == 'Java'
++			from java.lang import Runtime
++			default_jobs = Runtime.getRuntime().availableProcessors()
++		else:
++			# environment var defined on win32
++			default_jobs = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
++
++default_destdir = os.environ.get('DESTDIR', '')
++
++def get_usage(self):
++	cmds_str = []
++	module = Utils.g_module
++	if module:
++		# create the help messages for commands
++		tbl = module.__dict__
++		keys = list(tbl.keys())
++		keys.sort()
++
++		if 'build' in tbl:
++			if not module.build.__doc__:
++				module.build.__doc__ = 'builds the project'
++		if 'configure' in tbl:
++			if not module.configure.__doc__:
++				module.configure.__doc__ = 'configures the project'
++
++		ban = ['set_options', 'init', 'shutdown']
++
++		optlst = [x for x in keys if not x in ban
++			and type(tbl[x]) is type(parse_args_impl)
++			and tbl[x].__doc__
++			and not x.startswith('_')]
++
++		just = max([len(x) for x in optlst])
++
++		for x in optlst:
++			cmds_str.append('  %s: %s' % (x.ljust(just), tbl[x].__doc__))
++		ret = '\n'.join(cmds_str)
++	else:
++		ret = ' '.join(cmds)
++	return '''waf [command] [options]
++
++Main commands (example: ./waf build -j4)
++%s
++''' % ret
++
++
++setattr(optparse.OptionParser, 'get_usage', get_usage)
++
++def create_parser(module=None):
++	Logs.debug('options: create_parser is called')
++	parser = optparse.OptionParser(conflict_handler="resolve", version = 'waf %s (%s)' % (WAFVERSION, WAFREVISION))
++
++	parser.formatter.width = Utils.get_term_cols()
++	p = parser.add_option
++
++	p('-j', '--jobs',
++		type    = 'int',
++		default = default_jobs,
++		help    = 'amount of parallel jobs (%r)' % default_jobs,
++		dest    = 'jobs')
++
++	p('-k', '--keep',
++		action  = 'store_true',
++		default = False,
++		help    = 'keep running happily on independent task groups',
++		dest    = 'keep')
++
++	p('-v', '--verbose',
++		action  = 'count',
++		default = 0,
++		help    = 'verbosity level -v -vv or -vvv [default: 0]',
++		dest    = 'verbose')
++
++	p('--nocache',
++		action  = 'store_true',
++		default = False,
++		help    = 'ignore the WAFCACHE (if set)',
++		dest    = 'nocache')
++
++	p('--zones',
++		action  = 'store',
++		default = '',
++		help    = 'debugging zones (task_gen, deps, tasks, etc)',
++		dest    = 'zones')
++
++	p('-p', '--progress',
++		action  = 'count',
++		default = 0,
++		help    = '-p: progress bar; -pp: ide output',
++		dest    = 'progress_bar')
++
++	p('--targets',
++		action  = 'store',
++		default = '',
++		help    = 'build given task generators, e.g. "target1,target2"',
++		dest    = 'compile_targets')
++
++	gr = optparse.OptionGroup(parser, 'configuration options')
++	parser.add_option_group(gr)
++	gr.add_option('-b', '--blddir',
++		action  = 'store',
++		default = '',
++		help    = 'out dir for the project (configuration)',
++		dest    = 'blddir')
++	gr.add_option('-s', '--srcdir',
++		action  = 'store',
++		default = '',
++		help    = 'top dir for the project (configuration)',
++		dest    = 'srcdir')
++	gr.add_option('--prefix',
++		help    = 'installation prefix (configuration) [default: %r]' % default_prefix,
++		default = default_prefix,
++		dest    = 'prefix')
++
++	gr.add_option('--download',
++		action  = 'store_true',
++		default = False,
++		help    = 'try to download the tools if missing',
++		dest    = 'download')
++
++	gr = optparse.OptionGroup(parser, 'installation options')
++	parser.add_option_group(gr)
++	gr.add_option('--destdir',
++		help    = 'installation root [default: %r]' % default_destdir,
++		default = default_destdir,
++		dest    = 'destdir')
++	gr.add_option('-f', '--force',
++		action  = 'store_true',
++		default = False,
++		help    = 'force file installation',
++		dest    = 'force')
++
++	return parser
++
++def parse_args_impl(parser, _args=None):
++	global options, commands, arg_line
++	(options, args) = parser.parse_args(args=_args)
++
++	arg_line = args
++	#arg_line = args[:] # copy
++
++	# By default, 'waf' is equivalent to 'waf build'
++	commands = {}
++	for var in cmds: commands[var] = 0
++	if not args:
++		commands['build'] = 1
++		args.append('build')
++
++	# Parse the command arguments
++	for arg in args:
++		commands[arg] = True
++
++	# the check thing depends on the build
++	if 'check' in args:
++		idx = args.index('check')
++		try:
++			bidx = args.index('build')
++			if bidx > idx:
++				raise ValueError('build before check')
++		except ValueError, e:
++			args.insert(idx, 'build')
++
++	if args[0] != 'init':
++		args.insert(0, 'init')
++
++	# TODO -k => -j0
++	if options.keep: options.jobs = 1
++	if options.jobs < 1: options.jobs = 1
++
++	if 'install' in sys.argv or 'uninstall' in sys.argv:
++		# absolute path only if set
++		options.destdir = options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
++
++	Logs.verbose = options.verbose
++	Logs.init_log()
++
++	if options.zones:
++		Logs.zones = options.zones.split(',')
++		if not Logs.verbose: Logs.verbose = 1
++	elif Logs.verbose > 0:
++		Logs.zones = ['runner']
++	if Logs.verbose > 2:
++		Logs.zones = ['*']
++
++# TODO waf 1.6
++# 1. rename the class to OptionsContext
++# 2. instead of a class attribute, use a module (static 'parser')
++# 3. parse_args_impl was made in times when we did not know about binding new methods to classes
++
++class Handler(Utils.Context):
++	"""loads wscript modules in folders for adding options
++	This class should be named 'OptionsContext'
++	A method named 'recurse' is bound when used by the module Scripting"""
++
++	parser = None
++	# make it possible to access the reference, like Build.bld
++
++	def __init__(self, module=None):
++		self.parser = create_parser(module)
++		self.cwd = os.getcwd()
++		Handler.parser = self
++
++	def add_option(self, *k, **kw):
++		self.parser.add_option(*k, **kw)
++
++	def add_option_group(self, *k, **kw):
++		return self.parser.add_option_group(*k, **kw)
++
++	def get_option_group(self, opt_str):
++		return self.parser.get_option_group(opt_str)
++
++	def sub_options(self, *k, **kw):
++		if not k: raise Utils.WscriptError('folder expected')
++		self.recurse(k[0], name='set_options')
++
++	def tool_options(self, *k, **kw):
++		Utils.python_24_guard()
++
++		if not k[0]:
++			raise Utils.WscriptError('invalid tool_options call %r %r' % (k, kw))
++		tools = Utils.to_list(k[0])
++
++		# TODO waf 1.6 remove the global variable tooldir
++		path = Utils.to_list(kw.get('tdir', kw.get('tooldir', tooldir)))
++
++		for tool in tools:
++			tool = tool.replace('++', 'xx')
++			if tool == 'java': tool = 'javaw'
++			if tool.lower() == 'unittest': tool = 'unittestw'
++			module = Utils.load_tool(tool, path)
++			try:
++				fun = module.set_options
++			except AttributeError:
++				pass
++			else:
++				fun(kw.get('option_group', self))
++
++	def parse_args(self, args=None):
++		parse_args_impl(self.parser, args)
++
+diff --git a/buildtools/wafadmin/Runner.py b/buildtools/wafadmin/Runner.py
+new file mode 100644
+index 0000000..94db0fb
+--- /dev/null
++++ b/buildtools/wafadmin/Runner.py
+@@ -0,0 +1,236 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005-2008 (ita)
++
++"Execute the tasks"
++
++import os, sys, random, time, threading, traceback
++try: from Queue import Queue
++except ImportError: from queue import Queue
++import Build, Utils, Logs, Options
++from Logs import debug, error
++from Constants import *
++
++GAP = 15
++
++run_old = threading.Thread.run
++def run(*args, **kwargs):
++	try:
++		run_old(*args, **kwargs)
++	except (KeyboardInterrupt, SystemExit):
++		raise
++	except:
++		sys.excepthook(*sys.exc_info())
++threading.Thread.run = run
++
++def process_task(tsk):
++
++	m = tsk.master
++	if m.stop:
++		m.out.put(tsk)
++		return
++
++	try:
++		tsk.generator.bld.printout(tsk.display())
++		if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
++		# actual call to task's run() function
++		else: ret = tsk.call_run()
++	except Exception, e:
++		tsk.err_msg = Utils.ex_stack()
++		tsk.hasrun = EXCEPTION
++
++		# TODO cleanup
++		m.error_handler(tsk)
++		m.out.put(tsk)
++		return
++
++	if ret:
++		tsk.err_code = ret
++		tsk.hasrun = CRASHED
++	else:
++		try:
++			tsk.post_run()
++		except Utils.WafError:
++			pass
++		except Exception:
++			tsk.err_msg = Utils.ex_stack()
++			tsk.hasrun = EXCEPTION
++		else:
++			tsk.hasrun = SUCCESS
++	if tsk.hasrun != SUCCESS:
++		m.error_handler(tsk)
++
++	m.out.put(tsk)
++
++class TaskConsumer(threading.Thread):
++	ready = Queue(0)
++	consumers = []
++
++	def __init__(self):
++		threading.Thread.__init__(self)
++		self.setDaemon(1)
++		self.start()
++
++	def run(self):
++		try:
++			self.loop()
++		except:
++			pass
++
++	def loop(self):
++		while 1:
++			tsk = TaskConsumer.ready.get()
++			process_task(tsk)
++
++class Parallel(object):
++	"""
++	keep the consumer threads busy, and avoid consuming cpu cycles
++	when no more tasks can be added (end of the build, etc)
++	"""
++	def __init__(self, bld, j=2):
++
++		# number of consumers
++		self.numjobs = j
++
++		self.manager = bld.task_manager
++		self.manager.current_group = 0
++
++		self.total = self.manager.total()
++
++		# tasks waiting to be processed - IMPORTANT
++		self.outstanding = []
++		self.maxjobs = MAXJOBS
++
++		# tasks that are awaiting for another task to complete
++		self.frozen = []
++
++		# tasks returned by the consumers
++		self.out = Queue(0)
++
++		self.count = 0 # tasks not in the producer area
++
++		self.processed = 1 # progress indicator
++
++		self.stop = False # error condition to stop the build
++		self.error = False # error flag
++
++	def get_next(self):
++		"override this method to schedule the tasks in a particular order"
++		if not self.outstanding:
++			return None
++		return self.outstanding.pop(0)
++
++	def postpone(self, tsk):
++		"override this method to schedule the tasks in a particular order"
++		# TODO consider using a deque instead
++		if random.randint(0, 1):
++			self.frozen.insert(0, tsk)
++		else:
++			self.frozen.append(tsk)
++
++	def refill_task_list(self):
++		"called to set the next group of tasks"
++
++		while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
++			self.get_out()
++
++		while not self.outstanding:
++			if self.count:
++				self.get_out()
++
++			if self.frozen:
++				self.outstanding += self.frozen
++				self.frozen = []
++			elif not self.count:
++				(jobs, tmp) = self.manager.get_next_set()
++				if jobs != None: self.maxjobs = jobs
++				if tmp: self.outstanding += tmp
++				break
++
++	def get_out(self):
++		"the tasks that are put to execute are all collected using get_out"
++		ret = self.out.get()
++		self.manager.add_finished(ret)
++		if not self.stop and getattr(ret, 'more_tasks', None):
++			self.outstanding += ret.more_tasks
++			self.total += len(ret.more_tasks)
++		self.count -= 1
++
++	def error_handler(self, tsk):
++		"by default, errors make the build stop (not thread safe so be careful)"
++		if not Options.options.keep:
++			self.stop = True
++		self.error = True
++
++	def start(self):
++		"execute the tasks"
++
++		if TaskConsumer.consumers:
++			# the worker pool is usually loaded lazily (see below)
++			# in case it is re-used with a different value of numjobs:
++			while len(TaskConsumer.consumers) < self.numjobs:
++				TaskConsumer.consumers.append(TaskConsumer())
++
++		while not self.stop:
++
++			self.refill_task_list()
++
++			# consider the next task
++			tsk = self.get_next()
++			if not tsk:
++				if self.count:
++					# tasks may add new ones after they are run
++					continue
++				else:
++					# no tasks to run, no tasks running, time to exit
++					break
++
++			if tsk.hasrun:
++				# if the task is marked as "run", just skip it
++				self.processed += 1
++				self.manager.add_finished(tsk)
++				continue
++
++			try:
++				st = tsk.runnable_status()
++			except Exception, e:
++				self.processed += 1
++				if self.stop and not Options.options.keep:
++					tsk.hasrun = SKIPPED
++					self.manager.add_finished(tsk)
++					continue
++				self.error_handler(tsk)
++				self.manager.add_finished(tsk)
++				tsk.hasrun = EXCEPTION
++				tsk.err_msg = Utils.ex_stack()
++				continue
++
++			if st == ASK_LATER:
++				self.postpone(tsk)
++			elif st == SKIP_ME:
++				self.processed += 1
++				tsk.hasrun = SKIPPED
++				self.manager.add_finished(tsk)
++			else:
++				# run me: put the task in ready queue
++				tsk.position = (self.processed, self.total)
++				self.count += 1
++				tsk.master = self
++				self.processed += 1
++
++				if self.numjobs == 1:
++					process_task(tsk)
++				else:
++					TaskConsumer.ready.put(tsk)
++					# create the consumer threads only if there is something to consume
++					if not TaskConsumer.consumers:
++						TaskConsumer.consumers = [TaskConsumer() for i in xrange(self.numjobs)]
++
++		# self.count represents the tasks that have been made available to the consumer threads
++		# collect all the tasks after an error else the message may be incomplete
++		while self.error and self.count:
++			self.get_out()
++
++		#print loop
++		assert (self.count == 0 or self.stop)
++
+diff --git a/buildtools/wafadmin/Scripting.py b/buildtools/wafadmin/Scripting.py
+new file mode 100644
+index 0000000..d975bd9
+--- /dev/null
++++ b/buildtools/wafadmin/Scripting.py
+@@ -0,0 +1,586 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005 (ita)
++
++"Module called for configuring, compiling and installing targets"
++
++import os, sys, shutil, traceback, datetime, inspect, errno
++
++import Utils, Configure, Build, Logs, Options, Environment, Task
++from Logs import error, warn, info
++from Constants import *
++
++g_gz = 'bz2'
++commands = []
++
++def prepare_impl(t, cwd, ver, wafdir):
++	Options.tooldir = [t]
++	Options.launch_dir = cwd
++
++	# some command-line options can be processed immediately
++	if '--version' in sys.argv:
++		opt_obj = Options.Handler()
++		opt_obj.curdir = cwd
++		opt_obj.parse_args()
++		sys.exit(0)
++
++	# now find the wscript file
++	msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE
++
++	# in theory projects can be configured in an autotool-like manner:
++	# mkdir build && cd build && ../waf configure && ../waf
++	build_dir_override = None
++	candidate = None
++
++	lst = os.listdir(cwd)
++
++	search_for_candidate = True
++	if WSCRIPT_FILE in lst:
++		candidate = cwd
++
++	elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst:
++		# autotool-like configuration
++		calldir = os.path.abspath(os.path.dirname(sys.argv[0]))
++		if WSCRIPT_FILE in os.listdir(calldir):
++			candidate = calldir
++			search_for_candidate = False
++		else:
++			error('arg[0] directory does not contain a wscript file')
++			sys.exit(1)
++		build_dir_override = cwd
++
++	# climb up to find a script if it is not found
++	while search_for_candidate:
++		if len(cwd) <= 3:
++			break # stop at / or c:
++		dirlst = os.listdir(cwd)
++		if WSCRIPT_FILE in dirlst:
++			candidate = cwd
++		if 'configure' in sys.argv and candidate:
++			break
++		if Options.lockfile in dirlst:
++			env = Environment.Environment()
++			try:
++				env.load(os.path.join(cwd, Options.lockfile))
++			except:
++				error('could not load %r' % Options.lockfile)
++			try:
++				os.stat(env['cwd'])
++			except:
++				candidate = cwd
++			else:
++				candidate = env['cwd']
++			break
++		cwd = os.path.dirname(cwd) # climb up
++
++	if not candidate:
++		# check if the user only wanted to display the help
++		if '-h' in sys.argv or '--help' in sys.argv:
++			warn('No wscript file found: the help message may be incomplete')
++			opt_obj = Options.Handler()
++			opt_obj.curdir = cwd
++			opt_obj.parse_args()
++		else:
++			error(msg1)
++		sys.exit(0)
++
++	# We have found wscript, but there is no guarantee that it is valid
++	try:
++		os.chdir(candidate)
++	except OSError:
++		raise Utils.WafError("the folder %r is unreadable" % candidate)
++
++	# define the main module containing the functions init, shutdown, ..
++	Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE))
++
++	if build_dir_override:
++		d = getattr(Utils.g_module, BLDDIR, None)
++		if d:
++			# test if user has set the blddir in wscript.
++			msg = ' Overriding build directory %s with %s' % (d, build_dir_override)
++			warn(msg)
++		Utils.g_module.blddir = build_dir_override
++
++	# bind a few methods and classes by default
++
++	def set_def(obj, name=''):
++		n = name or obj.__name__
++		if not n in Utils.g_module.__dict__:
++			setattr(Utils.g_module, n, obj)
++
++	for k in [dist, distclean, distcheck, clean, install, uninstall]:
++		set_def(k)
++
++	set_def(Configure.ConfigurationContext, 'configure_context')
++
++	for k in ['build', 'clean', 'install', 'uninstall']:
++		set_def(Build.BuildContext, k + '_context')
++
++	# now parse the options from the user wscript file
++	opt_obj = Options.Handler(Utils.g_module)
++	opt_obj.curdir = candidate
++	try:
++		f = Utils.g_module.set_options
++	except AttributeError:
++		pass
++	else:
++		opt_obj.sub_options([''])
++	opt_obj.parse_args()
++
++	if not 'init' in Utils.g_module.__dict__:
++		Utils.g_module.init = Utils.nada
++	if not 'shutdown' in Utils.g_module.__dict__:
++		Utils.g_module.shutdown = Utils.nada
++
++	main()
++
++def prepare(t, cwd, ver, wafdir):
++	if WAFVERSION != ver:
++		msg = 'Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (ver, WAFVERSION, wafdir)
++		print('\033[91mError: %s\033[0m' % msg)
++		sys.exit(1)
++
++	#"""
++	try:
++		prepare_impl(t, cwd, ver, wafdir)
++	except Utils.WafError, e:
++		error(str(e))
++		sys.exit(1)
++	except KeyboardInterrupt:
++		Utils.pprint('RED', 'Interrupted')
++		sys.exit(68)
++	"""
++	import cProfile, pstats
++	cProfile.runctx("import Scripting; Scripting.prepare_impl(t, cwd, ver, wafdir)", {},
++		{'t': t, 'cwd':cwd, 'ver':ver, 'wafdir':wafdir},
++		 'profi.txt')
++	p = pstats.Stats('profi.txt')
++	p.sort_stats('time').print_stats(45)
++	#"""
++
++def main():
++	global commands
++	commands = Options.arg_line[:]
++
++	while commands:
++		x = commands.pop(0)
++
++		ini = datetime.datetime.now()
++		if x == 'configure':
++			fun = configure
++		elif x == 'build':
++			fun = build
++		else:
++			fun = getattr(Utils.g_module, x, None)
++
++		if not fun:
++			raise Utils.WscriptError('No such command %r' % x)
++
++		ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
++
++		if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
++			# compatibility TODO remove in waf 1.6
++			try:
++				fun(ctx)
++			except TypeError:
++				fun()
++		else:
++			fun(ctx)
++
++		ela = ''
++		if not Options.options.progress_bar:
++			ela = ' (%s)' % Utils.get_elapsed_time(ini)
++
++		if x != 'init' and x != 'shutdown':
++			info('%r finished successfully%s' % (x, ela))
++
++		if not commands and x != 'shutdown':
++			commands.append('shutdown')
++
++def configure(conf):
++
++	src = getattr(Options.options, SRCDIR, None)
++	if not src: src = getattr(Utils.g_module, SRCDIR, None)
++	if not src: src = getattr(Utils.g_module, 'top', None)
++	if not src:
++		src = '.'
++		incomplete_src = 1
++	src = os.path.abspath(src)
++
++	bld = getattr(Options.options, BLDDIR, None)
++	if not bld: bld = getattr(Utils.g_module, BLDDIR, None)
++	if not bld: bld = getattr(Utils.g_module, 'out', None)
++	if not bld:
++		bld = 'build'
++		incomplete_bld = 1
++	if bld == '.':
++		raise Utils.WafError('Setting blddir="." may cause distclean problems')
++	bld = os.path.abspath(bld)
++
++	try: os.makedirs(bld)
++	except OSError: pass
++
++	# It is not possible to compile specific targets in the configuration
++	# this may cause configuration errors if autoconfig is set
++	targets = Options.options.compile_targets
++	Options.options.compile_targets = None
++	Options.is_install = False
++
++	conf.srcdir = src
++	conf.blddir = bld
++	conf.post_init()
++
++	if 'incomplete_src' in vars():
++		conf.check_message_1('Setting srcdir to')
++		conf.check_message_2(src)
++	if 'incomplete_bld' in vars():
++		conf.check_message_1('Setting blddir to')
++		conf.check_message_2(bld)
++
++	# calling to main wscript's configure()
++	conf.sub_config([''])
++
++	conf.store()
++
++	# this will write a configure lock so that subsequent builds will
++	# consider the current path as the root directory (see prepare_impl).
++	# to remove: use 'waf distclean'
++	env = Environment.Environment()
++	env[BLDDIR] = bld
++	env[SRCDIR] = src
++	env['argv'] = sys.argv
++	env['commands'] = Options.commands
++	env['options'] = Options.options.__dict__
++
++	# conf.hash & conf.files hold wscript files paths and hash
++	# (used only by Configure.autoconfig)
++	env['hash'] = conf.hash
++	env['files'] = conf.files
++	env['environ'] = dict(conf.environ)
++	env['cwd'] = os.path.split(Utils.g_module.root_path)[0]
++
++	if Utils.g_module.root_path != src:
++		# in case the source dir is somewhere else
++		env.store(os.path.join(src, Options.lockfile))
++
++	env.store(Options.lockfile)
++
++	Options.options.compile_targets = targets
++
++def clean(bld):
++	'''removes the build files'''
++	try:
++		proj = Environment.Environment(Options.lockfile)
++	except IOError:
++		raise Utils.WafError('Nothing to clean (project not configured)')
++
++	bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
++	bld.load_envs()
++
++	bld.is_install = 0 # False
++
++	# read the scripts - and set the path to the wscript path (useful for srcdir='/foo/bar')
++	bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
++
++	try:
++		bld.clean()
++	finally:
++		bld.save()
++
++def check_configured(bld):
++	if not Configure.autoconfig:
++		return bld
++
++	conf_cls = getattr(Utils.g_module, 'configure_context', Utils.Context)
++	bld_cls = getattr(Utils.g_module, 'build_context', Utils.Context)
++
++	def reconf(proj):
++		back = (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose)
++
++		Options.commands = proj['commands']
++		Options.options.__dict__ = proj['options']
++		conf = conf_cls()
++		conf.environ = proj['environ']
++		configure(conf)
++
++		(Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) = back
++
++	try:
++		proj = Environment.Environment(Options.lockfile)
++	except IOError:
++		conf = conf_cls()
++		configure(conf)
++	else:
++		try:
++			bld = bld_cls()
++			bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
++			bld.load_envs()
++		except Utils.WafError:
++			reconf(proj)
++			return bld_cls()
++
++	try:
++		proj = Environment.Environment(Options.lockfile)
++	except IOError:
++		raise Utils.WafError('Auto-config: project does not configure (bug)')
++
++	h = 0
++	try:
++		for file in proj['files']:
++			if file.endswith('configure'):
++				h = hash((h, Utils.readf(file)))
++			else:
++				mod = Utils.load_module(file)
++				h = hash((h, mod.waf_hash_val))
++	except (OSError, IOError):
++		warn('Reconfiguring the project: a file is unavailable')
++		reconf(proj)
++	else:
++		if (h != proj['hash']):
++			warn('Reconfiguring the project: the configuration has changed')
++			reconf(proj)
++
++	return bld_cls()
++
++def install(bld):
++	'''installs the build files'''
++	bld = check_configured(bld)
++
++	Options.commands['install'] = True
++	Options.commands['uninstall'] = False
++	Options.is_install = True
++
++	bld.is_install = INSTALL
++
++	build_impl(bld)
++	bld.install()
++
++def uninstall(bld):
++	'''removes the installed files'''
++	Options.commands['install'] = False
++	Options.commands['uninstall'] = True
++	Options.is_install = True
++
++	bld.is_install = UNINSTALL
++
++	try:
++		def runnable_status(self):
++			return SKIP_ME
++		setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
++		setattr(Task.Task, 'runnable_status', runnable_status)
++
++		build_impl(bld)
++		bld.install()
++	finally:
++		setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
++
++def build(bld):
++	bld = check_configured(bld)
++
++	Options.commands['install'] = False
++	Options.commands['uninstall'] = False
++	Options.is_install = False
++
++	bld.is_install = 0 # False
++
++	return build_impl(bld)
++
++def build_impl(bld):
++	# compile the project and/or install the files
++	try:
++		proj = Environment.Environment(Options.lockfile)
++	except IOError:
++		raise Utils.WafError("Project not configured (run 'waf configure' first)")
++
++	bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
++	bld.load_envs()
++
++	info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
++	bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
++
++	# execute something immediately before the build starts
++	bld.pre_build()
++
++	try:
++		bld.compile()
++	finally:
++		if Options.options.progress_bar: print('')
++		info("Waf: Leaving directory `%s'" % bld.bldnode.abspath())
++
++	# execute something immediately after a successful build
++	bld.post_build()
++
++	bld.install()
++
++excludes = '.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log .gitattributes .hgignore .hgtags'.split()
++dist_exts = '~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
++def dont_dist(name, src, build_dir):
++	global excludes, dist_exts
++
++	if (name.startswith(',,')
++		or name.startswith('++')
++		or name.startswith('.waf')
++		or (src == '.' and name == Options.lockfile)
++		or name in excludes
++		or name == build_dir
++		):
++		return True
++
++	for ext in dist_exts:
++		if name.endswith(ext):
++			return True
++
++	return False
++
++# like shutil.copytree
++# exclude files and to raise exceptions immediately
++def copytree(src, dst, build_dir):
++	names = os.listdir(src)
++	os.makedirs(dst)
++	for name in names:
++		srcname = os.path.join(src, name)
++		dstname = os.path.join(dst, name)
++
++		if dont_dist(name, src, build_dir):
++			continue
++
++		if os.path.isdir(srcname):
++			copytree(srcname, dstname, build_dir)
++		else:
++			shutil.copy2(srcname, dstname)
++
++# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
++def distclean(ctx=None):
++	'''removes the build directory'''
++	global commands
++	lst = os.listdir('.')
++	for f in lst:
++		if f == Options.lockfile:
++			try:
++				proj = Environment.Environment(f)
++			except:
++				Logs.warn('could not read %r' % f)
++				continue
++
++			try:
++				shutil.rmtree(proj[BLDDIR])
++			except IOError:
++				pass
++			except OSError, e:
++				if e.errno != errno.ENOENT:
++					Logs.warn('project %r cannot be removed' % proj[BLDDIR])
++
++			try:
++				os.remove(f)
++			except OSError, e:
++				if e.errno != errno.ENOENT:
++					Logs.warn('file %r cannot be removed' % f)
++
++		# remove the local waf cache
++		if not commands and f.startswith('.waf'):
++			shutil.rmtree(f, ignore_errors=True)
++
++# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
++def dist(appname='', version=''):
++	'''makes a tarball for redistributing the sources'''
++	# return return (distdirname, tarballname)
++	import tarfile
++
++	if not appname: appname = Utils.g_module.APPNAME
++	if not version: version = Utils.g_module.VERSION
++
++	tmp_folder = appname + '-' + version
++	if g_gz in ['gz', 'bz2']:
++		arch_name = tmp_folder + '.tar.' + g_gz
++	else:
++		arch_name = tmp_folder + '.' + 'zip'
++
++	# remove the previous dir
++	try:
++		shutil.rmtree(tmp_folder)
++	except (OSError, IOError):
++		pass
++
++	# remove the previous archive
++	try:
++		os.remove(arch_name)
++	except (OSError, IOError):
++		pass
++
++	# copy the files into the temporary folder
++	blddir = getattr(Utils.g_module, BLDDIR, None)
++	if not blddir:
++		blddir = getattr(Utils.g_module, 'out', None)
++	copytree('.', tmp_folder, blddir)
++
++	# undocumented hook for additional cleanup
++	dist_hook = getattr(Utils.g_module, 'dist_hook', None)
++	if dist_hook:
++		back = os.getcwd()
++		os.chdir(tmp_folder)
++		try:
++			dist_hook()
++		finally:
++			# go back to the root directory
++			os.chdir(back)
++
++	if g_gz in ['gz', 'bz2']:
++		tar = tarfile.open(arch_name, 'w:' + g_gz)
++		tar.add(tmp_folder)
++		tar.close()
++	else:
++		Utils.zip_folder(tmp_folder, arch_name, tmp_folder)
++
++	try: from hashlib import sha1 as sha
++	except ImportError: from sha import sha
++	try:
++		digest = " (sha=%r)" % sha(Utils.readf(arch_name)).hexdigest()
++	except:
++		digest = ''
++
++	info('New archive created: %s%s' % (arch_name, digest))
++
++	if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
++	return arch_name
++
++# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
++def distcheck(appname='', version='', subdir=''):
++	'''checks if the sources compile (tarball from 'dist')'''
++	import tempfile, tarfile
++
++	if not appname: appname = Utils.g_module.APPNAME
++	if not version: version = Utils.g_module.VERSION
++
++	waf = os.path.abspath(sys.argv[0])
++	tarball = dist(appname, version)
++
++	path = appname + '-' + version
++
++	# remove any previous instance
++	if os.path.exists(path):
++		shutil.rmtree(path)
++
++	t = tarfile.open(tarball)
++	for x in t: t.extract(x)
++	t.close()
++
++	# build_path is the directory for the waf invocation
++	if subdir:
++		build_path = os.path.join(path, subdir)
++	else:
++		build_path = path
++
++	instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version))
++	ret = Utils.pproc.Popen([waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + instdir], cwd=build_path).wait()
++	if ret:
++		raise Utils.WafError('distcheck failed with code %i' % ret)
++
++	if os.path.exists(instdir):
++		raise Utils.WafError('distcheck succeeded, but files were left in %s' % instdir)
++
++	shutil.rmtree(path)
++
++# FIXME remove in Waf 1.6 (kept for compatibility)
++def add_subdir(dir, bld):
++	bld.recurse(dir, 'build')
++
+diff --git a/buildtools/wafadmin/Task.py b/buildtools/wafadmin/Task.py
+new file mode 100644
+index 0000000..5cda2ec
+--- /dev/null
++++ b/buildtools/wafadmin/Task.py
+@@ -0,0 +1,1200 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005-2008 (ita)
++
++"""
++Running tasks in parallel is a simple problem, but in practice it is more complicated:
++* dependencies discovered during the build (dynamic task creation)
++* dependencies discovered after files are compiled
++* the amount of tasks and dependencies (graph size) can be huge
++
++This is why the dependency management is split on three different levels:
++1. groups of tasks that run all after another group of tasks
++2. groups of tasks that can be run in parallel
++3. tasks that can run in parallel, but with possible unknown ad-hoc dependencies
++
++The point #1 represents a strict sequential order between groups of tasks, for example a compiler is produced
++and used to compile the rest, whereas #2 and #3 represent partial order constraints where #2 applies to the kind of task
++and #3 applies to the task instances.
++
++#1 is held by the task manager: ordered list of TaskGroups (see bld.add_group)
++#2 is held by the task groups and the task types: precedence after/before (topological sort),
++   and the constraints extracted from file extensions
++#3 is held by the tasks individually (attribute run_after),
++   and the scheduler (Runner.py) use Task::runnable_status to reorder the tasks
++
++--
++
++To try, use something like this in your code:
++import Constants, Task
++Task.algotype = Constants.MAXPARALLEL
++
++--
++
++There are two concepts with the tasks (individual units of change):
++* dependency (if 1 is recompiled, recompile 2)
++* order (run 2 after 1)
++
++example 1: if t1 depends on t2 and t2 depends on t3 it is not necessary to make t1 depend on t3 (dependency is transitive)
++example 2: if t1 depends on a node produced by t2, it is not immediately obvious that t1 must run after t2 (order is not obvious)
++
++The role of the Task Manager is to give the tasks in order (groups of task that may be run in parallel one after the other)
++
++"""
++
++import os, shutil, sys, re, random, datetime, tempfile, shlex
++from Utils import md5
++import Build, Runner, Utils, Node, Logs, Options
++from Logs import debug, warn, error
++from Constants import *
++
++algotype = NORMAL
++#algotype = JOBCONTROL
++#algotype = MAXPARALLEL
++
++COMPILE_TEMPLATE_SHELL = '''
++def f(task):
++	env = task.env
++	wd = getattr(task, 'cwd', None)
++	p = env.get_flat
++	cmd = \'\'\' %s \'\'\' % s
++	return task.exec_command(cmd, cwd=wd)
++'''
++
++COMPILE_TEMPLATE_NOSHELL = '''
++def f(task):
++	env = task.env
++	wd = getattr(task, 'cwd', None)
++	def to_list(xx):
++		if isinstance(xx, str): return [xx]
++		return xx
++	lst = []
++	%s
++	lst = [x for x in lst if x]
++	return task.exec_command(lst, cwd=wd)
++'''
++
++
++"""
++Enable different kind of dependency algorithms:
++1 make groups: first compile all cpps and then compile all links (NORMAL)
++2 parallelize all (each link task run after its dependencies) (MAXPARALLEL)
++3 like 1 but provide additional constraints for the parallelization (MAXJOBS)
++
++In theory 1. will be faster than 2 for waf, but might be slower for builds
++The scheme 2 will not allow for running tasks one by one so it can cause disk thrashing on huge builds
++"""
++
++file_deps = Utils.nada
++"""
++Additional dependency pre-check may be added by replacing the function file_deps.
++e.g. extract_outputs, extract_deps below.
++"""
++
++class TaskManager(object):
++	"""The manager is attached to the build object, it holds a list of TaskGroup"""
++	def __init__(self):
++		self.groups = []
++		self.tasks_done = []
++		self.current_group = 0
++		self.groups_names = {}
++
++	def group_name(self, g):
++		"""name for the group g (utility)"""
++		if not isinstance(g, TaskGroup):
++			g = self.groups[g]
++		for x in self.groups_names:
++			if id(self.groups_names[x]) == id(g):
++				return x
++		return ''
++
++	def group_idx(self, tg):
++		"""group the task generator tg is in"""
++		se = id(tg)
++		for i in range(len(self.groups)):
++			g = self.groups[i]
++			for t in g.tasks_gen:
++				if id(t) == se:
++					return i
++		return None
++
++	def get_next_set(self):
++		"""return the next set of tasks to execute
++		the first parameter is the maximum amount of parallelization that may occur"""
++		ret = None
++		while not ret and self.current_group < len(self.groups):
++			ret = self.groups[self.current_group].get_next_set()
++			if ret: return ret
++			else:
++				self.groups[self.current_group].process_install()
++				self.current_group += 1
++		return (None, None)
++
++	def add_group(self, name=None, set=True):
++		#if self.groups and not self.groups[0].tasks:
++		#	error('add_group: an empty group is already present')
++		g = TaskGroup()
++
++		if name and name in self.groups_names:
++			error('add_group: name %s already present' % name)
++		self.groups_names[name] = g
++		self.groups.append(g)
++		if set:
++			self.current_group = len(self.groups) - 1
++
++	def set_group(self, idx):
++		if isinstance(idx, str):
++			g = self.groups_names[idx]
++			for x in xrange(len(self.groups)):
++				if id(g) == id(self.groups[x]):
++					self.current_group = x
++		else:
++			self.current_group = idx
++
++	def add_task_gen(self, tgen):
++		if not self.groups: self.add_group()
++		self.groups[self.current_group].tasks_gen.append(tgen)
++
++	def add_task(self, task):
++		if not self.groups: self.add_group()
++		self.groups[self.current_group].tasks.append(task)
++
++	def total(self):
++		total = 0
++		if not self.groups: return 0
++		for group in self.groups:
++			total += len(group.tasks)
++		return total
++
++	def add_finished(self, tsk):
++		self.tasks_done.append(tsk)
++		bld = tsk.generator.bld
++		if bld.is_install:
++			f = None
++			if 'install' in tsk.__dict__:
++				f = tsk.__dict__['install']
++				# install=0 to prevent installation
++				if f: f(tsk)
++			else:
++				tsk.install()
++
++class TaskGroup(object):
++	"the compilation of one group does not begin until the previous group has finished (in the manager)"
++	def __init__(self):
++		self.tasks = [] # this list will be consumed
++		self.tasks_gen = []
++
++		self.cstr_groups = Utils.DefaultDict(list) # tasks having equivalent constraints
++		self.cstr_order = Utils.DefaultDict(set) # partial order between the cstr groups
++		self.temp_tasks = [] # tasks put on hold
++		self.ready = 0
++		self.post_funs = []
++
++	def reset(self):
++		"clears the state of the object (put back the tasks into self.tasks)"
++		for x in self.cstr_groups:
++			self.tasks += self.cstr_groups[x]
++		self.tasks = self.temp_tasks + self.tasks
++		self.temp_tasks = []
++		self.cstr_groups = Utils.DefaultDict(list)
++		self.cstr_order = Utils.DefaultDict(set)
++		self.ready = 0
++
++	def process_install(self):
++		for (f, k, kw) in self.post_funs:
++			f(*k, **kw)
++
++	def prepare(self):
++		"prepare the scheduling"
++		self.ready = 1
++		file_deps(self.tasks)
++		self.make_cstr_groups()
++		self.extract_constraints()
++
++	def get_next_set(self):
++		"next list of tasks to execute using max job settings, returns (maxjobs, task_list)"
++		global algotype
++		if algotype == NORMAL:
++			tasks = self.tasks_in_parallel()
++			maxj = MAXJOBS
++		elif algotype == JOBCONTROL:
++			(maxj, tasks) = self.tasks_by_max_jobs()
++		elif algotype == MAXPARALLEL:
++			tasks = self.tasks_with_inner_constraints()
++			maxj = MAXJOBS
++		else:
++			raise Utils.WafError("unknown algorithm type %s" % (algotype))
++
++		if not tasks: return ()
++		return (maxj, tasks)
++
++	def make_cstr_groups(self):
++		"unite the tasks that have similar constraints"
++		self.cstr_groups = Utils.DefaultDict(list)
++		for x in self.tasks:
++			h = x.hash_constraints()
++			self.cstr_groups[h].append(x)
++
++	def set_order(self, a, b):
++		self.cstr_order[a].add(b)
++
++	def compare_exts(self, t1, t2):
++		"extension production"
++		x = "ext_in"
++		y = "ext_out"
++		in_ = t1.attr(x, ())
++		out_ = t2.attr(y, ())
++		for k in in_:
++			if k in out_:
++				return -1
++		in_ = t2.attr(x, ())
++		out_ = t1.attr(y, ())
++		for k in in_:
++			if k in out_:
++				return 1
++		return 0
++
++	def compare_partial(self, t1, t2):
++		"partial relations after/before"
++		m = "after"
++		n = "before"
++		name = t2.__class__.__name__
++		if name in Utils.to_list(t1.attr(m, ())): return -1
++		elif name in Utils.to_list(t1.attr(n, ())): return 1
++		name = t1.__class__.__name__
++		if name in Utils.to_list(t2.attr(m, ())): return 1
++		elif name in Utils.to_list(t2.attr(n, ())): return -1
++		return 0
++
++	def extract_constraints(self):
++		"extract the parallelization constraints from the tasks with different constraints"
++		keys = self.cstr_groups.keys()
++		max = len(keys)
++		# hopefully the length of this list is short
++		for i in xrange(max):
++			t1 = self.cstr_groups[keys[i]][0]
++			for j in xrange(i + 1, max):
++				t2 = self.cstr_groups[keys[j]][0]
++
++				# add the constraints based on the comparisons
++				val = (self.compare_exts(t1, t2)
++					or self.compare_partial(t1, t2)
++					)
++				if val > 0:
++					self.set_order(keys[i], keys[j])
++				elif val < 0:
++					self.set_order(keys[j], keys[i])
++
++	def tasks_in_parallel(self):
++		"(NORMAL) next list of tasks that may be executed in parallel"
++
++		if not self.ready: self.prepare()
++
++		keys = self.cstr_groups.keys()
++
++		unconnected = []
++		remainder = []
++
++		for u in keys:
++			for k in self.cstr_order.values():
++				if u in k:
++					remainder.append(u)
++					break
++			else:
++				unconnected.append(u)
++
++		toreturn = []
++		for y in unconnected:
++			toreturn.extend(self.cstr_groups[y])
++
++		# remove stuff only after
++		for y in unconnected:
++				try: self.cstr_order.__delitem__(y)
++				except KeyError: pass
++				self.cstr_groups.__delitem__(y)
++
++		if not toreturn and remainder:
++			raise Utils.WafError("circular order constraint detected %r" % remainder)
++
++		return toreturn
++
++	def tasks_by_max_jobs(self):
++		"(JOBCONTROL) returns the tasks that can run in parallel with the max amount of jobs"
++		if not self.ready: self.prepare()
++		if not self.temp_tasks: self.temp_tasks = self.tasks_in_parallel()
++		if not self.temp_tasks: return (None, None)
++
++		maxjobs = MAXJOBS
++		ret = []
++		remaining = []
++		for t in self.temp_tasks:
++			m = getattr(t, "maxjobs", getattr(self.__class__, "maxjobs", MAXJOBS))
++			if m > maxjobs:
++				remaining.append(t)
++			elif m < maxjobs:
++				remaining += ret
++				ret = [t]
++				maxjobs = m
++			else:
++				ret.append(t)
++		self.temp_tasks = remaining
++		return (maxjobs, ret)
++
++	def tasks_with_inner_constraints(self):
++		"""(MAXPARALLEL) returns all tasks in this group, but add the constraints on each task instance
++		as an optimization, it might be desirable to discard the tasks which do not have to run"""
++		if not self.ready: self.prepare()
++
++		if getattr(self, "done", None): return None
++
++		for p in self.cstr_order:
++			for v in self.cstr_order[p]:
++				for m in self.cstr_groups[p]:
++					for n in self.cstr_groups[v]:
++						n.set_run_after(m)
++		self.cstr_order = Utils.DefaultDict(set)
++		self.cstr_groups = Utils.DefaultDict(list)
++		self.done = 1
++		return self.tasks[:] # make a copy
++
++class store_task_type(type):
++	"store the task types that have a name ending in _task into a map (remember the existing task types)"
++	def __init__(cls, name, bases, dict):
++		super(store_task_type, cls).__init__(name, bases, dict)
++		name = cls.__name__
++
++		if name.endswith('_task'):
++			name = name.replace('_task', '')
++		if name != 'TaskBase':
++			TaskBase.classes[name] = cls
++
++class TaskBase(object):
++	"""Base class for all Waf tasks
++
++	The most important methods are (by usual order of call):
++	1 runnable_status: ask the task if it should be run, skipped, or if we have to ask later
++	2 __str__: string to display to the user
++	3 run: execute the task
++	4 post_run: after the task is run, update the cache about the task
++
++	This class should be seen as an interface, it provides the very minimum necessary for the scheduler
++	so it does not do much.
++
++	For illustration purposes, TaskBase instances try to execute self.fun (if provided)
++	"""
++
++	__metaclass__ = store_task_type
++
++	color = "GREEN"
++	maxjobs = MAXJOBS
++	classes = {}
++	stat = None
++
++	def __init__(self, *k, **kw):
++		self.hasrun = NOT_RUN
++
++		try:
++			self.generator = kw['generator']
++		except KeyError:
++			self.generator = self
++			self.bld = Build.bld
++
++		if kw.get('normal', 1):
++			self.generator.bld.task_manager.add_task(self)
++
++	def __repr__(self):
++		"used for debugging"
++		return '\n\t{task: %s %s}' % (self.__class__.__name__, str(getattr(self, "fun", "")))
++
++	def __str__(self):
++		"string to display to the user"
++		if hasattr(self, 'fun'):
++			return 'executing: %s\n' % self.fun.__name__
++		return self.__class__.__name__ + '\n'
++
++	def exec_command(self, *k, **kw):
++		"use this for executing commands from tasks"
++		# TODO in waf 1.6, eliminate bld.exec_command, and move the cwd processing to here
++		if self.env['env']:
++			kw['env'] = self.env['env']
++		return self.generator.bld.exec_command(*k, **kw)
++
++	def runnable_status(self):
++		"RUN_ME SKIP_ME or ASK_LATER"
++		return RUN_ME
++
++	def can_retrieve_cache(self):
++		return False
++
++	def call_run(self):
++		if self.can_retrieve_cache():
++			return 0
++		return self.run()
++
++	def run(self):
++		"called if the task must run"
++		if hasattr(self, 'fun'):
++			return self.fun(self)
++		return 0
++
++	def post_run(self):
++		"update the dependency tree (node stats)"
++		pass
++
++	def display(self):
++		"print either the description (using __str__) or the progress bar or the ide output"
++		col1 = Logs.colors(self.color)
++		col2 = Logs.colors.NORMAL
++
++		if Options.options.progress_bar == 1:
++			return self.generator.bld.progress_line(self.position[0], self.position[1], col1, col2)
++
++		if Options.options.progress_bar == 2:
++			ela = Utils.get_elapsed_time(self.generator.bld.ini)
++			try:
++				ins  = ','.join([n.name for n in self.inputs])
++			except AttributeError:
++				ins = ''
++			try:
++				outs = ','.join([n.name for n in self.outputs])
++			except AttributeError:
++				outs = ''
++			return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (self.position[1], self.position[0], ins, outs, ela)
++
++		total = self.position[1]
++		n = len(str(total))
++		fs = '[%%%dd/%%%dd] %%s%%s%%s' % (n, n)
++		return fs % (self.position[0], self.position[1], col1, str(self), col2)
++
++	def attr(self, att, default=None):
++		"retrieve an attribute from the instance or from the class (microoptimization here)"
++		ret = getattr(self, att, self)
++		if ret is self: return getattr(self.__class__, att, default)
++		return ret
++
++	def hash_constraints(self):
++		"identify a task type for all the constraints relevant for the scheduler: precedence, file production"
++		a = self.attr
++		sum = hash((self.__class__.__name__,
++			str(a('before', '')),
++			str(a('after', '')),
++			str(a('ext_in', '')),
++			str(a('ext_out', '')),
++			self.__class__.maxjobs))
++		return sum
++
++	def format_error(self):
++		"error message to display to the user (when a build fails)"
++		if getattr(self, "err_msg", None):
++			return self.err_msg
++		elif self.hasrun == CRASHED:
++			try:
++				return " -> task failed (err #%d): %r" % (self.err_code, self)
++			except AttributeError:
++				return " -> task failed: %r" % self
++		elif self.hasrun == MISSING:
++			return " -> missing files: %r" % self
++		else:
++			return ''
++
++	def install(self):
++		"""
++		installation is performed by looking at the task attributes:
++		* install_path: installation path like "${PREFIX}/bin"
++		* filename: install the first node in the outputs as a file with a particular name, be certain to give os.sep
++		* chmod: permissions
++		"""
++		bld = self.generator.bld
++		d = self.attr('install')
++
++		if self.attr('install_path'):
++			lst = [a.relpath_gen(bld.srcnode) for a in self.outputs]
++			perm = self.attr('chmod', O644)
++			if self.attr('src'):
++				# if src is given, install the sources too
++				lst += [a.relpath_gen(bld.srcnode) for a in self.inputs]
++			if self.attr('filename'):
++				dir = self.install_path.rstrip(os.sep) + os.sep + self.attr('filename')
++				bld.install_as(dir, lst[0], self.env, perm)
++			else:
++				bld.install_files(self.install_path, lst, self.env, perm)
++
++class Task(TaskBase):
++	"""The parent class is quite limited, in this version:
++	* file system interaction: input and output nodes
++	* persistence: do not re-execute tasks that have already run
++	* caching: same files can be saved and retrieved from a cache directory
++	* dependencies:
++		implicit, like .c files depending on .h files
++		explicit, like the input nodes or the dep_nodes
++		environment variables, like the CXXFLAGS in self.env
++	"""
++	vars = []
++	def __init__(self, env, **kw):
++		TaskBase.__init__(self, **kw)
++		self.env = env
++
++		# inputs and outputs are nodes
++		# use setters when possible
++		self.inputs  = []
++		self.outputs = []
++
++		self.dep_nodes = []
++		self.run_after = []
++
++		# Additionally, you may define the following
++		#self.dep_vars  = 'PREFIX DATADIR'
++
++	def __str__(self):
++		"string to display to the user"
++		env = self.env
++		src_str = ' '.join([a.nice_path(env) for a in self.inputs])
++		tgt_str = ' '.join([a.nice_path(env) for a in self.outputs])
++		if self.outputs: sep = ' -> '
++		else: sep = ''
++		return '%s: %s%s%s\n' % (self.__class__.__name__.replace('_task', ''), src_str, sep, tgt_str)
++
++	def __repr__(self):
++		return "".join(['\n\t{task: ', self.__class__.__name__, " ", ",".join([x.name for x in self.inputs]), " -> ", ",".join([x.name for x in self.outputs]), '}'])
++
++	def unique_id(self):
++		"get a unique id: hash the node paths, the variant, the class, the function"
++		try:
++			return self.uid
++		except AttributeError:
++			"this is not a real hot zone, but we want to avoid surprizes here"
++			m = md5()
++			up = m.update
++			up(self.__class__.__name__)
++			up(self.env.variant())
++			p = None
++			for x in self.inputs + self.outputs:
++				if p != x.parent.id:
++					p = x.parent.id
++					up(x.parent.abspath())
++				up(x.name)
++			self.uid = m.digest()
++			return self.uid
++
++	def set_inputs(self, inp):
++		if isinstance(inp, list): self.inputs += inp
++		else: self.inputs.append(inp)
++
++	def set_outputs(self, out):
++		if isinstance(out, list): self.outputs += out
++		else: self.outputs.append(out)
++
++	def set_run_after(self, task):
++		"set (scheduler) order on another task"
++		# TODO: handle list or object
++		assert isinstance(task, TaskBase)
++		self.run_after.append(task)
++
++	def add_file_dependency(self, filename):
++		"TODO user-provided file dependencies"
++		node = self.generator.bld.path.find_resource(filename)
++		self.dep_nodes.append(node)
++
++	def signature(self):
++		# compute the result one time, and suppose the scan_signature will give the good result
++		try: return self.cache_sig[0]
++		except AttributeError: pass
++
++		self.m = md5()
++
++		# explicit deps
++		exp_sig = self.sig_explicit_deps()
++
++		# env vars
++		var_sig = self.sig_vars()
++
++		# implicit deps
++
++		imp_sig = SIG_NIL
++		if self.scan:
++			try:
++				imp_sig = self.sig_implicit_deps()
++			except ValueError:
++				return self.signature()
++
++		# we now have the signature (first element) and the details (for debugging)
++		ret = self.m.digest()
++		self.cache_sig = (ret, exp_sig, imp_sig, var_sig)
++		return ret
++
++	def runnable_status(self):
++		"SKIP_ME RUN_ME or ASK_LATER"
++		#return 0 # benchmarking
++
++		if self.inputs and (not self.outputs):
++			if not getattr(self.__class__, 'quiet', None):
++				warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r" % self)
++
++		for t in self.run_after:
++			if not t.hasrun:
++				return ASK_LATER
++
++		env = self.env
++		bld = self.generator.bld
++
++		# first compute the signature
++		new_sig = self.signature()
++
++		# compare the signature to a signature computed previously
++		key = self.unique_id()
++		try:
++			prev_sig = bld.task_sigs[key][0]
++		except KeyError:
++			debug("task: task %r must run as it was never run before or the task code changed", self)
++			return RUN_ME
++
++		# compare the signatures of the outputs
++		for node in self.outputs:
++			variant = node.variant(env)
++			try:
++				if bld.node_sigs[variant][node.id] != new_sig:
++					return RUN_ME
++			except KeyError:
++				debug("task: task %r must run as the output nodes do not exist", self)
++				return RUN_ME
++
++		# debug if asked to
++		if Logs.verbose: self.debug_why(bld.task_sigs[key])
++
++		if new_sig != prev_sig:
++			return RUN_ME
++		return SKIP_ME
++
++	def post_run(self):
++		"called after a successful task run"
++		bld = self.generator.bld
++		env = self.env
++		sig = self.signature()
++		ssig = sig.encode('hex')
++
++		variant = env.variant()
++		for node in self.outputs:
++			# check if the node exists ..
++			try:
++				os.stat(node.abspath(env))
++			except OSError:
++				self.hasrun = MISSING
++				self.err_msg = '-> missing file: %r' % node.abspath(env)
++				raise Utils.WafError
++
++			# important, store the signature for the next run
++			bld.node_sigs[variant][node.id] = sig
++		bld.task_sigs[self.unique_id()] = self.cache_sig
++
++		# file caching, if possible
++		# try to avoid data corruption as much as possible
++		if not Options.cache_global or Options.options.nocache or not self.outputs:
++			return None
++
++		if getattr(self, 'cached', None):
++			return None
++
++		dname = os.path.join(Options.cache_global, ssig)
++		tmpdir = tempfile.mkdtemp(prefix=Options.cache_global + os.sep + 'waf')
++
++		try:
++			shutil.rmtree(dname)
++		except:
++			pass
++
++		try:
++			i = 0
++			for node in self.outputs:
++				variant = node.variant(env)
++				dest = os.path.join(tmpdir, str(i) + node.name)
++				shutil.copy2(node.abspath(env), dest)
++				i += 1
++		except (OSError, IOError):
++			try:
++				shutil.rmtree(tmpdir)
++			except:
++				pass
++		else:
++			try:
++				os.rename(tmpdir, dname)
++			except OSError:
++				try:
++					shutil.rmtree(tmpdir)
++				except:
++					pass
++			else:
++				try:
++					os.chmod(dname, O755)
++				except:
++					pass
++
++	def can_retrieve_cache(self):
++		"""
++		Retrieve build nodes from the cache
++		update the file timestamps to help cleaning the least used entries from the cache
++		additionally, set an attribute 'cached' to avoid re-creating the same cache files
++
++		suppose there are files in cache/dir1/file1 and cache/dir2/file2
++		first, read the timestamp of dir1
++		then try to copy the files
++		then look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
++		should an exception occur, ignore the data
++		"""
++		if not Options.cache_global or Options.options.nocache or not self.outputs:
++			return None
++
++		env = self.env
++		sig = self.signature()
++		ssig = sig.encode('hex')
++
++		# first try to access the cache folder for the task
++		dname = os.path.join(Options.cache_global, ssig)
++		try:
++			t1 = os.stat(dname).st_mtime
++		except OSError:
++			return None
++
++		i = 0
++		for node in self.outputs:
++			variant = node.variant(env)
++
++			orig = os.path.join(dname, str(i) + node.name)
++			try:
++				shutil.copy2(orig, node.abspath(env))
++				# mark the cache file as used recently (modified)
++				os.utime(orig, None)
++			except (OSError, IOError):
++				debug('task: failed retrieving file')
++				return None
++			i += 1
++
++		# is it the same folder?
++		try:
++			t2 = os.stat(dname).st_mtime
++		except OSError:
++			return None
++
++		if t1 != t2:
++			return None
++
++		for node in self.outputs:
++			self.generator.bld.node_sigs[variant][node.id] = sig
++			if Options.options.progress_bar < 1:
++				self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))
++
++		self.cached = True
++		return 1
++
++	def debug_why(self, old_sigs):
++		"explains why a task is run"
++
++		new_sigs = self.cache_sig
++		def v(x):
++			return x.encode('hex')
++
++		debug("Task %r", self)
++		msgs = ['Task must run', '* Source file or manual dependency', '* Implicit dependency', '* Environment variable']
++		tmp = 'task: -> %s: %s %s'
++		for x in xrange(len(msgs)):
++			if (new_sigs[x] != old_sigs[x]):
++				debug(tmp, msgs[x], v(old_sigs[x]), v(new_sigs[x]))
++
++	def sig_explicit_deps(self):
++		bld = self.generator.bld
++		up = self.m.update
++
++		# the inputs
++		for x in self.inputs + getattr(self, 'dep_nodes', []):
++			if not x.parent.id in bld.cache_scanned_folders:
++				bld.rescan(x.parent)
++
++			variant = x.variant(self.env)
++			try:
++				up(bld.node_sigs[variant][x.id])
++			except KeyError:
++				raise Utils.WafError('Missing node signature for %r (required by %r)' % (x, self))
++
++		# manual dependencies, they can slow down the builds
++		if bld.deps_man:
++			additional_deps = bld.deps_man
++			for x in self.inputs + self.outputs:
++				try:
++					d = additional_deps[x.id]
++				except KeyError:
++					continue
++
++				for v in d:
++					if isinstance(v, Node.Node):
++						bld.rescan(v.parent)
++						variant = v.variant(self.env)
++						try:
++							v = bld.node_sigs[variant][v.id]
++						except KeyError:
++							raise Utils.WafError('Missing node signature for %r (required by %r)' % (v, self))
++					elif hasattr(v, '__call__'):
++						v = v() # dependency is a function, call it
++					up(v)
++
++		for x in self.dep_nodes:
++			v = bld.node_sigs[x.variant(self.env)][x.id]
++			up(v)
++
++		return self.m.digest()
++
++	def sig_vars(self):
++		bld = self.generator.bld
++		env = self.env
++
++		# dependencies on the environment vars
++		act_sig = bld.hash_env_vars(env, self.__class__.vars)
++		self.m.update(act_sig)
++
++		# additional variable dependencies, if provided
++		dep_vars = getattr(self, 'dep_vars', None)
++		if dep_vars:
++			self.m.update(bld.hash_env_vars(env, dep_vars))
++
++		return self.m.digest()
++
++	#def scan(self, node):
++	#	"""this method returns a tuple containing:
++	#	* a list of nodes corresponding to real files
++	#	* a list of names for files not found in path_lst
++	#	the input parameters may have more parameters that the ones used below
++	#	"""
++	#	return ((), ())
++	scan = None
++
++	# compute the signature, recompute it if there is no match in the cache
++	def sig_implicit_deps(self):
++		"the signature obtained may not be the one if the files have changed, we do it in two steps"
++
++		bld = self.generator.bld
++
++		# get the task signatures from previous runs
++		key = self.unique_id()
++		prev_sigs = bld.task_sigs.get(key, ())
++		if prev_sigs:
++			try:
++				# for issue #379
++				if prev_sigs[2] == self.compute_sig_implicit_deps():
++					return prev_sigs[2]
++			except (KeyError, OSError):
++				pass
++			del bld.task_sigs[key]
++			raise ValueError('rescan')
++
++		# no previous run or the signature of the dependencies has changed, rescan the dependencies
++		(nodes, names) = self.scan()
++		if Logs.verbose:
++			debug('deps: scanner for %s returned %s %s', str(self), str(nodes), str(names))
++
++		# store the dependencies in the cache
++		bld.node_deps[key] = nodes
++		bld.raw_deps[key] = names
++
++		# recompute the signature and return it
++		try:
++			sig = self.compute_sig_implicit_deps()
++		except KeyError:
++			try:
++				nodes = []
++				for k in bld.node_deps.get(self.unique_id(), []):
++					if k.id & 3 == 2: # Node.FILE:
++						if not k.id in bld.node_sigs[0]:
++							nodes.append(k)
++					else:
++						if not k.id in bld.node_sigs[self.env.variant()]:
++							nodes.append(k)
++			except:
++				nodes = '?'
++			raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)' % (nodes, self))
++
++		return sig
++
++	def compute_sig_implicit_deps(self):
++		"""it is intended for .cpp and inferred .h files
++		there is a single list (no tree traversal)
++		this is the hot spot so ... do not touch"""
++		upd = self.m.update
++
++		bld = self.generator.bld
++		tstamp = bld.node_sigs
++		env = self.env
++
++		for k in bld.node_deps.get(self.unique_id(), []):
++			# unlikely but necessary if it happens
++			if not k.parent.id in bld.cache_scanned_folders:
++				# if the parent folder is removed, an OSError may be thrown
++				bld.rescan(k.parent)
++
++			# if the parent folder is removed, a KeyError will be thrown
++			if k.id & 3 == 2: # Node.FILE:
++				upd(tstamp[0][k.id])
++			else:
++				upd(tstamp[env.variant()][k.id])
++
++		return self.m.digest()
++
++def funex(c):
++	dc = {}
++	exec(c, dc)
++	return dc['f']
++
++reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})", re.M)
++def compile_fun_shell(name, line):
++	"""Compiles a string (once) into a function, eg:
++	simple_task_type('c++', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}')
++
++	The env variables (CXX, ..) on the task must not hold dicts (order)
++	The reserved keywords TGT and SRC represent the task input and output nodes
++
++	quick test:
++	bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
++	"""
++
++	extr = []
++	def repl(match):
++		g = match.group
++		if g('dollar'): return "$"
++		elif g('backslash'): return '\\\\'
++		elif g('subst'): extr.append((g('var'), g('code'))); return "%s"
++		return None
++
++	line = reg_act.sub(repl, line) or line
++
++	parm = []
++	dvars = []
++	app = parm.append
++	for (var, meth) in extr:
++		if var == 'SRC':
++			if meth: app('task.inputs%s' % meth)
++			else: app('" ".join([a.srcpath(env) for a in task.inputs])')
++		elif var == 'TGT':
++			if meth: app('task.outputs%s' % meth)
++			else: app('" ".join([a.bldpath(env) for a in task.outputs])')
++		else:
++			if not var in dvars: dvars.append(var)
++			app("p('%s')" % var)
++	if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
++	else: parm = ''
++
++	c = COMPILE_TEMPLATE_SHELL % (line, parm)
++
++	debug('action: %s', c)
++	return (funex(c), dvars)
++
++def compile_fun_noshell(name, line):
++
++	extr = []
++	def repl(match):
++		g = match.group
++		if g('dollar'): return "$"
++		elif g('subst'): extr.append((g('var'), g('code'))); return "<<|@|>>"
++		return None
++
++	line2 = reg_act.sub(repl, line)
++	params = line2.split('<<|@|>>')
++
++	buf = []
++	dvars = []
++	app = buf.append
++	for x in xrange(len(extr)):
++		params[x] = params[x].strip()
++		if params[x]:
++			app("lst.extend(%r)" % params[x].split())
++		(var, meth) = extr[x]
++		if var == 'SRC':
++			if meth: app('lst.append(task.inputs%s)' % meth)
++			else: app("lst.extend([a.srcpath(env) for a in task.inputs])")
++		elif var == 'TGT':
++			if meth: app('lst.append(task.outputs%s)' % meth)
++			else: app("lst.extend([a.bldpath(env) for a in task.outputs])")
++		else:
++			app('lst.extend(to_list(env[%r]))' % var)
++			if not var in dvars: dvars.append(var)
++
++	if params[-1]:
++		app("lst.extend(%r)" % shlex.split(params[-1]))
++
++	fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
++	debug('action: %s', fun)
++	return (funex(fun), dvars)
++
++def compile_fun(name, line, shell=None):
++	"commands can be launched by the shell or not"
++	if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0:
++		shell = True
++	#else:
++	#	shell = False
++
++	if shell is None:
++		if sys.platform == 'win32':
++			shell = False
++		else:
++			shell = True
++
++	if shell:
++		return compile_fun_shell(name, line)
++	else:
++		return compile_fun_noshell(name, line)
++
++def simple_task_type(name, line, color='GREEN', vars=[], ext_in=[], ext_out=[], before=[], after=[], shell=None):
++	"""return a new Task subclass with the function run compiled from the line given"""
++	(fun, dvars) = compile_fun(name, line, shell)
++	fun.code = line
++	return task_type_from_func(name, fun, vars or dvars, color, ext_in, ext_out, before, after)
++
++def task_type_from_func(name, func, vars=[], color='GREEN', ext_in=[], ext_out=[], before=[], after=[]):
++	"""return a new Task subclass with the function run compiled from the line given"""
++	params = {
++		'run': func,
++		'vars': vars,
++		'color': color,
++		'name': name,
++		'ext_in': Utils.to_list(ext_in),
++		'ext_out': Utils.to_list(ext_out),
++		'before': Utils.to_list(before),
++		'after': Utils.to_list(after),
++	}
++
++	cls = type(Task)(name, (Task,), params)
++	TaskBase.classes[name] = cls
++	return cls
++
++def always_run(cls):
++	"""Set all task instances of this class to be executed whenever a build is started
++	The task signature is calculated, but the result of the comparation between
++	task signatures is bypassed
++	"""
++	old = cls.runnable_status
++	def always(self):
++		ret = old(self)
++		if ret == SKIP_ME:
++			return RUN_ME
++		return ret
++	cls.runnable_status = always
++
++def update_outputs(cls):
++	"""When a command is always run, it is possible that the output only change
++	sometimes. By default the build node have as a hash the signature of the task
++	which may not change. With this, the output nodes (produced) are hashed,
++	and the hashes are set to the build nodes
++
++	This may avoid unnecessary recompilations, but it uses more resources
++	(hashing the output files) so it is not used by default
++	"""
++	old_post_run = cls.post_run
++	def post_run(self):
++		old_post_run(self)
++		bld = self.generator.bld
++		for output in self.outputs:
++			bld.node_sigs[self.env.variant()][output.id] = Utils.h_file(output.abspath(self.env))
++			bld.task_sigs[output.id] = self.unique_id()
++	cls.post_run = post_run
++
++	old_runnable_status = cls.runnable_status
++	def runnable_status(self):
++		status = old_runnable_status(self)
++		if status != RUN_ME:
++			return status
++
++		uid = self.unique_id()
++		try:
++			bld = self.outputs[0].__class__.bld
++			new_sig  = self.signature()
++			prev_sig = bld.task_sigs[uid][0]
++			if prev_sig == new_sig:
++				for x in self.outputs:
++					if not x.id in bld.node_sigs[self.env.variant()]:
++						return RUN_ME
++					if bld.task_sigs[x.id] != uid: # ensure the outputs are associated with *this* task
++						return RUN_ME
++				return SKIP_ME
++		except KeyError:
++			pass
++		except IndexError:
++			pass
++		return RUN_ME
++	cls.runnable_status = runnable_status
++
++def extract_outputs(tasks):
++	"""file_deps: Infer additional dependencies from task input and output nodes
++	"""
++	v = {}
++	for x in tasks:
++		try:
++			(ins, outs) = v[x.env.variant()]
++		except KeyError:
++			ins = {}
++			outs = {}
++			v[x.env.variant()] = (ins, outs)
++
++		for a in getattr(x, 'inputs', []):
++			try: ins[a.id].append(x)
++			except KeyError: ins[a.id] = [x]
++		for a in getattr(x, 'outputs', []):
++			try: outs[a.id].append(x)
++			except KeyError: outs[a.id] = [x]
++
++	for (ins, outs) in v.values():
++		links = set(ins.iterkeys()).intersection(outs.iterkeys())
++		for k in links:
++			for a in ins[k]:
++				for b in outs[k]:
++					a.set_run_after(b)
++
++def extract_deps(tasks):
++	"""file_deps: Infer additional dependencies from task input and output nodes and from implicit dependencies
++	returned by the scanners - that will only work if all tasks are created
++
++	this is aimed at people who have pathological builds and who do not care enough
++	to implement the build dependencies properly
++
++	with two loops over the list of tasks, do not expect this to be really fast
++	"""
++
++	# first reuse the function above
++	extract_outputs(tasks)
++
++	# map the output nodes to the tasks producing them
++	out_to_task = {}
++	for x in tasks:
++		v = x.env.variant()
++		try:
++			lst = x.outputs
++		except AttributeError:
++			pass
++		else:
++			for node in lst:
++				out_to_task[(v, node.id)] = x
++
++	# map the dependencies found to the tasks compiled
++	dep_to_task = {}
++	for x in tasks:
++		try:
++			x.signature()
++		except: # this is on purpose
++			pass
++
++		v = x.env.variant()
++		key = x.unique_id()
++		for k in x.generator.bld.node_deps.get(x.unique_id(), []):
++			try: dep_to_task[(v, k.id)].append(x)
++			except KeyError: dep_to_task[(v, k.id)] = [x]
++
++	# now get the intersection
++	deps = set(dep_to_task.keys()).intersection(set(out_to_task.keys()))
++
++	# and add the dependencies from task to task
++	for idx in deps:
++		for k in dep_to_task[idx]:
++			k.set_run_after(out_to_task[idx])
++
++	# cleanup, remove the signatures
++	for x in tasks:
++		try:
++			delattr(x, 'cache_sig')
++		except AttributeError:
++			pass
++
+diff --git a/buildtools/wafadmin/TaskGen.py b/buildtools/wafadmin/TaskGen.py
+new file mode 100644
+index 0000000..ae1834a
+--- /dev/null
++++ b/buildtools/wafadmin/TaskGen.py
+@@ -0,0 +1,612 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005-2008 (ita)
++
++"""
++The class task_gen encapsulates the creation of task objects (low-level code)
++The instances can have various parameters, but the creation of task nodes (Task.py)
++is delayed. To achieve this, various methods are called from the method "apply"
++
++The class task_gen contains lots of methods, and a configuration table:
++* the methods to call (self.meths) can be specified dynamically (removing, adding, ..)
++* the order of the methods (self.prec or by default task_gen.prec) is configurable
++* new methods can be inserted dynamically without pasting old code
++
++Additionally, task_gen provides the method apply_core
++* file extensions are mapped to methods: def meth(self, name_or_node)
++* if a mapping is not found in self.mappings, it is searched in task_gen.mappings
++* when called, the functions may modify self.allnodes to re-add source to process
++* the mappings can map an extension or a filename (see the code below)
++
++WARNING: subclasses must reimplement the clone method
++"""
++
++import os, traceback, copy
++import Build, Task, Utils, Logs, Options
++from Logs import debug, error, warn
++from Constants import *
++
++typos = {
++'sources':'source',
++'targets':'target',
++'include':'includes',
++'define':'defines',
++'importpath':'importpaths',
++'install_var':'install_path',
++'install_subdir':'install_path',
++'inst_var':'install_path',
++'inst_dir':'install_path',
++'feature':'features',
++}
++
++class register_obj(type):
++	"""no decorators for classes, so we use a metaclass
++	we store into task_gen.classes the classes that inherit task_gen
++	and whose names end in '_taskgen'
++	"""
++	def __init__(cls, name, bases, dict):
++		super(register_obj, cls).__init__(name, bases, dict)
++		name = cls.__name__
++		suffix = '_taskgen'
++		if name.endswith(suffix):
++			task_gen.classes[name.replace(suffix, '')] = cls
++
++class task_gen(object):
++	"""
++	Most methods are of the form 'def meth(self):' without any parameters
++	there are many of them, and they do many different things:
++	* task creation
++	* task results installation
++	* environment modification
++	* attribute addition/removal
++
++	The inheritance approach is complicated
++	* mixing several languages at once
++	* subclassing is needed even for small changes
++	* inserting new methods is complicated
++
++	This new class uses a configuration table:
++	* adding new methods easily
++	* obtaining the order in which to call the methods
++	* postponing the method calls (post() -> apply)
++
++	Additionally, a 'traits' static attribute is provided:
++	* this list contains methods
++	* the methods can remove or add methods from self.meths
++	Example1: the attribute 'staticlib' is set on an instance
++	a method set in the list of traits is executed when the
++	instance is posted, it finds that flag and adds another method for execution
++	Example2: a method set in the list of traits finds the msvc
++	compiler (from self.env['MSVC']==1); more methods are added to self.meths
++	"""
++
++	__metaclass__ = register_obj
++	mappings = {}
++	mapped = {}
++	prec = Utils.DefaultDict(list)
++	traits = Utils.DefaultDict(set)
++	classes = {}
++
++	def __init__(self, *kw, **kwargs):
++		self.prec = Utils.DefaultDict(list)
++		"map precedence of function names to call"
++		# so we will have to play with directed acyclic graphs
++		# detect cycles, etc
++
++		self.source = ''
++		self.target = ''
++
++		# list of methods to execute - does not touch it by hand unless you know
++		self.meths = []
++
++		# list of mappings extension -> function
++		self.mappings = {}
++
++		# list of features (see the documentation on traits)
++		self.features = list(kw)
++
++		# not always a good idea
++		self.tasks = []
++
++		self.default_chmod = O644
++		self.default_install_path = None
++
++		# kind of private, beware of what you put in it, also, the contents are consumed
++		self.allnodes = []
++
++		self.bld = kwargs.get('bld', Build.bld)
++		self.env = self.bld.env.copy()
++
++		self.path = self.bld.path # emulate chdir when reading scripts
++		self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity)
++
++		# provide a unique id
++		self.idx = self.bld.idx[self.path.id] = self.bld.idx.get(self.path.id, 0) + 1
++
++		for key, val in kwargs.iteritems():
++			setattr(self, key, val)
++
++		self.bld.task_manager.add_task_gen(self)
++		self.bld.all_task_gen.append(self)
++
++	def __str__(self):
++		return ("<task_gen '%s' of type %s defined in %s>"
++			% (self.name or self.target, self.__class__.__name__, str(self.path)))
++
++	def __setattr__(self, name, attr):
++		real = typos.get(name, name)
++		if real != name:
++			warn('typo %s -> %s' % (name, real))
++			if Logs.verbose > 0:
++				traceback.print_stack()
++		object.__setattr__(self, real, attr)
++
++	def to_list(self, value):
++		"helper: returns a list"
++		if isinstance(value, str): return value.split()
++		else: return value
++
++	def apply(self):
++		"order the methods to execute using self.prec or task_gen.prec"
++		keys = set(self.meths)
++
++		# add the methods listed in the features
++		self.features = Utils.to_list(self.features)
++		for x in self.features + ['*']:
++			st = task_gen.traits[x]
++			if not st:
++				warn('feature %r does not exist - bind at least one method to it' % x)
++			keys.update(st)
++
++		# copy the precedence table
++		prec = {}
++		prec_tbl = self.prec or task_gen.prec
++		for x in prec_tbl:
++			if x in keys:
++				prec[x] = prec_tbl[x]
++
++		# elements disconnected
++		tmp = []
++		for a in keys:
++			for x in prec.values():
++				if a in x: break
++			else:
++				tmp.append(a)
++
++		# topological sort
++		out = []
++		while tmp:
++			e = tmp.pop()
++			if e in keys: out.append(e)
++			try:
++				nlst = prec[e]
++			except KeyError:
++				pass
++			else:
++				del prec[e]
++				for x in nlst:
++					for y in prec:
++						if x in prec[y]:
++							break
++					else:
++						tmp.append(x)
++
++		if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
++		out.reverse()
++		self.meths = out
++
++		# then we run the methods in order
++		debug('task_gen: posting %s %d', self, id(self))
++		for x in out:
++			try:
++				v = getattr(self, x)
++			except AttributeError:
++				raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
++			debug('task_gen: -> %s (%d)', x, id(self))
++			v()
++
++	def post(self):
++		"runs the code to create the tasks, do not subclass"
++		if not self.name:
++			if isinstance(self.target, list):
++				self.name = ' '.join(self.target)
++			else:
++				self.name = self.target
++
++		if getattr(self, 'posted', None):
++			#error("OBJECT ALREADY POSTED" + str( self))
++			return
++
++		self.apply()
++		self.posted = True
++		debug('task_gen: posted %s', self.name)
++
++	def get_hook(self, ext):
++		try: return self.mappings[ext]
++		except KeyError:
++			try: return task_gen.mappings[ext]
++			except KeyError: return None
++
++	# TODO waf 1.6: always set the environment
++	# TODO waf 1.6: create_task(self, name, inputs, outputs)
++	def create_task(self, name, src=None, tgt=None, env=None):
++		env = env or self.env
++		task = Task.TaskBase.classes[name](env.copy(), generator=self)
++		if src:
++			task.set_inputs(src)
++		if tgt:
++			task.set_outputs(tgt)
++		self.tasks.append(task)
++		return task
++
++	def name_to_obj(self, name):
++		return self.bld.name_to_obj(name, self.env)
++
++	def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]):
++		"""
++		The attributes "excludes" and "exts" must be lists to avoid the confusion
++		find_sources_in_dirs('a', 'b', 'c') <-> find_sources_in_dirs('a b c')
++
++		do not use absolute paths
++		do not use paths outside of the source tree
++		the files or folder beginning by . are not returned
++
++		# TODO: remove in Waf 1.6
++		"""
++
++		err_msg = "'%s' attribute must be a list"
++		if not isinstance(excludes, list):
++			raise Utils.WscriptError(err_msg % 'excludes')
++		if not isinstance(exts, list):
++			raise Utils.WscriptError(err_msg % 'exts')
++
++		lst = []
++
++		#make sure dirnames is a list helps with dirnames with spaces
++		dirnames = self.to_list(dirnames)
++
++		ext_lst = exts or list(self.mappings.keys()) + list(task_gen.mappings.keys())
++
++		for name in dirnames:
++			anode = self.path.find_dir(name)
++
++			if not anode or not anode.is_child_of(self.bld.srcnode):
++				raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path" \
++					 ", or it's not child of '%s'." % (name, self.bld.srcnode))
++
++			self.bld.rescan(anode)
++			for name in self.bld.cache_dir_contents[anode.id]:
++
++				# ignore hidden files
++				if name.startswith('.'):
++					continue
++
++				(base, ext) = os.path.splitext(name)
++				if ext in ext_lst and not name in lst and not name in excludes:
++					lst.append((anode.relpath_gen(self.path) or '.') + os.path.sep + name)
++
++		lst.sort()
++		self.source = self.to_list(self.source)
++		if not self.source: self.source = lst
++		else: self.source += lst
++
++	def clone(self, env):
++		"""when creating a clone in a task generator method, 
++		make sure to set posted=False on the clone 
++		else the other task generator will not create its tasks"""
++		newobj = task_gen(bld=self.bld)
++		for x in self.__dict__:
++			if x in ['env', 'bld']:
++				continue
++			elif x in ["path", "features"]:
++				setattr(newobj, x, getattr(self, x))
++			else:
++				setattr(newobj, x, copy.copy(getattr(self, x)))
++
++		newobj.__class__ = self.__class__
++		if isinstance(env, str):
++			newobj.env = self.bld.all_envs[env].copy()
++		else:
++			newobj.env = env.copy()
++
++		return newobj
++
++	def get_inst_path(self):
++		return getattr(self, '_install_path', getattr(self, 'default_install_path', ''))
++
++	def set_inst_path(self, val):
++		self._install_path = val
++
++	install_path = property(get_inst_path, set_inst_path)
++
++
++	def get_chmod(self):
++		return getattr(self, '_chmod', getattr(self, 'default_chmod', O644))
++
++	def set_chmod(self, val):
++		self._chmod = val
++
++	chmod = property(get_chmod, set_chmod)
++
++def declare_extension(var, func):
++	try:
++		for x in Utils.to_list(var):
++			task_gen.mappings[x] = func
++	except:
++		raise Utils.WscriptError('declare_extension takes either a list or a string %r' % var)
++	task_gen.mapped[func.__name__] = func
++
++def declare_order(*k):
++	assert(len(k) > 1)
++	n = len(k) - 1
++	for i in xrange(n):
++		f1 = k[i]
++		f2 = k[i+1]
++		if not f1 in task_gen.prec[f2]:
++			task_gen.prec[f2].append(f1)
++
++def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=True, color='BLUE',
++	install=0, before=[], after=[], decider=None, rule=None, scan=None):
++	"""
++	see Tools/flex.py for an example
++	while i do not like such wrappers, some people really do
++	"""
++
++	action = action or rule
++	if isinstance(action, str):
++		act = Task.simple_task_type(name, action, color=color)
++	else:
++		act = Task.task_type_from_func(name, action, color=color)
++	act.ext_in = tuple(Utils.to_list(ext_in))
++	act.ext_out = tuple(Utils.to_list(ext_out))
++	act.before = Utils.to_list(before)
++	act.after = Utils.to_list(after)
++	act.scan = scan
++
++	def x_file(self, node):
++		if decider:
++			ext = decider(self, node)
++		else:
++			ext = ext_out
++
++		if isinstance(ext, str):
++			out_source = node.change_ext(ext)
++			if reentrant:
++				self.allnodes.append(out_source)
++		elif isinstance(ext, list):
++			out_source = [node.change_ext(x) for x in ext]
++			if reentrant:
++				for i in xrange((reentrant is True) and len(out_source) or reentrant):
++					self.allnodes.append(out_source[i])
++		else:
++			# XXX: useless: it will fail on Utils.to_list above...
++			raise Utils.WafError("do not know how to process %s" % str(ext))
++
++		tsk = self.create_task(name, node, out_source)
++
++		if node.__class__.bld.is_install:
++			tsk.install = install
++
++	declare_extension(act.ext_in, x_file)
++	return x_file
++
++def bind_feature(name, methods):
++	lst = Utils.to_list(methods)
++	task_gen.traits[name].update(lst)
++
++"""
++All the following decorators are registration decorators, i.e add an attribute to current class
++ (task_gen and its derivatives), with same name as func, which points to func itself.
++For example:
++   @taskgen
++   def sayHi(self):
++        print("hi")
++Now taskgen.sayHi() may be called
++
++If python were really smart, it could infer itself the order of methods by looking at the
++attributes. A prerequisite for execution is to have the attribute set before.
++Intelligent compilers binding aspect-oriented programming and parallelization, what a nice topic for studies.
++"""
++def taskgen(func):
++	"""
++	register a method as a task generator method
++	"""
++	setattr(task_gen, func.__name__, func)
++	return func
++
++def feature(*k):
++	"""
++	declare a task generator method that will be executed when the
++	object attribute 'feature' contains the corresponding key(s)
++	"""
++	def deco(func):
++		setattr(task_gen, func.__name__, func)
++		for name in k:
++			task_gen.traits[name].update([func.__name__])
++		return func
++	return deco
++
++def before(*k):
++	"""
++	declare a task generator method which will be executed
++	before the functions of given name(s)
++	"""
++	def deco(func):
++		setattr(task_gen, func.__name__, func)
++		for fun_name in k:
++			if not func.__name__ in task_gen.prec[fun_name]:
++				task_gen.prec[fun_name].append(func.__name__)
++		return func
++	return deco
++
++def after(*k):
++	"""
++	declare a task generator method which will be executed
++	after the functions of given name(s)
++	"""
++	def deco(func):
++		setattr(task_gen, func.__name__, func)
++		for fun_name in k:
++			if not fun_name in task_gen.prec[func.__name__]:
++				task_gen.prec[func.__name__].append(fun_name)
++		return func
++	return deco
++
++def extension(var):
++	"""
++	declare a task generator method which will be invoked during
++	the processing of source files for the extension given
++	"""
++	def deco(func):
++		setattr(task_gen, func.__name__, func)
++		try:
++			for x in Utils.to_list(var):
++				task_gen.mappings[x] = func
++		except:
++			raise Utils.WafError('extension takes either a list or a string %r' % var)
++		task_gen.mapped[func.__name__] = func
++		return func
++	return deco
++
++# TODO make certain the decorators may be used here
++
++def apply_core(self):
++	"""Process the attribute source
++	transform the names into file nodes
++	try to process the files by name first, later by extension"""
++	# get the list of folders to use by the scanners
++	# all our objects share the same include paths anyway
++	find_resource = self.path.find_resource
++
++	for filename in self.to_list(self.source):
++		# if self.mappings or task_gen.mappings contains a file of the same name
++		x = self.get_hook(filename)
++		if x:
++			x(self, filename)
++		else:
++			node = find_resource(filename)
++			if not node: raise Utils.WafError("source not found: '%s' in '%s'" % (filename, str(self.path)))
++			self.allnodes.append(node)
++
++	for node in self.allnodes:
++		# self.mappings or task_gen.mappings map the file extension to a function
++		x = self.get_hook(node.suffix())
++
++		if not x:
++			raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?" % \
++				(str(node), self.__class__.mappings.keys(), self.__class__))
++		x(self, node)
++feature('*')(apply_core)
++
++def exec_rule(self):
++	"""Process the attribute rule, when provided the method apply_core will be disabled
++	"""
++	if not getattr(self, 'rule', None):
++		return
++
++	# someone may have removed it already
++	try:
++		self.meths.remove('apply_core')
++	except ValueError:
++		pass
++
++	# get the function and the variables
++	func = self.rule
++
++	vars2 = []
++	if isinstance(func, str):
++		# use the shell by default for user-defined commands
++		(func, vars2) = Task.compile_fun('', self.rule, shell=getattr(self, 'shell', True))
++		func.code = self.rule
++
++	# create the task class
++	name = getattr(self, 'name', None) or self.target or self.rule
++	if not isinstance(name, str):
++		name = str(self.idx)
++	cls = Task.task_type_from_func(name, func, getattr(self, 'vars', vars2))
++	cls.color = getattr(self, 'color', 'BLUE')
++
++	# now create one instance
++	tsk = self.create_task(name)
++
++	dep_vars = getattr(self, 'dep_vars', ['ruledeps'])
++	if dep_vars:
++		tsk.dep_vars = dep_vars
++	if isinstance(self.rule, str):
++		tsk.env.ruledeps = self.rule
++	else:
++		# only works if the function is in a global module such as a waf tool
++		tsk.env.ruledeps = Utils.h_fun(self.rule)
++
++	# we assume that the user knows that without inputs or outputs
++	#if not getattr(self, 'target', None) and not getattr(self, 'source', None):
++	#	cls.quiet = True
++
++	if getattr(self, 'target', None):
++		cls.quiet = True
++		tsk.outputs = [self.path.find_or_declare(x) for x in self.to_list(self.target)]
++
++	if getattr(self, 'source', None):
++		cls.quiet = True
++		tsk.inputs = []
++		for x in self.to_list(self.source):
++			y = self.path.find_resource(x)
++			if not y:
++				raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
++			tsk.inputs.append(y)
++
++	if self.allnodes:
++		tsk.inputs.extend(self.allnodes)
++
++	if getattr(self, 'scan', None):
++		cls.scan = self.scan
++
++	if getattr(self, 'install_path', None):
++		tsk.install_path = self.install_path
++
++	if getattr(self, 'cwd', None):
++		tsk.cwd = self.cwd
++
++	if getattr(self, 'on_results', None):
++		Task.update_outputs(cls)
++
++	if getattr(self, 'always', None):
++		Task.always_run(cls)
++
++	for x in ['after', 'before', 'ext_in', 'ext_out']:
++		setattr(cls, x, getattr(self, x, []))
++feature('*')(exec_rule)
++before('apply_core')(exec_rule)
++
++def sequence_order(self):
++	"""
++	add a strict sequential constraint between the tasks generated by task generators
++	it uses the fact that task generators are posted in order
++	it will not post objects which belong to other folders
++	there is also an awesome trick for executing the method in last position
++
++	to use:
++	bld(features='javac seq')
++	bld(features='jar seq')
++
++	to start a new sequence, set the attribute seq_start, for example:
++	obj.seq_start = True
++	"""
++	if self.meths and self.meths[-1] != 'sequence_order':
++		self.meths.append('sequence_order')
++		return
++
++	if getattr(self, 'seq_start', None):
++		return
++
++	# all the tasks previously declared must be run before these
++	if getattr(self.bld, 'prev', None):
++		self.bld.prev.post()
++		for x in self.bld.prev.tasks:
++			for y in self.tasks:
++				y.set_run_after(x)
++
++	self.bld.prev = self
++
++feature('seq')(sequence_order)
++
+diff --git a/buildtools/wafadmin/Tools/__init__.py b/buildtools/wafadmin/Tools/__init__.py
+new file mode 100644
+index 0000000..bc6ca23
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/__init__.py
+@@ -0,0 +1,4 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++
+diff --git a/buildtools/wafadmin/Tools/ar.py b/buildtools/wafadmin/Tools/ar.py
+new file mode 100644
+index 0000000..af9b17f
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/ar.py
+@@ -0,0 +1,36 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006-2008 (ita)
++# Ralf Habacker, 2006 (rh)
++
++"ar and ranlib"
++
++import os, sys
++import Task, Utils
++from Configure import conftest
++
++ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
++cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
++cls.maxjobs = 1
++cls.install = Utils.nada
++
++# remove the output in case it already exists
++old = cls.run
++def wrap(self):
++	try: os.remove(self.outputs[0].abspath(self.env))
++	except OSError: pass
++	return old(self)
++setattr(cls, 'run', wrap)
++
++def detect(conf):
++	conf.find_program('ar', var='AR')
++	conf.find_program('ranlib', var='RANLIB')
++	conf.env.ARFLAGS = 'rcs'
++
++ at conftest
++def find_ar(conf):
++	v = conf.env
++	conf.check_tool('ar')
++	if not v['AR']: conf.fatal('ar is required for static libraries - not found')
++
++
+diff --git a/buildtools/wafadmin/Tools/bison.py b/buildtools/wafadmin/Tools/bison.py
+new file mode 100644
+index 0000000..49c6051
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/bison.py
+@@ -0,0 +1,38 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# John O'Meara, 2006
++# Thomas Nagy 2009
++
++"Bison processing"
++
++import Task
++from TaskGen import extension
++
++bison = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
++cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', shell=False)
++
++ at extension(['.y', '.yc', '.yy'])
++def big_bison(self, node):
++	"""when it becomes complicated (unlike flex), the old recipes work better (cwd)"""
++	has_h = '-d' in self.env['BISONFLAGS']
++
++	outs = []
++	if node.name.endswith('.yc'):
++		outs.append(node.change_ext('.tab.cc'))
++		if has_h:
++			outs.append(node.change_ext('.tab.hh'))
++	else:
++		outs.append(node.change_ext('.tab.c'))
++		if has_h:
++			outs.append(node.change_ext('.tab.h'))
++
++	tsk = self.create_task('bison', node, outs)
++	tsk.cwd = node.bld_dir(tsk.env)
++
++	# and the c/cxx file must be compiled too
++	self.allnodes.append(outs[0])
++
++def detect(conf):
++	bison = conf.find_program('bison', var='BISON', mandatory=True)
++	conf.env['BISONFLAGS'] = '-d'
++
+diff --git a/buildtools/wafadmin/Tools/cc.py b/buildtools/wafadmin/Tools/cc.py
+new file mode 100644
+index 0000000..903a1c5
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/cc.py
+@@ -0,0 +1,100 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++
++"Base for c programs/libraries"
++
++import os
++import TaskGen, Build, Utils, Task
++from Logs import debug
++import ccroot
++from TaskGen import feature, before, extension, after
++
++g_cc_flag_vars = [
++'CCDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
++'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
++'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES']
++
++EXT_CC = ['.c']
++
++g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS']
++
++# TODO remove in waf 1.6
++class cc_taskgen(ccroot.ccroot_abstract):
++	pass
++
++ at feature('cc')
++ at before('apply_type_vars')
++ at after('default_cc')
++def init_cc(self):
++	self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars)
++	self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars)
++
++	if not self.env['CC_NAME']:
++		raise Utils.WafError("At least one compiler (gcc, ..) must be selected")
++
++ at feature('cc')
++ at after('apply_incpaths')
++def apply_obj_vars_cc(self):
++	"""after apply_incpaths for INC_PATHS"""
++	env = self.env
++	app = env.append_unique
++	cpppath_st = env['CPPPATH_ST']
++
++	# local flags come first
++	# set the user-defined includes paths
++	for i in env['INC_PATHS']:
++		app('_CCINCFLAGS', cpppath_st % i.bldpath(env))
++		app('_CCINCFLAGS', cpppath_st % i.srcpath(env))
++
++	# set the library include paths
++	for i in env['CPPPATH']:
++		app('_CCINCFLAGS', cpppath_st % i)
++
++ at feature('cc')
++ at after('apply_lib_vars')
++def apply_defines_cc(self):
++	"""after uselib is set for CCDEFINES"""
++	self.defines = getattr(self, 'defines', [])
++	lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES'])
++	milst = []
++
++	# now process the local defines
++	for defi in lst:
++		if not defi in milst:
++			milst.append(defi)
++
++	# CCDEFINES_
++	libs = self.to_list(self.uselib)
++	for l in libs:
++		val = self.env['CCDEFINES_'+l]
++		if val: milst += val
++	self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
++	y = self.env['CCDEFINES_ST']
++	self.env.append_unique('_CCDEFFLAGS', [y%x for x in milst])
++
++ at extension(EXT_CC)
++def c_hook(self, node):
++	# create the compilation task: cpp or cc
++	if getattr(self, 'obj_ext', None):
++		obj_ext = self.obj_ext
++	else:
++		obj_ext = '_%d.o' % self.idx
++
++	task = self.create_task('cc', node, node.change_ext(obj_ext))
++	try:
++		self.compiled_tasks.append(task)
++	except AttributeError:
++		raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
++	return task
++
++cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
++cls = Task.simple_task_type('cc', cc_str, 'GREEN', ext_out='.o', ext_in='.c', shell=False)
++cls.scan = ccroot.scan
++cls.vars.append('CCDEPS')
++
++link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
++cls = Task.simple_task_type('cc_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
++cls.maxjobs = 1
++cls.install = Utils.nada
++
+diff --git a/buildtools/wafadmin/Tools/ccroot.py b/buildtools/wafadmin/Tools/ccroot.py
+new file mode 100644
+index 0000000..f54c82f
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/ccroot.py
+@@ -0,0 +1,629 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005-2008 (ita)
++
++"base for all c/c++ programs and libraries"
++
++import os, sys, re
++import TaskGen, Task, Utils, preproc, Logs, Build, Options
++from Logs import error, debug, warn
++from Utils import md5
++from TaskGen import taskgen, after, before, feature
++from Constants import *
++from Configure import conftest
++try:
++	from cStringIO import StringIO
++except ImportError:
++	from io import StringIO
++
++import config_c # <- necessary for the configuration, do not touch
++
++USE_TOP_LEVEL = False
++
++def get_cc_version(conf, cc, gcc=False, icc=False):
++
++	cmd = cc + ['-dM', '-E', '-']
++	try:
++		p = Utils.pproc.Popen(cmd, stdin=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
++		p.stdin.write('\n')
++		out = p.communicate()[0]
++	except:
++		conf.fatal('could not determine the compiler version %r' % cmd)
++
++	# PY3K: do not touch
++	out = str(out)
++
++	if gcc:
++		if out.find('__INTEL_COMPILER') >= 0:
++			conf.fatal('The intel compiler pretends to be gcc')
++		if out.find('__GNUC__') < 0:
++			conf.fatal('Could not determine the compiler type')
++
++	if icc and out.find('__INTEL_COMPILER') < 0:
++		conf.fatal('Not icc/icpc')
++
++	k = {}
++	if icc or gcc:
++		out = out.split('\n')
++		import shlex
++
++		for line in out:
++			lst = shlex.split(line)
++			if len(lst)>2:
++				key = lst[1]
++				val = lst[2]
++				k[key] = val
++
++		def isD(var):
++			return var in k
++
++		def isT(var):
++			return var in k and k[var] != '0'
++
++		# Some documentation is available at http://predef.sourceforge.net
++		# The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
++		mp1 = {
++			'__linux__'   : 'linux',
++			'__GNU__'     : 'gnu',
++			'__FreeBSD__' : 'freebsd',
++			'__NetBSD__'  : 'netbsd',
++			'__OpenBSD__' : 'openbsd',
++			'__sun'       : 'sunos',
++			'__hpux'      : 'hpux',
++			'__sgi'       : 'irix',
++			'_AIX'        : 'aix',
++			'__CYGWIN__'  : 'cygwin',
++			'__MSYS__'    : 'msys',
++			'_UWIN'       : 'uwin',
++			'_WIN64'      : 'win32',
++			'_WIN32'      : 'win32',
++			'__POWERPC__' : 'powerpc',
++			}
++
++		for i in mp1:
++			if isD(i):
++				conf.env.DEST_OS = mp1[i]
++				break
++		else:
++			if isD('__APPLE__') and isD('__MACH__'):
++				conf.env.DEST_OS = 'darwin'
++			elif isD('__unix__'): # unix must be tested last as it's a generic fallback
++				conf.env.DEST_OS = 'generic'
++
++		if isD('__ELF__'):
++			conf.env.DEST_BINFMT = 'elf'
++		elif isD('__WINNT__') or isD('__CYGWIN__'):
++			conf.env.DEST_BINFMT = 'pe'
++		elif isD('__APPLE__'):
++			conf.env.DEST_BINFMT = 'mac-o'
++
++		mp2 = {
++				'__x86_64__'  : 'x86_64',
++				'__i386__'    : 'x86',
++				'__ia64__'    : 'ia',
++				'__mips__'    : 'mips',
++				'__sparc__'   : 'sparc',
++				'__alpha__'   : 'alpha',
++				'__arm__'     : 'arm',
++				'__hppa__'    : 'hppa',
++				'__powerpc__' : 'powerpc',
++				}
++		for i in mp2:
++			if isD(i):
++				conf.env.DEST_CPU = mp2[i]
++				break
++
++		debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
++		conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
++	return k
++
++class DEBUG_LEVELS:
++	"""Will disappear in waf 1.6"""
++	ULTRADEBUG = "ultradebug"
++	DEBUG = "debug"
++	RELEASE = "release"
++	OPTIMIZED = "optimized"
++	CUSTOM = "custom"
++
++	ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM]
++
++def scan(self):
++	"look for .h the .cpp need"
++	debug('ccroot: _scan_preprocessor(self, node, env, path_lst)')
++
++	# TODO waf 1.6 - assume the default input has exactly one file
++
++	if len(self.inputs) == 1:
++		node = self.inputs[0]
++		(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
++		if Logs.verbose:
++			debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
++		return (nodes, names)
++
++	all_nodes = []
++	all_names = []
++	seen = set()
++	for node in self.inputs:
++		(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
++		if Logs.verbose:
++			debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
++		for x in nodes:
++			if id(x) in seen: continue
++			seen.add(id(x))
++			all_nodes.append(x)
++		for x in names:
++			if not x in all_names:
++				all_names.append(x)
++	return (all_nodes, all_names)
++
++class ccroot_abstract(TaskGen.task_gen):
++	"Parent class for programs and libraries in languages c, c++ and moc (Qt)"
++	def __init__(self, *k, **kw):
++		# COMPAT remove in waf 1.6 TODO
++		if len(k) > 1:
++			k = list(k)
++			if k[1][0] != 'c':
++				k[1] = 'c' + k[1]
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++def get_target_name(self):
++	tp = 'program'
++	for x in self.features:
++		if x in ['cshlib', 'cstaticlib']:
++			tp = x.lstrip('c')
++
++	pattern = self.env[tp + '_PATTERN']
++	if not pattern: pattern = '%s'
++
++	dir, name = os.path.split(self.target)
++
++	if self.env.DEST_BINFMT == 'pe' and getattr(self, 'vnum', None) and 'cshlib' in self.features:
++		# include the version in the dll file name,
++		# the import lib file name stays unversionned.
++		name = name + '-' + self.vnum.split('.')[0]
++
++	return os.path.join(dir, pattern % name)
++
++ at feature('cc', 'cxx')
++ at before('apply_core')
++def default_cc(self):
++	"""compiled_tasks attribute must be set before the '.c->.o' tasks can be created"""
++	Utils.def_attrs(self,
++		includes = '',
++		defines= '',
++		rpaths = '',
++		uselib = '',
++		uselib_local = '',
++		add_objects = '',
++		p_flag_vars = [],
++		p_type_vars = [],
++		compiled_tasks = [],
++		link_task = None)
++
++	# The only thing we need for cross-compilation is DEST_BINFMT.
++	# At some point, we may reach a case where DEST_BINFMT is not enough, but for now it's sufficient.
++	# Currently, cross-compilation is auto-detected only for the gnu and intel compilers.
++	if not self.env.DEST_BINFMT:
++		# Infer the binary format from the os name.
++		self.env.DEST_BINFMT = Utils.unversioned_sys_platform_to_binary_format(
++			self.env.DEST_OS or Utils.unversioned_sys_platform())
++
++	if not self.env.BINDIR: self.env.BINDIR = Utils.subst_vars('${PREFIX}/bin', self.env)
++	if not self.env.LIBDIR: self.env.LIBDIR = Utils.subst_vars('${PREFIX}/lib${LIB_EXT}', self.env)
++
++ at feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
++def apply_verif(self):
++	"""no particular order, used for diagnostic"""
++	if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None) or getattr(self, 'obj_files', None)):
++		raise Utils.WafError('no source files specified for %s' % self)
++	if not self.target:
++		raise Utils.WafError('no target for %s' % self)
++
++# TODO reference the d programs, shlibs in d.py, not here
++
++ at feature('cprogram', 'dprogram')
++ at after('default_cc')
++ at before('apply_core')
++def vars_target_cprogram(self):
++	self.default_install_path = self.env.BINDIR
++	self.default_chmod = O755
++
++ at after('default_cc')
++ at feature('cshlib', 'dshlib')
++ at before('apply_core')
++def vars_target_cshlib(self):
++	if self.env.DEST_BINFMT == 'pe':
++		#   set execute bit on libs to avoid 'permission denied' (issue 283)
++		self.default_chmod = O755
++		self.default_install_path = self.env.BINDIR
++	else:
++		self.default_install_path = self.env.LIBDIR
++
++ at feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
++ at after('apply_link', 'vars_target_cprogram', 'vars_target_cshlib')
++def default_link_install(self):
++	"""you may kill this method to inject your own installation for the first element
++	any other install should only process its own nodes and not those from the others"""
++	if self.install_path:
++		self.bld.install_files(self.install_path, self.link_task.outputs[0], env=self.env, chmod=self.chmod)
++
++ at feature('cc', 'cxx')
++ at after('apply_type_vars', 'apply_lib_vars', 'apply_core')
++def apply_incpaths(self):
++	"""used by the scanner
++	after processing the uselib for CPPPATH
++	after apply_core because some processing may add include paths
++	"""
++	lst = []
++	# TODO move the uselib processing out of here
++	for lib in self.to_list(self.uselib):
++		for path in self.env['CPPPATH_' + lib]:
++			if not path in lst:
++				lst.append(path)
++	if preproc.go_absolute:
++		for path in preproc.standard_includes:
++			if not path in lst:
++				lst.append(path)
++
++	for path in self.to_list(self.includes):
++		if not path in lst:
++			if preproc.go_absolute or not os.path.isabs(path):
++				lst.append(path)
++			else:
++				self.env.prepend_value('CPPPATH', path)
++
++	for path in lst:
++		node = None
++		if os.path.isabs(path):
++			if preproc.go_absolute:
++				node = self.bld.root.find_dir(path)
++		elif path[0] == '#':
++			node = self.bld.srcnode
++			if len(path) > 1:
++				node = node.find_dir(path[1:])
++		else:
++			node = self.path.find_dir(path)
++
++		if node:
++			self.env.append_value('INC_PATHS', node)
++
++	# TODO WAF 1.6
++	if USE_TOP_LEVEL:
++		self.env.append_value('INC_PATHS', self.bld.srcnode)
++
++ at feature('cc', 'cxx')
++ at after('init_cc', 'init_cxx')
++ at before('apply_lib_vars')
++def apply_type_vars(self):
++	"""before apply_lib_vars because we modify uselib
++	after init_cc and init_cxx because web need p_type_vars
++	"""
++	for x in self.features:
++		if not x in ['cprogram', 'cstaticlib', 'cshlib']:
++			continue
++		x = x.lstrip('c')
++
++		# if the type defines uselib to add, add them
++		st = self.env[x + '_USELIB']
++		if st: self.uselib = self.uselib + ' ' + st
++
++		# each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc
++		# so when we make a task generator of the type shlib, CXXFLAGS are modified accordingly
++		for var in self.p_type_vars:
++			compvar = '%s_%s' % (x, var)
++			#print compvar
++			value = self.env[compvar]
++			if value: self.env.append_value(var, value)
++
++ at feature('cprogram', 'cshlib', 'cstaticlib')
++ at after('apply_core')
++def apply_link(self):
++	"""executes after apply_core for collecting 'compiled_tasks'
++	use a custom linker if specified (self.link='name-of-custom-link-task')"""
++	link = getattr(self, 'link', None)
++	if not link:
++		if 'cstaticlib' in self.features: link = 'static_link'
++		elif 'cxx' in self.features: link = 'cxx_link'
++		else: link = 'cc_link'
++
++	tsk = self.create_task(link)
++	outputs = [t.outputs[0] for t in self.compiled_tasks]
++	tsk.set_inputs(outputs)
++	tsk.set_outputs(self.path.find_or_declare(get_target_name(self)))
++
++	self.link_task = tsk
++
++ at feature('cc', 'cxx')
++ at after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
++def apply_lib_vars(self):
++	"""after apply_link because of 'link_task'
++	after default_cc because of the attribute 'uselib'"""
++
++	# after 'apply_core' in case if 'cc' if there is no link
++
++	env = self.env
++
++	# 1. the case of the libs defined in the project (visit ancestors first)
++	# the ancestors external libraries (uselib) will be prepended
++	self.uselib = self.to_list(self.uselib)
++	names = self.to_list(self.uselib_local)
++
++	seen = set([])
++	tmp = Utils.deque(names) # consume a copy of the list of names
++	while tmp:
++		lib_name = tmp.popleft()
++		# visit dependencies only once
++		if lib_name in seen:
++			continue
++
++		y = self.name_to_obj(lib_name)
++		if not y:
++			raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
++		y.post()
++		seen.add(lib_name)
++
++		# object has ancestors to process (shared libraries): add them to the end of the list
++		if getattr(y, 'uselib_local', None):
++			lst = y.to_list(y.uselib_local)
++			if 'cshlib' in y.features or 'cprogram' in y.features:
++				lst = [x for x in lst if not 'cstaticlib' in self.name_to_obj(x).features]
++			tmp.extend(lst)
++
++		# link task and flags
++		if getattr(y, 'link_task', None):
++
++			link_name = y.target[y.target.rfind(os.sep) + 1:]
++			if 'cstaticlib' in y.features:
++				env.append_value('STATICLIB', link_name)
++			elif 'cshlib' in y.features or 'cprogram' in y.features:
++				# WARNING some linkers can link against programs
++				env.append_value('LIB', link_name)
++
++			# the order
++			self.link_task.set_run_after(y.link_task)
++
++			# for the recompilation
++			dep_nodes = getattr(self.link_task, 'dep_nodes', [])
++			self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
++
++			# add the link path too
++			tmp_path = y.link_task.outputs[0].parent.bldpath(self.env)
++			if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path)
++
++		# add ancestors uselib too - but only propagate those that have no staticlib
++		for v in self.to_list(y.uselib):
++			if not env['STATICLIB_' + v]:
++				if not v in self.uselib:
++					self.uselib.insert(0, v)
++
++		# if the library task generator provides 'export_incdirs', add to the include path
++		# the export_incdirs must be a list of paths relative to the other library
++		if getattr(y, 'export_incdirs', None):
++			for x in self.to_list(y.export_incdirs):
++				node = y.path.find_dir(x)
++				if not node:
++					raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
++				self.env.append_unique('INC_PATHS', node)
++
++	# 2. the case of the libs defined outside
++	for x in self.uselib:
++		for v in self.p_flag_vars:
++			val = self.env[v + '_' + x]
++			if val: self.env.append_value(v, val)
++
++ at feature('cprogram', 'cstaticlib', 'cshlib')
++ at after('init_cc', 'init_cxx', 'apply_link')
++def apply_objdeps(self):
++	"add the .o files produced by some other object files in the same manner as uselib_local"
++	if not getattr(self, 'add_objects', None): return
++
++	seen = []
++	names = self.to_list(self.add_objects)
++	while names:
++		x = names[0]
++
++		# visit dependencies only once
++		if x in seen:
++			names = names[1:]
++			continue
++
++		# object does not exist ?
++		y = self.name_to_obj(x)
++		if not y:
++			raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
++
++		# object has ancestors to process first ? update the list of names
++		if getattr(y, 'add_objects', None):
++			added = 0
++			lst = y.to_list(y.add_objects)
++			lst.reverse()
++			for u in lst:
++				if u in seen: continue
++				added = 1
++				names = [u]+names
++			if added: continue # list of names modified, loop
++
++		# safe to process the current object
++		y.post()
++		seen.append(x)
++
++		for t in y.compiled_tasks:
++			self.link_task.inputs.extend(t.outputs)
++
++ at feature('cprogram', 'cshlib', 'cstaticlib')
++ at after('apply_lib_vars')
++def apply_obj_vars(self):
++	"""after apply_lib_vars for uselib"""
++	v = self.env
++	lib_st           = v['LIB_ST']
++	staticlib_st     = v['STATICLIB_ST']
++	libpath_st       = v['LIBPATH_ST']
++	staticlibpath_st = v['STATICLIBPATH_ST']
++	rpath_st         = v['RPATH_ST']
++
++	app = v.append_unique
++
++	if v['FULLSTATIC']:
++		v.append_value('LINKFLAGS', v['FULLSTATIC_MARKER'])
++
++	for i in v['RPATH']:
++		if i and rpath_st:
++			app('LINKFLAGS', rpath_st % i)
++
++	for i in v['LIBPATH']:
++		app('LINKFLAGS', libpath_st % i)
++		app('LINKFLAGS', staticlibpath_st % i)
++
++	if v['STATICLIB']:
++		v.append_value('LINKFLAGS', v['STATICLIB_MARKER'])
++		k = [(staticlib_st % i) for i in v['STATICLIB']]
++		app('LINKFLAGS', k)
++
++	# fully static binaries ?
++	if not v['FULLSTATIC']:
++		if v['STATICLIB'] or v['LIB']:
++			v.append_value('LINKFLAGS', v['SHLIB_MARKER'])
++
++	app('LINKFLAGS', [lib_st % i for i in v['LIB']])
++
++ at after('apply_link')
++def process_obj_files(self):
++	if not hasattr(self, 'obj_files'): return
++	for x in self.obj_files:
++		node = self.path.find_resource(x)
++		self.link_task.inputs.append(node)
++
++ at taskgen
++def add_obj_file(self, file):
++	"""Small example on how to link object files as if they were source
++	obj = bld.create_obj('cc')
++	obj.add_obj_file('foo.o')"""
++	if not hasattr(self, 'obj_files'): self.obj_files = []
++	if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
++	self.obj_files.append(file)
++
++c_attrs = {
++'cxxflag' : 'CXXFLAGS',
++'cflag' : 'CCFLAGS',
++'ccflag' : 'CCFLAGS',
++'linkflag' : 'LINKFLAGS',
++'ldflag' : 'LINKFLAGS',
++'lib' : 'LIB',
++'libpath' : 'LIBPATH',
++'staticlib': 'STATICLIB',
++'staticlibpath': 'STATICLIBPATH',
++'rpath' : 'RPATH',
++'framework' : 'FRAMEWORK',
++'frameworkpath' : 'FRAMEWORKPATH'
++}
++
++ at feature('cc', 'cxx')
++ at before('init_cxx', 'init_cc')
++ at before('apply_lib_vars', 'apply_obj_vars', 'apply_incpaths', 'init_cc')
++def add_extra_flags(self):
++	"""case and plural insensitive
++	before apply_obj_vars for processing the library attributes
++	"""
++	for x in self.__dict__.keys():
++		y = x.lower()
++		if y[-1] == 's':
++			y = y[:-1]
++		if c_attrs.get(y, None):
++			self.env.append_unique(c_attrs[y], getattr(self, x))
++
++# ============ the code above must not know anything about import libs ==========
++
++ at feature('cshlib')
++ at after('apply_link', 'default_cc')
++ at before('apply_lib_vars', 'apply_objdeps', 'default_link_install')
++def apply_implib(self):
++	"""On mswindows, handle dlls and their import libs
++	the .dll.a is the import lib and it is required for linking so it is installed too
++	"""
++	if not self.env.DEST_BINFMT == 'pe':
++		return
++
++	self.meths.remove('default_link_install')
++
++	bindir = self.install_path
++	if not bindir: return
++
++	# install the dll in the bin dir
++	dll = self.link_task.outputs[0]
++	self.bld.install_files(bindir, dll, self.env, self.chmod)
++
++	# add linker flags to generate the import lib
++	implib = self.env['implib_PATTERN'] % os.path.split(self.target)[1]
++
++	implib = dll.parent.find_or_declare(implib)
++	self.link_task.outputs.append(implib)
++	self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env)
++
++	self.env.append_value('LINKFLAGS', (self.env['IMPLIB_ST'] % implib.bldpath(self.env)).split())
++
++# ============ the code above must not know anything about vnum processing on unix platforms =========
++
++ at feature('cshlib')
++ at after('apply_link')
++ at before('apply_lib_vars', 'default_link_install')
++def apply_vnum(self):
++	"""
++	libfoo.so is installed as libfoo.so.1.2.3
++	"""
++	if not getattr(self, 'vnum', '') or not 'cshlib' in self.features or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
++		return
++
++	self.meths.remove('default_link_install')
++
++	link = self.link_task
++	nums = self.vnum.split('.')
++	node = link.outputs[0]
++
++	libname = node.name
++	if libname.endswith('.dylib'):
++		name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
++		name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
++	else:
++		name3 = libname + '.' + self.vnum
++		name2 = libname + '.' + nums[0]
++
++	if self.env.SONAME_ST:
++		v = self.env.SONAME_ST % name2
++		self.env.append_value('LINKFLAGS', v.split())
++
++	bld = self.bld
++	nums = self.vnum.split('.')
++
++	path = self.install_path
++	if not path: return
++
++	bld.install_as(path + os.sep + name3, node, env=self.env)
++	bld.symlink_as(path + os.sep + name2, name3)
++	bld.symlink_as(path + os.sep + libname, name3)
++
++	# the following task is just to enable execution from the build dir :-/
++	self.create_task('vnum', node, [node.parent.find_or_declare(name2), node.parent.find_or_declare(name3)])
++
++def exec_vnum_link(self):
++	for x in self.outputs:
++		path = x.abspath(self.env)
++		try:
++			os.remove(path)
++		except OSError:
++			pass
++
++		try:
++			os.symlink(self.inputs[0].name, path)
++		except OSError:
++			return 1
++
++cls = Task.task_type_from_func('vnum', func=exec_vnum_link, ext_in='.bin', color='CYAN')
++cls.quiet = 1
++
++# ============ the --as-needed flag should added during the configuration, not at runtime =========
++
++ at conftest
++def add_as_needed(conf):
++	if conf.env.DEST_BINFMT == 'elf' and 'gcc' in (conf.env.CXX_NAME, conf.env.CC_NAME):
++		conf.env.append_unique('LINKFLAGS', '--as-needed')
++
+diff --git a/buildtools/wafadmin/Tools/compiler_cc.py b/buildtools/wafadmin/Tools/compiler_cc.py
+new file mode 100644
+index 0000000..0421503
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/compiler_cc.py
+@@ -0,0 +1,67 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat)
++
++import os, sys, imp, types, ccroot
++import optparse
++import Utils, Configure, Options
++from Logs import debug
++
++c_compiler = {
++	'win32':  ['msvc', 'gcc'],
++	'cygwin': ['gcc'],
++	'darwin': ['gcc'],
++	'aix':    ['xlc', 'gcc'],
++	'linux':  ['gcc', 'icc', 'suncc'],
++	'sunos':  ['gcc', 'suncc'],
++	'irix':   ['gcc'],
++	'hpux':   ['gcc'],
++	'gnu':    ['gcc'],
++	'default': ['gcc']
++}
++
++def __list_possible_compiler(platform):
++	try:
++		return c_compiler[platform]
++	except KeyError:
++		return c_compiler["default"]
++
++def detect(conf):
++	"""
++	for each compiler for the platform, try to configure the compiler
++	in theory the tools should raise a configuration error if the compiler
++	pretends to be something it is not (setting CC=icc and trying to configure gcc)
++	"""
++	try: test_for_compiler = Options.options.check_c_compiler
++	except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
++	orig = conf.env
++	for compiler in test_for_compiler.split():
++		conf.env = orig.copy()
++		try:
++			conf.check_tool(compiler)
++		except Configure.ConfigurationError, e:
++			debug('compiler_cc: %r' % e)
++		else:
++			if conf.env['CC']:
++				orig.table = conf.env.get_merged_dict()
++				conf.env = orig
++				conf.check_message(compiler, '', True)
++				conf.env['COMPILER_CC'] = compiler
++				break
++			conf.check_message(compiler, '', False)
++			break
++	else:
++		conf.fatal('could not configure a c compiler!')
++
++def set_options(opt):
++	build_platform = Utils.unversioned_sys_platform()
++	possible_compiler_list = __list_possible_compiler(build_platform)
++	test_for_compiler = ' '.join(possible_compiler_list)
++	cc_compiler_opts = opt.add_option_group("C Compiler Options")
++	cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler,
++		help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
++		dest="check_c_compiler")
++
++	for c_compiler in test_for_compiler.split():
++		opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts)
++
+diff --git a/buildtools/wafadmin/Tools/compiler_cxx.py b/buildtools/wafadmin/Tools/compiler_cxx.py
+new file mode 100644
+index 0000000..5308ea9
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/compiler_cxx.py
+@@ -0,0 +1,62 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat)
++
++import os, sys, imp, types, ccroot
++import optparse
++import Utils, Configure, Options
++from Logs import debug
++
++cxx_compiler = {
++'win32':  ['msvc', 'g++'],
++'cygwin': ['g++'],
++'darwin': ['g++'],
++'aix':    ['xlc++', 'g++'],
++'linux':  ['g++', 'icpc', 'sunc++'],
++'sunos':  ['g++', 'sunc++'],
++'irix':   ['g++'],
++'hpux':   ['g++'],
++'gnu':    ['g++'],
++'default': ['g++']
++}
++
++def __list_possible_compiler(platform):
++	try:
++		return cxx_compiler[platform]
++	except KeyError:
++		return cxx_compiler["default"]
++
++def detect(conf):
++	try: test_for_compiler = Options.options.check_cxx_compiler
++	except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
++	orig = conf.env
++	for compiler in test_for_compiler.split():
++		try:
++			conf.env = orig.copy()
++			conf.check_tool(compiler)
++		except Configure.ConfigurationError, e:
++			debug('compiler_cxx: %r' % e)
++		else:
++			if conf.env['CXX']:
++				orig.table = conf.env.get_merged_dict()
++				conf.env = orig
++				conf.check_message(compiler, '', True)
++				conf.env['COMPILER_CXX'] = compiler
++				break
++			conf.check_message(compiler, '', False)
++			break
++	else:
++		conf.fatal('could not configure a cxx compiler!')
++
++def set_options(opt):
++	build_platform = Utils.unversioned_sys_platform()
++	possible_compiler_list = __list_possible_compiler(build_platform)
++	test_for_compiler = ' '.join(possible_compiler_list)
++	cxx_compiler_opts = opt.add_option_group('C++ Compiler Options')
++	cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler,
++		help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
++		dest="check_cxx_compiler")
++
++	for cxx_compiler in test_for_compiler.split():
++		opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts)
++
+diff --git a/buildtools/wafadmin/Tools/compiler_d.py b/buildtools/wafadmin/Tools/compiler_d.py
+new file mode 100644
+index 0000000..1ea5efa
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/compiler_d.py
+@@ -0,0 +1,33 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Carlos Rafael Giani, 2007 (dv)
++
++import os, sys, imp, types
++import Utils, Configure, Options
++
++def detect(conf):
++	if getattr(Options.options, 'check_dmd_first', None):
++		test_for_compiler = ['dmd', 'gdc']
++	else:
++		test_for_compiler = ['gdc', 'dmd']
++
++	for d_compiler in test_for_compiler:
++		try:
++			conf.check_tool(d_compiler)
++		except:
++			pass
++		else:
++			break
++	else:
++		conf.fatal('no suitable d compiler was found')
++
++def set_options(opt):
++	d_compiler_opts = opt.add_option_group('D Compiler Options')
++	d_compiler_opts.add_option('--check-dmd-first', action='store_true',
++			help='checks for the gdc compiler before dmd (default is the other way round)',
++			dest='check_dmd_first',
++			default=False)
++
++	for d_compiler in ['gdc', 'dmd']:
++		opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts)
++
+diff --git a/buildtools/wafadmin/Tools/config_c.py b/buildtools/wafadmin/Tools/config_c.py
+new file mode 100644
+index 0000000..a32d8aa
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/config_c.py
+@@ -0,0 +1,736 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005-2008 (ita)
++
++"""
++c/c++ configuration routines
++"""
++
++import os, imp, sys, shlex, shutil
++from Utils import md5
++import Build, Utils, Configure, Task, Options, Logs, TaskGen
++from Constants import *
++from Configure import conf, conftest
++
++cfg_ver = {
++	'atleast-version': '>=',
++	'exact-version': '==',
++	'max-version': '<=',
++}
++
++SNIP1 = '''
++	int main() {
++	void *p;
++	p=(void*)(%s);
++	return 0;
++}
++'''
++
++SNIP2 = '''
++int main() {
++	if ((%(type_name)s *) 0) return 0;
++	if (sizeof (%(type_name)s)) return 0;
++}
++'''
++
++SNIP3 = '''
++int main() {
++	return 0;
++}
++'''
++
++def parse_flags(line, uselib, env):
++	"""pkg-config still has bugs on some platforms, and there are many -config programs, parsing flags is necessary :-/"""
++
++	lst = shlex.split(line)
++	while lst:
++		x = lst.pop(0)
++		st = x[:2]
++		ot = x[2:]
++		app = env.append_value
++		if st == '-I' or st == '/I':
++			if not ot: ot = lst.pop(0)
++			app('CPPPATH_' + uselib, ot)
++		elif st == '-D':
++			if not ot: ot = lst.pop(0)
++			app('CXXDEFINES_' + uselib, ot)
++			app('CCDEFINES_' + uselib, ot)
++		elif st == '-l':
++			if not ot: ot = lst.pop(0)
++			app('LIB_' + uselib, ot)
++		elif st == '-L':
++			if not ot: ot = lst.pop(0)
++			app('LIBPATH_' + uselib, ot)
++		elif x == '-pthread' or x.startswith('+'):
++			app('CCFLAGS_' + uselib, x)
++			app('CXXFLAGS_' + uselib, x)
++			app('LINKFLAGS_' + uselib, x)
++		elif x == '-framework':
++			app('FRAMEWORK_' + uselib, lst.pop(0))
++		elif x.startswith('-F'):
++			app('FRAMEWORKPATH_' + uselib, x[2:])
++		elif x.startswith('-std'):
++			app('CCFLAGS_' + uselib, x)
++			app('CXXFLAGS_' + uselib, x)
++			app('LINKFLAGS_' + uselib, x)
++		elif x.startswith('-Wl'):
++			app('LINKFLAGS_' + uselib, x)
++		elif x.startswith('-m') or x.startswith('-f'):
++			app('CCFLAGS_' + uselib, x)
++			app('CXXFLAGS_' + uselib, x)
++
++ at conf
++def ret_msg(self, f, kw):
++	"""execute a function, when provided"""
++	if isinstance(f, str):
++		return f
++	return f(kw)
++
++ at conf
++def validate_cfg(self, kw):
++	if not 'path' in kw:
++		kw['path'] = 'pkg-config --errors-to-stdout --print-errors'
++
++	# pkg-config version
++	if 'atleast_pkgconfig_version' in kw:
++		if not 'msg' in kw:
++			kw['msg'] = 'Checking for pkg-config version >= %s' % kw['atleast_pkgconfig_version']
++		return
++
++	# pkg-config --modversion
++	if 'modversion' in kw:
++		return
++
++	if 'variables' in kw:
++		if not 'msg' in kw:
++			kw['msg'] = 'Checking for %s variables' % kw['package']
++		return
++
++	# checking for the version of a module, for the moment, one thing at a time
++	for x in cfg_ver.keys():
++		y = x.replace('-', '_')
++		if y in kw:
++			if not 'package' in kw:
++				raise ValueError('%s requires a package' % x)
++
++			if not 'msg' in kw:
++				kw['msg'] = 'Checking for %s %s %s' % (kw['package'], cfg_ver[x], kw[y])
++			return
++
++	if not 'msg' in kw:
++		kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path'])
++	if not 'okmsg' in kw:
++		kw['okmsg'] = 'yes'
++	if not 'errmsg' in kw:
++		kw['errmsg'] = 'not found'
++
++ at conf
++def cmd_and_log(self, cmd, kw):
++	Logs.debug('runner: %s\n' % cmd)
++	if self.log:
++		self.log.write('%s\n' % cmd)
++
++	try:
++		p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True)
++		(out, err) = p.communicate()
++	except OSError, e:
++		self.log.write('error %r' % e)
++		self.fatal(str(e))
++
++	# placeholder, don't touch
++	out = str(out)
++	err = str(err)
++
++	if self.log:
++		self.log.write(out)
++		self.log.write(err)
++
++	if p.returncode:
++		if not kw.get('errmsg', ''):
++			if kw.get('mandatory', False):
++				kw['errmsg'] = out.strip()
++			else:
++				kw['errmsg'] = 'no'
++		self.fatal('fail')
++	return out
++
++ at conf
++def exec_cfg(self, kw):
++
++	# pkg-config version
++	if 'atleast_pkgconfig_version' in kw:
++		cmd = '%s --atleast-pkgconfig-version=%s' % (kw['path'], kw['atleast_pkgconfig_version'])
++		self.cmd_and_log(cmd, kw)
++		if not 'okmsg' in kw:
++			kw['okmsg'] = 'yes'
++		return
++
++	# checking for the version of a module
++	for x in cfg_ver:
++		y = x.replace('-', '_')
++		if y in kw:
++			self.cmd_and_log('%s --%s=%s %s' % (kw['path'], x, kw[y], kw['package']), kw)
++			if not 'okmsg' in kw:
++				kw['okmsg'] = 'yes'
++			self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
++			break
++
++	# retrieving the version of a module
++	if 'modversion' in kw:
++		version = self.cmd_and_log('%s --modversion %s' % (kw['path'], kw['modversion']), kw).strip()
++		self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
++		return version
++
++	# retrieving variables of a module
++	if 'variables' in kw:
++		env = kw.get('env', self.env)
++		uselib = kw.get('uselib_store', kw['package'].upper())
++		vars = Utils.to_list(kw['variables'])
++		for v in vars:
++			val = self.cmd_and_log('%s --variable=%s %s' % (kw['path'], v, kw['package']), kw).strip()
++			var = '%s_%s' % (uselib, v)
++			env[var] = val
++		if not 'okmsg' in kw:
++			kw['okmsg'] = 'yes'
++		return
++
++	lst = [kw['path']]
++
++
++	defi = kw.get('define_variable', None)
++	if not defi:
++		defi = self.env.PKG_CONFIG_DEFINES or {}
++	for key, val in defi.iteritems():
++		lst.append('--define-variable=%s=%s' % (key, val))
++
++	lst.append(kw.get('args', ''))
++	lst.append(kw['package'])
++
++	# so we assume the command-line will output flags to be parsed afterwards
++	cmd = ' '.join(lst)
++	ret = self.cmd_and_log(cmd, kw)
++	if not 'okmsg' in kw:
++		kw['okmsg'] = 'yes'
++
++	self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
++	parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env))
++	return ret
++
++ at conf
++def check_cfg(self, *k, **kw):
++	"""
++	for pkg-config mostly, but also all the -config tools
++	conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI')
++	conf.check_cfg(package='dbus-1', variables='system_bus_default_address session_bus_services_dir')
++	"""
++
++	self.validate_cfg(kw)
++	if 'msg' in kw:
++		self.check_message_1(kw['msg'])
++	ret = None
++	try:
++		ret = self.exec_cfg(kw)
++	except Configure.ConfigurationError, e:
++		if 'errmsg' in kw:
++			self.check_message_2(kw['errmsg'], 'YELLOW')
++		if 'mandatory' in kw and kw['mandatory']:
++			if Logs.verbose > 1:
++				raise
++			else:
++				self.fatal('the configuration failed (see %r)' % self.log.name)
++	else:
++		kw['success'] = ret
++		if 'okmsg' in kw:
++			self.check_message_2(self.ret_msg(kw['okmsg'], kw))
++
++	return ret
++
++# the idea is the following: now that we are certain
++# that all the code here is only for c or c++, it is
++# easy to put all the logic in one function
++#
++# this should prevent code duplication (ita)
++
++# env: an optional environment (modified -> provide a copy)
++# compiler: cc or cxx - it tries to guess what is best
++# type: cprogram, cshlib, cstaticlib
++# code: a c code to execute
++# uselib_store: where to add the variables
++# uselib: parameters to use for building
++# define: define to set, like FOO in #define FOO, if not set, add /* #undef FOO */
++# execute: True or False - will return the result of the execution
++
++ at conf
++def validate_c(self, kw):
++	"""validate the parameters for the test method"""
++
++	if not 'env' in kw:
++		kw['env'] = self.env.copy()
++
++	env = kw['env']
++	if not 'compiler' in kw:
++		kw['compiler'] = 'cc'
++		if env['CXX_NAME'] and Task.TaskBase.classes.get('cxx', None):
++			kw['compiler'] = 'cxx'
++			if not self.env['CXX']:
++				self.fatal('a c++ compiler is required')
++		else:
++			if not self.env['CC']:
++				self.fatal('a c compiler is required')
++
++	if not 'type' in kw:
++		kw['type'] = 'cprogram'
++
++	assert not(kw['type'] != 'cprogram' and kw.get('execute', 0)), 'can only execute programs'
++
++
++	#if kw['type'] != 'program' and kw.get('execute', 0):
++	#	raise ValueError, 'can only execute programs'
++
++	def to_header(dct):
++		if 'header_name' in dct:
++			dct = Utils.to_list(dct['header_name'])
++			return ''.join(['#include <%s>\n' % x for x in dct])
++		return ''
++
++	# set the file name
++	if not 'compile_mode' in kw:
++		kw['compile_mode'] = (kw['compiler'] == 'cxx') and 'cxx' or 'cc'
++
++	if not 'compile_filename' in kw:
++		kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
++
++	#OSX
++	if 'framework_name' in kw:
++		try: TaskGen.task_gen.create_task_macapp
++		except AttributeError: self.fatal('frameworks require the osx tool')
++
++		fwkname = kw['framework_name']
++		if not 'uselib_store' in kw:
++			kw['uselib_store'] = fwkname.upper()
++
++		if not kw.get('no_header', False):
++			if not 'header_name' in kw:
++				kw['header_name'] = []
++			fwk = '%s/%s.h' % (fwkname, fwkname)
++			if kw.get('remove_dot_h', None):
++				fwk = fwk[:-2]
++			kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
++
++		kw['msg'] = 'Checking for framework %s' % fwkname
++		kw['framework'] = fwkname
++		#kw['frameworkpath'] = set it yourself
++
++	if 'function_name' in kw:
++		fu = kw['function_name']
++		if not 'msg' in kw:
++			kw['msg'] = 'Checking for function %s' % fu
++		kw['code'] = to_header(kw) + SNIP1 % fu
++		if not 'uselib_store' in kw:
++			kw['uselib_store'] = fu.upper()
++		if not 'define_name' in kw:
++			kw['define_name'] = self.have_define(fu)
++
++	elif 'type_name' in kw:
++		tu = kw['type_name']
++		if not 'msg' in kw:
++			kw['msg'] = 'Checking for type %s' % tu
++		if not 'header_name' in kw:
++			kw['header_name'] = 'stdint.h'
++		kw['code'] = to_header(kw) + SNIP2 % {'type_name' : tu}
++		if not 'define_name' in kw:
++			kw['define_name'] = self.have_define(tu.upper())
++
++	elif 'header_name' in kw:
++		if not 'msg' in kw:
++			kw['msg'] = 'Checking for header %s' % kw['header_name']
++
++		l = Utils.to_list(kw['header_name'])
++		assert len(l)>0, 'list of headers in header_name is empty'
++
++		kw['code'] = to_header(kw) + SNIP3
++
++		if not 'uselib_store' in kw:
++			kw['uselib_store'] = l[0].upper()
++
++		if not 'define_name' in kw:
++			kw['define_name'] = self.have_define(l[0])
++
++	if 'lib' in kw:
++		if not 'msg' in kw:
++			kw['msg'] = 'Checking for library %s' % kw['lib']
++		if not 'uselib_store' in kw:
++			kw['uselib_store'] = kw['lib'].upper()
++
++	if 'staticlib' in kw:
++		if not 'msg' in kw:
++			kw['msg'] = 'Checking for static library %s' % kw['staticlib']
++		if not 'uselib_store' in kw:
++			kw['uselib_store'] = kw['staticlib'].upper()
++
++	if 'fragment' in kw:
++		# an additional code fragment may be provided to replace the predefined code
++		# in custom headers
++		kw['code'] = kw['fragment']
++		if not 'msg' in kw:
++			kw['msg'] = 'Checking for custom code'
++		if not 'errmsg' in kw:
++			kw['errmsg'] = 'no'
++
++	for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
++		if flagsname in kw:
++			if not 'msg' in kw:
++				kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
++			if not 'errmsg' in kw:
++				kw['errmsg'] = 'no'
++
++	if not 'execute' in kw:
++		kw['execute'] = False
++
++	if not 'errmsg' in kw:
++		kw['errmsg'] = 'not found'
++
++	if not 'okmsg' in kw:
++		kw['okmsg'] = 'yes'
++
++	if not 'code' in kw:
++		kw['code'] = SNIP3
++
++	if not kw.get('success'): kw['success'] = None
++
++	assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
++
++ at conf
++def post_check(self, *k, **kw):
++	"set the variables after a test was run successfully"
++
++	is_success = False
++	if kw['execute']:
++		if kw['success'] is not None:
++			is_success = True
++	else:
++		is_success = (kw['success'] == 0)
++
++	if 'define_name' in kw:
++		if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
++			if kw['execute']:
++				key = kw['success']
++				if isinstance(key, str):
++					if key:
++						self.define(kw['define_name'], key, quote=kw.get('quote', 1))
++					else:
++						self.define_cond(kw['define_name'], True)
++				else:
++					self.define_cond(kw['define_name'], False)
++			else:
++				self.define_cond(kw['define_name'], is_success)
++
++	if is_success and 'uselib_store' in kw:
++		import cc, cxx
++		for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars):
++			lk = k.lower()
++			# inconsistency: includes -> CPPPATH
++			if k == 'CPPPATH': lk = 'includes'
++			if k == 'CXXDEFINES': lk = 'defines'
++			if k == 'CCDEFINES': lk = 'defines'
++			if lk in kw:
++				val = kw[lk]
++				# remove trailing slash
++				if isinstance(val, str):
++					val = val.rstrip(os.path.sep)
++				self.env.append_unique(k + '_' + kw['uselib_store'], val)
++
++ at conf
++def check(self, *k, **kw):
++	# so this will be the generic function
++	# it will be safer to use check_cxx or check_cc
++	self.validate_c(kw)
++	self.check_message_1(kw['msg'])
++	ret = None
++	try:
++		ret = self.run_c_code(*k, **kw)
++	except Configure.ConfigurationError, e:
++		self.check_message_2(kw['errmsg'], 'YELLOW')
++		if 'mandatory' in kw and kw['mandatory']:
++			if Logs.verbose > 1:
++				raise
++			else:
++				self.fatal('the configuration failed (see %r)' % self.log.name)
++	else:
++		kw['success'] = ret
++		self.check_message_2(self.ret_msg(kw['okmsg'], kw))
++
++	self.post_check(*k, **kw)
++	if not kw.get('execute', False):
++		return ret == 0
++	return ret
++
++ at conf
++def run_c_code(self, *k, **kw):
++	test_f_name = kw['compile_filename']
++
++	k = 0
++	while k < 10000:
++		# make certain to use a fresh folder - necessary for win32
++		dir = os.path.join(self.blddir, '.conf_check_%d' % k)
++
++		# if the folder already exists, remove it
++		try:
++			shutil.rmtree(dir)
++		except OSError:
++			pass
++
++		try:
++			os.stat(dir)
++		except OSError:
++			break
++
++		k += 1
++
++	try:
++		os.makedirs(dir)
++	except:
++		self.fatal('cannot create a configuration test folder %r' % dir)
++
++	try:
++		os.stat(dir)
++	except:
++		self.fatal('cannot use the configuration test folder %r' % dir)
++
++	bdir = os.path.join(dir, 'testbuild')
++
++	if not os.path.exists(bdir):
++		os.makedirs(bdir)
++
++	env = kw['env']
++
++	dest = open(os.path.join(dir, test_f_name), 'w')
++	dest.write(kw['code'])
++	dest.close()
++
++	back = os.path.abspath('.')
++
++	bld = Build.BuildContext()
++	bld.log = self.log
++	bld.all_envs.update(self.all_envs)
++	bld.all_envs['default'] = env
++	bld.lst_variants = bld.all_envs.keys()
++	bld.load_dirs(dir, bdir)
++
++	os.chdir(dir)
++
++	bld.rescan(bld.srcnode)
++
++	if not 'features' in kw:
++		# conf.check(features='cc cprogram pyext', ...)
++		kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram cc"
++
++	o = bld(features=kw['features'], source=test_f_name, target='testprog')
++
++	for k, v in kw.iteritems():
++		setattr(o, k, v)
++
++	self.log.write("==>\n%s\n<==\n" % kw['code'])
++
++	# compile the program
++	try:
++		bld.compile()
++	except Utils.WafError:
++		ret = Utils.ex_stack()
++	else:
++		ret = 0
++
++	# chdir before returning
++	os.chdir(back)
++
++	if ret:
++		self.log.write('command returned %r' % ret)
++		self.fatal(str(ret))
++
++	# if we need to run the program, try to get its result
++	# keep the name of the program to execute
++	if kw['execute']:
++		lastprog = o.link_task.outputs[0].abspath(env)
++
++		args = Utils.to_list(kw.get('exec_args', []))
++		proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
++		(out, err) = proc.communicate()
++		w = self.log.write
++		w(str(out))
++		w('\n')
++		w(str(err))
++		w('\n')
++		w('returncode %r' % proc.returncode)
++		w('\n')
++		if proc.returncode:
++			self.fatal(Utils.ex_stack())
++		ret = out
++
++	return ret
++
++ at conf
++def check_cxx(self, *k, **kw):
++	kw['compiler'] = 'cxx'
++	return self.check(*k, **kw)
++
++ at conf
++def check_cc(self, *k, **kw):
++	kw['compiler'] = 'cc'
++	return self.check(*k, **kw)
++
++ at conf
++def define(self, define, value, quote=1):
++	"""store a single define and its state into an internal list for later
++	   writing to a config header file.  Value can only be
++	   a string or int; other types not supported.  String
++	   values will appear properly quoted in the generated
++	   header file."""
++	assert define and isinstance(define, str)
++
++	# ordered_dict is for writing the configuration header in order
++	tbl = self.env[DEFINES] or Utils.ordered_dict()
++
++	# the user forgot to tell if the value is quoted or not
++	if isinstance(value, str):
++		if quote:
++			tbl[define] = '"%s"' % repr('"'+value)[2:-1].replace('"', '\\"')
++		else:
++			tbl[define] = value
++	elif isinstance(value, int):
++		tbl[define] = value
++	else:
++		raise TypeError('define %r -> %r must be a string or an int' % (define, value))
++
++	# add later to make reconfiguring faster
++	self.env[DEFINES] = tbl
++	self.env[define] = value # <- not certain this is necessary
++
++ at conf
++def undefine(self, define):
++	"""store a single define and its state into an internal list
++	   for later writing to a config header file"""
++	assert define and isinstance(define, str)
++
++	tbl = self.env[DEFINES] or Utils.ordered_dict()
++
++	value = UNDEFINED
++	tbl[define] = value
++
++	# add later to make reconfiguring faster
++	self.env[DEFINES] = tbl
++	self.env[define] = value
++
++ at conf
++def define_cond(self, name, value):
++	"""Conditionally define a name.
++	Formally equivalent to: if value: define(name, 1) else: undefine(name)"""
++	if value:
++		self.define(name, 1)
++	else:
++		self.undefine(name)
++
++ at conf
++def is_defined(self, key):
++	defines = self.env[DEFINES]
++	if not defines:
++		return False
++	try:
++		value = defines[key]
++	except KeyError:
++		return False
++	else:
++		return value != UNDEFINED
++
++ at conf
++def get_define(self, define):
++	"get the value of a previously stored define"
++	try: return self.env[DEFINES][define]
++	except KeyError: return None
++
++ at conf
++def have_define(self, name):
++	"prefix the define with 'HAVE_' and make sure it has valid characters."
++	return self.__dict__.get('HAVE_PAT', 'HAVE_%s') % Utils.quote_define_name(name)
++
++ at conf
++def write_config_header(self, configfile='', env='', guard='', top=False):
++	"save the defines into a file"
++	if not configfile: configfile = WAF_CONFIG_H
++	waf_guard = guard or '_%s_WAF' % Utils.quote_define_name(configfile)
++
++	# configfile -> absolute path
++	# there is a good reason to concatenate first and to split afterwards
++	if not env: env = self.env
++	if top:
++		diff = ''
++	else:
++		diff = Utils.diff_path(self.srcdir, self.curdir)
++	full = os.sep.join([self.blddir, env.variant(), diff, configfile])
++	full = os.path.normpath(full)
++	(dir, base) = os.path.split(full)
++
++	try: os.makedirs(dir)
++	except: pass
++
++	dest = open(full, 'w')
++	dest.write('/* Configuration header created by Waf - do not edit */\n')
++	dest.write('#ifndef %s\n#define %s\n\n' % (waf_guard, waf_guard))
++
++	dest.write(self.get_config_header())
++
++	# config files are not removed on "waf clean"
++	env.append_unique(CFG_FILES, os.path.join(diff, configfile))
++
++	dest.write('\n#endif /* %s */\n' % waf_guard)
++	dest.close()
++
++ at conf
++def get_config_header(self):
++	"""Fill-in the contents of the config header. Override when you need to write your own config header."""
++	config_header = []
++
++	tbl = self.env[DEFINES] or Utils.ordered_dict()
++	for key in tbl.allkeys:
++		value = tbl[key]
++		if value is None:
++			config_header.append('#define %s' % key)
++		elif value is UNDEFINED:
++			config_header.append('/* #undef %s */' % key)
++		else:
++			config_header.append('#define %s %s' % (key, value))
++	return "\n".join(config_header)
++
++ at conftest
++def find_cpp(conf):
++	v = conf.env
++	cpp = []
++	if v['CPP']: cpp = v['CPP']
++	elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
++	if not cpp: cpp = conf.find_program('cpp', var='CPP')
++	#if not cpp: cpp = v['CC']
++	#if not cpp: cpp = v['CXX']
++	v['CPP'] = cpp
++
++ at conftest
++def cc_add_flags(conf):
++	conf.add_os_flags('CFLAGS', 'CCFLAGS')
++	conf.add_os_flags('CPPFLAGS')
++
++ at conftest
++def cxx_add_flags(conf):
++	conf.add_os_flags('CXXFLAGS')
++	conf.add_os_flags('CPPFLAGS')
++
++ at conftest
++def link_add_flags(conf):
++	conf.add_os_flags('LINKFLAGS')
++	conf.add_os_flags('LDFLAGS', 'LINKFLAGS')
++
++ at conftest
++def cc_load_tools(conf):
++	conf.check_tool('cc')
++
++ at conftest
++def cxx_load_tools(conf):
++	conf.check_tool('cxx')
++
+diff --git a/buildtools/wafadmin/Tools/cs.py b/buildtools/wafadmin/Tools/cs.py
+new file mode 100644
+index 0000000..4354485
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/cs.py
+@@ -0,0 +1,68 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++
++"C# support"
++
++import TaskGen, Utils, Task, Options
++from Logs import error
++from TaskGen import before, after, taskgen, feature
++
++flag_vars= ['FLAGS', 'ASSEMBLIES']
++
++ at feature('cs')
++def init_cs(self):
++	Utils.def_attrs(self,
++		flags = '',
++		assemblies = '',
++		resources = '',
++		uselib = '')
++
++ at feature('cs')
++ at after('init_cs')
++def apply_uselib_cs(self):
++	if not self.uselib:
++		return
++	global flag_vars
++	for var in self.to_list(self.uselib):
++		for v in self.flag_vars:
++			val = self.env[v+'_'+var]
++			if val: self.env.append_value(v, val)
++
++ at feature('cs')
++ at after('apply_uselib_cs')
++ at before('apply_core')
++def apply_cs(self):
++	try: self.meths.remove('apply_core')
++	except ValueError: pass
++
++	# process the flags for the assemblies
++	for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']:
++		self.env.append_unique('_ASSEMBLIES', '/r:'+i)
++
++	# process the flags for the resources
++	for i in self.to_list(self.resources):
++		self.env.append_unique('_RESOURCES', '/resource:'+i)
++
++	# what kind of assembly are we generating?
++	self.env['_TYPE'] = getattr(self, 'type', 'exe')
++
++	# additional flags
++	self.env.append_unique('_FLAGS', self.to_list(self.flags))
++	self.env.append_unique('_FLAGS', self.env.FLAGS)
++
++	# process the sources
++	nodes = [self.path.find_resource(i) for i in self.to_list(self.source)]
++	self.create_task('mcs', nodes, self.path.find_or_declare(self.target))
++
++Task.simple_task_type('mcs', '${MCS} ${SRC} /target:${_TYPE} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW')
++
++def detect(conf):
++	csc = getattr(Options.options, 'cscbinary', None)
++	if csc:
++		conf.env.MCS = csc
++	conf.find_program(['gmcs', 'mcs'], var='MCS')
++
++def set_options(opt):
++	opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
++
+diff --git a/buildtools/wafadmin/Tools/cxx.py b/buildtools/wafadmin/Tools/cxx.py
+new file mode 100644
+index 0000000..719b821
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/cxx.py
+@@ -0,0 +1,104 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005 (ita)
++
++"Base for c++ programs and libraries"
++
++import TaskGen, Task, Utils
++from Logs import debug
++import ccroot # <- do not remove
++from TaskGen import feature, before, extension, after
++
++g_cxx_flag_vars = [
++'CXXDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
++'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
++'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES']
++"main cpp variables"
++
++EXT_CXX = ['.cpp', '.cc', '.cxx', '.C', '.c++']
++
++g_cxx_type_vars=['CXXFLAGS', 'LINKFLAGS']
++
++# TODO remove in waf 1.6
++class cxx_taskgen(ccroot.ccroot_abstract):
++	pass
++
++ at feature('cxx')
++ at before('apply_type_vars')
++ at after('default_cc')
++def init_cxx(self):
++	if not 'cc' in self.features:
++		self.mappings['.c'] = TaskGen.task_gen.mappings['.cxx']
++
++	self.p_flag_vars = set(self.p_flag_vars).union(g_cxx_flag_vars)
++	self.p_type_vars = set(self.p_type_vars).union(g_cxx_type_vars)
++
++	if not self.env['CXX_NAME']:
++		raise Utils.WafError("At least one compiler (g++, ..) must be selected")
++
++ at feature('cxx')
++ at after('apply_incpaths')
++def apply_obj_vars_cxx(self):
++	"""after apply_incpaths for INC_PATHS"""
++	env = self.env
++	app = env.append_unique
++	cxxpath_st = env['CPPPATH_ST']
++
++	# local flags come first
++	# set the user-defined includes paths
++	for i in env['INC_PATHS']:
++		app('_CXXINCFLAGS', cxxpath_st % i.bldpath(env))
++		app('_CXXINCFLAGS', cxxpath_st % i.srcpath(env))
++
++	# set the library include paths
++	for i in env['CPPPATH']:
++		app('_CXXINCFLAGS', cxxpath_st % i)
++
++ at feature('cxx')
++ at after('apply_lib_vars')
++def apply_defines_cxx(self):
++	"""after uselib is set for CXXDEFINES"""
++	self.defines = getattr(self, 'defines', [])
++	lst = self.to_list(self.defines) + self.to_list(self.env['CXXDEFINES'])
++	milst = []
++
++	# now process the local defines
++	for defi in lst:
++		if not defi in milst:
++			milst.append(defi)
++
++	# CXXDEFINES_USELIB
++	libs = self.to_list(self.uselib)
++	for l in libs:
++		val = self.env['CXXDEFINES_'+l]
++		if val: milst += self.to_list(val)
++
++	self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
++	y = self.env['CXXDEFINES_ST']
++	self.env.append_unique('_CXXDEFFLAGS', [y%x for x in milst])
++
++ at extension(EXT_CXX)
++def cxx_hook(self, node):
++	# create the compilation task: cpp or cc
++	if getattr(self, 'obj_ext', None):
++		obj_ext = self.obj_ext
++	else:
++		obj_ext = '_%d.o' % self.idx
++
++	task = self.create_task('cxx', node, node.change_ext(obj_ext))
++	try:
++		self.compiled_tasks.append(task)
++	except AttributeError:
++		raise Utils.WafError('Have you forgotten to set the feature "cxx" on %s?' % str(self))
++	return task
++
++cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
++cls = Task.simple_task_type('cxx', cxx_str, color='GREEN', ext_out='.o', ext_in='.cxx', shell=False)
++cls.scan = ccroot.scan
++cls.vars.append('CXXDEPS')
++
++link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
++cls = Task.simple_task_type('cxx_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
++cls.maxjobs = 1
++cls.install = Utils.nada
++
+diff --git a/buildtools/wafadmin/Tools/d.py b/buildtools/wafadmin/Tools/d.py
+new file mode 100644
+index 0000000..1a22821
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/d.py
+@@ -0,0 +1,535 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Carlos Rafael Giani, 2007 (dv)
++# Thomas Nagy, 2007-2008 (ita)
++
++import os, sys, re, optparse
++import ccroot # <- leave this
++import TaskGen, Utils, Task, Configure, Logs, Build
++from Logs import debug, error
++from TaskGen import taskgen, feature, after, before, extension
++from Configure import conftest
++
++EXT_D = ['.d', '.di', '.D']
++D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps'] # additional d methods
++
++DLIB = """
++version(D_Version2) {
++	import std.stdio;
++	int main() {
++		writefln("phobos2");
++		return 0;
++	}
++} else {
++	version(Tango) {
++		import tango.stdc.stdio;
++		int main() {
++			printf("tango");
++			return 0;
++		}
++	} else {
++		import std.stdio;
++		int main() {
++			writefln("phobos1");
++			return 0;
++		}
++	}
++}
++"""
++
++def filter_comments(filename):
++	txt = Utils.readf(filename)
++	i = 0
++	buf = []
++	max = len(txt)
++	begin = 0
++	while i < max:
++		c = txt[i]
++		if c == '"' or c == "'":  # skip a string or character literal
++			buf.append(txt[begin:i])
++			delim = c
++			i += 1
++			while i < max:
++				c = txt[i]
++				if c == delim: break
++				elif c == '\\':  # skip the character following backslash
++					i += 1
++				i += 1
++			i += 1
++			begin = i
++		elif c == '/':  # try to replace a comment with whitespace
++			buf.append(txt[begin:i])
++			i += 1
++			if i == max: break
++			c = txt[i]
++			if c == '+':  # eat nesting /+ +/ comment
++				i += 1
++				nesting = 1
++				c = None
++				while i < max:
++					prev = c
++					c = txt[i]
++					if prev == '/' and c == '+':
++						nesting += 1
++						c = None
++					elif prev == '+' and c == '/':
++						nesting -= 1
++						if nesting == 0: break
++						c = None
++					i += 1
++			elif c == '*':  # eat /* */ comment
++				i += 1
++				c = None
++				while i < max:
++					prev = c
++					c = txt[i]
++					if prev == '*' and c == '/': break
++					i += 1
++			elif c == '/':  # eat // comment
++				i += 1
++				while i < max and txt[i] != '\n':
++					i += 1
++			else:  # no comment
++				begin = i - 1
++				continue
++			i += 1
++			begin = i
++			buf.append(' ')
++		else:
++			i += 1
++	buf.append(txt[begin:])
++	return buf
++
++class d_parser(object):
++	def __init__(self, env, incpaths):
++		#self.code = ''
++		#self.module = ''
++		#self.imports = []
++
++		self.allnames = []
++
++		self.re_module = re.compile("module\s+([^;]+)")
++		self.re_import = re.compile("import\s+([^;]+)")
++		self.re_import_bindings = re.compile("([^:]+):(.*)")
++		self.re_import_alias = re.compile("[^=]+=(.+)")
++
++		self.env = env
++
++		self.nodes = []
++		self.names = []
++
++		self.incpaths = incpaths
++
++	def tryfind(self, filename):
++		found = 0
++		for n in self.incpaths:
++			found = n.find_resource(filename.replace('.', '/') + '.d')
++			if found:
++				self.nodes.append(found)
++				self.waiting.append(found)
++				break
++		if not found:
++			if not filename in self.names:
++				self.names.append(filename)
++
++	def get_strings(self, code):
++		#self.imports = []
++		self.module = ''
++		lst = []
++
++		# get the module name (if present)
++
++		mod_name = self.re_module.search(code)
++		if mod_name:
++			self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
++
++		# go through the code, have a look at all import occurrences
++
++		# first, lets look at anything beginning with "import" and ending with ";"
++		import_iterator = self.re_import.finditer(code)
++		if import_iterator:
++			for import_match in import_iterator:
++				import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
++
++				# does this end with an import bindings declaration?
++				# (import bindings always terminate the list of imports)
++				bindings_match = self.re_import_bindings.match(import_match_str)
++				if bindings_match:
++					import_match_str = bindings_match.group(1)
++					# if so, extract the part before the ":" (since the module declaration(s) is/are located there)
++
++				# split the matching string into a bunch of strings, separated by a comma
++				matches = import_match_str.split(',')
++
++				for match in matches:
++					alias_match = self.re_import_alias.match(match)
++					if alias_match:
++						# is this an alias declaration? (alias = module name) if so, extract the module name
++						match = alias_match.group(1)
++
++					lst.append(match)
++		return lst
++
++	def start(self, node):
++		self.waiting = [node]
++		# while the stack is not empty, add the dependencies
++		while self.waiting:
++			nd = self.waiting.pop(0)
++			self.iter(nd)
++
++	def iter(self, node):
++		path = node.abspath(self.env) # obtain the absolute path
++		code = "".join(filter_comments(path)) # read the file and filter the comments
++		names = self.get_strings(code) # obtain the import strings
++		for x in names:
++			# optimization
++			if x in self.allnames: continue
++			self.allnames.append(x)
++
++			# for each name, see if it is like a node or not
++			self.tryfind(x)
++
++def scan(self):
++	"look for .d/.di the .d source need"
++	env = self.env
++	gruik = d_parser(env, env['INC_PATHS'])
++	gruik.start(self.inputs[0])
++
++	if Logs.verbose:
++		debug('deps: nodes found for %s: %s %s' % (str(self.inputs[0]), str(gruik.nodes), str(gruik.names)))
++		#debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps')
++	return (gruik.nodes, gruik.names)
++
++def get_target_name(self):
++	"for d programs and libs"
++	v = self.env
++	tp = 'program'
++	for x in self.features:
++		if x in ['dshlib', 'dstaticlib']:
++			tp = x.lstrip('d')
++	return v['D_%s_PATTERN' % tp] % self.target
++
++d_params = {
++'dflags': '',
++'importpaths':'',
++'libs':'',
++'libpaths':'',
++'generate_headers':False,
++}
++
++ at feature('d')
++ at before('apply_type_vars')
++def init_d(self):
++	for x in d_params:
++		setattr(self, x, getattr(self, x, d_params[x]))
++
++class d_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++		# COMPAT
++		if len(k) > 1:
++			self.features.append('d' + k[1])
++
++# okay, we borrow a few methods from ccroot
++TaskGen.bind_feature('d', D_METHS)
++
++ at feature('d')
++ at before('apply_d_libs')
++def init_d(self):
++	Utils.def_attrs(self,
++		dflags='',
++		importpaths='',
++		libs='',
++		libpaths='',
++		uselib='',
++		uselib_local='',
++		generate_headers=False, # set to true if you want .di files as well as .o
++		compiled_tasks=[],
++		add_objects=[],
++		link_task=None)
++
++ at feature('d')
++ at after('apply_d_link', 'init_d')
++ at before('apply_vnum', 'apply_d_vars')
++def apply_d_libs(self):
++	"""after apply_link because of 'link_task'
++	after default_cc because of the attribute 'uselib'"""
++	env = self.env
++
++	# 1. the case of the libs defined in the project (visit ancestors first)
++	# the ancestors external libraries (uselib) will be prepended
++	self.uselib = self.to_list(self.uselib)
++	names = self.to_list(self.uselib_local)
++
++	seen = set([])
++	tmp = Utils.deque(names) # consume a copy of the list of names
++	while tmp:
++		lib_name = tmp.popleft()
++		# visit dependencies only once
++		if lib_name in seen:
++			continue
++
++		y = self.name_to_obj(lib_name)
++		if not y:
++			raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
++		y.post()
++		seen.add(lib_name)
++
++		# object has ancestors to process (shared libraries): add them to the end of the list
++		if getattr(y, 'uselib_local', None):
++			lst = y.to_list(y.uselib_local)
++			if 'dshlib' in y.features or 'dprogram' in y.features:
++				lst = [x for x in lst if not 'dstaticlib' in self.name_to_obj(x).features]
++			tmp.extend(lst)
++
++		# link task and flags
++		if getattr(y, 'link_task', None):
++
++			link_name = y.target[y.target.rfind(os.sep) + 1:]
++			if 'dstaticlib' in y.features or 'dshlib' in y.features:
++				env.append_unique('DLINKFLAGS', env.DLIB_ST % link_name)
++				env.append_unique('DLINKFLAGS', env.DLIBPATH_ST % y.link_task.outputs[0].parent.bldpath(env))
++
++			# the order
++			self.link_task.set_run_after(y.link_task)
++
++			# for the recompilation
++			dep_nodes = getattr(self.link_task, 'dep_nodes', [])
++			self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
++
++		# add ancestors uselib too - but only propagate those that have no staticlib
++		for v in self.to_list(y.uselib):
++			if not v in self.uselib:
++				self.uselib.insert(0, v)
++
++		# if the library task generator provides 'export_incdirs', add to the include path
++		# the export_incdirs must be a list of paths relative to the other library
++		if getattr(y, 'export_incdirs', None):
++			for x in self.to_list(y.export_incdirs):
++				node = y.path.find_dir(x)
++				if not node:
++					raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
++				self.env.append_unique('INC_PATHS', node)
++
++ at feature('dprogram', 'dshlib', 'dstaticlib')
++ at after('apply_core')
++def apply_d_link(self):
++	link = getattr(self, 'link', None)
++	if not link:
++		if 'dstaticlib' in self.features: link = 'static_link'
++		else: link = 'd_link'
++
++	outputs = [t.outputs[0] for t in self.compiled_tasks]
++	self.link_task = self.create_task(link, outputs, self.path.find_or_declare(get_target_name(self)))
++
++ at feature('d')
++ at after('apply_core')
++def apply_d_vars(self):
++	env = self.env
++	dpath_st   = env['DPATH_ST']
++	lib_st	 = env['DLIB_ST']
++	libpath_st = env['DLIBPATH_ST']
++
++	importpaths = self.to_list(self.importpaths)
++	libpaths = []
++	libs = []
++	uselib = self.to_list(self.uselib)
++
++	for i in uselib:
++		if env['DFLAGS_' + i]:
++			env.append_unique('DFLAGS', env['DFLAGS_' + i])
++
++	for x in self.features:
++		if not x in ['dprogram', 'dstaticlib', 'dshlib']:
++			continue
++		x.lstrip('d')
++		d_shlib_dflags = env['D_' + x + '_DFLAGS']
++		if d_shlib_dflags:
++			env.append_unique('DFLAGS', d_shlib_dflags)
++
++	# add import paths
++	for i in uselib:
++		if env['DPATH_' + i]:
++			for entry in self.to_list(env['DPATH_' + i]):
++				if not entry in importpaths:
++					importpaths.append(entry)
++
++	# now process the import paths
++	for path in importpaths:
++		if os.path.isabs(path):
++			env.append_unique('_DIMPORTFLAGS', dpath_st % path)
++		else:
++			node = self.path.find_dir(path)
++			self.env.append_unique('INC_PATHS', node)
++			env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env))
++			env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env))
++
++	# add library paths
++	for i in uselib:
++		if env['LIBPATH_' + i]:
++			for entry in self.to_list(env['LIBPATH_' + i]):
++				if not entry in libpaths:
++					libpaths.append(entry)
++	libpaths = self.to_list(self.libpaths) + libpaths
++
++	# now process the library paths
++	# apply same path manipulation as used with import paths
++	for path in libpaths:
++		if not os.path.isabs(path):
++			node = self.path.find_resource(path)
++			if not node:
++				raise Utils.WafError('could not find libpath %r from %r' % (path, self))
++			path = node.abspath(self.env)
++
++		env.append_unique('DLINKFLAGS', libpath_st % path)
++
++	# add libraries
++	for i in uselib:
++		if env['LIB_' + i]:
++			for entry in self.to_list(env['LIB_' + i]):
++				if not entry in libs:
++					libs.append(entry)
++	libs.extend(self.to_list(self.libs))
++
++	# process user flags
++	for flag in self.to_list(self.dflags):
++		env.append_unique('DFLAGS', flag)
++
++	# now process the libraries
++	for lib in libs:
++		env.append_unique('DLINKFLAGS', lib_st % lib)
++
++	# add linker flags
++	for i in uselib:
++		dlinkflags = env['DLINKFLAGS_' + i]
++		if dlinkflags:
++			for linkflag in dlinkflags:
++				env.append_unique('DLINKFLAGS', linkflag)
++
++ at feature('dshlib')
++ at after('apply_d_vars')
++def add_shlib_d_flags(self):
++	for linkflag in self.env['D_shlib_LINKFLAGS']:
++		self.env.append_unique('DLINKFLAGS', linkflag)
++
++ at extension(EXT_D)
++def d_hook(self, node):
++	# create the compilation task: cpp or cc
++	task = self.create_task(self.generate_headers and 'd_with_header' or 'd')
++	try: obj_ext = self.obj_ext
++	except AttributeError: obj_ext = '_%d.o' % self.idx
++
++	task.inputs = [node]
++	task.outputs = [node.change_ext(obj_ext)]
++	self.compiled_tasks.append(task)
++
++	if self.generate_headers:
++		header_node = node.change_ext(self.env['DHEADER_ext'])
++		task.outputs += [header_node]
++
++d_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}'
++d_with_header_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \
++${D_HDR_F}${TGT[1].bldpath(env)} \
++${D_SRC_F}${SRC} \
++${D_TGT_F}${TGT[0].bldpath(env)}'
++link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}'
++
++def override_exec(cls):
++	"""stupid dmd wants -of stuck to the file name"""
++	old_exec = cls.exec_command
++	def exec_command(self, *k, **kw):
++		if isinstance(k[0], list):
++			lst = k[0]
++			for i in xrange(len(lst)):
++				if lst[i] == '-of':
++					del lst[i]
++					lst[i] = '-of' + lst[i]
++					break
++		return old_exec(self, *k, **kw)
++	cls.exec_command = exec_command
++
++cls = Task.simple_task_type('d', d_str, 'GREEN', before='static_link d_link', shell=False)
++cls.scan = scan
++override_exec(cls)
++
++cls = Task.simple_task_type('d_with_header', d_with_header_str, 'GREEN', before='static_link d_link', shell=False)
++override_exec(cls)
++
++cls = Task.simple_task_type('d_link', link_str, color='YELLOW', shell=False)
++override_exec(cls)
++
++# for feature request #104
++ at taskgen
++def generate_header(self, filename, install_path):
++	if not hasattr(self, 'header_lst'): self.header_lst = []
++	self.meths.append('process_header')
++	self.header_lst.append([filename, install_path])
++
++ at before('apply_core')
++def process_header(self):
++	env = self.env
++	for i in getattr(self, 'header_lst', []):
++		node = self.path.find_resource(i[0])
++
++		if not node:
++			raise Utils.WafError('file not found on d obj '+i[0])
++
++		task = self.create_task('d_header')
++		task.set_inputs(node)
++		task.set_outputs(node.change_ext('.di'))
++
++d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}'
++Task.simple_task_type('d_header', d_header_str, color='BLUE', shell=False)
++
++ at conftest
++def d_platform_flags(conf):
++	v = conf.env
++	binfmt = v.DEST_BINFMT or Utils.unversioned_sys_platform_to_binary_format(
++		v.DEST_OS or Utils.unversioned_sys_platform())
++	if binfmt == 'pe':
++		v['D_program_PATTERN']   = '%s.exe'
++		v['D_shlib_PATTERN']	 = 'lib%s.dll'
++		v['D_staticlib_PATTERN'] = 'lib%s.a'
++	else:
++		v['D_program_PATTERN']   = '%s'
++		v['D_shlib_PATTERN']	 = 'lib%s.so'
++		v['D_staticlib_PATTERN'] = 'lib%s.a'
++
++ at conftest
++def check_dlibrary(conf):
++	ret = conf.check_cc(features='d dprogram', fragment=DLIB, mandatory=True, compile_filename='test.d', execute=True)
++	conf.env.DLIBRARY = ret.strip()
++
++# quick test #
++if __name__ == "__main__":
++	#Logs.verbose = 2
++
++	try: arg = sys.argv[1]
++	except IndexError: arg = "file.d"
++
++	print("".join(filter_comments(arg)))
++	# TODO
++	paths = ['.']
++
++	#gruik = filter()
++	#gruik.start(arg)
++
++	#code = "".join(gruik.buf)
++
++	#print "we have found the following code"
++	#print code
++
++	#print "now parsing"
++	#print "-------------------------------------------"
++	"""
++	parser_ = d_parser()
++	parser_.start(arg)
++
++	print "module: %s" % parser_.module
++	print "imports: ",
++	for imp in parser_.imports:
++		print imp + " ",
++	print
++"""
++
+diff --git a/buildtools/wafadmin/Tools/dbus.py b/buildtools/wafadmin/Tools/dbus.py
+new file mode 100644
+index 0000000..3179999
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/dbus.py
+@@ -0,0 +1,34 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Ali Sabil, 2007
++
++import Task, Utils
++from TaskGen import taskgen, before, after, feature
++
++ at taskgen
++def add_dbus_file(self, filename, prefix, mode):
++	if not hasattr(self, 'dbus_lst'):
++		self.dbus_lst = []
++	self.meths.append('process_dbus')
++	self.dbus_lst.append([filename, prefix, mode])
++
++ at before('apply_core')
++def process_dbus(self):
++	for filename, prefix, mode in getattr(self, 'dbus_lst', []):
++		node = self.path.find_resource(filename)
++
++		if not node:
++			raise Utils.WafError('file not found ' + filename)
++
++		tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
++
++		tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
++		tsk.env.DBUS_BINDING_TOOL_MODE   = mode
++
++Task.simple_task_type('dbus_binding_tool',
++	'${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',
++	color='BLUE', before='cc')
++
++def detect(conf):
++	dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
++
+diff --git a/buildtools/wafadmin/Tools/dmd.py b/buildtools/wafadmin/Tools/dmd.py
+new file mode 100644
+index 0000000..9c74908
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/dmd.py
+@@ -0,0 +1,64 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Carlos Rafael Giani, 2007 (dv)
++# Thomas Nagy, 2008 (ita)
++
++import sys
++import Utils, ar
++from Configure import conftest
++
++ at conftest
++def find_dmd(conf):
++	conf.find_program(['dmd', 'ldc'], var='D_COMPILER', mandatory=True)
++
++ at conftest
++def common_flags_ldc(conf):
++	v = conf.env
++	v['DFLAGS']         = ['-d-version=Posix']
++	v['DLINKFLAGS']     = []
++	v['D_shlib_DFLAGS'] = ['-relocation-model=pic']
++
++ at conftest
++def common_flags_dmd(conf):
++	v = conf.env
++
++	# _DFLAGS _DIMPORTFLAGS
++
++	# Compiler is dmd so 'gdc' part will be ignored, just
++	# ensure key is there, so wscript can append flags to it
++	v['DFLAGS']            = ['-version=Posix']
++
++	v['D_SRC_F']           = ''
++	v['D_TGT_F']           = ['-c', '-of']
++	v['DPATH_ST']          = '-I%s' # template for adding import paths
++
++	# linker
++	v['D_LINKER']          = v['D_COMPILER']
++	v['DLNK_SRC_F']        = ''
++	v['DLNK_TGT_F']        = '-of'
++
++	v['DLIB_ST']           = '-L-l%s' # template for adding libs
++	v['DLIBPATH_ST']       = '-L-L%s' # template for adding libpaths
++
++	# linker debug levels
++	v['DFLAGS_OPTIMIZED']  = ['-O']
++	v['DFLAGS_DEBUG']      = ['-g', '-debug']
++	v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug']
++	v['DLINKFLAGS']        = ['-quiet']
++
++	v['D_shlib_DFLAGS']    = ['-fPIC']
++	v['D_shlib_LINKFLAGS'] = ['-L-shared']
++
++	v['DHEADER_ext']       = '.di'
++	v['D_HDR_F']           = ['-H', '-Hf']
++
++def detect(conf):
++	conf.find_dmd()
++	conf.check_tool('ar')
++	conf.check_tool('d')
++	conf.common_flags_dmd()
++	conf.d_platform_flags()
++
++	if conf.env.D_COMPILER.find('ldc') > -1:
++		conf.common_flags_ldc()
++
+diff --git a/buildtools/wafadmin/Tools/flex.py b/buildtools/wafadmin/Tools/flex.py
+new file mode 100644
+index 0000000..5ce9f22
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/flex.py
+@@ -0,0 +1,25 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# John O'Meara, 2006
++# Thomas Nagy, 2006-2008
++
++"Flex processing"
++
++import TaskGen
++
++def decide_ext(self, node):
++	if 'cxx' in self.features: return '.lex.cc'
++	else: return '.lex.c'
++
++TaskGen.declare_chain(
++	name = 'flex',
++	rule = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',
++	ext_in = '.l',
++	ext_out = '.c .cxx',
++	decider = decide_ext
++)
++
++def detect(conf):
++	conf.find_program('flex', var='FLEX', mandatory=True)
++	conf.env['FLEXFLAGS'] = ''
++
+diff --git a/buildtools/wafadmin/Tools/gas.py b/buildtools/wafadmin/Tools/gas.py
+new file mode 100644
+index 0000000..c983b0a
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/gas.py
+@@ -0,0 +1,38 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2008 (ita)
++
++"as and gas"
++
++import os, sys
++import Task
++from TaskGen import extension, taskgen, after, before
++
++EXT_ASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
++
++as_str = '${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}'
++Task.simple_task_type('asm', as_str, 'PINK', ext_out='.o', shell=False)
++
++ at extension(EXT_ASM)
++def asm_hook(self, node):
++	# create the compilation task: cpp or cc
++	try: obj_ext = self.obj_ext
++	except AttributeError: obj_ext = '_%d.o' % self.idx
++
++	task = self.create_task('asm', node, node.change_ext(obj_ext))
++	self.compiled_tasks.append(task)
++	self.meths.append('asm_incflags')
++
++ at after('apply_obj_vars_cc')
++ at after('apply_obj_vars_cxx')
++ at before('apply_link')
++def asm_incflags(self):
++	self.env.append_value('_ASINCFLAGS', self.env.ASINCFLAGS)
++	var = ('cxx' in self.features) and 'CXX' or 'CC'
++	self.env.append_value('_ASINCFLAGS', self.env['_%sINCFLAGS' % var])
++
++def detect(conf):
++	conf.find_program(['gas', 'as'], var='AS')
++	if not conf.env.AS: conf.env.AS = conf.env.CC
++	#conf.env.ASFLAGS = ['-c'] <- may be necesary for .S files
++
+diff --git a/buildtools/wafadmin/Tools/gcc.py b/buildtools/wafadmin/Tools/gcc.py
+new file mode 100644
+index 0000000..420b44f
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/gcc.py
+@@ -0,0 +1,135 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006-2008 (ita)
++# Ralf Habacker, 2006 (rh)
++# Yinon Ehrlich, 2009
++
++import os, sys
++import Configure, Options, Utils
++import ccroot, ar
++from Configure import conftest
++
++ at conftest
++def find_gcc(conf):
++	cc = conf.find_program(['gcc', 'cc'], var='CC', mandatory=True)
++	cc = conf.cmd_to_list(cc)
++	ccroot.get_cc_version(conf, cc, gcc=True)
++	conf.env.CC_NAME = 'gcc'
++	conf.env.CC      = cc
++
++ at conftest
++def gcc_common_flags(conf):
++	v = conf.env
++
++	# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
++
++	v['CCFLAGS_DEBUG'] = ['-g']
++
++	v['CCFLAGS_RELEASE'] = ['-O2']
++
++	v['CC_SRC_F']            = ''
++	v['CC_TGT_F']            = ['-c', '-o', ''] # shell hack for -MD
++	v['CPPPATH_ST']          = '-I%s' # template for adding include paths
++
++	# linker
++	if not v['LINK_CC']: v['LINK_CC'] = v['CC']
++	v['CCLNK_SRC_F']         = ''
++	v['CCLNK_TGT_F']         = ['-o', ''] # shell hack for -MD
++
++	v['LIB_ST']              = '-l%s' # template for adding libs
++	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
++	v['STATICLIB_ST']        = '-l%s'
++	v['STATICLIBPATH_ST']    = '-L%s'
++	v['RPATH_ST']            = '-Wl,-rpath,%s'
++	v['CCDEFINES_ST']        = '-D%s'
++
++	v['SONAME_ST']           = '-Wl,-h,%s'
++	v['SHLIB_MARKER']        = '-Wl,-Bdynamic'
++	v['STATICLIB_MARKER']    = '-Wl,-Bstatic'
++	v['FULLSTATIC_MARKER']   = '-static'
++
++	# program
++	v['program_PATTERN']     = '%s'
++
++	# shared library
++	v['shlib_CCFLAGS']       = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
++	v['shlib_LINKFLAGS']     = ['-shared']
++	v['shlib_PATTERN']       = 'lib%s.so'
++
++	# static lib
++	v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
++	v['staticlib_PATTERN']   = 'lib%s.a'
++
++	# osx stuff
++	v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
++	v['CCFLAGS_MACBUNDLE']   = ['-fPIC']
++	v['macbundle_PATTERN']   = '%s.bundle'
++
++ at conftest
++def gcc_modifier_win32(conf):
++	v = conf.env
++	v['program_PATTERN']     = '%s.exe'
++
++	v['shlib_PATTERN']       = '%s.dll'
++	v['implib_PATTERN']      = 'lib%s.dll.a'
++	v['IMPLIB_ST']           = '-Wl,--out-implib,%s'
++
++	dest_arch = v['DEST_CPU']
++	v['shlib_CCFLAGS'] = ['-DPIC']
++
++	v.append_value('shlib_CCFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
++
++	# Auto-import is enabled by default even without this option,
++	# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
++	# that the linker emits otherwise.
++	v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
++
++ at conftest
++def gcc_modifier_cygwin(conf):
++	gcc_modifier_win32(conf)
++	v = conf.env
++	v['shlib_PATTERN']       = 'cyg%s.dll'
++	v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
++
++ at conftest
++def gcc_modifier_darwin(conf):
++	v = conf.env
++	v['shlib_CCFLAGS']       = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
++	v['shlib_LINKFLAGS']     = ['-dynamiclib']
++	v['shlib_PATTERN']       = 'lib%s.dylib'
++
++	v['staticlib_LINKFLAGS'] = []
++
++	v['SHLIB_MARKER']        = ''
++	v['STATICLIB_MARKER']    = ''
++	v['SONAME_ST']           = ''
++
++ at conftest
++def gcc_modifier_aix(conf):
++	v = conf.env
++	v['program_LINKFLAGS']   = ['-Wl,-brtl']
++
++	v['shlib_LINKFLAGS']     = ['-shared','-Wl,-brtl,-bexpfull']
++
++	v['SHLIB_MARKER']        = ''
++
++ at conftest
++def gcc_modifier_platform(conf):
++	# * set configurations specific for a platform.
++	# * the destination platform is detected automatically by looking at the macros the compiler predefines,
++	#   and if it's not recognised, it fallbacks to sys.platform.
++	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
++	gcc_modifier_func = globals().get('gcc_modifier_' + dest_os)
++	if gcc_modifier_func:
++			gcc_modifier_func(conf)
++
++def detect(conf):
++	conf.find_gcc()
++	conf.find_cpp()
++	conf.find_ar()
++	conf.gcc_common_flags()
++	conf.gcc_modifier_platform()
++	conf.cc_load_tools()
++	conf.cc_add_flags()
++	conf.link_add_flags()
++
+diff --git a/buildtools/wafadmin/Tools/gdc.py b/buildtools/wafadmin/Tools/gdc.py
+new file mode 100644
+index 0000000..4d2a321
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/gdc.py
+@@ -0,0 +1,52 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Carlos Rafael Giani, 2007 (dv)
++
++import sys
++import Utils, ar
++from Configure import conftest
++
++ at conftest
++def find_gdc(conf):
++	conf.find_program('gdc', var='D_COMPILER', mandatory=True)
++
++ at conftest
++def common_flags_gdc(conf):
++	v = conf.env
++
++	# _DFLAGS _DIMPORTFLAGS
++
++	# for mory info about the meaning of this dict see dmd.py
++	v['DFLAGS']            = []
++
++	v['D_SRC_F']           = ''
++	v['D_TGT_F']           = ['-c', '-o', '']
++	v['DPATH_ST']          = '-I%s' # template for adding import paths
++
++	# linker
++	v['D_LINKER']          = v['D_COMPILER']
++	v['DLNK_SRC_F']        = ''
++	v['DLNK_TGT_F']        = ['-o', '']
++
++	v['DLIB_ST']           = '-l%s' # template for adding libs
++	v['DLIBPATH_ST']       = '-L%s' # template for adding libpaths
++
++	# debug levels
++	v['DLINKFLAGS']        = []
++	v['DFLAGS_OPTIMIZED']  = ['-O3']
++	v['DFLAGS_DEBUG']      = ['-O0']
++	v['DFLAGS_ULTRADEBUG'] = ['-O0']
++
++	v['D_shlib_DFLAGS']    = []
++	v['D_shlib_LINKFLAGS'] = ['-shared']
++
++	v['DHEADER_ext']       = '.di'
++	v['D_HDR_F']           = '-fintfc -fintfc-file='
++
++def detect(conf):
++	conf.find_gdc()
++	conf.check_tool('ar')
++	conf.check_tool('d')
++	conf.common_flags_gdc()
++	conf.d_platform_flags()
++
+diff --git a/buildtools/wafadmin/Tools/glib2.py b/buildtools/wafadmin/Tools/glib2.py
+new file mode 100644
+index 0000000..042d612
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/glib2.py
+@@ -0,0 +1,164 @@
++#! /usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006-2008 (ita)
++
++"GLib2 support"
++
++import Task, Utils
++from TaskGen import taskgen, before, after, feature
++
++#
++# glib-genmarshal
++#
++
++ at taskgen
++def add_marshal_file(self, filename, prefix):
++	if not hasattr(self, 'marshal_list'):
++		self.marshal_list = []
++	self.meths.append('process_marshal')
++	self.marshal_list.append((filename, prefix))
++
++ at before('apply_core')
++def process_marshal(self):
++	for f, prefix in getattr(self, 'marshal_list', []):
++		node = self.path.find_resource(f)
++
++		if not node:
++			raise Utils.WafError('file not found %r' % f)
++
++		h_node = node.change_ext('.h')
++		c_node = node.change_ext('.c')
++
++		task = self.create_task('glib_genmarshal', node, [h_node, c_node])
++		task.env.GLIB_GENMARSHAL_PREFIX = prefix
++	self.allnodes.append(c_node)
++
++def genmarshal_func(self):
++
++	bld = self.inputs[0].__class__.bld
++
++	get = self.env.get_flat
++	cmd1 = "%s %s --prefix=%s --header > %s" % (
++		get('GLIB_GENMARSHAL'),
++		self.inputs[0].srcpath(self.env),
++		get('GLIB_GENMARSHAL_PREFIX'),
++		self.outputs[0].abspath(self.env)
++	)
++
++	ret = bld.exec_command(cmd1)
++	if ret: return ret
++
++	#print self.outputs[1].abspath(self.env)
++	f = open(self.outputs[1].abspath(self.env), 'wb')
++	c = '''#include "%s"\n''' % self.outputs[0].name
++	f.write(c)
++	f.close()
++
++	cmd2 = "%s %s --prefix=%s --body >> %s" % (
++		get('GLIB_GENMARSHAL'),
++		self.inputs[0].srcpath(self.env),
++		get('GLIB_GENMARSHAL_PREFIX'),
++		self.outputs[1].abspath(self.env)
++	)
++	ret = Utils.exec_command(cmd2)
++	if ret: return ret
++
++#
++# glib-mkenums
++#
++
++ at taskgen
++def add_enums_from_template(self, source='', target='', template='', comments=''):
++	if not hasattr(self, 'enums_list'):
++		self.enums_list = []
++	self.meths.append('process_enums')
++	self.enums_list.append({'source': source,
++	                        'target': target,
++	                        'template': template,
++	                        'file-head': '',
++	                        'file-prod': '',
++	                        'file-tail': '',
++	                        'enum-prod': '',
++	                        'value-head': '',
++	                        'value-prod': '',
++	                        'value-tail': '',
++	                        'comments': comments})
++
++ at taskgen
++def add_enums(self, source='', target='',
++              file_head='', file_prod='', file_tail='', enum_prod='',
++              value_head='', value_prod='', value_tail='', comments=''):
++	if not hasattr(self, 'enums_list'):
++		self.enums_list = []
++	self.meths.append('process_enums')
++	self.enums_list.append({'source': source,
++	                        'template': '',
++	                        'target': target,
++	                        'file-head': file_head,
++	                        'file-prod': file_prod,
++	                        'file-tail': file_tail,
++	                        'enum-prod': enum_prod,
++	                        'value-head': value_head,
++	                        'value-prod': value_prod,
++	                        'value-tail': value_tail,
++	                        'comments': comments})
++
++ at before('apply_core')
++def process_enums(self):
++	for enum in getattr(self, 'enums_list', []):
++		task = self.create_task('glib_mkenums')
++		env = task.env
++
++		inputs = []
++
++		# process the source
++		source_list = self.to_list(enum['source'])
++		if not source_list:
++			raise Utils.WafError('missing source ' + str(enum))
++		source_list = [self.path.find_resource(k) for k in source_list]
++		inputs += source_list
++		env['GLIB_MKENUMS_SOURCE'] = [k.srcpath(env) for k in source_list]
++
++		# find the target
++		if not enum['target']:
++			raise Utils.WafError('missing target ' + str(enum))
++		tgt_node = self.path.find_or_declare(enum['target'])
++		if tgt_node.name.endswith('.c'):
++			self.allnodes.append(tgt_node)
++		env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath(env)
++
++
++		options = []
++
++		if enum['template']: # template, if provided
++			template_node = self.path.find_resource(enum['template'])
++			options.append('--template %s' % (template_node.abspath(env)))
++			inputs.append(template_node)
++		params = {'file-head' : '--fhead',
++		           'file-prod' : '--fprod',
++		           'file-tail' : '--ftail',
++		           'enum-prod' : '--eprod',
++		           'value-head' : '--vhead',
++		           'value-prod' : '--vprod',
++		           'value-tail' : '--vtail',
++		           'comments': '--comments'}
++		for param, option in params.iteritems():
++			if enum[param]:
++				options.append('%s %r' % (option, enum[param]))
++
++		env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)
++
++		# update the task instance
++		task.set_inputs(inputs)
++		task.set_outputs(tgt_node)
++
++Task.task_type_from_func('glib_genmarshal', func=genmarshal_func, vars=['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'],
++	color='BLUE', before='cc cxx')
++Task.simple_task_type('glib_mkenums',
++	'${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',
++	color='PINK', before='cc cxx')
++
++def detect(conf):
++	glib_genmarshal = conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
++	mk_enums_tool = conf.find_program('glib-mkenums', var='GLIB_MKENUMS')
++
+diff --git a/buildtools/wafadmin/Tools/gnome.py b/buildtools/wafadmin/Tools/gnome.py
+new file mode 100644
+index 0000000..c098a41
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/gnome.py
+@@ -0,0 +1,223 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006-2008 (ita)
++
++"Gnome support"
++
++import os, re
++import TaskGen, Utils, Runner, Task, Build, Options, Logs
++import cc
++from Logs import error
++from TaskGen import taskgen, before, after, feature
++
++n1_regexp = re.compile('<refentrytitle>(.*)</refentrytitle>', re.M)
++n2_regexp = re.compile('<manvolnum>(.*)</manvolnum>', re.M)
++
++def postinstall_schemas(prog_name):
++	if Build.bld.is_install:
++		dir = Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas' % prog_name)
++		if not Options.options.destdir:
++			# add the gconf schema
++			Utils.pprint('YELLOW', 'Installing GConf schema')
++			command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir
++			ret = Utils.exec_command(command)
++		else:
++			Utils.pprint('YELLOW', 'GConf schema not installed. After install, run this:')
++			Utils.pprint('YELLOW', 'gconftool-2 --install-schema-file=%s' % dir)
++
++def postinstall_icons():
++	dir = Build.bld.get_install_path('${DATADIR}/icons/hicolor')
++	if Build.bld.is_install:
++		if not Options.options.destdir:
++			# update the pixmap cache directory
++			Utils.pprint('YELLOW', "Updating Gtk icon cache.")
++			command = 'gtk-update-icon-cache -q -f -t %s' % dir
++			ret = Utils.exec_command(command)
++		else:
++			Utils.pprint('YELLOW', 'Icon cache not updated. After install, run this:')
++			Utils.pprint('YELLOW', 'gtk-update-icon-cache -q -f -t %s' % dir)
++
++def postinstall_scrollkeeper(prog_name):
++	if Build.bld.is_install:
++		# now the scrollkeeper update if we can write to the log file
++		if os.access('/var/log/scrollkeeper.log', os.W_OK):
++			dir1 = Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
++			dir2 = Build.bld.get_install_path('${DATADIR}/omf/%s' % prog_name)
++			command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2)
++			ret = Utils.exec_command(command)
++
++def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1):
++	if schemas: postinstall_schemas(prog_name)
++	if icons: postinstall_icons()
++	if scrollkeeper: postinstall_scrollkeeper(prog_name)
++
++# OBSOLETE
++class gnome_doc_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at feature('gnome_doc')
++def init_gnome_doc(self):
++	self.default_install_path = '${PREFIX}/share'
++
++ at feature('gnome_doc')
++ at after('init_gnome_doc')
++def apply_gnome_doc(self):
++	self.env['APPNAME'] = self.doc_module
++	lst = self.to_list(self.doc_linguas)
++	bld = self.bld
++	lst.append('C')
++
++	for x in lst:
++		if not x == 'C':
++			tsk = self.create_task('xml2po')
++			node = self.path.find_resource(x+'/'+x+'.po')
++			src = self.path.find_resource('C/%s.xml' % self.doc_module)
++			out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
++			tsk.set_inputs([node, src])
++			tsk.set_outputs(out)
++		else:
++			out = self.path.find_resource('%s/%s.xml' % (x, self.doc_module))
++
++		tsk2 = self.create_task('xsltproc2po')
++		out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x))
++		tsk2.set_outputs(out2)
++		node = self.path.find_resource(self.doc_module+".omf.in")
++		tsk2.inputs = [node, out]
++
++		tsk2.run_after.append(tsk)
++
++		if bld.is_install:
++			path = self.install_path + '/gnome/help/%s/%s' % (self.doc_module, x)
++			bld.install_files(self.install_path + '/omf', out2, env=self.env)
++			for y in self.to_list(self.doc_figures):
++				try:
++					os.stat(self.path.abspath() + '/' + x + '/' + y)
++					bld.install_as(path + '/' + y, self.path.abspath() + '/' + x + '/' + y)
++				except:
++					bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y)
++			bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env))
++			if x == 'C':
++				xmls = self.to_list(self.doc_includes)
++				xmls.append(self.doc_entities)
++				for z in xmls:
++					out = self.path.find_resource('%s/%s' % (x, z))
++					bld.install_as(path + '/%s' % z, out.abspath(self.env))
++
++# OBSOLETE
++class xml_to_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at feature('xml_to')
++def init_xml_to(self):
++	Utils.def_attrs(self,
++		source = 'xmlfile',
++		xslt = 'xlsltfile',
++		target = 'hey',
++		default_install_path = '${PREFIX}',
++		task_created = None)
++
++ at feature('xml_to')
++ at after('init_xml_to')
++def apply_xml_to(self):
++	xmlfile = self.path.find_resource(self.source)
++	xsltfile = self.path.find_resource(self.xslt)
++	tsk = self.create_task('xmlto', [xmlfile, xsltfile], xmlfile.change_ext('html'))
++	tsk.install_path = self.install_path
++
++def sgml_scan(self):
++	node = self.inputs[0]
++
++	env = self.env
++	variant = node.variant(env)
++
++	fi = open(node.abspath(env), 'r')
++	content = fi.read()
++	fi.close()
++
++	# we should use a sgml parser :-/
++	name = n1_regexp.findall(content)[0]
++	num = n2_regexp.findall(content)[0]
++
++	doc_name = name+'.'+num
++
++	if not self.outputs:
++		self.outputs = [self.generator.path.find_or_declare(doc_name)]
++
++	return ([], [doc_name])
++
++class gnome_sgml2man_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at feature('gnome_sgml2man')
++def apply_gnome_sgml2man(self):
++	"""
++	we could make it more complicated, but for now we just scan the document each time
++	"""
++	assert(getattr(self, 'appname', None))
++
++	def install_result(task):
++		out = task.outputs[0]
++		name = out.name
++		ext = name[-1]
++		env = task.env
++		self.bld.install_files('${DATADIR}/man/man%s/' % ext, out, env)
++
++	self.bld.rescan(self.path)
++	for name in self.bld.cache_dir_contents[self.path.id]:
++		base, ext = os.path.splitext(name)
++		if ext != '.sgml': continue
++
++		task = self.create_task('sgml2man')
++		task.set_inputs(self.path.find_resource(name))
++		task.task_generator = self
++		if self.bld.is_install: task.install = install_result
++		# no outputs, the scanner does it
++		# no caching for now, this is not a time-critical feature
++		# in the future the scanner can be used to do more things (find dependencies, etc)
++		task.scan()
++
++cls = Task.simple_task_type('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC}  > /dev/null', color='BLUE')
++cls.scan = sgml_scan
++cls.quiet = 1
++
++Task.simple_task_type('xmlto', '${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
++
++Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', color='BLUE')
++
++# how do you expect someone to understand this?!
++xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
++--stringparam db2omf.basename ${APPNAME} \
++--stringparam db2omf.format docbook \
++--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
++--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
++--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
++--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
++--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
++--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
++${DB2OMF} ${SRC[1].abspath(env)}"""
++
++#--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
++Task.simple_task_type('xsltproc2po', xslt_magic, color='BLUE')
++
++def detect(conf):
++	conf.check_tool('gnu_dirs glib2 dbus')
++	sgml2man = conf.find_program('docbook2man', var='SGML2MAN')
++
++	def getstr(varname):
++		return getattr(Options.options, varname, '')
++
++	# addefine also sets the variable to the env
++	conf.define('GNOMELOCALEDIR', os.path.join(conf.env['DATADIR'], 'locale'))
++
++	xml2po = conf.find_program('xml2po', var='XML2PO')
++	xsltproc2po = conf.find_program('xsltproc', var='XSLTPROC2PO')
++	conf.env['XML2POFLAGS'] = '-e -p'
++	conf.env['SCROLLKEEPER_DATADIR'] = Utils.cmd_output("scrollkeeper-config --pkgdatadir", silent=1).strip()
++	conf.env['DB2OMF'] = Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils", silent=1).strip()
++
++def set_options(opt):
++	opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
++
+diff --git a/buildtools/wafadmin/Tools/gnu_dirs.py b/buildtools/wafadmin/Tools/gnu_dirs.py
+new file mode 100644
+index 0000000..856e4a7
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/gnu_dirs.py
+@@ -0,0 +1,111 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Ali Sabil, 2007
++
++"""
++To use this module do not forget to call
++opt.tool_options('gnu_dirs')
++AND
++conf.check_tool('gnu_dirs')
++
++Add options for the standard GNU directories, this tool will add the options
++found in autotools, and will update the environment with the following
++installation variables:
++
++ * PREFIX : architecture-independent files [/usr/local]
++ * EXEC_PREFIX : architecture-dependent files [PREFIX]
++ * BINDIR : user executables [EXEC_PREFIX/bin]
++ * SBINDIR : user executables [EXEC_PREFIX/sbin]
++ * LIBEXECDIR : program executables [EXEC_PREFIX/libexec]
++ * SYSCONFDIR : read-only single-machine data [PREFIX/etc]
++ * SHAREDSTATEDIR : modifiable architecture-independent data [PREFIX/com]
++ * LOCALSTATEDIR : modifiable single-machine data [PREFIX/var]
++ * LIBDIR : object code libraries [EXEC_PREFIX/lib]
++ * INCLUDEDIR : C header files [PREFIX/include]
++ * OLDINCLUDEDIR : C header files for non-gcc [/usr/include]
++ * DATAROOTDIR : read-only arch.-independent data root [PREFIX/share]
++ * DATADIR : read-only architecture-independent data [DATAROOTDIR]
++ * INFODIR : info documentation [DATAROOTDIR/info]
++ * LOCALEDIR : locale-dependent data [DATAROOTDIR/locale]
++ * MANDIR : man documentation [DATAROOTDIR/man]
++ * DOCDIR : documentation root [DATAROOTDIR/doc/telepathy-glib]
++ * HTMLDIR : html documentation [DOCDIR]
++ * DVIDIR : dvi documentation [DOCDIR]
++ * PDFDIR : pdf documentation [DOCDIR]
++ * PSDIR : ps documentation [DOCDIR]
++"""
++
++import Utils, Options
++
++_options = [x.split(', ') for x in '''
++bindir, user executables, ${EXEC_PREFIX}/bin
++sbindir, system admin executables, ${EXEC_PREFIX}/sbin
++libexecdir, program executables, ${EXEC_PREFIX}/libexec
++sysconfdir, read-only single-machine data, ${PREFIX}/etc
++sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
++localstatedir, modifiable single-machine data, ${PREFIX}/var
++libdir, object code libraries, ${EXEC_PREFIX}/lib
++includedir, C header files, ${PREFIX}/include
++oldincludedir, C header files for non-gcc, /usr/include
++datarootdir, read-only arch.-independent data root, ${PREFIX}/share
++datadir, read-only architecture-independent data, ${DATAROOTDIR}
++infodir, info documentation, ${DATAROOTDIR}/info
++localedir, locale-dependent data, ${DATAROOTDIR}/locale
++mandir, man documentation, ${DATAROOTDIR}/man
++docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
++htmldir, html documentation, ${DOCDIR}
++dvidir, dvi documentation, ${DOCDIR}
++pdfdir, pdf documentation, ${DOCDIR}
++psdir, ps documentation, ${DOCDIR}
++'''.split('\n') if x]
++
++def detect(conf):
++	def get_param(varname, default):
++		return getattr(Options.options, varname, '') or default
++
++	env = conf.env
++	env['EXEC_PREFIX'] = get_param('EXEC_PREFIX', env['PREFIX'])
++	env['PACKAGE'] = Utils.g_module.APPNAME
++
++	complete = False
++	iter = 0
++	while not complete and iter < len(_options) + 1:
++		iter += 1
++		complete = True
++		for name, help, default in _options:
++			name = name.upper()
++			if not env[name]:
++				try:
++					env[name] = Utils.subst_vars(get_param(name, default), env)
++				except TypeError:
++					complete = False
++	if not complete:
++		lst = [name for name, _, _ in _options if not env[name.upper()]]
++		raise Utils.WafError('Variable substitution failure %r' % lst)
++
++def set_options(opt):
++
++	inst_dir = opt.add_option_group('Installation directories',
++'By default, "waf install" will put the files in\
++ "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
++ than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
++
++	for k in ('--prefix', '--destdir'):
++		option = opt.parser.get_option(k)
++		if option:
++			opt.parser.remove_option(k)
++			inst_dir.add_option(option)
++
++	inst_dir.add_option('--exec-prefix',
++		help = 'installation prefix [Default: ${PREFIX}]',
++		default = '',
++		dest = 'EXEC_PREFIX')
++
++	dirs_options = opt.add_option_group('Pre-defined installation directories', '')
++
++	for name, help, default in _options:
++		option_name = '--' + name
++		str_default = default
++		str_help = '%s [Default: %s]' % (help, str_default)
++		dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
++
+diff --git a/buildtools/wafadmin/Tools/gob2.py b/buildtools/wafadmin/Tools/gob2.py
+new file mode 100644
+index 0000000..00aaa32
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/gob2.py
+@@ -0,0 +1,18 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Ali Sabil, 2007
++
++import TaskGen
++
++TaskGen.declare_chain(
++	name = 'gob2',
++	rule = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}',
++	ext_in = '.gob',
++	ext_out = '.c'
++)
++
++def detect(conf):
++	gob2 = conf.find_program('gob2', var='GOB2', mandatory=True)
++	conf.env['GOB2'] = gob2
++	conf.env['GOB2FLAGS'] = ''
++
+diff --git a/buildtools/wafadmin/Tools/gxx.py b/buildtools/wafadmin/Tools/gxx.py
+new file mode 100644
+index 0000000..8f4a0bf
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/gxx.py
+@@ -0,0 +1,133 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++# Ralf Habacker, 2006 (rh)
++# Yinon Ehrlich, 2009
++
++import os, sys
++import Configure, Options, Utils
++import ccroot, ar
++from Configure import conftest
++
++ at conftest
++def find_gxx(conf):
++	cxx = conf.find_program(['g++', 'c++'], var='CXX', mandatory=True)
++	cxx = conf.cmd_to_list(cxx)
++	ccroot.get_cc_version(conf, cxx, gcc=True)
++	conf.env.CXX_NAME = 'gcc'
++	conf.env.CXX      = cxx
++
++ at conftest
++def gxx_common_flags(conf):
++	v = conf.env
++
++	# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
++	v['CXXFLAGS_DEBUG'] = ['-g']
++	v['CXXFLAGS_RELEASE'] = ['-O2']
++
++	v['CXX_SRC_F']           = ''
++	v['CXX_TGT_F']           = ['-c', '-o', ''] # shell hack for -MD
++	v['CPPPATH_ST']          = '-I%s' # template for adding include paths
++
++	# linker
++	if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
++	v['CXXLNK_SRC_F']        = ''
++	v['CXXLNK_TGT_F']        = ['-o', ''] # shell hack for -MD
++
++	v['LIB_ST']              = '-l%s' # template for adding libs
++	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
++	v['STATICLIB_ST']        = '-l%s'
++	v['STATICLIBPATH_ST']    = '-L%s'
++	v['RPATH_ST']            = '-Wl,-rpath,%s'
++	v['CXXDEFINES_ST']       = '-D%s'
++
++	v['SONAME_ST']           = '-Wl,-h,%s'
++	v['SHLIB_MARKER']        = '-Wl,-Bdynamic'
++	v['STATICLIB_MARKER']    = '-Wl,-Bstatic'
++	v['FULLSTATIC_MARKER']   = '-static'
++
++	# program
++	v['program_PATTERN']     = '%s'
++
++	# shared library
++	v['shlib_CXXFLAGS']      = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
++	v['shlib_LINKFLAGS']     = ['-shared']
++	v['shlib_PATTERN']       = 'lib%s.so'
++
++	# static lib
++	v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
++	v['staticlib_PATTERN']   = 'lib%s.a'
++
++	# osx stuff
++	v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
++	v['CCFLAGS_MACBUNDLE']   = ['-fPIC']
++	v['macbundle_PATTERN']   = '%s.bundle'
++
++ at conftest
++def gxx_modifier_win32(conf):
++	v = conf.env
++	v['program_PATTERN']     = '%s.exe'
++
++	v['shlib_PATTERN']       = '%s.dll'
++	v['implib_PATTERN']      = 'lib%s.dll.a'
++	v['IMPLIB_ST']           = '-Wl,--out-implib,%s'
++
++	dest_arch = v['DEST_CPU']
++	v['shlib_CXXFLAGS'] = []
++
++	v.append_value('shlib_CXXFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
++
++	# Auto-import is enabled by default even without this option,
++	# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
++	# that the linker emits otherwise.
++	v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
++
++ at conftest
++def gxx_modifier_cygwin(conf):
++	gxx_modifier_win32(conf)
++	v = conf.env
++	v['shlib_PATTERN']       = 'cyg%s.dll'
++	v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
++
++ at conftest
++def gxx_modifier_darwin(conf):
++	v = conf.env
++	v['shlib_CXXFLAGS']      = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
++	v['shlib_LINKFLAGS']     = ['-dynamiclib']
++	v['shlib_PATTERN']       = 'lib%s.dylib'
++
++	v['staticlib_LINKFLAGS'] = []
++
++	v['SHLIB_MARKER']        = ''
++	v['STATICLIB_MARKER']    = ''
++	v['SONAME_ST']		 = ''	
++
++ at conftest
++def gxx_modifier_aix(conf):
++	v = conf.env
++	v['program_LINKFLAGS']   = ['-Wl,-brtl']
++
++	v['shlib_LINKFLAGS']     = ['-shared', '-Wl,-brtl,-bexpfull']
++
++	v['SHLIB_MARKER']        = ''
++
++ at conftest
++def gxx_modifier_platform(conf):
++	# * set configurations specific for a platform.
++	# * the destination platform is detected automatically by looking at the macros the compiler predefines,
++	#   and if it's not recognised, it fallbacks to sys.platform.
++	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
++	gxx_modifier_func = globals().get('gxx_modifier_' + dest_os)
++	if gxx_modifier_func:
++			gxx_modifier_func(conf)
++
++def detect(conf):
++	conf.find_gxx()
++	conf.find_cpp()
++	conf.find_ar()
++	conf.gxx_common_flags()
++	conf.gxx_modifier_platform()
++	conf.cxx_load_tools()
++	conf.cxx_add_flags()
++	conf.link_add_flags()
++
+diff --git a/buildtools/wafadmin/Tools/icc.py b/buildtools/wafadmin/Tools/icc.py
+new file mode 100644
+index 0000000..9c9a926
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/icc.py
+@@ -0,0 +1,37 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Stian Selnes, 2008
++# Thomas Nagy 2009
++
++import os, sys
++import Configure, Options, Utils
++import ccroot, ar, gcc
++from Configure import conftest
++
++ at conftest
++def find_icc(conf):
++	if sys.platform == 'cygwin':
++		conf.fatal('The Intel compiler does not work on Cygwin')
++
++	v = conf.env
++	cc = None
++	if v['CC']: cc = v['CC']
++	elif 'CC' in conf.environ: cc = conf.environ['CC']
++	if not cc: cc = conf.find_program('icc', var='CC')
++	if not cc: cc = conf.find_program('ICL', var='CC')
++	if not cc: conf.fatal('Intel C Compiler (icc) was not found')
++	cc = conf.cmd_to_list(cc)
++
++	ccroot.get_cc_version(conf, cc, icc=True)
++	v['CC'] = cc
++	v['CC_NAME'] = 'icc'
++
++detect = '''
++find_icc
++find_ar
++gcc_common_flags
++gcc_modifier_platform
++cc_load_tools
++cc_add_flags
++link_add_flags
++'''
+diff --git a/buildtools/wafadmin/Tools/icpc.py b/buildtools/wafadmin/Tools/icpc.py
+new file mode 100644
+index 0000000..7d79c57
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/icpc.py
+@@ -0,0 +1,35 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy 2009
++
++import os, sys
++import Configure, Options, Utils
++import ccroot, ar, gxx
++from Configure import conftest
++
++ at conftest
++def find_icpc(conf):
++	if sys.platform == 'cygwin':
++		conf.fatal('The Intel compiler does not work on Cygwin')
++
++	v = conf.env
++	cxx = None
++	if v['CXX']: cxx = v['CXX']
++	elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
++	if not cxx: cxx = conf.find_program('icpc', var='CXX')
++	if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found')
++	cxx = conf.cmd_to_list(cxx)
++
++	ccroot.get_cc_version(conf, cxx, icc=True)
++	v['CXX'] = cxx
++	v['CXX_NAME'] = 'icc'
++
++detect = '''
++find_icpc
++find_ar
++gxx_common_flags
++gxx_modifier_platform
++cxx_load_tools
++cxx_add_flags
++link_add_flags
++'''
+diff --git a/buildtools/wafadmin/Tools/intltool.py b/buildtools/wafadmin/Tools/intltool.py
+new file mode 100644
+index 0000000..deb8f4a
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/intltool.py
+@@ -0,0 +1,139 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++
++"intltool support"
++
++import os, re
++import Configure, TaskGen, Task, Utils, Runner, Options, Build, config_c
++from TaskGen import feature, before, taskgen
++from Logs import error
++
++"""
++Usage:
++
++bld(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
++
++"""
++
++class intltool_in_taskgen(TaskGen.task_gen):
++	"""deprecated"""
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at before('apply_core')
++ at feature('intltool_in')
++def iapply_intltool_in_f(self):
++	try: self.meths.remove('apply_core')
++	except ValueError: pass
++
++	for i in self.to_list(self.source):
++		node = self.path.find_resource(i)
++
++		podir = getattr(self, 'podir', 'po')
++		podirnode = self.path.find_dir(podir)
++		if not podirnode:
++			error("could not find the podir %r" % podir)
++			continue
++
++		cache = getattr(self, 'intlcache', '.intlcache')
++		self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), podir, cache)
++		self.env['INTLPODIR'] = podirnode.srcpath(self.env)
++		self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
++
++		task = self.create_task('intltool', node, node.change_ext(''))
++		task.install_path = self.install_path
++
++class intltool_po_taskgen(TaskGen.task_gen):
++	"""deprecated"""
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++
++ at feature('intltool_po')
++def apply_intltool_po(self):
++	try: self.meths.remove('apply_core')
++	except ValueError: pass
++
++	self.default_install_path = '${LOCALEDIR}'
++	appname = getattr(self, 'appname', 'set_your_app_name')
++	podir = getattr(self, 'podir', '')
++
++	def install_translation(task):
++		out = task.outputs[0]
++		filename = out.name
++		(langname, ext) = os.path.splitext(filename)
++		inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
++		self.bld.install_as(os.path.join(self.install_path, inst_file), out, self.env, self.chmod)
++
++	linguas = self.path.find_resource(os.path.join(podir, 'LINGUAS'))
++	if linguas:
++		# scan LINGUAS file for locales to process
++		file = open(linguas.abspath())
++		langs = []
++		for line in file.readlines():
++			# ignore lines containing comments
++			if not line.startswith('#'):
++				langs += line.split()
++		file.close()
++		re_linguas = re.compile('[-a-zA-Z_ at .]+')
++		for lang in langs:
++			# Make sure that we only process lines which contain locales
++			if re_linguas.match(lang):
++				node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
++				task = self.create_task('po')
++				task.set_inputs(node)
++				task.set_outputs(node.change_ext('.mo'))
++				if self.bld.is_install: task.install = install_translation
++	else:
++		Utils.pprint('RED', "Error no LINGUAS file found in po directory")
++
++Task.simple_task_type('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', shell=False)
++Task.simple_task_type('intltool',
++	'${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}',
++	color='BLUE', after="cc_link cxx_link", shell=False)
++
++def detect(conf):
++	pocom = conf.find_program('msgfmt')
++	if not pocom:
++		# if msgfmt should not be mandatory, catch the thrown exception in your wscript
++		conf.fatal('The program msgfmt (gettext) is mandatory!')
++	conf.env['POCOM'] = pocom
++
++	# NOTE: it is possible to set INTLTOOL in the environment, but it must not have spaces in it
++
++	intltool = conf.find_program('intltool-merge', var='INTLTOOL')
++	if not intltool:
++		# if intltool-merge should not be mandatory, catch the thrown exception in your wscript
++		if Options.platform == 'win32':
++			perl = conf.find_program('perl', var='PERL')
++			if not perl:
++				conf.fatal('The program perl (required by intltool) could not be found')
++
++			intltooldir = Configure.find_file('intltool-merge', os.environ['PATH'].split(os.pathsep))
++			if not intltooldir:
++				conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
++
++			conf.env['INTLTOOL'] = Utils.to_list(conf.env['PERL']) + [intltooldir + os.sep + 'intltool-merge']
++			conf.check_message('intltool', '', True, ' '.join(conf.env['INTLTOOL']))
++		else:
++			conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
++
++	def getstr(varname):
++		return getattr(Options.options, varname, '')
++
++	prefix  = conf.env['PREFIX']
++	datadir = getstr('datadir')
++	if not datadir: datadir = os.path.join(prefix,'share')
++
++	conf.define('LOCALEDIR', os.path.join(datadir, 'locale'))
++	conf.define('DATADIR', datadir)
++
++	if conf.env['CC'] or conf.env['CXX']:
++		# Define to 1 if <locale.h> is present
++		conf.check(header_name='locale.h')
++
++def set_options(opt):
++	opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
++	opt.add_option('--datadir', type='string', default='', dest='datadir', help='read-only application data')
++
+diff --git a/buildtools/wafadmin/Tools/javaw.py b/buildtools/wafadmin/Tools/javaw.py
+new file mode 100644
+index 0000000..301ebc4
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/javaw.py
+@@ -0,0 +1,255 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006-2008 (ita)
++
++"""
++Java support
++
++Javac is one of the few compilers that behaves very badly:
++* it outputs files where it wants to (-d is only for the package root)
++* it recompiles files silently behind your back
++* it outputs an undefined amount of files (inner classes)
++
++Fortunately, the convention makes it possible to use the build dir without
++too many problems for the moment
++
++Inner classes must be located and cleaned when a problem arise,
++for the moment waf does not track the production of inner classes.
++
++Adding all the files to a task and executing it if any of the input files
++change is only annoying for the compilation times
++
++Compilation can be run using Jython[1] rather than regular Python. Instead of
++running one of the following commands:
++    ./waf configure
++    python waf configure
++You would have to run:
++    java -jar /path/to/jython.jar waf configure
++
++[1] http://www.jython.org/
++"""
++
++import os, re
++from Configure import conf
++import TaskGen, Task, Utils, Options, Build
++from TaskGen import feature, before, taskgen
++
++class_check_source = '''
++public class Test {
++	public static void main(String[] argv) {
++		Class lib;
++		if (argv.length < 1) {
++			System.err.println("Missing argument");
++			System.exit(77);
++		}
++		try {
++			lib = Class.forName(argv[0]);
++		} catch (ClassNotFoundException e) {
++			System.err.println("ClassNotFoundException");
++			System.exit(1);
++		}
++		lib = null;
++		System.exit(0);
++	}
++}
++'''
++
++ at feature('jar')
++ at before('apply_core')
++def jar_files(self):
++	basedir = getattr(self, 'basedir', '.')
++	destfile = getattr(self, 'destfile', 'test.jar')
++	jaropts = getattr(self, 'jaropts', [])
++	jarcreate = getattr(self, 'jarcreate', 'cf')
++
++	dir = self.path.find_dir(basedir)
++	if not dir: raise
++
++	jaropts.append('-C')
++	jaropts.append(dir.abspath(self.env))
++	jaropts.append('.')
++
++	out = self.path.find_or_declare(destfile)
++
++	tsk = self.create_task('jar_create')
++	tsk.set_outputs(out)
++	tsk.inputs = [x for x in dir.find_iter(src=0, bld=1) if x.id != out.id]
++	tsk.env['JAROPTS'] = jaropts
++	tsk.env['JARCREATE'] = jarcreate
++
++ at feature('javac')
++ at before('apply_core')
++def apply_java(self):
++	Utils.def_attrs(self, jarname='', jaropts='', classpath='',
++		sourcepath='.', srcdir='.', source_re='**/*.java',
++		jar_mf_attributes={}, jar_mf_classpath=[])
++
++	if getattr(self, 'source_root', None):
++		# old stuff
++		self.srcdir = self.source_root
++
++
++	nodes_lst = []
++
++	if not self.classpath:
++		if not self.env['CLASSPATH']:
++			self.env['CLASSPATH'] = '..' + os.pathsep + '.'
++	else:
++		self.env['CLASSPATH'] = self.classpath
++
++	srcdir_node = self.path.find_dir(self.srcdir)
++	if not srcdir_node:
++		raise Utils.WafError('could not find srcdir %r' % self.srcdir)
++
++	src_nodes = [x for x in srcdir_node.ant_glob(self.source_re, flat=False)]
++	bld_nodes = [x.change_ext('.class') for x in src_nodes]
++
++	self.env['OUTDIR'] = [srcdir_node.bldpath(self.env)]
++
++	tsk = self.create_task('javac')
++	tsk.set_inputs(src_nodes)
++	tsk.set_outputs(bld_nodes)
++
++	if getattr(self, 'compat', None):
++		tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])
++
++	if hasattr(self, 'sourcepath'):
++		fold = [self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
++		names = os.pathsep.join([x.srcpath() for x in fold])
++	else:
++		names = srcdir_node.srcpath()
++
++	if names:
++		tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
++
++	if self.jarname:
++		jtsk = self.create_task('jar_create', bld_nodes, self.path.find_or_declare(self.jarname))
++		jtsk.set_run_after(tsk)
++
++		if not self.env.JAROPTS:
++			if self.jaropts:
++				self.env.JAROPTS = self.jaropts
++			else:
++				dirs = '.'
++				self.env.JAROPTS = ['-C', ''.join(self.env['OUTDIR']), dirs]
++
++Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN', shell=False)
++cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}', shell=False)
++cls.color = 'BLUE'
++def post_run_javac(self):
++	"""this is for cleaning the folder
++	javac creates single files for inner classes
++	but it is not possible to know which inner classes in advance"""
++
++	par = {}
++	for x in self.inputs:
++		par[x.parent.id] = x.parent
++
++	inner = {}
++	for k in par.values():
++		path = k.abspath(self.env)
++		lst = os.listdir(path)
++
++		for u in lst:
++			if u.find('$') >= 0:
++				inner_class_node = k.find_or_declare(u)
++				inner[inner_class_node.id] = inner_class_node
++
++	to_add = set(inner.keys()) - set([x.id for x in self.outputs])
++	for x in to_add:
++		self.outputs.append(inner[x])
++
++	self.cached = True # disable the cache here - inner classes are a problem
++	return Task.Task.post_run(self)
++cls.post_run = post_run_javac
++
++def detect(conf):
++	# If JAVA_PATH is set, we prepend it to the path list
++	java_path = conf.environ['PATH'].split(os.pathsep)
++	v = conf.env
++
++	if 'JAVA_HOME' in conf.environ:
++		java_path = [os.path.join(conf.environ['JAVA_HOME'], 'bin')] + java_path
++		conf.env['JAVA_HOME'] = [conf.environ['JAVA_HOME']]
++
++	for x in 'javac java jar'.split():
++		conf.find_program(x, var=x.upper(), path_list=java_path)
++		conf.env[x.upper()] = conf.cmd_to_list(conf.env[x.upper()])
++	v['JAVA_EXT'] = ['.java']
++
++	if 'CLASSPATH' in conf.environ:
++		v['CLASSPATH'] = conf.environ['CLASSPATH']
++
++	if not v['JAR']: conf.fatal('jar is required for making java packages')
++	if not v['JAVAC']: conf.fatal('javac is required for compiling java classes')
++	v['JARCREATE'] = 'cf' # can use cvf
++
++ at conf
++def check_java_class(self, classname, with_classpath=None):
++	"""Check if the specified java class is installed"""
++
++	import shutil
++
++	javatestdir = '.waf-javatest'
++
++	classpath = javatestdir
++	if self.env['CLASSPATH']:
++		classpath += os.pathsep + self.env['CLASSPATH']
++	if isinstance(with_classpath, str):
++		classpath += os.pathsep + with_classpath
++
++	shutil.rmtree(javatestdir, True)
++	os.mkdir(javatestdir)
++
++	java_file = open(os.path.join(javatestdir, 'Test.java'), 'w')
++	java_file.write(class_check_source)
++	java_file.close()
++
++	# Compile the source
++	Utils.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)
++
++	# Try to run the app
++	cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
++	self.log.write("%s\n" % str(cmd))
++	found = Utils.exec_command(cmd, shell=False, log=self.log)
++
++	self.check_message('Java class %s' % classname, "", not found)
++
++	shutil.rmtree(javatestdir, True)
++
++	return found
++
++ at conf
++def check_jni_headers(conf):
++	"""
++	Check for jni headers and libraries
++
++	On success the environment variable xxx_JAVA is added for uselib
++	"""
++
++	if not conf.env.CC_NAME and not conf.env.CXX_NAME:
++		conf.fatal('load a compiler first (gcc, g++, ..)')
++
++	if not conf.env.JAVA_HOME:
++		conf.fatal('set JAVA_HOME in the system environment')
++
++	# jni requires the jvm
++	javaHome = conf.env['JAVA_HOME'][0]
++
++	b = Build.BuildContext()
++	b.load_dirs(conf.srcdir, conf.blddir)
++	dir = b.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
++	f = dir.ant_glob('**/(jni|jni_md).h', flat=False)
++	incDirs = [x.parent.abspath() for x in f]
++
++	dir = b.root.find_dir(conf.env.JAVA_HOME[0])
++	f = dir.ant_glob('**/*jvm.(so|dll)', flat=False)
++	libDirs = [x.parent.abspath() for x in f] or [javaHome]
++
++	for i, d in enumerate(libDirs):
++		if conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
++				libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA'):
++			break
++	else:
++		conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
++
+diff --git a/buildtools/wafadmin/Tools/kde4.py b/buildtools/wafadmin/Tools/kde4.py
+new file mode 100644
+index 0000000..f480929
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/kde4.py
+@@ -0,0 +1,74 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++
++import os, sys, re
++import Options, TaskGen, Task, Utils
++from TaskGen import taskgen, feature, after
++
++class msgfmt_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at feature('msgfmt')
++def init_msgfmt(self):
++	#langs = '' # for example "foo/fr foo/br"
++	self.default_install_path = '${KDE4_LOCALE_INSTALL_DIR}'
++
++ at feature('msgfmt')
++ at after('init_msgfmt')
++def apply_msgfmt(self):
++	for lang in self.to_list(self.langs):
++		node = self.path.find_resource(lang+'.po')
++		task = self.create_task('msgfmt', node, node.change_ext('.mo'))
++
++		if not self.bld.is_install: continue
++		langname = lang.split('/')
++		langname = langname[-1]
++		task.install_path = self.install_path + os.sep + langname + os.sep + 'LC_MESSAGES'
++		task.filename = getattr(self, 'appname', 'set_your_appname') + '.mo'
++		task.chmod = self.chmod
++
++def detect(conf):
++	kdeconfig = conf.find_program('kde4-config')
++	if not kdeconfig:
++		conf.fatal('we need kde4-config')
++	prefix = Utils.cmd_output('%s --prefix' % kdeconfig, silent=True).strip()
++	file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
++	try: os.stat(file)
++	except OSError:
++		file = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
++		try: os.stat(file)
++		except OSError: conf.fatal('could not open %s' % file)
++
++	try:
++		txt = Utils.readf(file)
++	except (OSError, IOError):
++		conf.fatal('could not read %s' % file)
++
++	txt = txt.replace('\\\n', '\n')
++	fu = re.compile('#(.*)\n')
++	txt = fu.sub('', txt)
++
++	setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
++	found = setregexp.findall(txt)
++
++	for (_, key, val) in found:
++		#print key, val
++		conf.env[key] = val
++
++	# well well, i could just write an interpreter for cmake files
++	conf.env['LIB_KDECORE']='kdecore'
++	conf.env['LIB_KDEUI']  ='kdeui'
++	conf.env['LIB_KIO']    ='kio'
++	conf.env['LIB_KHTML']  ='khtml'
++	conf.env['LIB_KPARTS'] ='kparts'
++
++	conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR']
++	conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR']
++	conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE")
++
++	conf.env['MSGFMT'] = conf.find_program('msgfmt')
++
++Task.simple_task_type('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', shell=False)
++
+diff --git a/buildtools/wafadmin/Tools/libtool.py b/buildtools/wafadmin/Tools/libtool.py
+new file mode 100644
+index 0000000..47fa906
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/libtool.py
+@@ -0,0 +1,330 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Matthias Jahn, 2008, jahn matthias ath freenet punto de
++# Thomas Nagy, 2008 (ita)
++
++import sys, re, os, optparse
++
++import TaskGen, Task, Utils, preproc
++from Logs import error, debug, warn
++from TaskGen import taskgen, after, before, feature
++
++REVISION="0.1.3"
++
++"""
++if you want to use the code here, you must use something like this:
++obj = obj.create(...)
++obj.features.append("libtool")
++obj.vnum = "1.2.3" # optional, but versioned libraries are common
++"""
++
++# fake libtool files
++fakelibtool_vardeps = ['CXX', 'PREFIX']
++def fakelibtool_build(task):
++	# Writes a .la file, used by libtool
++	env = task.env
++	dest  = open(task.outputs[0].abspath(env), 'w')
++	sname = task.inputs[0].name
++	fu = dest.write
++	fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n")
++	if env['vnum']:
++		nums = env['vnum'].split('.')
++		libname = task.inputs[0].name
++		name3 = libname+'.'+env['vnum']
++		name2 = libname+'.'+nums[0]
++		name1 = libname
++		fu("dlname='%s'\n" % name2)
++		strn = " ".join([name3, name2, name1])
++		fu("library_names='%s'\n" % (strn) )
++	else:
++		fu("dlname='%s'\n" % sname)
++		fu("library_names='%s %s %s'\n" % (sname, sname, sname) )
++	fu("old_library=''\n")
++	vars = ' '.join(env['libtoolvars']+env['LINKFLAGS'])
++	fu("dependency_libs='%s'\n" % vars)
++	fu("current=0\n")
++	fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n")
++	fu("dlopen=''\ndlpreopen=''\n")
++	fu("libdir='%s/lib'\n" % env['PREFIX'])
++	dest.close()
++	return 0
++
++def read_la_file(path):
++	sp = re.compile(r'^([^=]+)=\'(.*)\'$')
++	dc={}
++	file = open(path, "r")
++	for line in file.readlines():
++		try:
++			#print sp.split(line.strip())
++			_, left, right, _ = sp.split(line.strip())
++			dc[left]=right
++		except ValueError:
++			pass
++	file.close()
++	return dc
++
++ at feature("libtool")
++ at after('apply_link')
++def apply_link_libtool(self):
++	if self.type != 'program':
++		linktask = self.link_task
++		self.latask = self.create_task('fakelibtool', linktask.outputs, linktask.outputs[0].change_ext('.la'))
++
++	if self.bld.is_install:
++		self.bld.install_files('${PREFIX}/lib', linktask.outputs[0], self.env)
++
++ at feature("libtool")
++ at before('apply_core')
++def apply_libtool(self):
++	self.env['vnum']=self.vnum
++
++	paths=[]
++	libs=[]
++	libtool_files=[]
++	libtool_vars=[]
++
++	for l in self.env['LINKFLAGS']:
++		if l[:2]=='-L':
++			paths.append(l[2:])
++		elif l[:2]=='-l':
++			libs.append(l[2:])
++
++	for l in libs:
++		for p in paths:
++			dict = read_la_file(p+'/lib'+l+'.la')
++			linkflags2 = dict.get('dependency_libs', '')
++			for v in linkflags2.split():
++				if v.endswith('.la'):
++					libtool_files.append(v)
++					libtool_vars.append(v)
++					continue
++				self.env.append_unique('LINKFLAGS', v)
++				break
++
++	self.env['libtoolvars']=libtool_vars
++
++	while libtool_files:
++		file = libtool_files.pop()
++		dict = read_la_file(file)
++		for v in dict['dependency_libs'].split():
++			if v[-3:] == '.la':
++				libtool_files.append(v)
++				continue
++			self.env.append_unique('LINKFLAGS', v)
++
++Task.task_type_from_func('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', after="cc_link cxx_link static_link")
++
++class libtool_la_file:
++	def __init__ (self, la_filename):
++		self.__la_filename = la_filename
++		#remove path and .la suffix
++		self.linkname = str(os.path.split(la_filename)[-1])[:-3]
++		if self.linkname.startswith("lib"):
++			self.linkname = self.linkname[3:]
++		# The name that we can dlopen(3).
++		self.dlname = None
++		# Names of this library
++		self.library_names = None
++		# The name of the static archive.
++		self.old_library = None
++		# Libraries that this one depends upon.
++		self.dependency_libs = None
++		# Version information for libIlmImf.
++		self.current = None
++		self.age = None
++		self.revision = None
++		# Is this an already installed library?
++		self.installed = None
++		# Should we warn about portability when linking against -modules?
++		self.shouldnotlink = None
++		# Files to dlopen/dlpreopen
++		self.dlopen = None
++		self.dlpreopen = None
++		# Directory that this library needs to be installed in:
++		self.libdir = '/usr/lib'
++		if not self.__parse():
++			raise ValueError("file %s not found!!" %(la_filename))
++
++	def __parse(self):
++		"Retrieve the variables from a file"
++		if not os.path.isfile(self.__la_filename): return 0
++		la_file=open(self.__la_filename, 'r')
++		for line in la_file:
++			ln = line.strip()
++			if not ln: continue
++			if ln[0]=='#': continue
++			(key, value) = str(ln).split('=', 1)
++			key = key.strip()
++			value = value.strip()
++			if value == "no": value = False
++			elif value == "yes": value = True
++			else:
++				try: value = int(value)
++				except ValueError: value = value.strip("'")
++			setattr(self, key, value)
++		la_file.close()
++		return 1
++
++	def get_libs(self):
++		"""return linkflags for this lib"""
++		libs = []
++		if self.dependency_libs:
++			libs = str(self.dependency_libs).strip().split()
++		if libs == None:
++			libs = []
++		# add la lib and libdir
++		libs.insert(0, "-l%s" % self.linkname.strip())
++		libs.insert(0, "-L%s" % self.libdir.strip())
++		return libs
++
++	def __str__(self):
++		return '''\
++dlname = "%(dlname)s"
++library_names = "%(library_names)s"
++old_library = "%(old_library)s"
++dependency_libs = "%(dependency_libs)s"
++version = %(current)s.%(age)s.%(revision)s
++installed = "%(installed)s"
++shouldnotlink = "%(shouldnotlink)s"
++dlopen = "%(dlopen)s"
++dlpreopen = "%(dlpreopen)s"
++libdir = "%(libdir)s"''' % self.__dict__
++
++class libtool_config:
++	def __init__ (self, la_filename):
++		self.__libtool_la_file = libtool_la_file(la_filename)
++		tmp = self.__libtool_la_file
++		self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)]
++		self.__sub_la_files = []
++		self.__sub_la_files.append(la_filename)
++		self.__libs = None
++
++	def __cmp__(self, other):
++		"""make it compareable with X.Y.Z versions (Y and Z are optional)"""
++		if not other:
++			return 1
++		othervers = [int(s) for s in str(other).split(".")]
++		selfvers = self.__version
++		return cmp(selfvers, othervers)
++
++	def __str__(self):
++		return "\n".join([
++			str(self.__libtool_la_file),
++			' '.join(self.__libtool_la_file.get_libs()),
++			'* New getlibs:',
++			' '.join(self.get_libs())
++		])
++
++	def __get_la_libs(self, la_filename):
++		return libtool_la_file(la_filename).get_libs()
++
++	def get_libs(self):
++		"""return the complete uniqe linkflags that do not
++		contain .la files anymore"""
++		libs_list = list(self.__libtool_la_file.get_libs())
++		libs_map = {}
++		while len(libs_list) > 0:
++			entry = libs_list.pop(0)
++			if entry:
++				if str(entry).endswith(".la"):
++					## prevents duplicate .la checks
++					if entry not in self.__sub_la_files:
++						self.__sub_la_files.append(entry)
++						libs_list.extend(self.__get_la_libs(entry))
++				else:
++					libs_map[entry]=1
++		self.__libs = libs_map.keys()
++		return self.__libs
++
++	def get_libs_only_L(self):
++		if not self.__libs: self.get_libs()
++		libs = self.__libs
++		libs = [s for s in libs if str(s).startswith('-L')]
++		return libs
++
++	def get_libs_only_l(self):
++		if not self.__libs: self.get_libs()
++		libs = self.__libs
++		libs = [s for s in libs if str(s).startswith('-l')]
++		return libs
++
++	def get_libs_only_other(self):
++		if not self.__libs: self.get_libs()
++		libs = self.__libs
++		libs = [s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))]
++		return libs
++
++def useCmdLine():
++	"""parse cmdline args and control build"""
++	usage = '''Usage: %prog [options] PathToFile.la
++example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la
++nor: %prog --libs /usr/lib/libamarok.la'''
++	parser = optparse.OptionParser(usage)
++	a = parser.add_option
++	a("--version", dest = "versionNumber",
++		action = "store_true", default = False,
++		help = "output version of libtool-config"
++		)
++	a("--debug", dest = "debug",
++		action = "store_true", default = False,
++		help = "enable debug"
++		)
++	a("--libs", dest = "libs",
++		action = "store_true", default = False,
++		help = "output all linker flags"
++		)
++	a("--libs-only-l", dest = "libs_only_l",
++		action = "store_true", default = False,
++		help = "output -l flags"
++		)
++	a("--libs-only-L", dest = "libs_only_L",
++		action = "store_true", default = False,
++		help = "output -L flags"
++		)
++	a("--libs-only-other", dest = "libs_only_other",
++		action = "store_true", default = False,
++		help = "output other libs (e.g. -pthread)"
++		)
++	a("--atleast-version", dest = "atleast_version",
++		default=None,
++		help = "return 0 if the module is at least version ATLEAST_VERSION"
++		)
++	a("--exact-version", dest = "exact_version",
++		default=None,
++		help = "return 0 if the module is exactly version EXACT_VERSION"
++		)
++	a("--max-version", dest = "max_version",
++		default=None,
++		help = "return 0 if the module is at no newer than version MAX_VERSION"
++		)
++
++	(options, args) = parser.parse_args()
++	if len(args) != 1 and not options.versionNumber:
++		parser.error("incorrect number of arguments")
++	if options.versionNumber:
++		print("libtool-config version %s" % REVISION)
++		return 0
++	ltf = libtool_config(args[0])
++	if options.debug:
++		print(ltf)
++	if options.atleast_version:
++		if ltf >= options.atleast_version: return 0
++		sys.exit(1)
++	if options.exact_version:
++		if ltf == options.exact_version: return 0
++		sys.exit(1)
++	if options.max_version:
++		if ltf <= options.max_version: return 0
++		sys.exit(1)
++
++	def p(x):
++		print(" ".join(x))
++	if options.libs: p(ltf.get_libs())
++	elif options.libs_only_l: p(ltf.get_libs_only_l())
++	elif options.libs_only_L: p(ltf.get_libs_only_L())
++	elif options.libs_only_other: p(ltf.get_libs_only_other())
++	return 0
++
++if __name__ == '__main__':
++	useCmdLine()
++
+diff --git a/buildtools/wafadmin/Tools/lua.py b/buildtools/wafadmin/Tools/lua.py
+new file mode 100644
+index 0000000..5b181e1
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/lua.py
+@@ -0,0 +1,25 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Sebastian Schlingmann, 2008
++# Thomas Nagy, 2008 (ita)
++
++import TaskGen
++from TaskGen import taskgen, feature
++from Constants import *
++
++TaskGen.declare_chain(
++	name = 'luac',
++	rule = '${LUAC} -s -o ${TGT} ${SRC}',
++	ext_in = '.lua',
++	ext_out = '.luac',
++	reentrant = False,
++	install = 'LUADIR', # env variable
++)
++
++ at feature('lua')
++def init_lua(self):
++	self.default_chmod = O755
++
++def detect(conf):
++	conf.find_program('luac', var='LUAC', mandatory = True)
++
+diff --git a/buildtools/wafadmin/Tools/misc.py b/buildtools/wafadmin/Tools/misc.py
+new file mode 100644
+index 0000000..9903ee4
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/misc.py
+@@ -0,0 +1,430 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++
++"""
++Custom objects:
++ - execute a function everytime
++ - copy a file somewhere else
++"""
++
++import shutil, re, os
++import TaskGen, Node, Task, Utils, Build, Constants
++from TaskGen import feature, taskgen, after, before
++from Logs import debug
++
++def copy_func(tsk):
++	"Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
++	env = tsk.env
++	infile = tsk.inputs[0].abspath(env)
++	outfile = tsk.outputs[0].abspath(env)
++	try:
++		shutil.copy2(infile, outfile)
++	except (OSError, IOError):
++		return 1
++	else:
++		if tsk.chmod: os.chmod(outfile, tsk.chmod)
++		return 0
++
++def action_process_file_func(tsk):
++	"Ask the function attached to the task to process it"
++	if not tsk.fun: raise Utils.WafError('task must have a function attached to it for copy_func to work!')
++	return tsk.fun(tsk)
++
++class cmd_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at feature('cmd')
++def apply_cmd(self):
++	"call a command everytime"
++	if not self.fun: raise Utils.WafError('cmdobj needs a function!')
++	tsk = Task.TaskBase()
++	tsk.fun = self.fun
++	tsk.env = self.env
++	self.tasks.append(tsk)
++	tsk.install_path = self.install_path
++
++class copy_taskgen(TaskGen.task_gen):
++	"By default, make a file copy, if fun is provided, fun will make the copy (or call a compiler, etc)"
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at feature('copy')
++ at before('apply_core')
++def apply_copy(self):
++	Utils.def_attrs(self, fun=copy_func)
++	self.default_install_path = 0
++
++	lst = self.to_list(self.source)
++	self.meths.remove('apply_core')
++
++	for filename in lst:
++		node = self.path.find_resource(filename)
++		if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
++
++		target = self.target
++		if not target or len(lst)>1: target = node.name
++
++		# TODO the file path may be incorrect
++		newnode = self.path.find_or_declare(target)
++
++		tsk = self.create_task('copy', node, newnode)
++		tsk.fun = self.fun
++		tsk.chmod = self.chmod
++		tsk.install_path = self.install_path
++
++		if not tsk.env:
++			tsk.debug()
++			raise Utils.WafError('task without an environment')
++
++def subst_func(tsk):
++	"Substitutes variables in a .in file"
++
++	m4_re = re.compile('@(\w+)@', re.M)
++
++	env = tsk.env
++	infile = tsk.inputs[0].abspath(env)
++	outfile = tsk.outputs[0].abspath(env)
++
++	code = Utils.readf(infile)
++
++	# replace all % by %% to prevent errors by % signs in the input file while string formatting
++	code = code.replace('%', '%%')
++
++	s = m4_re.sub(r'%(\1)s', code)
++
++	di = tsk.dict or {}
++	if not di:
++		names = m4_re.findall(code)
++		for i in names:
++			di[i] = env.get_flat(i) or env.get_flat(i.upper())
++
++	file = open(outfile, 'w')
++	file.write(s % di)
++	file.close()
++	if tsk.chmod: os.chmod(outfile, tsk.chmod)
++
++class subst_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at feature('subst')
++ at before('apply_core')
++def apply_subst(self):
++	Utils.def_attrs(self, fun=subst_func)
++	self.default_install_path = 0
++	lst = self.to_list(self.source)
++	self.meths.remove('apply_core')
++
++	self.dict = getattr(self, 'dict', {})
++
++	for filename in lst:
++		node = self.path.find_resource(filename)
++		if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
++
++		if self.target:
++			newnode = self.path.find_or_declare(self.target)
++		else:
++			newnode = node.change_ext('')
++
++		try:
++			self.dict = self.dict.get_merged_dict()
++		except AttributeError:
++			pass
++
++		if self.dict and not self.env['DICT_HASH']:
++			self.env = self.env.copy()
++			keys = list(self.dict.keys())
++			keys.sort()
++			lst = [self.dict[x] for x in keys]
++			self.env['DICT_HASH'] = str(Utils.h_list(lst))
++
++		tsk = self.create_task('copy', node, newnode)
++		tsk.fun = self.fun
++		tsk.dict = self.dict
++		tsk.dep_vars = ['DICT_HASH']
++		tsk.install_path = self.install_path
++		tsk.chmod = self.chmod
++
++		if not tsk.env:
++			tsk.debug()
++			raise Utils.WafError('task without an environment')
++
++####################
++## command-output ####
++####################
++
++class cmd_arg(object):
++	"""command-output arguments for representing files or folders"""
++	def __init__(self, name, template='%s'):
++		self.name = name
++		self.template = template
++		self.node = None
++
++class input_file(cmd_arg):
++	def find_node(self, base_path):
++		assert isinstance(base_path, Node.Node)
++		self.node = base_path.find_resource(self.name)
++		if self.node is None:
++			raise Utils.WafError("Input file %s not found in " % (self.name, base_path))
++
++	def get_path(self, env, absolute):
++		if absolute:
++			return self.template % self.node.abspath(env)
++		else:
++			return self.template % self.node.srcpath(env)
++
++class output_file(cmd_arg):
++	def find_node(self, base_path):
++		assert isinstance(base_path, Node.Node)
++		self.node = base_path.find_or_declare(self.name)
++		if self.node is None:
++			raise Utils.WafError("Output file %s not found in " % (self.name, base_path))
++
++	def get_path(self, env, absolute):
++		if absolute:
++			return self.template % self.node.abspath(env)
++		else:
++			return self.template % self.node.bldpath(env)
++
++class cmd_dir_arg(cmd_arg):
++	def find_node(self, base_path):
++		assert isinstance(base_path, Node.Node)
++		self.node = base_path.find_dir(self.name)
++		if self.node is None:
++			raise Utils.WafError("Directory %s not found in " % (self.name, base_path))
++
++class input_dir(cmd_dir_arg):
++	def get_path(self, dummy_env, dummy_absolute):
++		return self.template % self.node.abspath()
++
++class output_dir(cmd_dir_arg):
++	def get_path(self, env, dummy_absolute):
++		return self.template % self.node.abspath(env)
++
++
++class command_output(Task.Task):
++	color = "BLUE"
++	def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
++		Task.Task.__init__(self, env, normal=1)
++		assert isinstance(command, (str, Node.Node))
++		self.command = command
++		self.command_args = command_args
++		self.stdin = stdin
++		self.stdout = stdout
++		self.cwd = cwd
++		self.os_env = os_env
++		self.stderr = stderr
++
++		if command_node is not None: self.dep_nodes = [command_node]
++		self.dep_vars = [] # additional environment variables to look
++
++	def run(self):
++		task = self
++		#assert len(task.inputs) > 0
++
++		def input_path(node, template):
++			if task.cwd is None:
++				return template % node.bldpath(task.env)
++			else:
++				return template % node.abspath()
++		def output_path(node, template):
++			fun = node.abspath
++			if task.cwd is None: fun = node.bldpath
++			return template % fun(task.env)
++
++		if isinstance(task.command, Node.Node):
++			argv = [input_path(task.command, '%s')]
++		else:
++			argv = [task.command]
++
++		for arg in task.command_args:
++			if isinstance(arg, str):
++				argv.append(arg)
++			else:
++				assert isinstance(arg, cmd_arg)
++				argv.append(arg.get_path(task.env, (task.cwd is not None)))
++
++		if task.stdin:
++			stdin = open(input_path(task.stdin, '%s'))
++		else:
++			stdin = None
++
++		if task.stdout:
++			stdout = open(output_path(task.stdout, '%s'), "w")
++		else:
++			stdout = None
++
++		if task.stderr:
++			stderr = open(output_path(task.stderr, '%s'), "w")
++		else:
++			stderr = None
++
++		if task.cwd is None:
++			cwd = ('None (actually %r)' % os.getcwd())
++		else:
++			cwd = repr(task.cwd)
++		debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
++			     (cwd, stdin, stdout, argv))
++
++		if task.os_env is None:
++			os_env = os.environ
++		else:
++			os_env = task.os_env
++		command = Utils.pproc.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
++		return command.wait()
++
++class cmd_output_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at feature('command-output')
++def init_cmd_output(self):
++	Utils.def_attrs(self,
++		stdin = None,
++		stdout = None,
++		stderr = None,
++		# the command to execute
++		command = None,
++
++		# whether it is an external command; otherwise it is assumed
++		# to be an executable binary or script that lives in the
++		# source or build tree.
++		command_is_external = False,
++
++		# extra parameters (argv) to pass to the command (excluding
++		# the command itself)
++		argv = [],
++
++		# dependencies to other objects -> this is probably not what you want (ita)
++		# values must be 'task_gen' instances (not names!)
++		dependencies = [],
++
++		# dependencies on env variable contents
++		dep_vars = [],
++
++		# input files that are implicit, i.e. they are not
++		# stdin, nor are they mentioned explicitly in argv
++		hidden_inputs = [],
++
++		# output files that are implicit, i.e. they are not
++		# stdout, nor are they mentioned explicitly in argv
++		hidden_outputs = [],
++
++		# change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
++		cwd = None,
++
++		# OS environment variables to pass to the subprocess
++		# if None, use the default environment variables unchanged
++		os_env = None)
++
++ at feature('command-output')
++ at after('init_cmd_output')
++def apply_cmd_output(self):
++	if self.command is None:
++		raise Utils.WafError("command-output missing command")
++	if self.command_is_external:
++		cmd = self.command
++		cmd_node = None
++	else:
++		cmd_node = self.path.find_resource(self.command)
++		assert cmd_node is not None, ('''Could not find command '%s' in source tree.
++Hint: if this is an external command,
++use command_is_external=True''') % (self.command,)
++		cmd = cmd_node
++
++	if self.cwd is None:
++		cwd = None
++	else:
++		assert isinstance(cwd, CmdDirArg)
++		self.cwd.find_node(self.path)
++
++	args = []
++	inputs = []
++	outputs = []
++
++	for arg in self.argv:
++		if isinstance(arg, cmd_arg):
++			arg.find_node(self.path)
++			if isinstance(arg, input_file):
++				inputs.append(arg.node)
++			if isinstance(arg, output_file):
++				outputs.append(arg.node)
++
++	if self.stdout is None:
++		stdout = None
++	else:
++		assert isinstance(self.stdout, str)
++		stdout = self.path.find_or_declare(self.stdout)
++		if stdout is None:
++			raise Utils.WafError("File %s not found" % (self.stdout,))
++		outputs.append(stdout)
++
++	if self.stderr is None:
++		stderr = None
++	else:
++		assert isinstance(self.stderr, str)
++		stderr = self.path.find_or_declare(self.stderr)
++		if stderr is None:
++			raise Utils.WafError("File %s not found" % (self.stderr,))
++		outputs.append(stderr)
++
++	if self.stdin is None:
++		stdin = None
++	else:
++		assert isinstance(self.stdin, str)
++		stdin = self.path.find_resource(self.stdin)
++		if stdin is None:
++			raise Utils.WafError("File %s not found" % (self.stdin,))
++		inputs.append(stdin)
++
++	for hidden_input in self.to_list(self.hidden_inputs):
++		node = self.path.find_resource(hidden_input)
++		if node is None:
++			raise Utils.WafError("File %s not found in dir %s" % (hidden_input, self.path))
++		inputs.append(node)
++
++	for hidden_output in self.to_list(self.hidden_outputs):
++		node = self.path.find_or_declare(hidden_output)
++		if node is None:
++			raise Utils.WafError("File %s not found in dir %s" % (hidden_output, self.path))
++		outputs.append(node)
++
++	if not (inputs or getattr(self, 'no_inputs', None)):
++		raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs')
++	if not (outputs or getattr(self, 'no_outputs', None)):
++		raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs')
++
++	task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
++	Utils.copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
++	self.tasks.append(task)
++
++	task.inputs = inputs
++	task.outputs = outputs
++	task.dep_vars = self.to_list(self.dep_vars)
++
++	for dep in self.dependencies:
++		assert dep is not self
++		dep.post()
++		for dep_task in dep.tasks:
++			task.set_run_after(dep_task)
++
++	if not task.inputs:
++		# the case for svnversion, always run, and update the output nodes
++		task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
++		task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
++
++	# TODO the case with no outputs?
++
++def post_run(self):
++	for x in self.outputs:
++		h = Utils.h_file(x.abspath(self.env))
++		self.generator.bld.node_sigs[self.env.variant()][x.id] = h
++
++def runnable_status(self):
++	return Constants.RUN_ME
++
++Task.task_type_from_func('copy', vars=[], func=action_process_file_func)
++TaskGen.task_gen.classes['command-output'] = cmd_output_taskgen
++
+diff --git a/buildtools/wafadmin/Tools/msvc.py b/buildtools/wafadmin/Tools/msvc.py
+new file mode 100644
+index 0000000..4fde8b1
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/msvc.py
+@@ -0,0 +1,797 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Carlos Rafael Giani, 2006 (dv)
++# Tamas Pal, 2007 (folti)
++# Nicolas Mercier, 2009
++# Microsoft Visual C++/Intel C++ compiler support - beta, needs more testing
++
++# usage:
++#
++# conf.env['MSVC_VERSIONS'] = ['msvc 9.0', 'msvc 8.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
++# conf.env['MSVC_TARGETS'] = ['x64']
++# conf.check_tool('msvc')
++# OR conf.check_tool('msvc', funs='no_autodetect')
++# conf.check_lib_msvc('gdi32')
++# conf.check_libs_msvc('kernel32 user32', mandatory=true)
++# ...
++# obj.uselib = 'KERNEL32 USER32 GDI32'
++#
++# platforms and targets will be tested in the order they appear;
++# the first good configuration will be used
++# supported platforms :
++# ia64, x64, x86, x86_amd64, x86_ia64
++
++# compilers supported :
++#  msvc       => Visual Studio, versions 7.1 (2003), 8,0 (2005), 9.0 (2008)
++#  wsdk       => Windows SDK, versions 6.0, 6.1, 7.0
++#  icl        => Intel compiler, versions 9,10,11
++#  Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
++#  PocketPC   => Compiler/SDK for PocketPC devices (armv4/v4i)
++
++
++import os, sys, re, string, optparse
++import Utils, TaskGen, Runner, Configure, Task, Options
++from Logs import debug, info, warn, error
++from TaskGen import after, before, feature
++
++from Configure import conftest, conf
++import ccroot, cc, cxx, ar, winres
++from libtool import read_la_file
++
++try:
++	import _winreg
++except:
++	import winreg as _winreg
++
++pproc = Utils.pproc
++
++# importlibs provided by MSVC/Platform SDK. Do NOT search them....
++g_msvc_systemlibs = """
++aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
++cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
++credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
++ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
++faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
++gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
++kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
++mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
++msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
++netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
++odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
++osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
++ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
++rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
++shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
++traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
++version vfw32 wbemuuid  webpost wiaguid wininet winmm winscard winspool winstrm
++wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
++""".split()
++
++
++all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64') ]
++all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
++all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
++
++def setup_msvc(conf, versions):
++	platforms = Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
++	desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1]
++	versiondict = dict(versions)
++
++	for version in desired_versions:
++		try:
++			targets = dict(versiondict [version])
++			for target in platforms:
++				try:
++					arch,(p1,p2,p3) = targets[target]
++					compiler,revision = version.split()
++					return compiler,revision,p1,p2,p3
++				except KeyError: continue
++		except KeyError: continue
++	conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
++
++ at conf
++def get_msvc_version(conf, compiler, version, target, vcvars):
++	debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
++	batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
++	f = open(batfile, 'w')
++	f.write("""@echo off
++set INCLUDE=
++set LIB=
++call "%s" %s
++echo PATH=%%PATH%%
++echo INCLUDE=%%INCLUDE%%
++echo LIB=%%LIB%%
++""" % (vcvars,target))
++	f.close()
++	sout = Utils.cmd_output(['cmd', '/E:on', '/V:on', '/C', batfile])
++	lines = sout.splitlines()
++
++	for x in ('Setting environment', 'Setting SDK environment', 'Intel(R) C++ Compiler'):
++		if lines[0].find(x) != -1:
++			break
++	else:
++		debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target)
++		conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
++
++	for line in lines[1:]:
++		if line.startswith('PATH='):
++			path = line[5:]
++			MSVC_PATH = path.split(';')
++		elif line.startswith('INCLUDE='):
++			MSVC_INCDIR = [i for i in line[8:].split(';') if i]
++		elif line.startswith('LIB='):
++			MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
++
++	# Check if the compiler is usable at all.
++	# The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
++	env = {}
++	env.update(os.environ)
++	env.update(PATH = path)
++	compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
++	cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
++	# delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
++	if env.has_key('CL'):
++		del(env['CL'])
++
++	try:
++		p = pproc.Popen([cxx, '/help'], env=env, stdout=pproc.PIPE, stderr=pproc.PIPE)
++		out, err = p.communicate()
++		if p.returncode != 0:
++			raise Exception('return code: %r: %r' % (p.returncode, err))
++	except Exception, e:
++		debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version, target)
++		debug(str(e))
++		conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
++	else:
++		debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
++
++	return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
++
++ at conf
++def gather_wsdk_versions(conf, versions):
++	version_pattern = re.compile('^v..?.?\...?.?')
++	try:
++		all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
++	except WindowsError:
++		try:
++			all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
++		except WindowsError:
++			return
++	index = 0
++	while 1:
++		try:
++			version = _winreg.EnumKey(all_versions, index)
++		except WindowsError:
++			break
++		index = index + 1
++		if not version_pattern.match(version):
++			continue
++		try:
++			msvc_version = _winreg.OpenKey(all_versions, version)
++			path,type = _winreg.QueryValueEx(msvc_version,'InstallationFolder')
++		except WindowsError:
++			continue
++		if os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
++			targets = []
++			for target,arch in all_msvc_platforms:
++				try:
++					targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')))))
++				except Configure.ConfigurationError:
++					pass
++			versions.append(('wsdk ' + version[1:], targets))
++
++ at conf
++def gather_msvc_versions(conf, versions):
++	# checks SmartPhones SDKs
++	try:
++		ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
++	except WindowsError:
++		try:
++			ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
++		except WindowsError:
++			ce_sdk = ''
++	if ce_sdk:
++		supported_wince_platforms = []
++		ce_index = 0
++		while 1:
++			try:
++				sdk_device = _winreg.EnumKey(ce_sdk, ce_index)
++			except WindowsError:
++				break
++			ce_index = ce_index + 1
++			sdk = _winreg.OpenKey(ce_sdk, sdk_device)
++			path,type = _winreg.QueryValueEx(sdk, 'SDKRootDir')
++			path=str(path)
++			path,device = os.path.split(path)
++			if not device:
++				path,device = os.path.split(path)
++			for arch,compiler in all_wince_platforms:
++				platforms = []
++				if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
++					platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
++				if platforms:
++					supported_wince_platforms.append((device, platforms))
++	# checks MSVC
++	version_pattern = re.compile('^..?\...?')
++	for vcver,vcvar in [('VCExpress','exp'), ('VisualStudio','')]:
++		try:
++			all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver)
++		except WindowsError:
++			try:
++				all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\'+vcver)
++			except WindowsError:
++				continue
++		index = 0
++		while 1:
++			try:
++				version = _winreg.EnumKey(all_versions, index)
++			except WindowsError:
++				break
++			index = index + 1
++			if not version_pattern.match(version):
++				continue
++			try:
++				msvc_version = _winreg.OpenKey(all_versions, version + "\\Setup\\VS")
++				path,type = _winreg.QueryValueEx(msvc_version, 'ProductDir')
++				path=str(path)
++				targets = []
++				if ce_sdk:
++					for device,platforms in supported_wince_platforms:
++						cetargets = []
++						for platform,compiler,include,lib in platforms:
++							winCEpath = os.path.join(path, 'VC', 'ce')
++							if os.path.isdir(winCEpath):
++								common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat'))
++								if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
++									bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs
++									incdirs = [include, os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include')]
++									libdirs = [lib, os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform)]
++									cetargets.append((platform, (platform, (bindirs,incdirs,libdirs))))
++						versions.append((device+' '+version, cetargets))
++				if os.path.isfile(os.path.join(path, 'VC', 'vcvarsall.bat')):
++					for target,realtarget in all_msvc_platforms[::-1]:
++						try:
++							targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(path, 'VC', 'vcvarsall.bat')))))
++						except:
++							pass
++				elif os.path.isfile(os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')):
++					try:
++						targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')))))
++					except Configure.ConfigurationError:
++						pass
++				versions.append(('msvc '+version, targets))
++
++			except WindowsError:
++				continue
++
++ at conf
++def gather_icl_versions(conf, versions):
++	version_pattern = re.compile('^...?.?\....?.?')
++	try:
++		all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
++	except WindowsError:
++		try:
++			all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
++		except WindowsError:
++			return
++	index = 0
++	while 1:
++		try:
++			version = _winreg.EnumKey(all_versions, index)
++		except WindowsError:
++			break
++		index = index + 1
++		if not version_pattern.match(version):
++			continue
++		targets = []
++		for target,arch in all_icl_platforms:
++			try:
++				icl_version = _winreg.OpenKey(all_versions, version+'\\'+target)
++				path,type = _winreg.QueryValueEx(icl_version,'ProductDir')
++				if os.path.isfile(os.path.join(path, 'bin', 'iclvars.bat')):
++					try:
++						targets.append((target, (arch, conf.get_msvc_version('intel', version, target, os.path.join(path, 'bin', 'iclvars.bat')))))
++					except Configure.ConfigurationError:
++						pass
++			except WindowsError:
++				continue
++		major = version[0:2]
++		versions.append(('intel ' + major, targets))
++
++ at conf
++def get_msvc_versions(conf):
++	if not conf.env.MSVC_INSTALLED_VERSIONS:
++		lst = []
++		conf.gather_msvc_versions(lst)
++		conf.gather_wsdk_versions(lst)
++		conf.gather_icl_versions(lst)
++		conf.env.MSVC_INSTALLED_VERSIONS = lst
++	return conf.env.MSVC_INSTALLED_VERSIONS
++
++ at conf
++def print_all_msvc_detected(conf):
++	for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
++		info(version)
++		for target,l in targets:
++			info("\t"+target)
++
++def detect_msvc(conf):
++	versions = get_msvc_versions(conf)
++	return setup_msvc(conf, versions)
++
++ at conf
++def find_lt_names_msvc(self, libname, is_static=False):
++	"""
++	Win32/MSVC specific code to glean out information from libtool la files.
++	this function is not attached to the task_gen class
++	"""
++	lt_names=[
++		'lib%s.la' % libname,
++		'%s.la' % libname,
++	]
++
++	for path in self.env['LIBPATH']:
++		for la in lt_names:
++			laf=os.path.join(path,la)
++			dll=None
++			if os.path.exists(laf):
++				ltdict=read_la_file(laf)
++				lt_libdir=None
++				if ltdict.get('libdir', ''):
++					lt_libdir = ltdict['libdir']
++				if not is_static and ltdict.get('library_names', ''):
++					dllnames=ltdict['library_names'].split()
++					dll=dllnames[0].lower()
++					dll=re.sub('\.dll$', '', dll)
++					return (lt_libdir, dll, False)
++				elif ltdict.get('old_library', ''):
++					olib=ltdict['old_library']
++					if os.path.exists(os.path.join(path,olib)):
++						return (path, olib, True)
++					elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
++						return (lt_libdir, olib, True)
++					else:
++						return (None, olib, True)
++				else:
++					raise Utils.WafError('invalid libtool object file: %s' % laf)
++	return (None, None, None)
++
++ at conf
++def libname_msvc(self, libname, is_static=False, mandatory=False):
++	lib = libname.lower()
++	lib = re.sub('\.lib$','',lib)
++
++	if lib in g_msvc_systemlibs:
++		return lib
++
++	lib=re.sub('^lib','',lib)
++
++	if lib == 'm':
++		return None
++
++	(lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
++
++	if lt_path != None and lt_libname != None:
++		if lt_static == True:
++			# file existance check has been made by find_lt_names
++			return os.path.join(lt_path,lt_libname)
++
++	if lt_path != None:
++		_libpaths=[lt_path] + self.env['LIBPATH']
++	else:
++		_libpaths=self.env['LIBPATH']
++
++	static_libs=[
++		'lib%ss.lib' % lib,
++		'lib%s.lib' % lib,
++		'%ss.lib' % lib,
++		'%s.lib' %lib,
++		]
++
++	dynamic_libs=[
++		'lib%s.dll.lib' % lib,
++		'lib%s.dll.a' % lib,
++		'%s.dll.lib' % lib,
++		'%s.dll.a' % lib,
++		'lib%s_d.lib' % lib,
++		'%s_d.lib' % lib,
++		'%s.lib' %lib,
++		]
++
++	libnames=static_libs
++	if not is_static:
++		libnames=dynamic_libs + static_libs
++
++	for path in _libpaths:
++		for libn in libnames:
++			if os.path.exists(os.path.join(path, libn)):
++				debug('msvc: lib found: %s', os.path.join(path,libn))
++				return re.sub('\.lib$', '',libn)
++
++	#if no lib can be found, just return the libname as msvc expects it
++	if mandatory:
++		self.fatal("The library %r could not be found" % libname)
++	return re.sub('\.lib$', '', libname)
++
++ at conf
++def check_lib_msvc(self, libname, is_static=False, uselib_store=None, mandatory=False):
++	"This is the api to use"
++	libn = self.libname_msvc(libname, is_static, mandatory)
++
++	if not uselib_store:
++		uselib_store = libname.upper()
++
++	# Note: ideally we should be able to place the lib in the right env var, either STATICLIB or LIB,
++	# but we don't distinguish static libs from shared libs.
++	# This is ok since msvc doesn't have any special linker flag to select static libs (no env['STATICLIB_MARKER'])
++	if False and is_static: # disabled
++		self.env['STATICLIB_' + uselib_store] = [libn]
++	else:
++		self.env['LIB_' + uselib_store] = [libn]
++
++ at conf
++def check_libs_msvc(self, libnames, is_static=False, mandatory=False):
++	for libname in Utils.to_list(libnames):
++		self.check_lib_msvc(libname, is_static, mandatory=mandatory)
++
++ at conftest
++def no_autodetect(conf):
++	conf.eval_rules(detect.replace('autodetect', ''))
++
++
++detect = '''
++autodetect
++find_msvc
++msvc_common_flags
++cc_load_tools
++cxx_load_tools
++cc_add_flags
++cxx_add_flags
++link_add_flags
++'''
++
++ at conftest
++def autodetect(conf):
++	v = conf.env
++	compiler, version, path, includes, libdirs = detect_msvc(conf)
++	v['PATH'] = path
++	v['CPPPATH'] = includes
++	v['LIBPATH'] = libdirs
++	v['MSVC_COMPILER'] = compiler
++
++def _get_prog_names(conf, compiler):
++	if compiler=='intel':
++		compiler_name = 'ICL'
++		linker_name = 'XILINK'
++		lib_name = 'XILIB'
++	else:
++		# assumes CL.exe
++		compiler_name = 'CL'
++		linker_name = 'LINK'
++		lib_name = 'LIB'
++	return compiler_name, linker_name, lib_name
++
++ at conftest
++def find_msvc(conf):
++	# due to path format limitations, limit operation only to native Win32. Yeah it sucks.
++	if sys.platform != 'win32':
++		conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet')
++
++	v = conf.env
++
++	compiler, version, path, includes, libdirs = detect_msvc(conf)
++
++	compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
++	has_msvc_manifest = (compiler == 'msvc' and float(version) >= 8) or (compiler == 'wsdk' and float(version) >= 6)	or (compiler == 'intel' and float(version) >= 11)
++
++	# compiler
++	cxx = None
++	if v.CXX: cxx = v.CXX
++	elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
++	if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path, mandatory=True)
++	cxx = conf.cmd_to_list(cxx)
++
++	# before setting anything, check if the compiler is really msvc
++	env = dict(conf.environ)
++	env.update(PATH = ';'.join(path))
++	if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env):
++		conf.fatal('the msvc compiler could not be identified')
++
++	link = v.LINK_CXX
++	if not link:
++		link = conf.find_program(linker_name, path_list=path, mandatory=True)
++	ar = v.AR
++	if not ar:
++		ar = conf.find_program(lib_name, path_list=path, mandatory=True)
++
++	# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
++	mt = v.MT
++	if has_msvc_manifest:
++		mt = conf.find_program('MT', path_list=path, mandatory=True)
++
++	# no more possibility of failure means the data state will be consistent
++	# we may store the data safely now
++
++	v.MSVC_MANIFEST = has_msvc_manifest
++	v.PATH = path
++	v.CPPPATH = includes
++	v.LIBPATH = libdirs
++
++	# c/c++ compiler
++	v.CC = v.CXX = cxx
++	v.CC_NAME = v.CXX_NAME = 'msvc'
++
++	v.LINK = v.LINK_CXX = link
++	if not v.LINK_CC:
++		v.LINK_CC = v.LINK_CXX
++
++	v.AR = ar
++	v.MT = mt
++	v.MTFLAGS = v.ARFLAGS = ['/NOLOGO']
++
++
++	conf.check_tool('winres')
++
++	if not conf.env.WINRC:
++		warn('Resource compiler not found. Compiling resource file is disabled')
++
++	# environment flags
++	try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
++	except KeyError: pass
++	try: v.prepend_value('LIBPATH', conf.environ['LIB'])
++	except KeyError: pass
++
++ at conftest
++def msvc_common_flags(conf):
++	v = conf.env
++
++	v['CPPFLAGS']     = ['/W3', '/nologo']
++
++	v['CCDEFINES_ST']     = '/D%s'
++	v['CXXDEFINES_ST']    = '/D%s'
++
++	# TODO just use _WIN32, which defined by the compiler itself!
++	v['CCDEFINES']    = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
++	v['CXXDEFINES']   = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
++
++	v['_CCINCFLAGS']  = []
++	v['_CCDEFFLAGS']  = []
++	v['_CXXINCFLAGS'] = []
++	v['_CXXDEFFLAGS'] = []
++
++	v['CC_SRC_F']     = ''
++	v['CC_TGT_F']     = ['/c', '/Fo']
++	v['CXX_SRC_F']    = ''
++	v['CXX_TGT_F']    = ['/c', '/Fo']
++
++	v['CPPPATH_ST']   = '/I%s' # template for adding include paths
++
++	v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:'
++
++	# Subsystem specific flags
++	v['CPPFLAGS_CONSOLE']   = ['/SUBSYSTEM:CONSOLE']
++	v['CPPFLAGS_NATIVE']    = ['/SUBSYSTEM:NATIVE']
++	v['CPPFLAGS_POSIX']     = ['/SUBSYSTEM:POSIX']
++	v['CPPFLAGS_WINDOWS']   = ['/SUBSYSTEM:WINDOWS']
++	v['CPPFLAGS_WINDOWSCE']	= ['/SUBSYSTEM:WINDOWSCE']
++
++	# CRT specific flags
++	v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT']
++	v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD']
++
++	# TODO these are defined by the compiler itself!
++	v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] # this is defined by the compiler itself!
++	v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] # these are defined by the compiler itself!
++
++	v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd']
++	v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd']
++
++	# TODO these are defined by the compiler itself!
++	v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] # these are defined by the compiler itself!
++	v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] # these are defined by the compiler itself!
++
++	# compiler debug levels
++	v['CCFLAGS']            = ['/TC']
++	v['CCFLAGS_OPTIMIZED']  = ['/O2', '/DNDEBUG']
++	v['CCFLAGS_RELEASE']    = ['/O2', '/DNDEBUG']
++	v['CCFLAGS_DEBUG']      = ['/Od', '/RTC1', '/ZI']
++	v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
++
++	v['CXXFLAGS']            = ['/TP', '/EHsc']
++	v['CXXFLAGS_OPTIMIZED']  = ['/O2', '/DNDEBUG']
++	v['CXXFLAGS_RELEASE']    = ['/O2', '/DNDEBUG']
++
++	v['CXXFLAGS_DEBUG']      = ['/Od', '/RTC1', '/ZI']
++	v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
++
++	# linker
++	v['LIB']              = []
++
++	v['LIB_ST']           = '%s.lib' # template for adding libs
++	v['LIBPATH_ST']       = '/LIBPATH:%s' # template for adding libpaths
++	v['STATICLIB_ST']     = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
++	v['STATICLIBPATH_ST'] = '/LIBPATH:%s'
++
++	v['LINKFLAGS'] = ['/NOLOGO']
++	if v['MSVC_MANIFEST']:
++		v.append_value('LINKFLAGS', '/MANIFEST')
++	v['LINKFLAGS_DEBUG']      = ['/DEBUG']
++	v['LINKFLAGS_ULTRADEBUG'] = ['/DEBUG']
++
++	# shared library
++	v['shlib_CCFLAGS']  = ['']
++	v['shlib_CXXFLAGS'] = ['']
++	v['shlib_LINKFLAGS']= ['/DLL']
++	v['shlib_PATTERN']  = '%s.dll'
++	v['implib_PATTERN'] = '%s.lib'
++	v['IMPLIB_ST']      = '/IMPLIB:%s'
++
++	# static library
++	v['staticlib_LINKFLAGS'] = ['']
++	v['staticlib_PATTERN']   = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
++
++	# program
++	v['program_PATTERN']     = '%s.exe'
++
++
++#######################################################################################################
++##### conf above, build below
++
++ at after('apply_link')
++ at feature('cc', 'cxx')
++def apply_flags_msvc(self):
++	if self.env.CC_NAME != 'msvc' or not self.link_task:
++		return
++
++	subsystem = getattr(self, 'subsystem', '')
++	if subsystem:
++		subsystem = '/subsystem:%s' % subsystem
++		flags = 'cstaticlib' in self.features and 'ARFLAGS' or 'LINKFLAGS'
++		self.env.append_value(flags, subsystem)
++
++	if getattr(self, 'link_task', None) and not 'cstaticlib' in self.features:
++		for f in self.env.LINKFLAGS:
++			d = f.lower()
++			if d[1:] == 'debug':
++				pdbnode = self.link_task.outputs[0].change_ext('.pdb')
++				pdbfile = pdbnode.bldpath(self.env)
++				self.link_task.outputs.append(pdbnode)
++				self.bld.install_files(self.install_path, [pdbnode], env=self.env)
++				break
++
++ at feature('cprogram', 'cshlib', 'cstaticlib')
++ at after('apply_lib_vars')
++ at before('apply_obj_vars')
++def apply_obj_vars_msvc(self):
++	if self.env['CC_NAME'] != 'msvc':
++		return
++
++	try:
++		self.meths.remove('apply_obj_vars')
++	except ValueError:
++		pass
++
++	libpaths = getattr(self, 'libpaths', [])
++	if not libpaths: self.libpaths = libpaths
++
++	env = self.env
++	app = env.append_unique
++
++	cpppath_st       = env['CPPPATH_ST']
++	lib_st           = env['LIB_ST']
++	staticlib_st     = env['STATICLIB_ST']
++	libpath_st       = env['LIBPATH_ST']
++	staticlibpath_st = env['STATICLIBPATH_ST']
++
++	for i in env['LIBPATH']:
++		app('LINKFLAGS', libpath_st % i)
++		if not libpaths.count(i):
++			libpaths.append(i)
++
++	for i in env['LIBPATH']:
++		app('LINKFLAGS', staticlibpath_st % i)
++		if not libpaths.count(i):
++			libpaths.append(i)
++
++	# i doubt that anyone will make a fully static binary anyway
++	if not env['FULLSTATIC']:
++		if env['STATICLIB'] or env['LIB']:
++			app('LINKFLAGS', env['SHLIB_MARKER']) # TODO does SHLIB_MARKER work?
++
++	for i in env['STATICLIB']:
++		app('LINKFLAGS', staticlib_st % i)
++
++	for i in env['LIB']:
++		app('LINKFLAGS', lib_st % i)
++
++# split the manifest file processing from the link task, like for the rc processing
++
++ at feature('cprogram', 'cshlib')
++ at after('apply_link')
++def apply_manifest(self):
++	"""Special linker for MSVC with support for embedding manifests into DLL's
++	and executables compiled by Visual Studio 2005 or probably later. Without
++	the manifest file, the binaries are unusable.
++	See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx"""
++
++	if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST:
++		out_node = self.link_task.outputs[0]
++		man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
++		self.link_task.outputs.append(man_node)
++		self.link_task.do_manifest = True
++
++def exec_mf(self):
++	env = self.env
++	mtool = env['MT']
++	if not mtool:
++		return 0
++
++	self.do_manifest = False
++
++	outfile = self.outputs[0].bldpath(env)
++	
++	manifest = None
++	for out_node in self.outputs:
++		if out_node.name.endswith('.manifest'):
++			manifest = out_node.bldpath(env)
++			break
++	if manifest is None:
++		# Should never get here.  If we do, it means the manifest file was 
++		# never added to the outputs list, thus we don't have a manifest file 
++		# to embed, so we just return.
++		return 0
++
++	# embedding mode. Different for EXE's and DLL's.
++	# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
++	mode = ''
++	if 'cprogram' in self.generator.features:
++		mode = '1'
++	elif 'cshlib' in self.generator.features:
++		mode = '2'
++
++	debug('msvc: embedding manifest')
++	#flags = ' '.join(env['MTFLAGS'] or [])
++
++	lst = []
++	lst.extend([env['MT']])
++	lst.extend(Utils.to_list(env['MTFLAGS']))
++	lst.extend(Utils.to_list("-manifest"))
++	lst.extend(Utils.to_list(manifest))
++	lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
++
++	#cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
++	#	manifest, outfile, mode)
++	lst = [lst]
++	return self.exec_command(*lst)
++
++########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
++
++def exec_command_msvc(self, *k, **kw):
++	"instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in"
++	if self.env['CC_NAME'] == 'msvc':
++		if isinstance(k[0], list):
++			lst = []
++			carry = ''
++			for a in k[0]:
++				if len(a) == 3 and a.startswith('/F') or a == '/doc' or a[-1] == ':':
++					carry = a
++				else:
++					lst.append(carry + a)
++					carry = ''
++			k = [lst]
++
++		env = dict(os.environ)
++		env.update(PATH = ';'.join(self.env['PATH']))
++		kw['env'] = env
++
++	ret = self.generator.bld.exec_command(*k, **kw)
++	if ret: return ret
++	if getattr(self, 'do_manifest', None):
++		ret = exec_mf(self)
++	return ret
++
++for k in 'cc cxx winrc cc_link cxx_link static_link qxx'.split():
++	cls = Task.TaskBase.classes.get(k, None)
++	if cls:
++		cls.exec_command = exec_command_msvc
++
+diff --git a/buildtools/wafadmin/Tools/nasm.py b/buildtools/wafadmin/Tools/nasm.py
+new file mode 100644
+index 0000000..b99c3c7
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/nasm.py
+@@ -0,0 +1,49 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2008
++
++"""
++Nasm processing
++"""
++
++import os
++import TaskGen, Task, Utils
++from TaskGen import taskgen, before, extension
++
++nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}'
++
++EXT_NASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
++
++ at before('apply_link')
++def apply_nasm_vars(self):
++
++	# flags
++	if hasattr(self, 'nasm_flags'):
++		for flag in self.to_list(self.nasm_flags):
++			self.env.append_value('NASM_FLAGS', flag)
++
++	# includes - well, if we suppose it works with c processing
++	if hasattr(self, 'includes'):
++		for inc in self.to_list(self.includes):
++			node = self.path.find_dir(inc)
++			if not node:
++				raise Utils.WafError('cannot find the dir' + inc)
++			self.env.append_value('NASM_INCLUDES', '-I%s' % node.srcpath(self.env))
++			self.env.append_value('NASM_INCLUDES', '-I%s' % node.bldpath(self.env))
++
++ at extension(EXT_NASM)
++def nasm_file(self, node):
++	try: obj_ext = self.obj_ext
++	except AttributeError: obj_ext = '_%d.o' % self.idx
++
++ 	task = self.create_task('nasm', node, node.change_ext(obj_ext))
++	self.compiled_tasks.append(task)
++
++	self.meths.append('apply_nasm_vars')
++
++# create our action here
++Task.simple_task_type('nasm', nasm_str, color='BLUE', ext_out='.o', shell=False)
++
++def detect(conf):
++	nasm = conf.find_program(['nasm', 'yasm'], var='NASM', mandatory=True)
++
+diff --git a/buildtools/wafadmin/Tools/ocaml.py b/buildtools/wafadmin/Tools/ocaml.py
+new file mode 100644
+index 0000000..20c9269
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/ocaml.py
+@@ -0,0 +1,298 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++
++"ocaml support"
++
++import os, re
++import TaskGen, Utils, Task, Build
++from Logs import error
++from TaskGen import taskgen, feature, before, after, extension
++
++EXT_MLL = ['.mll']
++EXT_MLY = ['.mly']
++EXT_MLI = ['.mli']
++EXT_MLC = ['.c']
++EXT_ML  = ['.ml']
++
++open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
++foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
++def filter_comments(txt):
++	meh = [0]
++	def repl(m):
++		if m.group(1): meh[0] += 1
++		elif m.group(2): meh[0] -= 1
++		elif not meh[0]: return m.group(0)
++		return ''
++	return foo.sub(repl, txt)
++
++def scan(self):
++	node = self.inputs[0]
++	code = filter_comments(node.read(self.env))
++
++	global open_re
++	names = []
++	import_iterator = open_re.finditer(code)
++	if import_iterator:
++		for import_match in import_iterator:
++			names.append(import_match.group(1))
++	found_lst = []
++	raw_lst = []
++	for name in names:
++		nd = None
++		for x in self.incpaths:
++			nd = x.find_resource(name.lower()+'.ml')
++			if not nd: nd = x.find_resource(name+'.ml')
++			if nd:
++				found_lst.append(nd)
++				break
++		else:
++			raw_lst.append(name)
++
++	return (found_lst, raw_lst)
++
++native_lst=['native', 'all', 'c_object']
++bytecode_lst=['bytecode', 'all']
++class ocaml_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at feature('ocaml')
++def init_ml(self):
++	Utils.def_attrs(self,
++		type = 'all',
++		incpaths_lst = [],
++		bld_incpaths_lst = [],
++		mlltasks = [],
++		mlytasks = [],
++		mlitasks = [],
++		native_tasks = [],
++		bytecode_tasks = [],
++		linktasks = [],
++		bytecode_env = None,
++		native_env = None,
++		compiled_tasks = [],
++		includes = '',
++		uselib = '',
++		are_deps_set = 0)
++
++ at feature('ocaml')
++ at after('init_ml')
++def init_envs_ml(self):
++
++	self.islibrary = getattr(self, 'islibrary', False)
++
++	global native_lst, bytecode_lst
++	self.native_env = None
++	if self.type in native_lst:
++		self.native_env = self.env.copy()
++		if self.islibrary: self.native_env['OCALINKFLAGS']   = '-a'
++
++	self.bytecode_env = None
++	if self.type in bytecode_lst:
++		self.bytecode_env = self.env.copy()
++		if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
++
++	if self.type == 'c_object':
++		self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
++
++ at feature('ocaml')
++ at before('apply_vars_ml')
++ at after('init_envs_ml')
++def apply_incpaths_ml(self):
++	inc_lst = self.includes.split()
++	lst = self.incpaths_lst
++	for dir in inc_lst:
++		node = self.path.find_dir(dir)
++		if not node:
++			error("node not found: " + str(dir))
++			continue
++		self.bld.rescan(node)
++		if not node in lst: lst.append(node)
++		self.bld_incpaths_lst.append(node)
++	# now the nodes are added to self.incpaths_lst
++
++ at feature('ocaml')
++ at before('apply_core')
++def apply_vars_ml(self):
++	for i in self.incpaths_lst:
++		if self.bytecode_env:
++			app = self.bytecode_env.append_value
++			app('OCAMLPATH', '-I')
++			app('OCAMLPATH', i.srcpath(self.env))
++			app('OCAMLPATH', '-I')
++			app('OCAMLPATH', i.bldpath(self.env))
++
++		if self.native_env:
++			app = self.native_env.append_value
++			app('OCAMLPATH', '-I')
++			app('OCAMLPATH', i.bldpath(self.env))
++			app('OCAMLPATH', '-I')
++			app('OCAMLPATH', i.srcpath(self.env))
++
++	varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
++	for name in self.uselib.split():
++		for vname in varnames:
++			cnt = self.env[vname+'_'+name]
++			if cnt:
++				if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
++				if self.native_env: self.native_env.append_value(vname, cnt)
++
++ at feature('ocaml')
++ at after('apply_core')
++def apply_link_ml(self):
++
++	if self.bytecode_env:
++		ext = self.islibrary and '.cma' or '.run'
++
++		linktask = self.create_task('ocalink')
++		linktask.bytecode = 1
++		linktask.set_outputs(self.path.find_or_declare(self.target + ext))
++		linktask.obj = self
++		linktask.env = self.bytecode_env
++		self.linktasks.append(linktask)
++
++	if self.native_env:
++		if self.type == 'c_object': ext = '.o'
++		elif self.islibrary: ext = '.cmxa'
++		else: ext = ''
++
++		linktask = self.create_task('ocalinkx')
++		linktask.set_outputs(self.path.find_or_declare(self.target + ext))
++		linktask.obj = self
++		linktask.env = self.native_env
++		self.linktasks.append(linktask)
++
++		# we produce a .o file to be used by gcc
++		self.compiled_tasks.append(linktask)
++
++ at extension(EXT_MLL)
++def mll_hook(self, node):
++	mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'), env=self.native_env)
++	self.mlltasks.append(mll_task)
++
++	self.allnodes.append(mll_task.outputs[0])
++
++ at extension(EXT_MLY)
++def mly_hook(self, node):
++	mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')], env=self.native_env)
++	self.mlytasks.append(mly_task)
++	self.allnodes.append(mly_task.outputs[0])
++
++	task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'), env=self.native_env)
++
++ at extension(EXT_MLI)
++def mli_hook(self, node):
++	task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'), env=self.native_env)
++	self.mlitasks.append(task)
++
++ at extension(EXT_MLC)
++def mlc_hook(self, node):
++	task = self.create_task('ocamlcc', node, node.change_ext('.o'), env=self.native_env)
++	self.compiled_tasks.append(task)
++
++ at extension(EXT_ML)
++def ml_hook(self, node):
++	if self.native_env:
++		task = self.create_task('ocamlx', node, node.change_ext('.cmx'), env=self.native_env)
++		task.obj = self
++		task.incpaths = self.bld_incpaths_lst
++		self.native_tasks.append(task)
++
++	if self.bytecode_env:
++		task = self.create_task('ocaml', node, node.change_ext('.cmo'), env=self.bytecode_env)
++		task.obj = self
++		task.bytecode = 1
++		task.incpaths = self.bld_incpaths_lst
++		self.bytecode_tasks.append(task)
++
++def compile_may_start(self):
++	if not getattr(self, 'flag_deps', ''):
++		self.flag_deps = 1
++
++		# the evil part is that we can only compute the dependencies after the
++		# source files can be read (this means actually producing the source files)
++		if getattr(self, 'bytecode', ''): alltasks = self.obj.bytecode_tasks
++		else: alltasks = self.obj.native_tasks
++
++		self.signature() # ensure that files are scanned - unfortunately
++		tree = self.generator.bld
++		env = self.env
++		for node in self.inputs:
++			lst = tree.node_deps[self.unique_id()]
++			for depnode in lst:
++				for t in alltasks:
++					if t == self: continue
++					if depnode in t.inputs:
++						self.set_run_after(t)
++
++		# TODO necessary to get the signature right - for now
++		delattr(self, 'cache_sig')
++		self.signature()
++
++	return Task.Task.runnable_status(self)
++
++b = Task.simple_task_type
++cls = b('ocamlx', '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
++cls.runnable_status = compile_may_start
++cls.scan = scan
++
++b = Task.simple_task_type
++cls = b('ocaml', '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
++cls.runnable_status = compile_may_start
++cls.scan = scan
++
++
++b('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', before="ocaml ocamlcc ocamlx")
++b('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN')
++
++b('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
++b('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
++
++
++def link_may_start(self):
++	if not getattr(self, 'order', ''):
++
++		# now reorder the inputs given the task dependencies
++		if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks
++		else: alltasks = self.obj.native_tasks
++
++		# this part is difficult, we do not have a total order on the tasks
++		# if the dependencies are wrong, this may not stop
++		seen = []
++		pendant = []+alltasks
++		while pendant:
++			task = pendant.pop(0)
++			if task in seen: continue
++			for x in task.run_after:
++				if not x in seen:
++					pendant.append(task)
++					break
++			else:
++				seen.append(task)
++		self.inputs = [x.outputs[0] for x in seen]
++		self.order = 1
++	return Task.Task.runnable_status(self)
++
++act = b('ocalink', '${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', after="ocaml ocamlcc")
++act.runnable_status = link_may_start
++act = b('ocalinkx', '${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', after="ocamlx ocamlcc")
++act.runnable_status = link_may_start
++
++def detect(conf):
++	opt = conf.find_program('ocamlopt', var='OCAMLOPT')
++	occ = conf.find_program('ocamlc', var='OCAMLC')
++	if (not opt) or (not occ):
++		conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
++
++	v = conf.env
++	v['OCAMLC']       = occ
++	v['OCAMLOPT']     = opt
++	v['OCAMLLEX']     = conf.find_program('ocamllex', var='OCAMLLEX')
++	v['OCAMLYACC']    = conf.find_program('ocamlyacc', var='OCAMLYACC')
++	v['OCAMLFLAGS']   = ''
++	v['OCAMLLIB']     = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
++	v['LIBPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
++	v['CPPPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
++	v['LIB_OCAML'] = 'camlrun'
++
+diff --git a/buildtools/wafadmin/Tools/osx.py b/buildtools/wafadmin/Tools/osx.py
+new file mode 100644
+index 0000000..561eca4
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/osx.py
+@@ -0,0 +1,188 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy 2008
++
++"""MacOSX related tools
++
++To compile an executable into a Mac application bundle (a .app), set its 'mac_app' attribute
++  obj.mac_app = True
++
++To make a bundled shared library (a .bundle), set the 'mac_bundle' attribute:
++  obj.mac_bundle = True
++"""
++
++import os, shutil, sys, platform
++import TaskGen, Task, Build, Options, Utils
++from TaskGen import taskgen, feature, after, before
++from Logs import error, debug
++
++# plist template
++app_info = '''
++<?xml version="1.0" encoding="UTF-8"?>
++<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
++<plist version="0.9">
++<dict>
++	<key>CFBundlePackageType</key>
++	<string>APPL</string>
++	<key>CFBundleGetInfoString</key>
++	<string>Created by Waf</string>
++	<key>CFBundleSignature</key>
++	<string>????</string>
++	<key>NOTE</key>
++	<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
++	<key>CFBundleExecutable</key>
++	<string>%s</string>
++</dict>
++</plist>
++'''
++
++# see WAF issue 285
++# and also http://trac.macports.org/ticket/17059
++ at feature('cc', 'cxx')
++ at before('apply_lib_vars')
++def set_macosx_deployment_target(self):
++	if self.env['MACOSX_DEPLOYMENT_TARGET']:
++		os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
++	elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
++		if sys.platform == 'darwin':
++			os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
++
++ at feature('cc', 'cxx')
++ at after('apply_lib_vars')
++def apply_framework(self):
++	for x in self.to_list(self.env['FRAMEWORKPATH']):
++		frameworkpath_st = '-F%s'
++		self.env.append_unique('CXXFLAGS', frameworkpath_st % x)
++		self.env.append_unique('CCFLAGS', frameworkpath_st % x)
++		self.env.append_unique('LINKFLAGS', frameworkpath_st % x)
++
++	for x in self.to_list(self.env['FRAMEWORK']):
++		self.env.append_value('LINKFLAGS', ['-framework', x])
++
++ at taskgen
++def create_bundle_dirs(self, name, out):
++	bld = self.bld
++	dir = out.parent.get_dir(name)
++
++	if not dir:
++		dir = out.__class__(name, out.parent, 1)
++		bld.rescan(dir)
++		contents = out.__class__('Contents', dir, 1)
++		bld.rescan(contents)
++		macos = out.__class__('MacOS', contents, 1)
++		bld.rescan(macos)
++	return dir
++
++def bundle_name_for_output(out):
++	name = out.name
++	k = name.rfind('.')
++	if k >= 0:
++		name = name[:k] + '.app'
++	else:
++		name = name + '.app'
++	return name
++
++ at taskgen
++ at after('apply_link')
++ at feature('cprogram')
++def create_task_macapp(self):
++	"""Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
++	or use obj.mac_app = True to build specific targets as Mac apps"""
++	if self.env['MACAPP'] or getattr(self, 'mac_app', False):
++		apptask = self.create_task('macapp')
++		apptask.set_inputs(self.link_task.outputs)
++
++		out = self.link_task.outputs[0]
++
++		name = bundle_name_for_output(out)
++		dir = self.create_bundle_dirs(name, out)
++
++		n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
++
++		apptask.set_outputs([n1])
++		apptask.chmod = 0755
++		apptask.install_path = os.path.join(self.install_path, name, 'Contents', 'MacOS')
++		self.apptask = apptask
++
++ at after('apply_link')
++ at feature('cprogram')
++def create_task_macplist(self):
++	"""Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
++	or use obj.mac_app = True to build specific targets as Mac apps"""
++	if  self.env['MACAPP'] or getattr(self, 'mac_app', False):
++		# check if the user specified a plist before using our template
++		if not getattr(self, 'mac_plist', False):
++			self.mac_plist = app_info
++
++		plisttask = self.create_task('macplist')
++		plisttask.set_inputs(self.link_task.outputs)
++
++		out = self.link_task.outputs[0]
++		self.mac_plist = self.mac_plist % (out.name)
++
++		name = bundle_name_for_output(out)
++		dir = self.create_bundle_dirs(name, out)
++
++		n1 = dir.find_or_declare(['Contents', 'Info.plist'])
++
++		plisttask.set_outputs([n1])
++		plisttask.mac_plist = self.mac_plist
++		plisttask.install_path = os.path.join(self.install_path, name, 'Contents')
++		self.plisttask = plisttask
++
++ at after('apply_link')
++ at feature('cshlib')
++def apply_link_osx(self):
++	name = self.link_task.outputs[0].name
++	if not self.install_path:
++		return
++	if getattr(self, 'vnum', None):
++		name = name.replace('.dylib', '.%s.dylib' % self.vnum)
++
++	path = os.path.join(Utils.subst_vars(self.install_path, self.env), name)
++	if '-dynamiclib' in self.env['LINKFLAGS']:
++		self.env.append_value('LINKFLAGS', '-install_name')
++		self.env.append_value('LINKFLAGS', path)
++
++ at before('apply_link', 'apply_lib_vars')
++ at feature('cc', 'cxx')
++def apply_bundle(self):
++	"""use env['MACBUNDLE'] to force all shlibs into mac bundles
++	or use obj.mac_bundle = True for specific targets only"""
++	if not ('cshlib' in self.features or 'shlib' in self.features): return
++	if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
++		self.env['shlib_PATTERN'] = self.env['macbundle_PATTERN']
++		uselib = self.uselib = self.to_list(self.uselib)
++		if not 'MACBUNDLE' in uselib: uselib.append('MACBUNDLE')
++
++ at after('apply_link')
++ at feature('cshlib')
++def apply_bundle_remove_dynamiclib(self):
++	if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
++		if not getattr(self, 'vnum', None):
++			try:
++				self.env['LINKFLAGS'].remove('-dynamiclib')
++				self.env['LINKFLAGS'].remove('-single_module')
++			except ValueError:
++				pass
++
++# TODO REMOVE IN 1.6 (global variable)
++app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
++
++def app_build(task):
++	env = task.env
++	shutil.copy2(task.inputs[0].srcpath(env), task.outputs[0].abspath(env))
++
++	return 0
++
++def plist_build(task):
++	env = task.env
++	f = open(task.outputs[0].abspath(env), "w")
++	f.write(task.mac_plist)
++	f.close()
++
++	return 0
++
++Task.task_type_from_func('macapp', vars=[], func=app_build, after="cxx_link cc_link static_link")
++Task.task_type_from_func('macplist', vars=[], func=plist_build, after="cxx_link cc_link static_link")
++
+diff --git a/buildtools/wafadmin/Tools/perl.py b/buildtools/wafadmin/Tools/perl.py
+new file mode 100644
+index 0000000..a6787a8
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/perl.py
+@@ -0,0 +1,109 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# andersg at 0x63.nu 2007
++
++import os
++import Task, Options, Utils
++from Configure import conf
++from TaskGen import extension, taskgen, feature, before
++
++xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
++EXT_XS = ['.xs']
++
++ at before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars')
++ at feature('perlext')
++def init_perlext(self):
++	self.uselib = self.to_list(getattr(self, 'uselib', ''))
++	if not 'PERL' in self.uselib: self.uselib.append('PERL')
++	if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
++	self.env['shlib_PATTERN'] = self.env['perlext_PATTERN']
++
++ at extension(EXT_XS)
++def xsubpp_file(self, node):
++	outnode = node.change_ext('.c')
++	self.create_task('xsubpp', node, outnode)
++	self.allnodes.append(outnode)
++
++Task.simple_task_type('xsubpp', xsubpp_str, color='BLUE', before='cc cxx', shell=False)
++
++ at conf
++def check_perl_version(conf, minver=None):
++	"""
++	Checks if perl is installed.
++
++	If installed the variable PERL will be set in environment.
++
++	Perl binary can be overridden by --with-perl-binary config variable
++
++	"""
++
++	if getattr(Options.options, 'perlbinary', None):
++		conf.env.PERL = Options.options.perlbinary
++	else:
++		conf.find_program('perl', var='PERL', mandatory=True)
++
++	try:
++		version = Utils.cmd_output([conf.env.PERL, '-e', 'printf "%vd",$^V'])
++	except:
++		conf.fatal('could not determine the perl version')
++
++	conf.env.PERL_VERSION = version
++	cver = ''
++	if minver:
++		try:
++			ver = tuple(map(int, version.split('.')))
++		except:
++			conf.fatal('unsupported perl version %r' % version)
++		if ver < minver:
++			conf.fatal('perl is too old')
++
++		cver = '.'.join(map(str,minver))
++	conf.check_message('perl', cver, True, version)
++
++ at conf
++def check_perl_module(conf, module):
++	"""
++	Check if specified perlmodule is installed.
++
++	Minimum version can be specified by specifying it after modulename
++	like this:
++
++	conf.check_perl_module("Some::Module 2.92")
++	"""
++	cmd = [conf.env['PERL'], '-e', 'use %s' % module]
++	r = Utils.pproc.call(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) == 0
++	conf.check_message("perl module %s" % module, "", r)
++	return r
++
++ at conf
++def check_perl_ext_devel(conf):
++	"""
++	Check for configuration needed to build perl extensions.
++
++	Sets different xxx_PERLEXT variables in the environment.
++
++	Also sets the ARCHDIR_PERL variable useful as installation path,
++	which can be overridden by --with-perl-archdir
++	"""
++	if not conf.env.PERL:
++		conf.fatal('perl detection is required first')
++
++	def read_out(cmd):
++		return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))
++
++	conf.env.LINKFLAGS_PERLEXT = read_out('print $Config{lddlflags}')
++	conf.env.CPPPATH_PERLEXT   = read_out('print "$Config{archlib}/CORE"')
++	conf.env.CCFLAGS_PERLEXT   = read_out('print "$Config{ccflags} $Config{cccdlflags}"')
++	conf.env.XSUBPP            = read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"')
++	conf.env.EXTUTILS_TYPEMAP  = read_out('print "$Config{privlib}/ExtUtils/typemap"')
++	conf.env.perlext_PATTERN   = '%s.' + read_out('print $Config{dlext}')[0]
++
++	if getattr(Options.options, 'perlarchdir', None):
++		conf.env.ARCHDIR_PERL = Options.options.perlarchdir
++	else:
++		conf.env.ARCHDIR_PERL = read_out('print $Config{sitearch}')[0]
++
++def set_options(opt):
++	opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None)
++	opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None)
++
+diff --git a/buildtools/wafadmin/Tools/preproc.py b/buildtools/wafadmin/Tools/preproc.py
+new file mode 100644
+index 0000000..5055456
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/preproc.py
+@@ -0,0 +1,836 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006-2009 (ita)
++
++"""
++C/C++ preprocessor for finding dependencies
++
++Reasons for using the Waf preprocessor by default
++1. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
++2. Not all compilers provide .d files for obtaining the dependencies (portability)
++3. A naive file scanner will not catch the constructs such as "#include foo()"
++4. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
++
++Regarding the speed concerns:
++a. the preprocessing is performed only when files must be compiled
++b. the macros are evaluated only for #if/#elif/#include
++c. the time penalty is about 10%
++d. system headers are not scanned
++
++Now if you do not want the Waf preprocessor, the tool "gccdeps" uses the .d files produced
++during the compilation to track the dependencies (useful when used with the boost libraries).
++It only works with gcc though, and it cannot be used with Qt builds. A dumb
++file scanner will be added in the future, so we will have most bahaviours.
++"""
++# TODO: more varargs, pragma once
++# TODO: dumb file scanner tracking all includes
++
++import re, sys, os, string
++import Logs, Build, Utils
++from Logs import debug, error
++import traceback
++
++class PreprocError(Utils.WafError):
++	pass
++
++POPFILE = '-'
++
++
++recursion_limit = 5000
++"do not loop too much on header inclusion"
++
++go_absolute = 0
++"set to 1 to track headers on files in /usr/include - else absolute paths are ignored"
++
++standard_includes = ['/usr/include']
++if sys.platform == "win32":
++	standard_includes = []
++
++use_trigraphs = 0
++'apply the trigraph rules first'
++
++strict_quotes = 0
++"Keep <> for system includes (do not search for those includes)"
++
++g_optrans = {
++'not':'!',
++'and':'&&',
++'bitand':'&',
++'and_eq':'&=',
++'or':'||',
++'bitor':'|',
++'or_eq':'|=',
++'xor':'^',
++'xor_eq':'^=',
++'compl':'~',
++}
++"these ops are for c++, to reset, set an empty dict"
++
++# ignore #warning and #error
++re_lines = re.compile(\
++	'^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
++	re.IGNORECASE | re.MULTILINE)
++
++re_mac = re.compile("^[a-zA-Z_]\w*")
++re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
++re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
++re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
++re_cpp = re.compile(
++	r"""(/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)|//[^\n]*|("(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|.[^/"'\\]*)""",
++	re.MULTILINE)
++trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
++chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
++
++NUM   = 'i'
++OP    = 'O'
++IDENT = 'T'
++STR   = 's'
++CHAR  = 'c'
++
++tok_types = [NUM, STR, IDENT, OP]
++exp_types = [
++	r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
++	r'L?"([^"\\]|\\.)*"',
++	r'[a-zA-Z_]\w*',
++	r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
++]
++re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
++
++accepted  = 'a'
++ignored   = 'i'
++undefined = 'u'
++skipped   = 's'
++
++def repl(m):
++	if m.group(1):
++		return ' '
++	s = m.group(2)
++	if s is None:
++		return ''
++	return s
++
++def filter_comments(filename):
++	# return a list of tuples : keyword, line
++	code = Utils.readf(filename)
++	if use_trigraphs:
++		for (a, b) in trig_def: code = code.split(a).join(b)
++	code = re_nl.sub('', code)
++	code = re_cpp.sub(repl, code)
++	return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
++
++prec = {}
++# op -> number, needed for such expressions:   #if 1 && 2 != 0
++ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
++for x in range(len(ops)):
++	syms = ops[x]
++	for u in syms.split():
++		prec[u] = x
++
++def reduce_nums(val_1, val_2, val_op):
++	"""apply arithmetic rules and try to return an integer result"""
++	#print val_1, val_2, val_op
++
++	# now perform the operation, make certain a and b are numeric
++	try:    a = 0 + val_1
++	except TypeError: a = int(val_1)
++	try:    b = 0 + val_2
++	except TypeError: b = int(val_2)
++
++	d = val_op
++	if d == '%':  c = a%b
++	elif d=='+':  c = a+b
++	elif d=='-':  c = a-b
++	elif d=='*':  c = a*b
++	elif d=='/':  c = a/b
++	elif d=='^':  c = a^b
++	elif d=='|':  c = a|b
++	elif d=='||': c = int(a or b)
++	elif d=='&':  c = a&b
++	elif d=='&&': c = int(a and b)
++	elif d=='==': c = int(a == b)
++	elif d=='!=': c = int(a != b)
++	elif d=='<=': c = int(a <= b)
++	elif d=='<':  c = int(a < b)
++	elif d=='>':  c = int(a > b)
++	elif d=='>=': c = int(a >= b)
++	elif d=='^':  c = int(a^b)
++	elif d=='<<': c = a<<b
++	elif d=='>>': c = a>>b
++	else: c = 0
++	return c
++
++def get_num(lst):
++	if not lst: raise PreprocError("empty list for get_num")
++	(p, v) = lst[0]
++	if p == OP:
++		if v == '(':
++			count_par = 1
++			i = 1
++			while i < len(lst):
++				(p, v) = lst[i]
++
++				if p == OP:
++					if v == ')':
++						count_par -= 1
++						if count_par == 0:
++							break
++					elif v == '(':
++						count_par += 1
++				i += 1
++			else:
++				raise PreprocError("rparen expected %r" % lst)
++
++			(num, _) = get_term(lst[1:i])
++			return (num, lst[i+1:])
++
++		elif v == '+':
++			return get_num(lst[1:])
++		elif v == '-':
++			num, lst = get_num(lst[1:])
++			return (reduce_nums('-1', num, '*'), lst)
++		elif v == '!':
++			num, lst = get_num(lst[1:])
++			return (int(not int(num)), lst)
++		elif v == '~':
++			return (~ int(num), lst)
++		else:
++			raise PreprocError("invalid op token %r for get_num" % lst)
++	elif p == NUM:
++		return v, lst[1:]
++	elif p == IDENT:
++		# all macros should have been replaced, remaining identifiers eval to 0
++		return 0, lst[1:]
++	else:
++		raise PreprocError("invalid token %r for get_num" % lst)
++
++def get_term(lst):
++	if not lst: raise PreprocError("empty list for get_term")
++	num, lst = get_num(lst)
++	if not lst:
++		return (num, [])
++	(p, v) = lst[0]
++	if p == OP:
++		if v == '&&' and not num:
++			return (num, [])
++		elif v == '||' and num:
++			return (num, [])
++		elif v == ',':
++			# skip
++			return get_term(lst[1:])
++		elif v == '?':
++			count_par = 0
++			i = 1
++			while i < len(lst):
++				(p, v) = lst[i]
++
++				if p == OP:
++					if v == ')':
++						count_par -= 1
++					elif v == '(':
++						count_par += 1
++					elif v == ':':
++						if count_par == 0:
++							break
++				i += 1
++			else:
++				raise PreprocError("rparen expected %r" % lst)
++
++			if int(num):
++				return get_term(lst[1:i])
++			else:
++				return get_term(lst[i+1:])
++
++		else:
++			num2, lst = get_num(lst[1:])
++
++			if not lst:
++				# no more tokens to process
++				num2 = reduce_nums(num, num2, v)
++				return get_term([(NUM, num2)] + lst)
++
++			# operator precedence
++			p2, v2 = lst[0]
++			if p2 != OP:
++				raise PreprocError("op expected %r" % lst)
++
++			if prec[v2] >= prec[v]:
++				num2 = reduce_nums(num, num2, v)
++				return get_term([(NUM, num2)] + lst)
++			else:
++				num3, lst = get_num(lst[1:])
++				num3 = reduce_nums(num2, num3, v2)
++				return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
++
++
++	raise PreprocError("cannot reduce %r" % lst)
++
++def reduce_eval(lst):
++	"""take a list of tokens and output true or false (#if/#elif conditions)"""
++	num, lst = get_term(lst)
++	return (NUM, num)
++
++def stringize(lst):
++	"""use for converting a list of tokens to a string"""
++	lst = [str(v2) for (p2, v2) in lst]
++	return "".join(lst)
++
++def paste_tokens(t1, t2):
++	"""
++	here is what we can paste:
++	 a ## b  ->  ab
++	 > ## =  ->  >=
++	 a ## 2  ->  a2
++	"""
++	p1 = None
++	if t1[0] == OP and t2[0] == OP:
++		p1 = OP
++	elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
++		p1 = IDENT
++	elif t1[0] == NUM and t2[0] == NUM:
++		p1 = NUM
++	if not p1:
++		raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
++	return (p1, t1[1] + t2[1])
++
++def reduce_tokens(lst, defs, ban=[]):
++	"""replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied"""
++	i = 0
++
++	while i < len(lst):
++		(p, v) = lst[i]
++
++		if p == IDENT and v == "defined":
++			del lst[i]
++			if i < len(lst):
++				(p2, v2) = lst[i]
++				if p2 == IDENT:
++					if v2 in defs:
++						lst[i] = (NUM, 1)
++					else:
++						lst[i] = (NUM, 0)
++				elif p2 == OP and v2 == '(':
++					del lst[i]
++					(p2, v2) = lst[i]
++					del lst[i] # remove the ident, and change the ) for the value
++					if v2 in defs:
++						lst[i] = (NUM, 1)
++					else:
++						lst[i] = (NUM, 0)
++				else:
++					raise PreprocError("invalid define expression %r" % lst)
++
++		elif p == IDENT and v in defs:
++
++			if isinstance(defs[v], str):
++				a, b = extract_macro(defs[v])
++				defs[v] = b
++			macro_def = defs[v]
++			to_add = macro_def[1]
++
++			if isinstance(macro_def[0], list):
++				# macro without arguments
++				del lst[i]
++				for x in xrange(len(to_add)):
++					lst.insert(i, to_add[x])
++					i += 1
++			else:
++				# collect the arguments for the funcall
++
++				args = []
++				del lst[i]
++
++				if i >= len(lst):
++					raise PreprocError("expected '(' after %r (got nothing)" % v)
++
++				(p2, v2) = lst[i]
++				if p2 != OP or v2 != '(':
++					raise PreprocError("expected '(' after %r" % v)
++
++				del lst[i]
++
++				one_param = []
++				count_paren = 0
++				while i < len(lst):
++					p2, v2 = lst[i]
++
++					del lst[i]
++					if p2 == OP and count_paren == 0:
++						if v2 == '(':
++							one_param.append((p2, v2))
++							count_paren += 1
++						elif v2 == ')':
++							if one_param: args.append(one_param)
++							break
++						elif v2 == ',':
++							if not one_param: raise PreprocError("empty param in funcall %s" % p)
++							args.append(one_param)
++							one_param = []
++						else:
++							one_param.append((p2, v2))
++					else:
++						one_param.append((p2, v2))
++						if   v2 == '(': count_paren += 1
++						elif v2 == ')': count_paren -= 1
++				else:
++					raise PreprocError('malformed macro')
++
++				# substitute the arguments within the define expression
++				accu = []
++				arg_table = macro_def[0]
++				j = 0
++				while j < len(to_add):
++					(p2, v2) = to_add[j]
++
++					if p2 == OP and v2 == '#':
++						# stringize is for arguments only
++						if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
++							toks = args[arg_table[to_add[j+1][1]]]
++							accu.append((STR, stringize(toks)))
++							j += 1
++						else:
++							accu.append((p2, v2))
++					elif p2 == OP and v2 == '##':
++						# token pasting, how can man invent such a complicated system?
++						if accu and j+1 < len(to_add):
++							# we have at least two tokens
++
++							t1 = accu[-1]
++
++							if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
++								toks = args[arg_table[to_add[j+1][1]]]
++
++								if toks:
++									accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
++									accu.extend(toks[1:])
++								else:
++									# error, case "a##"
++									accu.append((p2, v2))
++									accu.extend(toks)
++							elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
++								# TODO not sure
++								# first collect the tokens
++								va_toks = []
++								st = len(macro_def[0])
++								pt = len(args)
++								for x in args[pt-st+1:]:
++									va_toks.extend(x)
++									va_toks.append((OP, ','))
++								if va_toks: va_toks.pop() # extra comma
++								if len(accu)>1:
++									(p3, v3) = accu[-1]
++									(p4, v4) = accu[-2]
++									if v3 == '##':
++										# remove the token paste
++										accu.pop()
++										if v4 == ',' and pt < st:
++											# remove the comma
++											accu.pop()
++								accu += va_toks
++							else:
++								accu[-1] = paste_tokens(t1, to_add[j+1])
++
++							j += 1
++						else:
++							# invalid paste, case    "##a" or "b##"
++							accu.append((p2, v2))
++
++					elif p2 == IDENT and v2 in arg_table:
++						toks = args[arg_table[v2]]
++						reduce_tokens(toks, defs, ban+[v])
++						accu.extend(toks)
++					else:
++						accu.append((p2, v2))
++
++					j += 1
++
++
++				reduce_tokens(accu, defs, ban+[v])
++
++				for x in xrange(len(accu)-1, -1, -1):
++					lst.insert(i, accu[x])
++
++		i += 1
++
++
++def eval_macro(lst, adefs):
++	"""reduce the tokens from the list lst, and try to return a 0/1 result"""
++	reduce_tokens(lst, adefs, [])
++	if not lst: raise PreprocError("missing tokens to evaluate")
++	(p, v) = reduce_eval(lst)
++	return int(v) != 0
++
++def extract_macro(txt):
++	"""process a macro definition from "#define f(x, y) x * y" into a function or a simple macro without arguments"""
++	t = tokenize(txt)
++	if re_fun.search(txt):
++		p, name = t[0]
++
++		p, v = t[1]
++		if p != OP: raise PreprocError("expected open parenthesis")
++
++		i = 1
++		pindex = 0
++		params = {}
++		prev = '('
++
++		while 1:
++			i += 1
++			p, v = t[i]
++
++			if prev == '(':
++				if p == IDENT:
++					params[v] = pindex
++					pindex += 1
++					prev = p
++				elif p == OP and v == ')':
++					break
++				else:
++					raise PreprocError("unexpected token (3)")
++			elif prev == IDENT:
++				if p == OP and v == ',':
++					prev = v
++				elif p == OP and v == ')':
++					break
++				else:
++					raise PreprocError("comma or ... expected")
++			elif prev == ',':
++				if p == IDENT:
++					params[v] = pindex
++					pindex += 1
++					prev = p
++				elif p == OP and v == '...':
++					raise PreprocError("not implemented (1)")
++				else:
++					raise PreprocError("comma or ... expected (2)")
++			elif prev == '...':
++				raise PreprocError("not implemented (2)")
++			else:
++				raise PreprocError("unexpected else")
++
++		#~ print (name, [params, t[i+1:]])
++		return (name, [params, t[i+1:]])
++	else:
++		(p, v) = t[0]
++		return (v, [[], t[1:]])
++
++re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
++def extract_include(txt, defs):
++	"""process a line in the form "#include foo" to return a string representing the file"""
++	m = re_include.search(txt)
++	if m:
++		if m.group('a'): return '<', m.group('a')
++		if m.group('b'): return '"', m.group('b')
++
++	# perform preprocessing and look at the result, it must match an include
++	toks = tokenize(txt)
++	reduce_tokens(toks, defs, ['waf_include'])
++
++	if not toks:
++		raise PreprocError("could not parse include %s" % txt)
++
++	if len(toks) == 1:
++		if toks[0][0] == STR:
++			return '"', toks[0][1]
++	else:
++		if toks[0][1] == '<' and toks[-1][1] == '>':
++			return stringize(toks).lstrip('<').rstrip('>')
++
++	raise PreprocError("could not parse include %s." % txt)
++
++def parse_char(txt):
++	if not txt: raise PreprocError("attempted to parse a null char")
++	if txt[0] != '\\':
++		return ord(txt)
++	c = txt[1]
++	if c == 'x':
++		if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
++		return int(txt[2:], 16)
++	elif c.isdigit():
++		if c == '0' and len(txt)==2: return 0
++		for i in 3, 2, 1:
++			if len(txt) > i and txt[1:1+i].isdigit():
++				return (1+i, int(txt[1:1+i], 8))
++	else:
++		try: return chr_esc[c]
++		except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
++
++ at Utils.run_once
++def tokenize(s):
++	"""convert a string into a list of tokens (shlex.split does not apply to c/c++/d)"""
++	ret = []
++	for match in re_clexer.finditer(s):
++		m = match.group
++		for name in tok_types:
++			v = m(name)
++			if v:
++				if name == IDENT:
++					try: v = g_optrans[v]; name = OP
++					except KeyError:
++						# c++ specific
++						if v.lower() == "true":
++							v = 1
++							name = NUM
++						elif v.lower() == "false":
++							v = 0
++							name = NUM
++				elif name == NUM:
++					if m('oct'): v = int(v, 8)
++					elif m('hex'): v = int(m('hex'), 16)
++					elif m('n0'): v = m('n0')
++					else:
++						v = m('char')
++						if v: v = parse_char(v)
++						else: v = m('n2') or m('n4')
++				elif name == OP:
++					if v == '%:': v = '#'
++					elif v == '%:%:': v = '##'
++				elif name == STR:
++					# remove the quotes around the string
++					v = v[1:-1]
++				ret.append((name, v))
++				break
++	return ret
++
++ at Utils.run_once
++def define_name(line):
++	return re_mac.match(line).group(0)
++
++class c_parser(object):
++	def __init__(self, nodepaths=None, defines=None):
++		#self.lines = txt.split('\n')
++		self.lines = []
++
++		if defines is None:
++			self.defs  = {}
++		else:
++			self.defs  = dict(defines) # make a copy
++		self.state = []
++
++		self.env   = None # needed for the variant when searching for files
++
++		self.count_files = 0
++		self.currentnode_stack = []
++
++		self.nodepaths = nodepaths or []
++
++		self.nodes = []
++		self.names = []
++
++		# file added
++		self.curfile = ''
++		self.ban_includes = set([])
++
++	def cached_find_resource(self, node, filename):
++		try:
++			nd = node.bld.cache_nd
++		except:
++			nd = node.bld.cache_nd = {}
++
++		tup = (node.id, filename)
++		try:
++			return nd[tup]
++		except KeyError:
++			ret = node.find_resource(filename)
++			nd[tup] = ret
++			return ret
++
++	def tryfind(self, filename):
++		self.curfile = filename
++
++		# for msvc it should be a for loop on the whole stack
++		found = self.cached_find_resource(self.currentnode_stack[-1], filename)
++
++		for n in self.nodepaths:
++			if found:
++				break
++			found = self.cached_find_resource(n, filename)
++
++		if found:
++			self.nodes.append(found)
++			if filename[-4:] != '.moc':
++				self.addlines(found)
++		else:
++			if not filename in self.names:
++				self.names.append(filename)
++		return found
++
++	def addlines(self, node):
++
++		self.currentnode_stack.append(node.parent)
++		filepath = node.abspath(self.env)
++
++		self.count_files += 1
++		if self.count_files > recursion_limit: raise PreprocError("recursion limit exceeded")
++		pc = self.parse_cache
++		debug('preproc: reading file %r', filepath)
++		try:
++			lns = pc[filepath]
++		except KeyError:
++			pass
++		else:
++			self.lines.extend(lns)
++			return
++
++		try:
++			lines = filter_comments(filepath)
++			lines.append((POPFILE, ''))
++			lines.reverse()
++			pc[filepath] = lines # cache the lines filtered
++			self.lines.extend(lines)
++		except IOError:
++			raise PreprocError("could not read the file %s" % filepath)
++		except Exception:
++			if Logs.verbose > 0:
++				error("parsing %s failed" % filepath)
++				traceback.print_exc()
++
++	def start(self, node, env):
++		debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
++
++		self.env = env
++		variant = node.variant(env)
++		bld = node.__class__.bld
++		try:
++			self.parse_cache = bld.parse_cache
++		except AttributeError:
++			bld.parse_cache = {}
++			self.parse_cache = bld.parse_cache
++
++		self.addlines(node)
++		if env['DEFLINES']:
++			lst = [('define', x) for x in env['DEFLINES']]
++			lst.reverse()
++			self.lines.extend(lst)
++
++		while self.lines:
++			(kind, line) = self.lines.pop()
++			if kind == POPFILE:
++				self.currentnode_stack.pop()
++				continue
++			try:
++				self.process_line(kind, line)
++			except Exception, e:
++				if Logs.verbose:
++					debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
++
++	def process_line(self, token, line):
++		"""
++		WARNING: a new state must be added for if* because the endif
++		"""
++		ve = Logs.verbose
++		if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
++		state = self.state
++
++		# make certain we define the state if we are about to enter in an if block
++		if token in ['ifdef', 'ifndef', 'if']:
++			state.append(undefined)
++		elif token == 'endif':
++			state.pop()
++
++		# skip lines when in a dead 'if' branch, wait for the endif
++		if not token in ['else', 'elif', 'endif']:
++			if skipped in self.state or ignored in self.state:
++				return
++
++		if token == 'if':
++			ret = eval_macro(tokenize(line), self.defs)
++			if ret: state[-1] = accepted
++			else: state[-1] = ignored
++		elif token == 'ifdef':
++			m = re_mac.match(line)
++			if m and m.group(0) in self.defs: state[-1] = accepted
++			else: state[-1] = ignored
++		elif token == 'ifndef':
++			m = re_mac.match(line)
++			if m and m.group(0) in self.defs: state[-1] = ignored
++			else: state[-1] = accepted
++		elif token == 'include' or token == 'import':
++			(kind, inc) = extract_include(line, self.defs)
++			if inc in self.ban_includes: return
++			if token == 'import': self.ban_includes.add(inc)
++			if ve: debug('preproc: include found %s    (%s) ', inc, kind)
++			if kind == '"' or not strict_quotes:
++				self.tryfind(inc)
++		elif token == 'elif':
++			if state[-1] == accepted:
++				state[-1] = skipped
++			elif state[-1] == ignored:
++				if eval_macro(tokenize(line), self.defs):
++					state[-1] = accepted
++		elif token == 'else':
++			if state[-1] == accepted: state[-1] = skipped
++			elif state[-1] == ignored: state[-1] = accepted
++		elif token == 'define':
++			try:
++				self.defs[define_name(line)] = line
++			except:
++				raise PreprocError("invalid define line %s" % line)
++		elif token == 'undef':
++			m = re_mac.match(line)
++			if m and m.group(0) in self.defs:
++				self.defs.__delitem__(m.group(0))
++				#print "undef %s" % name
++		elif token == 'pragma':
++			if re_pragma_once.match(line.lower()):
++				self.ban_includes.add(self.curfile)
++
++def get_deps(node, env, nodepaths=[]):
++	"""
++	Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind
++	#include some_macro()
++	"""
++
++	gruik = c_parser(nodepaths)
++	gruik.start(node, env)
++	return (gruik.nodes, gruik.names)
++
++#################### dumb dependency scanner
++
++re_inc = re.compile(\
++	'^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',
++	re.IGNORECASE | re.MULTILINE)
++
++def lines_includes(filename):
++	code = Utils.readf(filename)
++	if use_trigraphs:
++		for (a, b) in trig_def: code = code.split(a).join(b)
++	code = re_nl.sub('', code)
++	code = re_cpp.sub(repl, code)
++	return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
++
++def get_deps_simple(node, env, nodepaths=[], defines={}):
++	"""
++	Get the dependencies by just looking recursively at the #include statements
++	"""
++
++	nodes = []
++	names = []
++
++	def find_deps(node):
++		lst = lines_includes(node.abspath(env))
++
++		for (_, line) in lst:
++			(t, filename) = extract_include(line, defines)
++			if filename in names:
++				continue
++
++			if filename.endswith('.moc'):
++				names.append(filename)
++
++			found = None
++			for n in nodepaths:
++				if found:
++					break
++				found = n.find_resource(filename)
++
++			if not found:
++				if not filename in names:
++					names.append(filename)
++			elif not found in nodes:
++				nodes.append(found)
++				find_deps(node)
++
++	find_deps(node)
++	return (nodes, names)
++
++
+diff --git a/buildtools/wafadmin/Tools/python.py b/buildtools/wafadmin/Tools/python.py
+new file mode 100644
+index 0000000..4f73081
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/python.py
+@@ -0,0 +1,413 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2007 (ita)
++# Gustavo Carneiro (gjc), 2007
++
++"Python support"
++
++import os, sys
++import TaskGen, Utils, Utils, Runner, Options, Build
++from Logs import debug, warn, info
++from TaskGen import extension, taskgen, before, after, feature
++from Configure import conf
++
++EXT_PY = ['.py']
++FRAG_2 = '''
++#include "Python.h"
++#ifdef __cplusplus
++extern "C" {
++#endif
++	void Py_Initialize(void);
++	void Py_Finalize(void);
++#ifdef __cplusplus
++}
++#endif
++int main()
++{
++   Py_Initialize();
++   Py_Finalize();
++   return 0;
++}
++'''
++
++ at feature('pyext')
++ at before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars', 'apply_bundle')
++ at after('vars_target_cshlib')
++def init_pyext(self):
++	self.default_install_path = '${PYTHONARCHDIR}'
++	self.uselib = self.to_list(getattr(self, 'uselib', ''))
++	if not 'PYEXT' in self.uselib:
++		self.uselib.append('PYEXT')
++	self.env['MACBUNDLE'] = True
++
++ at before('apply_link', 'apply_lib_vars', 'apply_type_vars')
++ at after('apply_bundle')
++ at feature('pyext')
++def pyext_shlib_ext(self):
++	# override shlib_PATTERN set by the osx module
++	self.env['shlib_PATTERN'] = self.env['pyext_PATTERN']
++
++ at before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
++ at feature('pyembed')
++def init_pyembed(self):
++	self.uselib = self.to_list(getattr(self, 'uselib', ''))
++	if not 'PYEMBED' in self.uselib:
++		self.uselib.append('PYEMBED')
++
++ at extension(EXT_PY)
++def process_py(self, node):
++	if not (self.bld.is_install and self.install_path):
++		return
++	def inst_py(ctx):
++		install_pyfile(self, node)
++	self.bld.add_post_fun(inst_py)
++
++def install_pyfile(self, node):
++	path = self.bld.get_install_path(self.install_path + os.sep + node.name, self.env)
++
++	self.bld.install_files(self.install_path, [node], self.env, self.chmod, postpone=False)
++	if self.bld.is_install < 0:
++		info("* removing byte compiled python files")
++		for x in 'co':
++			try:
++				os.remove(path + x)
++			except OSError:
++				pass
++
++	if self.bld.is_install > 0:
++		if self.env['PYC'] or self.env['PYO']:
++			info("* byte compiling %r" % path)
++
++		if self.env['PYC']:
++			program = ("""
++import sys, py_compile
++for pyfile in sys.argv[1:]:
++	py_compile.compile(pyfile, pyfile + 'c')
++""")
++			argv = [self.env['PYTHON'], '-c', program, path]
++			ret = Utils.pproc.Popen(argv).wait()
++			if ret:
++				raise Utils.WafError('bytecode compilation failed %r' % path)
++
++		if self.env['PYO']:
++			program = ("""
++import sys, py_compile
++for pyfile in sys.argv[1:]:
++	py_compile.compile(pyfile, pyfile + 'o')
++""")
++			argv = [self.env['PYTHON'], self.env['PYFLAGS_OPT'], '-c', program, path]
++			ret = Utils.pproc.Popen(argv).wait()
++			if ret:
++				raise Utils.WafError('bytecode compilation failed %r' % path)
++
++# COMPAT
++class py_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at before('apply_core')
++ at after('vars_target_cprogram', 'vars_target_cshlib')
++ at feature('py')
++def init_py(self):
++	self.default_install_path = '${PYTHONDIR}'
++
++def _get_python_variables(python_exe, variables, imports=['import sys']):
++	"""Run a python interpreter and print some variables"""
++	program = list(imports)
++	program.append('')
++	for v in variables:
++		program.append("print(repr(%s))" % v)
++	os_env = dict(os.environ)
++	try:
++		del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
++	except KeyError:
++		pass
++	proc = Utils.pproc.Popen([python_exe, "-c", '\n'.join(program)], stdout=Utils.pproc.PIPE, env=os_env)
++	output = proc.communicate()[0].split("\n") # do not touch, python3
++	if proc.returncode:
++		if Options.options.verbose:
++			warn("Python program to extract python configuration variables failed:\n%s"
++				       % '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)]))
++		raise RuntimeError
++	return_values = []
++	for s in output:
++		s = s.strip()
++		if not s:
++			continue
++		if s == 'None':
++			return_values.append(None)
++		elif s[0] == "'" and s[-1] == "'":
++			return_values.append(s[1:-1])
++		elif s[0].isdigit():
++			return_values.append(int(s))
++		else: break
++	return return_values
++
++ at conf
++def check_python_headers(conf, mandatory=True):
++	"""Check for headers and libraries necessary to extend or embed python.
++
++	On success the environment variables xxx_PYEXT and xxx_PYEMBED are added for uselib
++
++	PYEXT: for compiling python extensions
++	PYEMBED: for embedding a python interpreter"""
++
++	if not conf.env['CC_NAME'] and not conf.env['CXX_NAME']:
++		conf.fatal('load a compiler first (gcc, g++, ..)')
++
++	if not conf.env['PYTHON_VERSION']:
++		conf.check_python_version()
++
++	env = conf.env
++	python = env['PYTHON']
++	if not python:
++		conf.fatal('could not find the python executable')
++
++	## On Mac OSX we need to use mac bundles for python plugins
++	if Options.platform == 'darwin':
++		conf.check_tool('osx')
++
++	try:
++		# Get some python configuration variables using distutils
++		v = 'prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split()
++		(python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
++		 python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED,
++		 python_MACOSX_DEPLOYMENT_TARGET) = \
++			_get_python_variables(python, ["get_config_var('%s')" % x for x in v],
++					      ['from distutils.sysconfig import get_config_var'])
++	except RuntimeError:
++		conf.fatal("Python development headers not found (-v for details).")
++
++	conf.log.write("""Configuration returned from %r:
++python_prefix = %r
++python_SO = %r
++python_SYSLIBS = %r
++python_LDFLAGS = %r
++python_SHLIBS = %r
++python_LIBDIR = %r
++python_LIBPL = %r
++INCLUDEPY = %r
++Py_ENABLE_SHARED = %r
++MACOSX_DEPLOYMENT_TARGET = %r
++""" % (python, python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
++	python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED, python_MACOSX_DEPLOYMENT_TARGET))
++
++	if python_MACOSX_DEPLOYMENT_TARGET:
++		conf.env['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
++		conf.environ['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
++
++	env['pyext_PATTERN'] = '%s'+python_SO
++
++	# Check for python libraries for embedding
++	if python_SYSLIBS is not None:
++		for lib in python_SYSLIBS.split():
++			if lib.startswith('-l'):
++				lib = lib[2:] # strip '-l'
++			env.append_value('LIB_PYEMBED', lib)
++
++	if python_SHLIBS is not None:
++		for lib in python_SHLIBS.split():
++			if lib.startswith('-l'):
++				env.append_value('LIB_PYEMBED', lib[2:]) # strip '-l'
++			else:
++				env.append_value('LINKFLAGS_PYEMBED', lib)
++
++	if Options.platform != 'darwin' and python_LDFLAGS:
++		env.append_value('LINKFLAGS_PYEMBED', python_LDFLAGS.split())
++
++	result = False
++	name = 'python' + env['PYTHON_VERSION']
++
++	if python_LIBDIR is not None:
++		path = [python_LIBDIR]
++		conf.log.write("\n\n# Trying LIBDIR: %r\n" % path)
++		result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
++
++	if not result and python_LIBPL is not None:
++		conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
++		path = [python_LIBPL]
++		result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
++
++	if not result:
++		conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
++		path = [os.path.join(python_prefix, "libs")]
++		name = 'python' + env['PYTHON_VERSION'].replace('.', '')
++		result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
++
++	if result:
++		env['LIBPATH_PYEMBED'] = path
++		env.append_value('LIB_PYEMBED', name)
++	else:
++		conf.log.write("\n\n### LIB NOT FOUND\n")
++
++	# under certain conditions, python extensions must link to
++	# python libraries, not just python embedding programs.
++	if (sys.platform == 'win32' or sys.platform.startswith('os2')
++		or sys.platform == 'darwin' or Py_ENABLE_SHARED):
++		env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
++		env['LIB_PYEXT'] = env['LIB_PYEMBED']
++
++	# We check that pythonX.Y-config exists, and if it exists we
++	# use it to get only the includes, else fall back to distutils.
++	python_config = conf.find_program(
++		'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
++		var='PYTHON_CONFIG')
++	if not python_config:
++		python_config = conf.find_program(
++			'python-config-%s' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
++			var='PYTHON_CONFIG')
++
++	includes = []
++	if python_config:
++		for incstr in Utils.cmd_output("%s %s --includes" % (python, python_config)).strip().split():
++			# strip the -I or /I
++			if (incstr.startswith('-I')
++			    or incstr.startswith('/I')):
++				incstr = incstr[2:]
++			# append include path, unless already given
++			if incstr not in includes:
++				includes.append(incstr)
++		conf.log.write("Include path for Python extensions "
++			       "(found via python-config --includes): %r\n" % (includes,))
++		env['CPPPATH_PYEXT'] = includes
++		env['CPPPATH_PYEMBED'] = includes
++	else:
++		conf.log.write("Include path for Python extensions "
++			       "(found via distutils module): %r\n" % (INCLUDEPY,))
++		env['CPPPATH_PYEXT'] = [INCLUDEPY]
++		env['CPPPATH_PYEMBED'] = [INCLUDEPY]
++
++	# Code using the Python API needs to be compiled with -fno-strict-aliasing
++	if env['CC_NAME'] == 'gcc':
++		env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing')
++		env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing')
++	if env['CXX_NAME'] == 'gcc':
++		env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing')
++		env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing')
++
++	# See if it compiles
++	conf.check(define_name='HAVE_PYTHON_H',
++		   uselib='PYEMBED', fragment=FRAG_2,
++		   errmsg='Could not find the python development headers', mandatory=mandatory)
++
++ at conf
++def check_python_version(conf, minver=None):
++	"""
++	Check if the python interpreter is found matching a given minimum version.
++	minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
++
++	If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
++	(eg. '2.4') of the actual python version found, and PYTHONDIR is
++	defined, pointing to the site-packages directory appropriate for
++	this python version, where modules/packages/extensions should be
++	installed.
++	"""
++	assert minver is None or isinstance(minver, tuple)
++	python = conf.env['PYTHON']
++	if not python:
++		conf.fatal('could not find the python executable')
++
++	# Get python version string
++	cmd = [python, "-c", "import sys\nfor x in sys.version_info: print(str(x))"]
++	debug('python: Running python command %r' % cmd)
++	proc = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE)
++	lines = proc.communicate()[0].split()
++	assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
++	pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
++
++	# compare python version with the minimum required
++	result = (minver is None) or (pyver_tuple >= minver)
++
++	if result:
++		# define useful environment variables
++		pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
++		conf.env['PYTHON_VERSION'] = pyver
++
++		if 'PYTHONDIR' in conf.environ:
++			pydir = conf.environ['PYTHONDIR']
++		else:
++			if sys.platform == 'win32':
++				(python_LIBDEST, pydir) = \
++						_get_python_variables(python,
++											  ["get_config_var('LIBDEST')",
++											   "get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
++											  ['from distutils.sysconfig import get_config_var, get_python_lib'])
++			else:
++				python_LIBDEST = None
++				(pydir,) = \
++						_get_python_variables(python,
++											  ["get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
++											  ['from distutils.sysconfig import get_config_var, get_python_lib'])
++			if python_LIBDEST is None:
++				if conf.env['LIBDIR']:
++					python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
++				else:
++					python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
++
++		if 'PYTHONARCHDIR' in conf.environ:
++			pyarchdir = conf.environ['PYTHONARCHDIR']
++		else:
++			(pyarchdir,) = _get_python_variables(python,
++											["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
++											['from distutils.sysconfig import get_config_var, get_python_lib'])
++			if not pyarchdir:
++				pyarchdir = pydir
++
++		if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
++			conf.define('PYTHONDIR', pydir)
++			conf.define('PYTHONARCHDIR', pyarchdir)
++
++		conf.env['PYTHONDIR'] = pydir
++
++	# Feedback
++	pyver_full = '.'.join(map(str, pyver_tuple[:3]))
++	if minver is None:
++		conf.check_message_custom('Python version', '', pyver_full)
++	else:
++		minver_str = '.'.join(map(str, minver))
++		conf.check_message('Python version', ">= %s" % minver_str, result, option=pyver_full)
++
++	if not result:
++		conf.fatal('The python version is too old (%r)' % pyver_full)
++
++ at conf
++def check_python_module(conf, module_name):
++	"""
++	Check if the selected python interpreter can import the given python module.
++	"""
++	result = not Utils.pproc.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name],
++			   stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE).wait()
++	conf.check_message('Python module', module_name, result)
++	if not result:
++		conf.fatal('Could not find the python module %r' % module_name)
++
++def detect(conf):
++
++	if not conf.env.PYTHON:
++		conf.env.PYTHON = sys.executable
++
++	python = conf.find_program('python', var='PYTHON')
++	if not python:
++		conf.fatal('Could not find the path of the python executable')
++
++	v = conf.env
++
++	v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
++	v['PYFLAGS'] = ''
++	v['PYFLAGS_OPT'] = '-O'
++
++	v['PYC'] = getattr(Options.options, 'pyc', 1)
++	v['PYO'] = getattr(Options.options, 'pyo', 1)
++
++def set_options(opt):
++	opt.add_option('--nopyc',
++			action='store_false',
++			default=1,
++			help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
++			dest = 'pyc')
++	opt.add_option('--nopyo',
++			action='store_false',
++			default=1,
++			help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
++			dest='pyo')
++
+diff --git a/buildtools/wafadmin/Tools/qt4.py b/buildtools/wafadmin/Tools/qt4.py
+new file mode 100644
+index 0000000..84d121a
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/qt4.py
+@@ -0,0 +1,505 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++
++"""
++Qt4 support
++
++If QT4_ROOT is given (absolute path), the configuration will look in it first
++
++This module also demonstrates how to add tasks dynamically (when the build has started)
++"""
++
++try:
++	from xml.sax import make_parser
++	from xml.sax.handler import ContentHandler
++except ImportError:
++	has_xml = False
++	ContentHandler = object
++else:
++	has_xml = True
++
++import os, sys
++import ccroot, cxx
++import TaskGen, Task, Utils, Runner, Options, Node, Configure
++from TaskGen import taskgen, feature, after, extension
++from Logs import error
++from Constants import *
++
++MOC_H = ['.h', '.hpp', '.hxx', '.hh']
++EXT_RCC = ['.qrc']
++EXT_UI  = ['.ui']
++EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
++
++class qxx_task(Task.Task):
++	"A cpp task that may create a moc task dynamically"
++
++	before = ['cxx_link', 'static_link']
++
++	def __init__(self, *k, **kw):
++		Task.Task.__init__(self, *k, **kw)
++		self.moc_done = 0
++
++	def scan(self):
++		(nodes, names) = ccroot.scan(self)
++		# for some reasons (variants) the moc node may end in the list of node deps
++		for x in nodes:
++			if x.name.endswith('.moc'):
++				nodes.remove(x)
++				names.append(x.relpath_gen(self.inputs[0].parent))
++		return (nodes, names)
++
++	def runnable_status(self):
++		if self.moc_done:
++			# if there is a moc task, delay the computation of the file signature
++			for t in self.run_after:
++				if not t.hasrun:
++					return ASK_LATER
++			# the moc file enters in the dependency calculation
++			# so we need to recompute the signature when the moc file is present
++			self.signature()
++			return Task.Task.runnable_status(self)
++		else:
++			# yes, really, there are people who generate cxx files
++			for t in self.run_after:
++				if not t.hasrun:
++					return ASK_LATER
++			self.add_moc_tasks()
++			return ASK_LATER
++
++	def add_moc_tasks(self):
++
++		node = self.inputs[0]
++		tree = node.__class__.bld
++
++		try:
++			# compute the signature once to know if there is a moc file to create
++			self.signature()
++		except KeyError:
++			# the moc file may be referenced somewhere else
++			pass
++		else:
++			# remove the signature, it must be recomputed with the moc task
++			delattr(self, 'cache_sig')
++
++		moctasks=[]
++		mocfiles=[]
++		variant = node.variant(self.env)
++		try:
++			tmp_lst = tree.raw_deps[self.unique_id()]
++			tree.raw_deps[self.unique_id()] = []
++		except KeyError:
++			tmp_lst = []
++		for d in tmp_lst:
++			if not d.endswith('.moc'): continue
++			# paranoid check
++			if d in mocfiles:
++				error("paranoia owns")
++				continue
++
++			# process that base.moc only once
++			mocfiles.append(d)
++
++			# find the extension (performed only when the .cpp has changes)
++			base2 = d[:-4]
++			for path in [node.parent] + self.generator.env['INC_PATHS']:
++				tree.rescan(path)
++				vals = getattr(Options.options, 'qt_header_ext', '') or MOC_H
++				for ex in vals:
++					h_node = path.find_resource(base2 + ex)
++					if h_node:
++						break
++				else:
++					continue
++				break
++			else:
++				raise Utils.WafError("no header found for %s which is a moc file" % str(d))
++
++			m_node = h_node.change_ext('.moc')
++			tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), m_node.name)] = h_node
++
++			# create the task
++			task = Task.TaskBase.classes['moc'](self.env, normal=0)
++			task.set_inputs(h_node)
++			task.set_outputs(m_node)
++
++			generator = tree.generator
++			generator.outstanding.insert(0, task)
++			generator.total += 1
++
++			moctasks.append(task)
++
++		# remove raw deps except the moc files to save space (optimization)
++		tmp_lst = tree.raw_deps[self.unique_id()] = mocfiles
++
++		# look at the file inputs, it is set right above
++		lst = tree.node_deps.get(self.unique_id(), ())
++		for d in lst:
++			name = d.name
++			if name.endswith('.moc'):
++				task = Task.TaskBase.classes['moc'](self.env, normal=0)
++				task.set_inputs(tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), name)]) # 1st element in a tuple
++				task.set_outputs(d)
++
++				generator = tree.generator
++				generator.outstanding.insert(0, task)
++				generator.total += 1
++
++				moctasks.append(task)
++
++		# simple scheduler dependency: run the moc task before others
++		self.run_after = moctasks
++		self.moc_done = 1
++
++	run = Task.TaskBase.classes['cxx'].__dict__['run']
++
++def translation_update(task):
++	outs = [a.abspath(task.env) for a in task.outputs]
++	outs = " ".join(outs)
++	lupdate = task.env['QT_LUPDATE']
++
++	for x in task.inputs:
++		file = x.abspath(task.env)
++		cmd = "%s %s -ts %s" % (lupdate, file, outs)
++		Utils.pprint('BLUE', cmd)
++		task.generator.bld.exec_command(cmd)
++
++class XMLHandler(ContentHandler):
++	def __init__(self):
++		self.buf = []
++		self.files = []
++	def startElement(self, name, attrs):
++		if name == 'file':
++			self.buf = []
++	def endElement(self, name):
++		if name == 'file':
++			self.files.append(''.join(self.buf))
++	def characters(self, cars):
++		self.buf.append(cars)
++
++def scan(self):
++	"add the dependency on the files referenced in the qrc"
++	node = self.inputs[0]
++	parser = make_parser()
++	curHandler = XMLHandler()
++	parser.setContentHandler(curHandler)
++	fi = open(self.inputs[0].abspath(self.env))
++	parser.parse(fi)
++	fi.close()
++
++	nodes = []
++	names = []
++	root = self.inputs[0].parent
++	for x in curHandler.files:
++		nd = root.find_resource(x)
++		if nd: nodes.append(nd)
++		else: names.append(x)
++
++	return (nodes, names)
++
++ at extension(EXT_RCC)
++def create_rcc_task(self, node):
++	"hook for rcc files"
++	rcnode = node.change_ext('_rc.cpp')
++	rcctask = self.create_task('rcc', node, rcnode)
++	cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
++	self.compiled_tasks.append(cpptask)
++	return cpptask
++
++ at extension(EXT_UI)
++def create_uic_task(self, node):
++	"hook for uic tasks"
++	uictask = self.create_task('ui4', node)
++	uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
++	return uictask
++
++class qt4_taskgen(cxx.cxx_taskgen):
++	def __init__(self, *k, **kw):
++		cxx.cxx_taskgen.__init__(self, *k, **kw)
++		self.features.append('qt4')
++
++ at extension('.ts')
++def add_lang(self, node):
++	"""add all the .ts file into self.lang"""
++	self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
++
++ at feature('qt4')
++ at after('apply_link')
++def apply_qt4(self):
++	if getattr(self, 'lang', None):
++		update = getattr(self, 'update', None)
++		lst=[]
++		trans=[]
++		for l in self.to_list(self.lang):
++
++			if not isinstance(l, Node.Node):
++				l = self.path.find_resource(l+'.ts')
++
++			t = self.create_task('ts2qm', l, l.change_ext('.qm'))
++			lst.append(t.outputs[0])
++
++			if update:
++				trans.append(t.inputs[0])
++
++		trans_qt4 = getattr(Options.options, 'trans_qt4', False)
++		if update and trans_qt4:
++			# we need the cpp files given, except the rcc task we create after
++			# FIXME may be broken
++			u = Task.TaskCmd(translation_update, self.env, 2)
++			u.inputs = [a.inputs[0] for a in self.compiled_tasks]
++			u.outputs = trans
++
++		if getattr(self, 'langname', None):
++			t = Task.TaskBase.classes['qm2rcc'](self.env)
++			t.set_inputs(lst)
++			t.set_outputs(self.path.find_or_declare(self.langname+'.qrc'))
++			t.path = self.path
++			k = create_rcc_task(self, t.outputs[0])
++			self.link_task.inputs.append(k.outputs[0])
++
++	self.env.append_value('MOC_FLAGS', self.env._CXXDEFFLAGS)
++	self.env.append_value('MOC_FLAGS', self.env._CXXINCFLAGS)
++
++ at extension(EXT_QT4)
++def cxx_hook(self, node):
++	# create the compilation task: cpp or cc
++	try: obj_ext = self.obj_ext
++	except AttributeError: obj_ext = '_%d.o' % self.idx
++
++	task = self.create_task('qxx', node, node.change_ext(obj_ext))
++	self.compiled_tasks.append(task)
++	return task
++
++def process_qm2rcc(task):
++	outfile = task.outputs[0].abspath(task.env)
++	f = open(outfile, 'w')
++	f.write('<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n')
++	for k in task.inputs:
++		f.write(' <file>')
++		#f.write(k.name)
++		f.write(k.path_to_parent(task.path))
++		f.write('</file>\n')
++	f.write('</qresource>\n</RCC>')
++	f.close()
++
++b = Task.simple_task_type
++b('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], shell=False)
++cls = b('rcc', '${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', after="qm2rcc", shell=False)
++cls.scan = scan
++b('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', shell=False)
++b('ts2qm', '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}', color='BLUE', before='qm2rcc', shell=False)
++
++Task.task_type_from_func('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', before='rcc', after='ts2qm')
++
++def detect_qt4(conf):
++	env = conf.env
++	opt = Options.options
++
++	qtdir = getattr(opt, 'qtdir', '')
++	qtbin = getattr(opt, 'qtbin', '')
++	qtlibs = getattr(opt, 'qtlibs', '')
++	useframework = getattr(opt, 'use_qt4_osxframework', True)
++
++	paths = []
++
++	# the path to qmake has been given explicitely
++	if qtbin:
++		paths = [qtbin]
++
++	# the qt directory has been given - we deduce the qt binary path
++	if not qtdir:
++		qtdir = conf.environ.get('QT4_ROOT', '')
++		qtbin = os.path.join(qtdir, 'bin')
++		paths = [qtbin]
++
++	# no qtdir, look in the path and in /usr/local/Trolltech
++	if not qtdir:
++		paths = os.environ.get('PATH', '').split(os.pathsep)
++		paths.append('/usr/share/qt4/bin/')
++		try:
++			lst = os.listdir('/usr/local/Trolltech/')
++		except OSError:
++			pass
++		else:
++			if lst:
++				lst.sort()
++				lst.reverse()
++
++				# keep the highest version
++				qtdir = '/usr/local/Trolltech/%s/' % lst[0]
++				qtbin = os.path.join(qtdir, 'bin')
++				paths.append(qtbin)
++
++	# at the end, try to find qmake in the paths given
++	# keep the one with the highest version
++	cand = None
++	prev_ver = ['4', '0', '0']
++	for qmk in ['qmake-qt4', 'qmake4', 'qmake']:
++		qmake = conf.find_program(qmk, path_list=paths)
++		if qmake:
++			try:
++				version = Utils.cmd_output([qmake, '-query', 'QT_VERSION']).strip()
++			except ValueError:
++				pass
++			else:
++				if version:
++					new_ver = version.split('.')
++					if new_ver > prev_ver:
++						cand = qmake
++						prev_ver = new_ver
++	if cand:
++		qmake = cand
++	else:
++		conf.fatal('could not find qmake for qt4')
++
++	conf.env.QMAKE = qmake
++	qtincludes = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_HEADERS']).strip()
++	qtdir = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
++	qtbin = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_BINS']).strip() + os.sep
++
++	if not qtlibs:
++		try:
++			qtlibs = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_LIBS']).strip() + os.sep
++		except ValueError:
++			qtlibs = os.path.join(qtdir, 'lib')
++
++	def find_bin(lst, var):
++		for f in lst:
++			ret = conf.find_program(f, path_list=paths)
++			if ret:
++				env[var]=ret
++				break
++
++	vars = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split()
++
++	find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
++	find_bin(['uic-qt4', 'uic'], 'QT_UIC')
++	if not env['QT_UIC']:
++		conf.fatal('cannot find the uic compiler for qt4')
++
++	try:
++		version = Utils.cmd_output(env['QT_UIC'] + " -version 2>&1").strip()
++	except ValueError:
++		conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path')
++
++	version = version.replace('Qt User Interface Compiler ','')
++	version = version.replace('User Interface Compiler for Qt', '')
++	if version.find(" 3.") != -1:
++		conf.check_message('uic version', '(too old)', 0, option='(%s)'%version)
++		sys.exit(1)
++	conf.check_message('uic version', '', 1, option='(%s)'%version)
++
++	find_bin(['moc-qt4', 'moc'], 'QT_MOC')
++	find_bin(['rcc'], 'QT_RCC')
++	find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
++	find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
++
++	env['UIC3_ST']= '%s -o %s'
++	env['UIC_ST'] = '%s -o %s'
++	env['MOC_ST'] = '-o'
++	env['ui_PATTERN'] = 'ui_%s.h'
++	env['QT_LRELEASE_FLAGS'] = ['-silent']
++
++	vars_debug = [a+'_debug' for a in vars]
++
++	try:
++		conf.find_program('pkg-config', var='pkgconfig', path_list=paths, mandatory=True)
++
++	except Configure.ConfigurationError:
++
++		for lib in vars_debug+vars:
++			uselib = lib.upper()
++
++			d = (lib.find('_debug') > 0) and 'd' or ''
++
++			# original author seems to prefer static to shared libraries
++			for (pat, kind) in ((conf.env.staticlib_PATTERN, 'STATIC'), (conf.env.shlib_PATTERN, '')):
++
++				conf.check_message_1('Checking for %s %s' % (lib, kind))
++
++				for ext in ['', '4']:
++					path = os.path.join(qtlibs, pat % (lib + d + ext))
++					if os.path.exists(path):
++						env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
++						conf.check_message_2('ok ' + path, 'GREEN')
++						break
++					path = os.path.join(qtbin, pat % (lib + d + ext))
++					if os.path.exists(path):
++						env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
++						conf.check_message_2('ok ' + path, 'GREEN')
++						break
++				else:
++					conf.check_message_2('not found', 'YELLOW')
++					continue
++				break
++
++			env.append_unique('LIBPATH_' + uselib, qtlibs)
++			env.append_unique('CPPPATH_' + uselib, qtincludes)
++			env.append_unique('CPPPATH_' + uselib, qtincludes + os.sep + lib)
++	else:
++		for i in vars_debug+vars:
++			try:
++				conf.check_cfg(package=i, args='--cflags --libs --silence-errors', path=conf.env.pkgconfig)
++			except ValueError:
++				pass
++
++	# the libpaths are set nicely, unfortunately they make really long command-lines
++	# remove the qtcore ones from qtgui, etc
++	def process_lib(vars_, coreval):
++		for d in vars_:
++			var = d.upper()
++			if var == 'QTCORE': continue
++
++			value = env['LIBPATH_'+var]
++			if value:
++				core = env[coreval]
++				accu = []
++				for lib in value:
++					if lib in core: continue
++					accu.append(lib)
++				env['LIBPATH_'+var] = accu
++
++	process_lib(vars, 'LIBPATH_QTCORE')
++	process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG')
++
++	# rpath if wanted
++	want_rpath = getattr(Options.options, 'want_rpath', 1)
++	if want_rpath:
++		def process_rpath(vars_, coreval):
++			for d in vars_:
++				var = d.upper()
++				value = env['LIBPATH_'+var]
++				if value:
++					core = env[coreval]
++					accu = []
++					for lib in value:
++						if var != 'QTCORE':
++							if lib in core:
++								continue
++						accu.append('-Wl,--rpath='+lib)
++					env['RPATH_'+var] = accu
++		process_rpath(vars, 'LIBPATH_QTCORE')
++		process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG')
++
++	env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale'
++
++def detect(conf):
++	detect_qt4(conf)
++
++def set_options(opt):
++	opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
++
++	opt.add_option('--header-ext',
++		type='string',
++		default='',
++		help='header extension for moc files',
++		dest='qt_header_ext')
++
++	for i in 'qtdir qtbin qtlibs'.split():
++		opt.add_option('--'+i, type='string', default='', dest=i)
++
++	if sys.platform == "darwin":
++		opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True)
++
++	opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
++
+diff --git a/buildtools/wafadmin/Tools/ruby.py b/buildtools/wafadmin/Tools/ruby.py
+new file mode 100644
+index 0000000..d3b7569
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/ruby.py
+@@ -0,0 +1,120 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# daniel.svensson at purplescout.se 2008
++
++import os
++import Task, Options, Utils
++from TaskGen import before, feature, after
++from Configure import conf
++
++ at feature('rubyext')
++ at before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars', 'apply_bundle')
++ at after('default_cc', 'vars_target_cshlib')
++def init_rubyext(self):
++	self.default_install_path = '${ARCHDIR_RUBY}'
++	self.uselib = self.to_list(getattr(self, 'uselib', ''))
++	if not 'RUBY' in self.uselib:
++		self.uselib.append('RUBY')
++	if not 'RUBYEXT' in self.uselib:
++		self.uselib.append('RUBYEXT')
++
++ at feature('rubyext')
++ at before('apply_link')
++def apply_ruby_so_name(self):
++	self.env['shlib_PATTERN'] = self.env['rubyext_PATTERN']
++
++ at conf
++def check_ruby_version(conf, minver=()):
++	"""
++	Checks if ruby is installed.
++	If installed the variable RUBY will be set in environment.
++	Ruby binary can be overridden by --with-ruby-binary config variable
++	"""
++
++	if Options.options.rubybinary:
++		conf.env.RUBY = Options.options.rubybinary
++	else:
++		conf.find_program("ruby", var="RUBY", mandatory=True)
++
++	ruby = conf.env.RUBY
++
++	try:
++		version = Utils.cmd_output([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
++	except:
++		conf.fatal('could not determine ruby version')
++	conf.env.RUBY_VERSION = version
++
++	try:
++		ver = tuple(map(int, version.split(".")))
++	except:
++		conf.fatal('unsupported ruby version %r' % version)
++
++	cver = ''
++	if minver:
++		if ver < minver:
++			conf.fatal('ruby is too old')
++		cver = ".".join([str(x) for x in minver])
++
++	conf.check_message('ruby', cver, True, version)
++
++ at conf
++def check_ruby_ext_devel(conf):
++	if not conf.env.RUBY:
++		conf.fatal('ruby detection is required first')
++
++	if not conf.env.CC_NAME and not conf.env.CXX_NAME:
++		conf.fatal('load a c/c++ compiler first')
++
++	version = tuple(map(int, conf.env.RUBY_VERSION.split(".")))
++
++	def read_out(cmd):
++		return Utils.to_list(Utils.cmd_output([conf.env.RUBY, '-rrbconfig', '-e', cmd]))
++
++	def read_config(key):
++		return read_out('puts Config::CONFIG[%r]' % key)
++
++	ruby = conf.env['RUBY']
++	archdir = read_config('archdir')
++	cpppath = archdir
++	if version >= (1, 9, 0):
++		ruby_hdrdir = read_config('rubyhdrdir')
++		cpppath += ruby_hdrdir
++		cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
++
++	conf.check(header_name='ruby.h', includes=cpppath, mandatory=True, errmsg='could not find ruby header file')
++
++	conf.env.LIBPATH_RUBYEXT = read_config('libdir')
++	conf.env.LIBPATH_RUBYEXT += archdir
++	conf.env.CPPPATH_RUBYEXT = cpppath
++	conf.env.CCFLAGS_RUBYEXT = read_config("CCDLFLAGS")
++	conf.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
++
++	# ok this is really stupid, but the command and flags are combined.
++	# so we try to find the first argument...
++	flags = read_config('LDSHARED')
++	while flags and flags[0][0] != '-':
++		flags = flags[1:]
++
++	# we also want to strip out the deprecated ppc flags
++	if len(flags) > 1 and flags[1] == "ppc":
++		flags = flags[2:]
++
++	conf.env.LINKFLAGS_RUBYEXT = flags
++	conf.env.LINKFLAGS_RUBYEXT += read_config("LIBS")
++	conf.env.LINKFLAGS_RUBYEXT += read_config("LIBRUBYARG_SHARED")
++
++	if Options.options.rubyarchdir:
++		conf.env.ARCHDIR_RUBY = Options.options.rubyarchdir
++	else:
++		conf.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
++
++	if Options.options.rubylibdir:
++		conf.env.LIBDIR_RUBY = Options.options.rubylibdir
++	else:
++		conf.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
++
++def set_options(opt):
++	opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
++	opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
++	opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
++
+diff --git a/buildtools/wafadmin/Tools/suncc.py b/buildtools/wafadmin/Tools/suncc.py
+new file mode 100644
+index 0000000..b1a2aad
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/suncc.py
+@@ -0,0 +1,76 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++# Ralf Habacker, 2006 (rh)
++
++import os, optparse
++import Utils, Options, Configure
++import ccroot, ar
++from Configure import conftest
++
++ at conftest
++def find_scc(conf):
++	v = conf.env
++	cc = None
++	if v['CC']: cc = v['CC']
++	elif 'CC' in conf.environ: cc = conf.environ['CC']
++	#if not cc: cc = conf.find_program('gcc', var='CC')
++	if not cc: cc = conf.find_program('cc', var='CC')
++	if not cc: conf.fatal('suncc was not found')
++	cc = conf.cmd_to_list(cc)
++
++	try:
++		if not Utils.cmd_output(cc + ['-flags']):
++			conf.fatal('suncc %r was not found' % cc)
++	except ValueError:
++		conf.fatal('suncc -flags could not be executed')
++
++	v['CC']  = cc
++	v['CC_NAME'] = 'sun'
++
++ at conftest
++def scc_common_flags(conf):
++	v = conf.env
++
++	# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
++
++	v['CC_SRC_F']            = ''
++	v['CC_TGT_F']            = ['-c', '-o', '']
++	v['CPPPATH_ST']          = '-I%s' # template for adding include paths
++
++	# linker
++	if not v['LINK_CC']: v['LINK_CC'] = v['CC']
++	v['CCLNK_SRC_F']         = ''
++	v['CCLNK_TGT_F']         = ['-o', ''] # solaris hack, separate the -o from the target
++
++	v['LIB_ST']              = '-l%s' # template for adding libs
++	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
++	v['STATICLIB_ST']        = '-l%s'
++	v['STATICLIBPATH_ST']    = '-L%s'
++	v['CCDEFINES_ST']        = '-D%s'
++
++	v['SONAME_ST']           = '-Wl,-h -Wl,%s'
++	v['SHLIB_MARKER']        = '-Bdynamic'
++	v['STATICLIB_MARKER']    = '-Bstatic'
++
++	# program
++	v['program_PATTERN']     = '%s'
++
++	# shared library
++	v['shlib_CCFLAGS']       = ['-Kpic', '-DPIC']
++	v['shlib_LINKFLAGS']     = ['-G']
++	v['shlib_PATTERN']       = 'lib%s.so'
++
++	# static lib
++	v['staticlib_LINKFLAGS'] = ['-Bstatic']
++	v['staticlib_PATTERN']   = 'lib%s.a'
++
++detect = '''
++find_scc
++find_cpp
++find_ar
++scc_common_flags
++cc_load_tools
++cc_add_flags
++link_add_flags
++'''
+diff --git a/buildtools/wafadmin/Tools/suncxx.py b/buildtools/wafadmin/Tools/suncxx.py
+new file mode 100644
+index 0000000..8754b6c
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/suncxx.py
+@@ -0,0 +1,75 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++# Ralf Habacker, 2006 (rh)
++
++import os, optparse
++import Utils, Options, Configure
++import ccroot, ar
++from Configure import conftest
++
++ at conftest
++def find_sxx(conf):
++	v = conf.env
++	cc = None
++	if v['CXX']: cc = v['CXX']
++	elif 'CXX' in conf.environ: cc = conf.environ['CXX']
++	if not cc: cc = conf.find_program('c++', var='CXX')
++	if not cc: conf.fatal('sunc++ was not found')
++	cc = conf.cmd_to_list(cc)
++
++	try:
++		if not Utils.cmd_output(cc + ['-flags']):
++			conf.fatal('sunc++ %r was not found' % cc)
++	except ValueError:
++		conf.fatal('sunc++ -flags could not be executed')
++
++	v['CXX']  = cc
++	v['CXX_NAME'] = 'sun'
++
++ at conftest
++def sxx_common_flags(conf):
++	v = conf.env
++
++	# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
++
++	v['CXX_SRC_F']           = ''
++	v['CXX_TGT_F']           = ['-c', '-o', '']
++	v['CPPPATH_ST']          = '-I%s' # template for adding include paths
++
++	# linker
++	if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
++	v['CXXLNK_SRC_F']        = ''
++	v['CXXLNK_TGT_F']        = ['-o', ''] # solaris hack, separate the -o from the target
++
++	v['LIB_ST']              = '-l%s' # template for adding libs
++	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
++	v['STATICLIB_ST']        = '-l%s'
++	v['STATICLIBPATH_ST']    = '-L%s'
++	v['CXXDEFINES_ST']       = '-D%s'
++
++	v['SONAME_ST']           = '-Wl,-h -Wl,%s'
++	v['SHLIB_MARKER']        = '-Bdynamic'
++	v['STATICLIB_MARKER']    = '-Bstatic'
++
++	# program
++	v['program_PATTERN']     = '%s'
++
++	# shared library
++	v['shlib_CXXFLAGS']      = ['-Kpic', '-DPIC']
++	v['shlib_LINKFLAGS']     = ['-G']
++	v['shlib_PATTERN']       = 'lib%s.so'
++
++	# static lib
++	v['staticlib_LINKFLAGS'] = ['-Bstatic']
++	v['staticlib_PATTERN']   = 'lib%s.a'
++
++detect = '''
++find_sxx
++find_cpp
++find_ar
++sxx_common_flags
++cxx_load_tools
++cxx_add_flags
++link_add_flags
++'''
+diff --git a/buildtools/wafadmin/Tools/tex.py b/buildtools/wafadmin/Tools/tex.py
+new file mode 100644
+index 0000000..2dd748b
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/tex.py
+@@ -0,0 +1,251 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++
++"TeX/LaTeX/PDFLaTeX support"
++
++import os, re
++import Utils, TaskGen, Task, Runner, Build
++from TaskGen import feature, before
++from Logs import error, warn, debug
++
++re_tex = re.compile(r'\\(?P<type>include|input|import|bringin|lstinputlisting){(?P<file>[^{}]*)}', re.M)
++def scan(self):
++	node = self.inputs[0]
++	env = self.env
++
++	nodes = []
++	names = []
++	if not node: return (nodes, names)
++
++	code = Utils.readf(node.abspath(env))
++
++	curdirnode = self.curdirnode
++	abs = curdirnode.abspath()
++	for match in re_tex.finditer(code):
++		path = match.group('file')
++		if path:
++			for k in ['', '.tex', '.ltx']:
++				# add another loop for the tex include paths?
++				debug('tex: trying %s%s' % (path, k))
++				try:
++					os.stat(abs+os.sep+path+k)
++				except OSError:
++					continue
++				found = path+k
++				node = curdirnode.find_resource(found)
++				if node:
++					nodes.append(node)
++			else:
++				debug('tex: could not find %s' % path)
++				names.append(path)
++
++	debug("tex: found the following : %s and names %s" % (nodes, names))
++	return (nodes, names)
++
++latex_fun, _ = Task.compile_fun('latex', '${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
++pdflatex_fun, _ = Task.compile_fun('pdflatex', '${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
++bibtex_fun, _ = Task.compile_fun('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
++makeindex_fun, _ = Task.compile_fun('bibtex', '${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
++
++g_bibtex_re = re.compile('bibdata', re.M)
++def tex_build(task, command='LATEX'):
++	env = task.env
++	bld = task.generator.bld
++
++	if not env['PROMPT_LATEX']:
++		env.append_value('LATEXFLAGS', '-interaction=batchmode')
++		env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
++
++	fun = latex_fun
++	if command == 'PDFLATEX':
++		fun = pdflatex_fun
++
++	node = task.inputs[0]
++	reldir  = node.bld_dir(env)
++
++	#lst = []
++	#for c in Utils.split_path(reldir):
++	#	if c: lst.append('..')
++	#srcfile = os.path.join(*(lst + [node.srcpath(env)]))
++	#sr2 = os.path.join(*(lst + [node.parent.srcpath(env)]))
++	srcfile = node.abspath(env)
++	sr2 = node.parent.abspath() + os.pathsep + node.parent.abspath(env) + os.pathsep
++
++	aux_node = node.change_ext('.aux')
++	idx_node = node.change_ext('.idx')
++
++	nm = aux_node.name
++	docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux"
++
++	# important, set the cwd for everybody
++	task.cwd = task.inputs[0].parent.abspath(task.env)
++
++
++	warn('first pass on %s' % command)
++
++	task.env.env = {'TEXINPUTS': sr2}
++	task.env.SRCFILE = srcfile
++	ret = fun(task)
++	if ret:
++		return ret
++
++	# look in the .aux file if there is a bibfile to process
++	try:
++		ct = Utils.readf(aux_node.abspath(env))
++	except (OSError, IOError):
++		error('error bibtex scan')
++	else:
++		fo = g_bibtex_re.findall(ct)
++
++		# there is a .aux file to process
++		if fo:
++			warn('calling bibtex')
++
++			task.env.env = {'BIBINPUTS': sr2, 'BSTINPUTS': sr2}
++			task.env.SRCFILE = docuname
++			ret = bibtex_fun(task)
++			if ret:
++				error('error when calling bibtex %s' % docuname)
++				return ret
++
++	# look on the filesystem if there is a .idx file to process
++	try:
++		idx_path = idx_node.abspath(env)
++		os.stat(idx_path)
++	except OSError:
++		error('error file.idx scan')
++	else:
++		warn('calling makeindex')
++
++		task.env.SRCFILE = idx_node.name
++		task.env.env = {}
++		ret = makeindex_fun(task)
++		if ret:
++			error('error when calling makeindex %s' % idx_path)
++			return ret
++
++
++	hash = ''
++	i = 0
++	while i < 10:
++		# prevent against infinite loops - one never knows
++		i += 1
++
++		# watch the contents of file.aux
++		prev_hash = hash
++		try:
++			hash = Utils.h_file(aux_node.abspath(env))
++		except KeyError:
++			error('could not read aux.h -> %s' % aux_node.abspath(env))
++			pass
++
++		# debug
++		#print "hash is, ", hash, " ", old_hash
++
++		# stop if file.aux does not change anymore
++		if hash and hash == prev_hash:
++			break
++
++		# run the command
++		warn('calling %s' % command)
++
++		task.env.env = {'TEXINPUTS': sr2 + os.pathsep}
++		task.env.SRCFILE = srcfile
++		ret = fun(task)
++		if ret:
++			error('error when calling %s %s' % (command, latex_compile_cmd))
++			return ret
++
++	return None # ok
++
++latex_vardeps  = ['LATEX', 'LATEXFLAGS']
++def latex_build(task):
++	return tex_build(task, 'LATEX')
++
++pdflatex_vardeps  = ['PDFLATEX', 'PDFLATEXFLAGS']
++def pdflatex_build(task):
++	return tex_build(task, 'PDFLATEX')
++
++class tex_taskgen(TaskGen.task_gen):
++	def __init__(self, *k, **kw):
++		TaskGen.task_gen.__init__(self, *k, **kw)
++
++ at feature('tex')
++ at before('apply_core')
++def apply_tex(self):
++	if not getattr(self, 'type', None) in ['latex', 'pdflatex']:
++		self.type = 'pdflatex'
++
++	tree = self.bld
++	outs = Utils.to_list(getattr(self, 'outs', []))
++
++	# prompt for incomplete files (else the batchmode is used)
++	self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
++
++	deps_lst = []
++
++	if getattr(self, 'deps', None):
++		deps = self.to_list(self.deps)
++		for filename in deps:
++			n = self.path.find_resource(filename)
++			if not n in deps_lst: deps_lst.append(n)
++
++	self.source = self.to_list(self.source)
++	for filename in self.source:
++		base, ext = os.path.splitext(filename)
++
++		node = self.path.find_resource(filename)
++		if not node: raise Utils.WafError('cannot find %s' % filename)
++
++		if self.type == 'latex':
++			task = self.create_task('latex', node, node.change_ext('.dvi'))
++		elif self.type == 'pdflatex':
++			task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
++
++		task.env = self.env
++		task.curdirnode = self.path
++
++		# add the manual dependencies
++		if deps_lst:
++			variant = node.variant(self.env)
++			try:
++				lst = tree.node_deps[task.unique_id()]
++				for n in deps_lst:
++					if not n in lst:
++						lst.append(n)
++			except KeyError:
++				tree.node_deps[task.unique_id()] = deps_lst
++
++		if self.type == 'latex':
++			if 'ps' in outs:
++				tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
++				tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
++			if 'pdf' in outs:
++				tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
++				tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
++		elif self.type == 'pdflatex':
++			if 'ps' in outs:
++				self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
++	self.source = []
++
++def detect(conf):
++	v = conf.env
++	for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
++		conf.find_program(p, var=p.upper())
++		v[p.upper()+'FLAGS'] = ''
++	v['DVIPSFLAGS'] = '-Ppdf'
++
++b = Task.simple_task_type
++b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
++b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
++b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
++b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
++b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', after="dvipdf pdflatex", shell=False)
++
++b = Task.task_type_from_func
++cls = b('latex', latex_build, vars=latex_vardeps)
++cls.scan = scan
++cls = b('pdflatex', pdflatex_build, vars=pdflatex_vardeps)
++cls.scan = scan
++
+diff --git a/buildtools/wafadmin/Tools/unittestw.py b/buildtools/wafadmin/Tools/unittestw.py
+new file mode 100644
+index 0000000..0e30a51
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/unittestw.py
+@@ -0,0 +1,310 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Carlos Rafael Giani, 2006
++
++"""
++Unit tests run in the shutdown() method, and for c/c++ programs
++
++One should NOT have to give parameters to programs to execute
++
++In the shutdown method, add the following code:
++
++	>>> def shutdown():
++	...	ut = UnitTest.unit_test()
++	...	ut.run()
++	...	ut.print_results()
++
++
++Each object to use as a unit test must be a program and must have X{obj.unit_test=1}
++"""
++import os, sys
++import Build, TaskGen, Utils, Options, Logs, Task
++from TaskGen import before, after, feature
++from Constants import *
++
++class unit_test(object):
++	"Unit test representation"
++	def __init__(self):
++		self.returncode_ok = 0		# Unit test returncode considered OK. All returncodes differing from this one
++						# will cause the unit test to be marked as "FAILED".
++
++		# The following variables are filled with data by run().
++
++		# print_results() uses these for printing the unit test summary,
++		# but if there is need for direct access to the results,
++		# they can be retrieved here, after calling run().
++
++		self.num_tests_ok = 0		# Number of successful unit tests
++		self.num_tests_failed = 0	# Number of failed unit tests
++		self.num_tests_err = 0		# Tests that have not even run
++		self.total_num_tests = 0	# Total amount of unit tests
++		self.max_label_length = 0	# Maximum label length (pretty-print the output)
++
++		self.unit_tests = Utils.ordered_dict()		# Unit test dictionary. Key: the label (unit test filename relative
++						# to the build dir), value: unit test filename with absolute path
++		self.unit_test_results = {}	# Dictionary containing the unit test results.
++						# Key: the label, value: result (true = success false = failure)
++		self.unit_test_erroneous = {}	# Dictionary indicating erroneous unit tests.
++						# Key: the label, value: true = unit test has an error  false = unit test is ok
++		self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir
++		self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites)
++		self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites)
++		self.run_if_waf_does = 'check' #build was the old default
++
++	def run(self):
++		"Run the unit tests and gather results (note: no output here)"
++
++		self.num_tests_ok = 0
++		self.num_tests_failed = 0
++		self.num_tests_err = 0
++		self.total_num_tests = 0
++		self.max_label_length = 0
++
++		self.unit_tests = Utils.ordered_dict()
++		self.unit_test_results = {}
++		self.unit_test_erroneous = {}
++
++		ld_library_path = []
++
++		# If waf is not building, don't run anything
++		if not Options.commands[self.run_if_waf_does]: return
++
++		# Get the paths for the shared libraries, and obtain the unit tests to execute
++		for obj in Build.bld.all_task_gen:
++			try:
++				link_task = obj.link_task
++			except AttributeError:
++				pass
++			else:
++				lib_path = link_task.outputs[0].parent.abspath(obj.env)
++				if lib_path not in ld_library_path:
++					ld_library_path.append(lib_path)
++
++			unit_test = getattr(obj, 'unit_test', '')
++			if unit_test and 'cprogram' in obj.features:
++				try:
++					output = obj.path
++					filename = os.path.join(output.abspath(obj.env), obj.target)
++					srcdir = output.abspath()
++					label = os.path.join(output.bldpath(obj.env), obj.target)
++					self.max_label_length = max(self.max_label_length, len(label))
++					self.unit_tests[label] = (filename, srcdir)
++				except KeyError:
++					pass
++		self.total_num_tests = len(self.unit_tests)
++		# Now run the unit tests
++		Utils.pprint('GREEN', 'Running the unit tests')
++		count = 0
++		result = 1
++
++		for label in self.unit_tests.allkeys:
++			file_and_src = self.unit_tests[label]
++			filename = file_and_src[0]
++			srcdir = file_and_src[1]
++			count += 1
++			line = Build.bld.progress_line(count, self.total_num_tests, Logs.colors.GREEN, Logs.colors.NORMAL)
++			if Options.options.progress_bar and line:
++				sys.stderr.write(line)
++				sys.stderr.flush()
++			try:
++				kwargs = {}
++				kwargs['env'] = os.environ.copy()
++				if self.change_to_testfile_dir:
++					kwargs['cwd'] = srcdir
++				if not self.want_to_see_test_output:
++					kwargs['stdout'] = Utils.pproc.PIPE  # PIPE for ignoring output
++				if not self.want_to_see_test_error:
++					kwargs['stderr'] = Utils.pproc.PIPE  # PIPE for ignoring output
++				if ld_library_path:
++					v = kwargs['env']
++					def add_path(dct, path, var):
++						dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
++					if sys.platform == 'win32':
++						add_path(v, ld_library_path, 'PATH')
++					elif sys.platform == 'darwin':
++						add_path(v, ld_library_path, 'DYLD_LIBRARY_PATH')
++						add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
++					else:
++						add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
++
++				pp = Utils.pproc.Popen(filename, **kwargs)
++				(out, err) = pp.communicate() # uh, and the output is ignored?? - fortunately this is going to disappear
++
++				result = int(pp.returncode == self.returncode_ok)
++
++				if result:
++					self.num_tests_ok += 1
++				else:
++					self.num_tests_failed += 1
++
++				self.unit_test_results[label] = result
++				self.unit_test_erroneous[label] = 0
++			except OSError:
++				self.unit_test_erroneous[label] = 1
++				self.num_tests_err += 1
++			except KeyboardInterrupt:
++				pass
++		if Options.options.progress_bar: sys.stdout.write(Logs.colors.cursor_on)
++
++	def print_results(self):
++		"Pretty-prints a summary of all unit tests, along with some statistics"
++
++		# If waf is not building, don't output anything
++		if not Options.commands[self.run_if_waf_does]: return
++
++		p = Utils.pprint
++		# Early quit if no tests were performed
++		if self.total_num_tests == 0:
++			p('YELLOW', 'No unit tests present')
++			return
++
++		for label in self.unit_tests.allkeys:
++			filename = self.unit_tests[label]
++			err = 0
++			result = 0
++
++			try: err = self.unit_test_erroneous[label]
++			except KeyError: pass
++
++			try: result = self.unit_test_results[label]
++			except KeyError: pass
++
++			n = self.max_label_length - len(label)
++			if err: n += 4
++			elif result: n += 7
++			else: n += 3
++
++			line = '%s %s' % (label, '.' * n)
++
++			if err: p('RED', '%sERROR' % line)
++			elif result: p('GREEN', '%sOK' % line)
++			else: p('YELLOW', '%sFAILED' % line)
++
++		percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0
++		percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0
++		percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0
++
++		p('NORMAL', '''
++Successful tests:      %i (%.1f%%)
++Failed tests:          %i (%.1f%%)
++Erroneous tests:       %i (%.1f%%)
++
++Total number of tests: %i
++''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed,
++		self.num_tests_err, percentage_erroneous, self.total_num_tests))
++		p('GREEN', 'Unit tests finished')
++
++
++############################################################################################
++
++"""
++New unit test system
++
++The targets with feature 'test' are executed after they are built
++bld(features='cprogram cc test', ...)
++
++To display the results:
++import UnitTest
++bld.add_post_fun(UnitTest.summary)
++"""
++
++import threading
++testlock = threading.Lock()
++
++def set_options(opt):
++	opt.add_option('--alltests', action='store_true', default=True, help='Exec all unit tests', dest='all_tests')
++
++ at feature('test')
++ at after('apply_link', 'vars_target_cprogram')
++def make_test(self):
++	if not 'cprogram' in self.features:
++		Logs.error('test cannot be executed %s' % self)
++		return
++
++	self.default_install_path = None
++	self.create_task('utest', self.link_task.outputs)
++
++def exec_test(self):
++
++	status = 0
++
++	variant = self.env.variant()
++
++	filename = self.inputs[0].abspath(self.env)
++	self.ut_exec = getattr(self, 'ut_exec', [filename])
++	if getattr(self.generator, 'ut_fun', None):
++		self.generator.ut_fun(self)
++
++	try:
++		fu = getattr(self.generator.bld, 'all_test_paths')
++	except AttributeError:
++		fu = os.environ.copy()
++		self.generator.bld.all_test_paths = fu
++
++		lst = []
++		for obj in self.generator.bld.all_task_gen:
++			link_task = getattr(obj, 'link_task', None)
++			if link_task and link_task.env.variant() == variant:
++				lst.append(link_task.outputs[0].parent.abspath(obj.env))
++
++		def add_path(dct, path, var):
++			dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
++
++		if sys.platform == 'win32':
++			add_path(fu, lst, 'PATH')
++		elif sys.platform == 'darwin':
++			add_path(fu, lst, 'DYLD_LIBRARY_PATH')
++			add_path(fu, lst, 'LD_LIBRARY_PATH')
++		else:
++			add_path(fu, lst, 'LD_LIBRARY_PATH')
++
++
++	cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath(self.env)
++	proc = Utils.pproc.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE)
++	(stdout, stderr) = proc.communicate()
++
++	tup = (filename, proc.returncode, stdout, stderr)
++	self.generator.utest_result = tup
++
++	testlock.acquire()
++	try:
++		bld = self.generator.bld
++		Logs.debug("ut: %r", tup)
++		try:
++			bld.utest_results.append(tup)
++		except AttributeError:
++			bld.utest_results = [tup]
++	finally:
++		testlock.release()
++
++cls = Task.task_type_from_func('utest', func=exec_test, color='PINK', ext_in='.bin')
++
++old = cls.runnable_status
++def test_status(self):
++	ret = old(self)
++	if ret == SKIP_ME and getattr(Options.options, 'all_tests', False):
++		return RUN_ME
++	return ret
++
++cls.runnable_status = test_status
++cls.quiet = 1
++
++def summary(bld):
++	lst = getattr(bld, 'utest_results', [])
++	if lst:
++		Utils.pprint('CYAN', 'execution summary')
++
++		total = len(lst)
++		tfail = len([x for x in lst if x[1]])
++
++		Utils.pprint('CYAN', '  tests that pass %d/%d' % (total-tfail, total))
++		for (f, code, out, err) in lst:
++			if not code:
++				Utils.pprint('CYAN', '    %s' % f)
++
++		Utils.pprint('CYAN', '  tests that fail %d/%d' % (tfail, total))
++		for (f, code, out, err) in lst:
++			if code:
++				Utils.pprint('CYAN', '    %s' % f)
++
++
+diff --git a/buildtools/wafadmin/Tools/vala.py b/buildtools/wafadmin/Tools/vala.py
+new file mode 100644
+index 0000000..753ee8d
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/vala.py
+@@ -0,0 +1,308 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Ali Sabil, 2007
++
++import os.path, shutil
++import Task, Runner, Utils, Logs, Build, Node, Options
++from TaskGen import extension, after, before
++
++EXT_VALA = ['.vala', '.gs']
++
++class valac_task(Task.Task):
++
++	vars = ("VALAC", "VALAC_VERSION", "VALAFLAGS")
++	before = ("cc", "cxx")
++
++	def run(self):
++		env = self.env
++		inputs = [a.srcpath(env) for a in self.inputs]
++		valac = env['VALAC']
++		vala_flags = env.get_flat('VALAFLAGS')
++		top_src = self.generator.bld.srcnode.abspath()
++		top_bld = self.generator.bld.srcnode.abspath(env)
++
++		if env['VALAC_VERSION'] > (0, 1, 6):
++			cmd = [valac, '-C', '--quiet', vala_flags]
++		else:
++			cmd = [valac, '-C', vala_flags]
++
++		if self.threading:
++			cmd.append('--thread')
++
++		if self.profile:
++			cmd.append('--profile=%s' % self.profile)
++
++		if self.target_glib:
++			cmd.append('--target-glib=%s' % self.target_glib)
++
++		features = self.generator.features
++
++		if 'cshlib' in features or 'cstaticlib' in features:
++			output_dir = self.outputs[0].bld_dir(env)
++			cmd.append('--library ' + self.target)
++			if env['VALAC_VERSION'] >= (0, 7, 0):
++				for x in self.outputs:
++					if x.name.endswith('.h'):
++						cmd.append('--header ' + x.bldpath(self.env))
++			cmd.append('--basedir ' + top_src)
++			cmd.append('-d ' + top_bld)
++			if env['VALAC_VERSION'] > (0, 7, 2) and hasattr(self, 'gir'):
++				cmd.append('--gir=%s.gir' % self.gir)
++
++		else:
++			output_dir = self.outputs[0].bld_dir(env)
++			cmd.append('-d %s' % output_dir)
++
++		for vapi_dir in self.vapi_dirs:
++			cmd.append('--vapidir=%s' % vapi_dir)
++
++		for package in self.packages:
++			cmd.append('--pkg %s' % package)
++
++		for package in self.packages_private:
++			cmd.append('--pkg %s' % package)
++
++		cmd.append(" ".join(inputs))
++		result = self.generator.bld.exec_command(" ".join(cmd))
++
++		if not 'cprogram' in features:
++			# generate the .deps file
++			if self.packages:
++				filename = os.path.join(self.generator.path.abspath(env), "%s.deps" % self.target)
++				deps = open(filename, 'w')
++				for package in self.packages:
++					deps.write(package + '\n')
++				deps.close()
++
++			# handle vala 0.1.6 who doesn't honor --directory for the generated .vapi
++			self._fix_output("../%s.vapi" % self.target)
++			# handle vala >= 0.1.7 who has a weid definition for --directory
++			self._fix_output("%s.vapi" % self.target)
++			# handle vala >= 0.2.0 who doesn't honor --directory for the generated .gidl
++			self._fix_output("%s.gidl" % self.target)
++			# handle vala >= 0.3.6 who doesn't honor --directory for the generated .gir
++			self._fix_output("%s.gir" % self.target)
++			if hasattr(self, 'gir'):
++				self._fix_output("%s.gir" % self.gir)
++
++		first = None
++		for node in self.outputs:
++			if not first:
++				first = node
++			else:
++				if first.parent.id != node.parent.id:
++					# issue #483
++					if env['VALAC_VERSION'] < (0, 7, 0):
++						shutil.move(first.parent.abspath(self.env) + os.sep + node.name, node.abspath(self.env))
++		return result
++
++	def install(self):
++		bld = self.generator.bld
++		features = self.generator.features
++
++		if self.attr("install_path") and ("cshlib" in features or "cstaticlib" in features):
++			headers_list = [o for o in self.outputs if o.suffix() == ".h"]
++			vapi_list = [o for o in self.outputs if (o.suffix() in (".vapi", ".deps"))]
++			gir_list = [o for o in self.outputs if o.suffix() == ".gir"]
++
++			for header in headers_list:
++				top_src = self.generator.bld.srcnode
++				package = self.env['PACKAGE']
++				try:
++					api_version = Utils.g_module.API_VERSION
++				except AttributeError:
++					version = Utils.g_module.VERSION.split(".")
++					if version[0] == "0":
++						api_version = "0." + version[1]
++					else:
++						api_version = version[0] + ".0"
++				install_path = '${INCLUDEDIR}/%s-%s/%s' % (package, api_version, header.relpath_gen(top_src))
++				bld.install_as(install_path, header, self.env)
++			bld.install_files('${DATAROOTDIR}/vala/vapi', vapi_list, self.env)
++			bld.install_files('${DATAROOTDIR}/gir-1.0', gir_list, self.env)
++
++	def _fix_output(self, output):
++		top_bld = self.generator.bld.srcnode.abspath(self.env)
++		try:
++			src = os.path.join(top_bld, output)
++			dst = self.generator.path.abspath (self.env)
++			shutil.move(src, dst)
++		except:
++			pass
++
++ at extension(EXT_VALA)
++def vala_file(self, node):
++	valatask = getattr(self, "valatask", None)
++	# there is only one vala task and it compiles all vala files .. :-/
++	if not valatask:
++		valatask = self.create_task('valac')
++		self.valatask = valatask
++		self.includes = Utils.to_list(getattr(self, 'includes', []))
++		self.uselib = self.to_list(self.uselib)
++		valatask.packages = []
++		valatask.packages_private = Utils.to_list(getattr(self, 'packages_private', []))
++		valatask.vapi_dirs = []
++		valatask.target = self.target
++		valatask.threading = False
++		valatask.install_path = self.install_path
++		valatask.profile = getattr (self, 'profile', 'gobject')
++		valatask.target_glib = None #Deprecated
++
++		packages = Utils.to_list(getattr(self, 'packages', []))
++		vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
++		includes =  []
++
++		if hasattr(self, 'uselib_local'):
++			local_packages = Utils.to_list(self.uselib_local)
++			seen = []
++			while len(local_packages) > 0:
++				package = local_packages.pop()
++				if package in seen:
++					continue
++				seen.append(package)
++
++				# check if the package exists
++				package_obj = self.name_to_obj(package)
++				if not package_obj:
++					raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')" % (package, self.name))
++
++				package_name = package_obj.target
++				package_node = package_obj.path
++				package_dir = package_node.relpath_gen(self.path)
++
++				for task in package_obj.tasks:
++					for output in task.outputs:
++						if output.name == package_name + ".vapi":
++							valatask.set_run_after(task)
++							if package_name not in packages:
++								packages.append(package_name)
++							if package_dir not in vapi_dirs:
++								vapi_dirs.append(package_dir)
++							if package_dir not in includes:
++								includes.append(package_dir)
++
++				if hasattr(package_obj, 'uselib_local'):
++					lst = self.to_list(package_obj.uselib_local)
++					lst.reverse()
++					local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
++
++		valatask.packages = packages
++		for vapi_dir in vapi_dirs:
++			try:
++				valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
++				valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env))
++			except AttributeError:
++				Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir)
++
++		self.includes.append(node.bld.srcnode.abspath())
++		self.includes.append(node.bld.srcnode.abspath(self.env))
++		for include in includes:
++			try:
++				self.includes.append(self.path.find_dir(include).abspath())
++				self.includes.append(self.path.find_dir(include).abspath(self.env))
++			except AttributeError:
++				Logs.warn("Unable to locate include directory: '%s'" % include)
++
++		if valatask.profile == 'gobject':
++			if hasattr(self, 'target_glib'):
++				Logs.warn ('target_glib on vala tasks is deprecated --vala-target-glib=MAJOR.MINOR from the vala tool options')
++
++			if getattr(Options.options, 'vala_target_glib', None):
++				valatask.target_glib = Options.options.vala_target_glib
++
++			if not 'GOBJECT' in self.uselib:
++				self.uselib.append('GOBJECT')
++
++		if hasattr(self, 'threading'):
++			if valatask.profile == 'gobject':
++				valatask.threading = self.threading
++				if not 'GTHREAD' in self.uselib:
++					self.uselib.append('GTHREAD')
++			else:
++				#Vala doesn't have threading support for dova nor posix
++				Logs.warn("Profile %s does not have threading support" % valatask.profile)
++
++		if hasattr(self, 'gir'):
++			valatask.gir = self.gir
++
++	env = valatask.env
++
++	output_nodes = []
++
++	c_node = node.change_ext('.c')
++	output_nodes.append(c_node)
++	self.allnodes.append(c_node)
++
++	if env['VALAC_VERSION'] < (0, 7, 0):
++		output_nodes.append(node.change_ext('.h'))
++	else:
++		if not 'cprogram' in self.features:
++			output_nodes.append(self.path.find_or_declare('%s.h' % self.target))
++
++	if not 'cprogram' in self.features:
++		output_nodes.append(self.path.find_or_declare('%s.vapi' % self.target))
++		if env['VALAC_VERSION'] > (0, 7, 2):
++			if hasattr(self, 'gir'):
++				output_nodes.append(self.path.find_or_declare('%s.gir' % self.gir))
++		elif env['VALAC_VERSION'] > (0, 3, 5):
++			output_nodes.append(self.path.find_or_declare('%s.gir' % self.target))
++		elif env['VALAC_VERSION'] > (0, 1, 7):
++			output_nodes.append(self.path.find_or_declare('%s.gidl' % self.target))
++		if valatask.packages:
++			output_nodes.append(self.path.find_or_declare('%s.deps' % self.target))
++
++	valatask.inputs.append(node)
++	valatask.outputs.extend(output_nodes)
++
++def detect(conf):
++	min_version = (0, 1, 6)
++	min_version_str = "%d.%d.%d" % min_version
++
++	valac = conf.find_program('valac', var='VALAC', mandatory=True)
++
++	if not conf.env["HAVE_GOBJECT"]:
++		pkg_args = {'package':      'gobject-2.0',
++		            'uselib_store': 'GOBJECT',
++		            'args':         '--cflags --libs'}
++		if getattr(Options.options, 'vala_target_glib', None):
++			pkg_args['atleast_version'] = Options.options.vala_target_glib
++
++		conf.check_cfg(**pkg_args)
++
++	if not conf.env["HAVE_GTHREAD"]:
++		pkg_args = {'package':      'gthread-2.0',
++		            'uselib_store': 'GTHREAD',
++		            'args':         '--cflags --libs'}
++		if getattr(Options.options, 'vala_target_glib', None):
++			pkg_args['atleast_version'] = Options.options.vala_target_glib
++
++		conf.check_cfg(**pkg_args)
++
++	try:
++		output = Utils.cmd_output(valac + " --version", silent=True)
++		version = output.split(' ', 1)[-1].strip().split(".")[0:3]
++		version = [int(x) for x in version]
++		valac_version = tuple(version)
++	except Exception:
++		valac_version = (0, 0, 0)
++
++	conf.check_message('program version',
++			'valac >= ' + min_version_str,
++			valac_version >= min_version,
++			"%d.%d.%d" % valac_version)
++
++	conf.check_tool('gnu_dirs')
++
++	if valac_version < min_version:
++		conf.fatal("valac version too old to be used with this tool")
++		return
++
++	conf.env['VALAC_VERSION'] = valac_version
++	conf.env['VALAFLAGS'] = ''
++
++def set_options (opt):
++	valaopts = opt.add_option_group('Vala Compiler Options')
++	valaopts.add_option ('--vala-target-glib', default=None,
++	                     dest='vala_target_glib', metavar='MAJOR.MINOR',
++	                     help='Target version of glib for Vala GObject code generation')
++
+diff --git a/buildtools/wafadmin/Tools/winres.py b/buildtools/wafadmin/Tools/winres.py
+new file mode 100644
+index 0000000..2500d43
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/winres.py
+@@ -0,0 +1,45 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Brant Young, 2007
++
++"This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}"
++
++import os, sys, re
++import TaskGen, Task
++from Utils import quote_whitespace
++from TaskGen import extension
++
++EXT_WINRC = ['.rc']
++
++winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
++
++ at extension(EXT_WINRC)
++def rc_file(self, node):
++	obj_ext = '.rc.o'
++	if self.env['WINRC_TGT_F'] == '/fo': obj_ext = '.res'
++
++	rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
++	self.compiled_tasks.append(rctask)
++
++# create our action, for use with rc file
++Task.simple_task_type('winrc', winrc_str, color='BLUE', before='cc cxx', shell=False)
++
++def detect(conf):
++	v = conf.env
++
++	winrc = v['WINRC']
++	v['WINRC_TGT_F'] = '-o'
++	v['WINRC_SRC_F'] = '-i'
++	# find rc.exe
++	if not winrc:
++		if v['CC_NAME'] in ['gcc', 'cc', 'g++', 'c++']:
++			winrc = conf.find_program('windres', var='WINRC', path_list = v['PATH'])
++		elif v['CC_NAME'] == 'msvc':
++			winrc = conf.find_program('RC', var='WINRC', path_list = v['PATH'])
++			v['WINRC_TGT_F'] = '/fo'
++			v['WINRC_SRC_F'] = ''
++	if not winrc:
++		conf.fatal('winrc was not found!')
++
++	v['WINRCFLAGS'] = ''
++
+diff --git a/buildtools/wafadmin/Tools/xlc.py b/buildtools/wafadmin/Tools/xlc.py
+new file mode 100644
+index 0000000..e33b7a1
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/xlc.py
+@@ -0,0 +1,78 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006-2008 (ita)
++# Ralf Habacker, 2006 (rh)
++# Yinon Ehrlich, 2009
++# Michael Kuhn, 2009
++
++import os, sys
++import Configure, Options, Utils
++import ccroot, ar
++from Configure import conftest
++
++ at conftest
++def find_xlc(conf):
++	cc = conf.find_program(['xlc_r', 'xlc'], var='CC', mandatory=True)
++	cc = conf.cmd_to_list(cc)
++	conf.env.CC_NAME = 'xlc'
++	conf.env.CC      = cc
++
++ at conftest
++def find_cpp(conf):
++	v = conf.env
++	cpp = None
++	if v['CPP']: cpp = v['CPP']
++	elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
++	#if not cpp: cpp = v['CC']
++	v['CPP'] = cpp
++
++ at conftest
++def xlc_common_flags(conf):
++	v = conf.env
++
++	# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
++	v['CCFLAGS_DEBUG'] = ['-g']
++	v['CCFLAGS_RELEASE'] = ['-O2']
++
++	v['CC_SRC_F']            = ''
++	v['CC_TGT_F']            = ['-c', '-o', ''] # shell hack for -MD
++	v['CPPPATH_ST']          = '-I%s' # template for adding include paths
++
++	# linker
++	if not v['LINK_CC']: v['LINK_CC'] = v['CC']
++	v['CCLNK_SRC_F']         = ''
++	v['CCLNK_TGT_F']         = ['-o', ''] # shell hack for -MD
++
++	v['LIB_ST']              = '-l%s' # template for adding libs
++	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
++	v['STATICLIB_ST']        = '-l%s'
++	v['STATICLIBPATH_ST']    = '-L%s'
++	v['RPATH_ST']            = '-Wl,-rpath,%s'
++	v['CCDEFINES_ST']        = '-D%s'
++
++	v['SONAME_ST']           = ''
++	v['SHLIB_MARKER']        = ''
++	v['STATICLIB_MARKER']    = ''
++	v['FULLSTATIC_MARKER']   = '-static'
++
++	# program
++	v['program_LINKFLAGS']   = ['-Wl,-brtl']
++	v['program_PATTERN']     = '%s'
++
++	# shared library
++	v['shlib_CCFLAGS']       = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
++	v['shlib_LINKFLAGS']     = ['-G', '-Wl,-brtl,-bexpfull']
++	v['shlib_PATTERN']       = 'lib%s.so'
++
++	# static lib
++	v['staticlib_LINKFLAGS'] = ''
++	v['staticlib_PATTERN']   = 'lib%s.a'
++
++def detect(conf):
++	conf.find_xlc()
++	conf.find_cpp()
++	conf.find_ar()
++	conf.xlc_common_flags()
++	conf.cc_load_tools()
++	conf.cc_add_flags()
++	conf.link_add_flags()
+diff --git a/buildtools/wafadmin/Tools/xlcxx.py b/buildtools/wafadmin/Tools/xlcxx.py
+new file mode 100644
+index 0000000..6e84662
+--- /dev/null
++++ b/buildtools/wafadmin/Tools/xlcxx.py
+@@ -0,0 +1,78 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2006 (ita)
++# Ralf Habacker, 2006 (rh)
++# Yinon Ehrlich, 2009
++# Michael Kuhn, 2009
++
++import os, sys
++import Configure, Options, Utils
++import ccroot, ar
++from Configure import conftest
++
++ at conftest
++def find_xlcxx(conf):
++	cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX', mandatory=True)
++	cxx = conf.cmd_to_list(cxx)
++	conf.env.CXX_NAME = 'xlc++'
++	conf.env.CXX      = cxx
++
++ at conftest
++def find_cpp(conf):
++	v = conf.env
++	cpp = None
++	if v['CPP']: cpp = v['CPP']
++	elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
++	#if not cpp: cpp = v['CXX']
++	v['CPP'] = cpp
++
++ at conftest
++def xlcxx_common_flags(conf):
++	v = conf.env
++
++	# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
++	v['CXXFLAGS_DEBUG'] = ['-g']
++	v['CXXFLAGS_RELEASE'] = ['-O2']
++
++	v['CXX_SRC_F']           = ''
++	v['CXX_TGT_F']           = ['-c', '-o', ''] # shell hack for -MD
++	v['CPPPATH_ST']          = '-I%s' # template for adding include paths
++
++	# linker
++	if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
++	v['CXXLNK_SRC_F']        = ''
++	v['CXXLNK_TGT_F']        = ['-o', ''] # shell hack for -MD
++
++	v['LIB_ST']              = '-l%s' # template for adding libs
++	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
++	v['STATICLIB_ST']        = '-l%s'
++	v['STATICLIBPATH_ST']    = '-L%s'
++	v['RPATH_ST']            = '-Wl,-rpath,%s'
++	v['CXXDEFINES_ST']       = '-D%s'
++
++	v['SONAME_ST']           = ''
++	v['SHLIB_MARKER']        = ''
++	v['STATICLIB_MARKER']    = ''
++	v['FULLSTATIC_MARKER']   = '-static'
++
++	# program
++	v['program_LINKFLAGS']   = ['-Wl,-brtl']
++	v['program_PATTERN']     = '%s'
++
++	# shared library
++	v['shlib_CXXFLAGS']      = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
++	v['shlib_LINKFLAGS']     = ['-G', '-Wl,-brtl,-bexpfull']
++	v['shlib_PATTERN']       = 'lib%s.so'
++
++	# static lib
++	v['staticlib_LINKFLAGS'] = ''
++	v['staticlib_PATTERN']   = 'lib%s.a'
++
++def detect(conf):
++	conf.find_xlcxx()
++	conf.find_cpp()
++	conf.find_ar()
++	conf.xlcxx_common_flags()
++	conf.cxx_load_tools()
++	conf.cxx_add_flags()
++	conf.link_add_flags()
+diff --git a/buildtools/wafadmin/Utils.py b/buildtools/wafadmin/Utils.py
+new file mode 100644
+index 0000000..41dad57
+--- /dev/null
++++ b/buildtools/wafadmin/Utils.py
+@@ -0,0 +1,726 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005 (ita)
++
++"""
++Utilities, the stable ones are the following:
++
++* h_file: compute a unique value for a file (hash), it uses
++  the module fnv if it is installed (see waf/utils/fnv & http://code.google.com/p/waf/wiki/FAQ)
++  else, md5 (see the python docs)
++
++  For large projects (projects with more than 15000 files) or slow hard disks and filesystems (HFS)
++  it is possible to use a hashing based on the path and the size (may give broken cache results)
++  The method h_file MUST raise an OSError if the file is a folder
++
++	import stat
++	def h_file(filename):
++		st = os.stat(filename)
++		if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
++		m = Utils.md5()
++		m.update(str(st.st_mtime))
++		m.update(str(st.st_size))
++		m.update(filename)
++		return m.digest()
++
++	To replace the function in your project, use something like this:
++	import Utils
++	Utils.h_file = h_file
++
++* h_list
++* h_fun
++* get_term_cols
++* ordered_dict
++
++"""
++
++import os, sys, imp, string, errno, traceback, inspect, re, shutil, datetime, gc
++
++# In python 3.0 we can get rid of all this
++try: from UserDict import UserDict
++except ImportError: from collections import UserDict
++if sys.hexversion >= 0x2060000 or os.name == 'java':
++	import subprocess as pproc
++else:
++	import pproc
++import Logs
++from Constants import *
++
++try:
++	from collections import deque
++except ImportError:
++	class deque(list):
++		def popleft(self):
++			return self.pop(0)
++
++is_win32 = sys.platform == 'win32'
++
++try:
++	# defaultdict in python 2.5
++	from collections import defaultdict as DefaultDict
++except ImportError:
++	class DefaultDict(dict):
++		def __init__(self, default_factory):
++			super(DefaultDict, self).__init__()
++			self.default_factory = default_factory
++		def __getitem__(self, key):
++			try:
++				return super(DefaultDict, self).__getitem__(key)
++			except KeyError:
++				value = self.default_factory()
++				self[key] = value
++				return value
++
++class WafError(Exception):
++	def __init__(self, *args):
++		self.args = args
++		try:
++			self.stack = traceback.extract_stack()
++		except:
++			pass
++		Exception.__init__(self, *args)
++	def __str__(self):
++		return str(len(self.args) == 1 and self.args[0] or self.args)
++
++class WscriptError(WafError):
++	def __init__(self, message, wscript_file=None):
++		if wscript_file:
++			self.wscript_file = wscript_file
++			self.wscript_line = None
++		else:
++			try:
++				(self.wscript_file, self.wscript_line) = self.locate_error()
++			except:
++				(self.wscript_file, self.wscript_line) = (None, None)
++
++		msg_file_line = ''
++		if self.wscript_file:
++			msg_file_line = "%s:" % self.wscript_file
++			if self.wscript_line:
++				msg_file_line += "%s:" % self.wscript_line
++		err_message = "%s error: %s" % (msg_file_line, message)
++		WafError.__init__(self, err_message)
++
++	def locate_error(self):
++		stack = traceback.extract_stack()
++		stack.reverse()
++		for frame in stack:
++			file_name = os.path.basename(frame[0])
++			is_wscript = (file_name == WSCRIPT_FILE or file_name == WSCRIPT_BUILD_FILE)
++			if is_wscript:
++				return (frame[0], frame[1])
++		return (None, None)
++
++indicator = is_win32 and '\x1b[A\x1b[K%s%s%s\r' or '\x1b[K%s%s%s\r'
++
++try:
++	from fnv import new as md5
++	import Constants
++	Constants.SIG_NIL = 'signofnv'
++
++	def h_file(filename):
++		m = md5()
++		try:
++			m.hfile(filename)
++			x = m.digest()
++			if x is None: raise OSError("not a file")
++			return x
++		except SystemError:
++			raise OSError("not a file" + filename)
++
++except ImportError:
++	try:
++		try:
++			from hashlib import md5
++		except ImportError:
++			from md5 import md5
++
++		def h_file(filename):
++			f = open(filename, 'rb')
++			m = md5()
++			while (filename):
++				filename = f.read(100000)
++				m.update(filename)
++			f.close()
++			return m.digest()
++	except ImportError:
++		# portability fixes may be added elsewhere (although, md5 should be everywhere by now)
++		md5 = None
++
++class ordered_dict(UserDict):
++	def __init__(self, dict = None):
++		self.allkeys = []
++		UserDict.__init__(self, dict)
++
++	def __delitem__(self, key):
++		self.allkeys.remove(key)
++		UserDict.__delitem__(self, key)
++
++	def __setitem__(self, key, item):
++		if key not in self.allkeys: self.allkeys.append(key)
++		UserDict.__setitem__(self, key, item)
++
++def exec_command(s, **kw):
++	if 'log' in kw:
++		kw['stdout'] = kw['stderr'] = kw['log']
++		del(kw['log'])
++	kw['shell'] = isinstance(s, str)
++
++	try:
++		proc = pproc.Popen(s, **kw)
++		return proc.wait()
++	except OSError:
++		return -1
++
++if is_win32:
++	def exec_command(s, **kw):
++		if 'log' in kw:
++			kw['stdout'] = kw['stderr'] = kw['log']
++			del(kw['log'])
++		kw['shell'] = isinstance(s, str)
++
++		if len(s) > 2000:
++			startupinfo = pproc.STARTUPINFO()
++			startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW
++			kw['startupinfo'] = startupinfo
++
++		try:
++			if 'stdout' not in kw:
++				kw['stdout'] = pproc.PIPE
++				kw['stderr'] = pproc.PIPE
++				kw['universal_newlines'] = True
++				proc = pproc.Popen(s,**kw)
++				(stdout, stderr) = proc.communicate()
++				Logs.info(stdout)
++				if stderr:
++					Logs.error(stderr)
++				return proc.returncode
++			else:
++				proc = pproc.Popen(s,**kw)
++				return proc.wait()
++		except OSError:
++			return -1
++
++listdir = os.listdir
++if is_win32:
++	def listdir_win32(s):
++		if re.match('^[A-Za-z]:$', s):
++			# os.path.isdir fails if s contains only the drive name... (x:)
++			s += os.sep
++		if not os.path.isdir(s):
++			e = OSError()
++			e.errno = errno.ENOENT
++			raise e
++		return os.listdir(s)
++	listdir = listdir_win32
++
++def waf_version(mini = 0x010000, maxi = 0x100000):
++	"Halts if the waf version is wrong"
++	ver = HEXVERSION
++	try: min_val = mini + 0
++	except TypeError: min_val = int(mini.replace('.', '0'), 16)
++
++	if min_val > ver:
++		Logs.error("waf version should be at least %s (%s found)" % (mini, ver))
++		sys.exit(1)
++
++	try: max_val = maxi + 0
++	except TypeError: max_val = int(maxi.replace('.', '0'), 16)
++
++	if max_val < ver:
++		Logs.error("waf version should be at most %s (%s found)" % (maxi, ver))
++		sys.exit(1)
++
++def python_24_guard():
++	if sys.hexversion < 0x20400f0 or sys.hexversion >= 0x3000000:
++		raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
++
++def ex_stack():
++	exc_type, exc_value, tb = sys.exc_info()
++	if Logs.verbose > 1:
++		exc_lines = traceback.format_exception(exc_type, exc_value, tb)
++		return ''.join(exc_lines)
++	return str(exc_value)
++
++def to_list(sth):
++	if isinstance(sth, str):
++		return sth.split()
++	else:
++		return sth
++
++g_loaded_modules = {}
++"index modules by absolute path"
++
++g_module=None
++"the main module is special"
++
++def load_module(file_path, name=WSCRIPT_FILE):
++	"this function requires an absolute path"
++	try:
++		return g_loaded_modules[file_path]
++	except KeyError:
++		pass
++
++	module = imp.new_module(name)
++
++	try:
++		code = readf(file_path, m='rU')
++	except (IOError, OSError):
++		raise WscriptError('Could not read the file %r' % file_path)
++
++	module.waf_hash_val = code
++
++	dt = os.path.dirname(file_path)
++	sys.path.insert(0, dt)
++	try:
++		exec(compile(code, file_path, 'exec'), module.__dict__)
++	except Exception:
++		exc_type, exc_value, tb = sys.exc_info()
++		raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), file_path)
++	sys.path.remove(dt)
++
++	g_loaded_modules[file_path] = module
++
++	return module
++
++def set_main_module(file_path):
++	"Load custom options, if defined"
++	global g_module
++	g_module = load_module(file_path, 'wscript_main')
++	g_module.root_path = file_path
++
++	try:
++		g_module.APPNAME
++	except:
++		g_module.APPNAME = 'noname'
++	try:
++		g_module.VERSION
++	except:
++		g_module.VERSION = '1.0'
++
++	# note: to register the module globally, use the following:
++	# sys.modules['wscript_main'] = g_module
++
++def to_hashtable(s):
++	"used for importing env files"
++	tbl = {}
++	lst = s.split('\n')
++	for line in lst:
++		if not line: continue
++		mems = line.split('=')
++		tbl[mems[0]] = mems[1]
++	return tbl
++
++def get_term_cols():
++	"console width"
++	return 80
++try:
++	import struct, fcntl, termios
++except ImportError:
++	pass
++else:
++	if Logs.got_tty:
++		def myfun():
++			dummy_lines, cols = struct.unpack("HHHH", \
++			fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
++			struct.pack("HHHH", 0, 0, 0, 0)))[:2]
++			return cols
++		# we actually try the function once to see if it is suitable
++		try:
++			myfun()
++		except:
++			pass
++		else:
++			get_term_cols = myfun
++
++rot_idx = 0
++rot_chr = ['\\', '|', '/', '-']
++"the rotation character in the progress bar"
++
++
++def split_path(path):
++	return path.split('/')
++
++def split_path_cygwin(path):
++	if path.startswith('//'):
++		ret = path.split('/')[2:]
++		ret[0] = '/' + ret[0]
++		return ret
++	return path.split('/')
++
++re_sp = re.compile('[/\\\\]')
++def split_path_win32(path):
++	if path.startswith('\\\\'):
++		ret = re.split(re_sp, path)[2:]
++		ret[0] = '\\' + ret[0]
++		return ret
++	return re.split(re_sp, path)
++
++if sys.platform == 'cygwin':
++	split_path = split_path_cygwin
++elif is_win32:
++	split_path = split_path_win32
++
++def copy_attrs(orig, dest, names, only_if_set=False):
++	for a in to_list(names):
++		u = getattr(orig, a, ())
++		if u or not only_if_set:
++			setattr(dest, a, u)
++
++def def_attrs(cls, **kw):
++	'''
++	set attributes for class.
++	@param cls [any class]: the class to update the given attributes in.
++	@param kw [dictionary]: dictionary of attributes names and values.
++
++	if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
++	'''
++	for k, v in kw.iteritems():
++		if not hasattr(cls, k):
++			setattr(cls, k, v)
++
++def quote_define_name(path):
++	fu = re.compile("[^a-zA-Z0-9]").sub("_", path)
++	fu = fu.upper()
++	return fu
++
++def quote_whitespace(path):
++	return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"')
++
++def trimquotes(s):
++	if not s: return ''
++	s = s.rstrip()
++	if s[0] == "'" and s[-1] == "'": return s[1:-1]
++	return s
++
++def h_list(lst):
++	m = md5()
++	m.update(str(lst))
++	return m.digest()
++
++def h_fun(fun):
++	try:
++		return fun.code
++	except AttributeError:
++		try:
++			h = inspect.getsource(fun)
++		except IOError:
++			h = "nocode"
++		try:
++			fun.code = h
++		except AttributeError:
++			pass
++		return h
++
++def pprint(col, str, label='', sep='\n'):
++	"print messages in color"
++	sys.stderr.write("%s%s%s %s%s" % (Logs.colors(col), str, Logs.colors.NORMAL, label, sep))
++
++def check_dir(dir):
++	"""If a folder doesn't exists, create it."""
++	try:
++		os.stat(dir)
++	except OSError:
++		try:
++			os.makedirs(dir)
++		except OSError, e:
++			raise WafError("Cannot create folder '%s' (original error: %s)" % (dir, e))
++
++def cmd_output(cmd, **kw):
++
++	silent = False
++	if 'silent' in kw:
++		silent = kw['silent']
++		del(kw['silent'])
++
++	if 'e' in kw:
++		tmp = kw['e']
++		del(kw['e'])
++		kw['env'] = tmp
++
++	kw['shell'] = isinstance(cmd, str)
++	kw['stdout'] = pproc.PIPE
++	if silent:
++		kw['stderr'] = pproc.PIPE
++
++	try:
++		p = pproc.Popen(cmd, **kw)
++		output = p.communicate()[0]
++	except OSError, e:
++		raise ValueError(str(e))
++
++	if p.returncode:
++		if not silent:
++			msg = "command execution failed: %s -> %r" % (cmd, str(output))
++			raise ValueError(msg)
++		output = ''
++	return output
++
++reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
++def subst_vars(expr, params):
++	"substitute ${PREFIX}/bin in /usr/local/bin"
++	def repl_var(m):
++		if m.group(1):
++			return '\\'
++		if m.group(2):
++			return '$'
++		try:
++			# environments may contain lists
++			return params.get_flat(m.group(3))
++		except AttributeError:
++			return params[m.group(3)]
++	return reg_subst.sub(repl_var, expr)
++
++def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
++	"infers the binary format from the unversioned_sys_platform name."
++
++	if unversioned_sys_platform in ('linux', 'freebsd', 'netbsd', 'openbsd', 'sunos', 'gnu'):
++		return 'elf'
++	elif unversioned_sys_platform == 'darwin':
++		return 'mac-o'
++	elif unversioned_sys_platform in ('win32', 'cygwin', 'uwin', 'msys'):
++		return 'pe'
++	# TODO we assume all other operating systems are elf, which is not true.
++	# we may set this to 'unknown' and have ccroot and other tools handle the case "gracefully" (whatever that means).
++	return 'elf'
++
++def unversioned_sys_platform():
++	"""returns an unversioned name from sys.platform.
++	sys.plaform is not very well defined and depends directly on the python source tree.
++	The version appended to the names is unreliable as it's taken from the build environment at the time python was built,
++	i.e., it's possible to get freebsd7 on a freebsd8 system.
++	So we remove the version from the name, except for special cases where the os has a stupid name like os2 or win32.
++	Some possible values of sys.platform are, amongst others:
++		aix3 aix4 atheos beos5 darwin freebsd2 freebsd3 freebsd4 freebsd5 freebsd6 freebsd7
++		generic gnu0 irix5 irix6 linux2 mac netbsd1 next3 os2emx riscos sunos5 unixware7
++	Investigating the python source tree may reveal more values.
++	"""
++	s = sys.platform
++	if s == 'java':
++		# The real OS is hidden under the JVM.
++		from java.lang import System
++		s = System.getProperty('os.name')
++		# see http://lopica.sourceforge.net/os.html for a list of possible values
++		if s == 'Mac OS X':
++			return 'darwin'
++		elif s.startswith('Windows '):
++			return 'win32'
++		elif s == 'OS/2':
++			return 'os2'
++		elif s == 'HP-UX':
++			return 'hpux'
++		elif s in ('SunOS', 'Solaris'):
++			return 'sunos'
++		else: s = s.lower()
++	if s == 'win32' or s.endswith('os2') and s != 'sunos2': return s
++	return re.split('\d+$', s)[0]
++
++#@deprecated('use unversioned_sys_platform instead')
++def detect_platform():
++	"""this function has been in the Utils module for some time.
++	It's hard to guess what people have used it for.
++	It seems its goal is to return an unversionned sys.platform, but it's not handling all platforms.
++	For example, the version is not removed on freebsd and netbsd, amongst others.
++	"""
++	s = sys.platform
++
++	# known POSIX
++	for x in 'cygwin linux irix sunos hpux aix darwin gnu'.split():
++		# sys.platform may be linux2
++		if s.find(x) >= 0:
++			return x
++
++	# unknown POSIX
++	if os.name in 'posix java os2'.split():
++		return os.name
++
++	return s
++
++def load_tool(tool, tooldir=None):
++	'''
++	load_tool: import a Python module, optionally using several directories.
++	@param tool [string]: name of tool to import.
++	@param tooldir [list]: directories to look for the tool.
++	@return: the loaded module.
++
++	Warning: this function is not thread-safe: plays with sys.path,
++					 so must run in sequence.
++	'''
++	if tooldir:
++		assert isinstance(tooldir, list)
++		sys.path = tooldir + sys.path
++	else:
++		tooldir = []
++	try:
++		return __import__(tool)
++	finally:
++		for dt in tooldir:
++			sys.path.remove(dt)
++
++def readf(fname, m='r'):
++	"get the contents of a file, it is not used anywhere for the moment"
++	f = open(fname, m)
++	try:
++		txt = f.read()
++	finally:
++		f.close()
++	return txt
++
++def nada(*k, **kw):
++	"""A function that does nothing"""
++	pass
++
++def diff_path(top, subdir):
++	"""difference between two absolute paths"""
++	top = os.path.normpath(top).replace('\\', '/').split('/')
++	subdir = os.path.normpath(subdir).replace('\\', '/').split('/')
++	if len(top) == len(subdir): return ''
++	diff = subdir[len(top) - len(subdir):]
++	return os.path.join(*diff)
++
++class Context(object):
++	"""A base class for commands to be executed from Waf scripts"""
++
++	def set_curdir(self, dir):
++		self.curdir_ = dir
++
++	def get_curdir(self):
++		try:
++			return self.curdir_
++		except AttributeError:
++			self.curdir_ = os.getcwd()
++			return self.get_curdir()
++
++	curdir = property(get_curdir, set_curdir)
++
++	def recurse(self, dirs, name=''):
++		"""The function for calling scripts from folders, it tries to call wscript + function_name
++		and if that file does not exist, it will call the method 'function_name' from a file named wscript
++		the dirs can be a list of folders or a string containing space-separated folder paths
++		"""
++		if not name:
++			name = inspect.stack()[1][3]
++
++		if isinstance(dirs, str):
++			dirs = to_list(dirs)
++
++		for x in dirs:
++			if os.path.isabs(x):
++				nexdir = x
++			else:
++				nexdir = os.path.join(self.curdir, x)
++
++			base = os.path.join(nexdir, WSCRIPT_FILE)
++			file_path = base + '_' + name
++
++			try:
++				txt = readf(file_path, m='rU')
++			except (OSError, IOError):
++				try:
++					module = load_module(base)
++				except OSError:
++					raise WscriptError('No such script %s' % base)
++
++				try:
++					f = module.__dict__[name]
++				except KeyError:
++					raise WscriptError('No function %s defined in %s' % (name, base))
++
++				if getattr(self.__class__, 'pre_recurse', None):
++					self.pre_recurse(f, base, nexdir)
++				old = self.curdir
++				self.curdir = nexdir
++				try:
++					f(self)
++				finally:
++					self.curdir = old
++				if getattr(self.__class__, 'post_recurse', None):
++					self.post_recurse(module, base, nexdir)
++			else:
++				dc = {'ctx': self}
++				if getattr(self.__class__, 'pre_recurse', None):
++					dc = self.pre_recurse(txt, file_path, nexdir)
++				old = self.curdir
++				self.curdir = nexdir
++				try:
++					try:
++						exec(compile(txt, file_path, 'exec'), dc)
++					except Exception:
++						exc_type, exc_value, tb = sys.exc_info()
++						raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), base)
++				finally:
++					self.curdir = old
++				if getattr(self.__class__, 'post_recurse', None):
++					self.post_recurse(txt, file_path, nexdir)
++
++if is_win32:
++	old = shutil.copy2
++	def copy2(src, dst):
++		old(src, dst)
++		shutil.copystat(src, src)
++	setattr(shutil, 'copy2', copy2)
++
++def zip_folder(dir, zip_file_name, prefix):
++	"""
++	prefix represents the app to add in the archive
++	"""
++	import zipfile
++	zip = zipfile.ZipFile(zip_file_name, 'w', compression=zipfile.ZIP_DEFLATED)
++	base = os.path.abspath(dir)
++
++	if prefix:
++		if prefix[-1] != os.sep:
++			prefix += os.sep
++
++	n = len(base)
++	for root, dirs, files in os.walk(base):
++		for f in files:
++			archive_name = prefix + root[n:] + os.sep + f
++			zip.write(root + os.sep + f, archive_name, zipfile.ZIP_DEFLATED)
++	zip.close()
++
++def get_elapsed_time(start):
++	"Format a time delta (datetime.timedelta) using the format DdHhMmS.MSs"
++	delta = datetime.datetime.now() - start
++	# cast to int necessary for python 3.0
++	days = int(delta.days)
++	hours = int(delta.seconds / 3600)
++	minutes = int((delta.seconds - hours * 3600) / 60)
++	seconds = delta.seconds - hours * 3600 - minutes * 60 \
++		+ float(delta.microseconds) / 1000 / 1000
++	result = ''
++	if days:
++		result += '%dd' % days
++	if days or hours:
++		result += '%dh' % hours
++	if days or hours or minutes:
++		result += '%dm' % minutes
++	return '%s%.3fs' % (result, seconds)
++
++if os.name == 'java':
++	# For Jython (they should really fix the inconsistency)
++	try:
++		gc.disable()
++		gc.enable()
++	except NotImplementedError:
++		gc.disable = gc.enable
++
++def run_once(fun):
++	"""
++	decorator, make a function cache its results, use like this:
++
++	@run_once
++	def foo(k):
++		return 345*2343
++	"""
++	cache = {}
++	def wrap(k):
++		try:
++			return cache[k]
++		except KeyError:
++			ret = fun(k)
++			cache[k] = ret
++			return ret
++	wrap.__cache__ = cache
++	return wrap
++
+diff --git a/buildtools/wafadmin/__init__.py b/buildtools/wafadmin/__init__.py
+new file mode 100644
+index 0000000..01273cf
+--- /dev/null
++++ b/buildtools/wafadmin/__init__.py
+@@ -0,0 +1,3 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2005 (ita)
+diff --git a/buildtools/wafadmin/ansiterm.py b/buildtools/wafadmin/ansiterm.py
+new file mode 100644
+index 0000000..720b79c
+--- /dev/null
++++ b/buildtools/wafadmin/ansiterm.py
+@@ -0,0 +1,236 @@
++import sys, os
++try:
++	if (not sys.stderr.isatty()) or (not sys.stdout.isatty()):
++		raise ValueError('not a tty')
++
++	from ctypes import *
++
++	class COORD(Structure):
++		_fields_ = [("X", c_short), ("Y", c_short)]
++
++	class SMALL_RECT(Structure):
++		_fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
++
++	class CONSOLE_SCREEN_BUFFER_INFO(Structure):
++		_fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_short), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
++
++	class CONSOLE_CURSOR_INFO(Structure):
++		_fields_ = [('dwSize',c_ulong), ('bVisible', c_int)]
++
++	sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
++	csinfo = CONSOLE_CURSOR_INFO()
++	hconsole = windll.kernel32.GetStdHandle(-11)
++	windll.kernel32.GetConsoleScreenBufferInfo(hconsole, byref(sbinfo))
++	if sbinfo.Size.X < 10 or sbinfo.Size.Y < 10: raise Exception('small console')
++	windll.kernel32.GetConsoleCursorInfo(hconsole, byref(csinfo))
++except Exception:
++	pass
++else:
++	import re, threading
++
++	to_int = lambda number, default: number and int(number) or default
++	wlock = threading.Lock()
++
++	STD_OUTPUT_HANDLE = -11
++	STD_ERROR_HANDLE = -12
++
++	class AnsiTerm(object):
++		def __init__(self):
++			self.hconsole = windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
++			self.cursor_history = []
++			self.orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
++			self.orig_csinfo = CONSOLE_CURSOR_INFO()
++			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self.orig_sbinfo))
++			windll.kernel32.GetConsoleCursorInfo(hconsole, byref(self.orig_csinfo))
++
++
++		def screen_buffer_info(self):
++			sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
++			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
++			return sbinfo
++
++		def clear_line(self, param):
++			mode = param and int(param) or 0
++			sbinfo = self.screen_buffer_info()
++			if mode == 1: # Clear from begining of line to cursor position
++				line_start = COORD(0, sbinfo.CursorPosition.Y)
++				line_length = sbinfo.Size.X
++			elif mode == 2: # Clear entire line
++				line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
++				line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
++			else: # Clear from cursor position to end of line
++				line_start = sbinfo.CursorPosition
++				line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
++			chars_written = c_int()
++			windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), line_length, line_start, byref(chars_written))
++			windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
++
++		def clear_screen(self, param):
++			mode = to_int(param, 0)
++			sbinfo = self.screen_buffer_info()
++			if mode == 1: # Clear from begining of screen to cursor position
++				clear_start = COORD(0, 0)
++				clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
++			elif mode == 2: # Clear entire screen and return cursor to home
++				clear_start = COORD(0, 0)
++				clear_length = sbinfo.Size.X * sbinfo.Size.Y
++				windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
++			else: # Clear from cursor position to end of screen
++				clear_start = sbinfo.CursorPosition
++				clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
++			chars_written = c_int()
++			windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), clear_length, clear_start, byref(chars_written))
++			windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
++
++		def push_cursor(self, param):
++			sbinfo = self.screen_buffer_info()
++			self.cursor_history.push(sbinfo.CursorPosition)
++
++		def pop_cursor(self, param):
++			if self.cursor_history:
++				old_pos = self.cursor_history.pop()
++				windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
++
++		def set_cursor(self, param):
++			x, sep, y = param.partition(';')
++			x = to_int(x, 1) - 1
++			y = to_int(y, 1) - 1
++			sbinfo = self.screen_buffer_info()
++			new_pos = COORD(
++				min(max(0, x), sbinfo.Size.X),
++				min(max(0, y), sbinfo.Size.Y)
++			)
++			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
++
++		def set_column(self, param):
++			x = to_int(param, 1) - 1
++			sbinfo = self.screen_buffer_info()
++			new_pos = COORD(
++				min(max(0, x), sbinfo.Size.X),
++				sbinfo.CursorPosition.Y
++			)
++			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
++
++		def move_cursor(self, x_offset=0, y_offset=0):
++			sbinfo = self.screen_buffer_info()
++			new_pos = COORD(
++				min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
++				min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
++			)
++			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
++
++		def move_up(self, param):
++			self.move_cursor(y_offset = -to_int(param, 1))
++
++		def move_down(self, param):
++			self.move_cursor(y_offset = to_int(param, 1))
++
++		def move_left(self, param):
++			self.move_cursor(x_offset = -to_int(param, 1))
++
++		def move_right(self, param):
++			self.move_cursor(x_offset = to_int(param, 1))
++
++		def next_line(self, param):
++			sbinfo = self.screen_buffer_info()
++			self.move_cursor(
++				x_offset = -sbinfo.CursorPosition.X,
++				y_offset = to_int(param, 1)
++			)
++
++		def prev_line(self, param):
++			sbinfo = self.screen_buffer_info()
++			self.move_cursor(
++				x_offset = -sbinfo.CursorPosition.X,
++				y_offset = -to_int(param, 1)
++			)
++
++		escape_to_color = { (0, 30): 0x0,			 #black
++							(0, 31): 0x4,			 #red
++							(0, 32): 0x2,			 #green
++							(0, 33): 0x4+0x2,		 #dark yellow
++							(0, 34): 0x1,			 #blue
++							(0, 35): 0x1+0x4,		 #purple
++							(0, 36): 0x2+0x4,		 #cyan
++							(0, 37): 0x1+0x2+0x4,	 #grey
++							(1, 30): 0x1+0x2+0x4,	 #dark gray
++							(1, 31): 0x4+0x8,		 #red
++							(1, 32): 0x2+0x8,		 #light green
++							(1, 33): 0x4+0x2+0x8,	 #yellow
++							(1, 34): 0x1+0x8,		 #light blue
++							(1, 35): 0x1+0x4+0x8,	 #light purple
++							(1, 36): 0x1+0x2+0x8,	 #light cyan
++							(1, 37): 0x1+0x2+0x4+0x8, #white
++						   }
++
++		def set_color(self, param):
++			cols = param.split(';')
++			attr = self.orig_sbinfo.Attributes
++			for c in cols:
++				c = to_int(c, 0)
++				if c in range(30,38):
++					attr = (attr & 0xf0) | (self.escape_to_color.get((0,c), 0x7))
++				elif c in range(40,48):
++					attr = (attr & 0x0f) | (self.escape_to_color.get((0,c), 0x7) << 8)
++				elif c in range(90,98):
++					attr = (attr & 0xf0) | (self.escape_to_color.get((1,c-60), 0x7))
++				elif c in range(100,108):
++					attr = (attr & 0x0f) | (self.escape_to_color.get((1,c-60), 0x7) << 8)
++				elif c == 1:
++					attr |= 0x08
++			windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
++
++		def show_cursor(self,param):
++			csinfo.bVisible = 1
++			windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
++
++		def hide_cursor(self,param):
++			csinfo.bVisible = 0
++			windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
++
++		ansi_command_table = {
++			'A': move_up,
++			'B': move_down,
++			'C': move_right,
++			'D': move_left,
++			'E': next_line,
++			'F': prev_line,
++			'G': set_column,
++			'H': set_cursor,
++			'f': set_cursor,
++			'J': clear_screen,
++			'K': clear_line,
++			'h': show_cursor,
++			'l': hide_cursor,
++			'm': set_color,
++			's': push_cursor,
++			'u': pop_cursor,
++		}
++		# Match either the escape sequence or text not containing escape sequence
++		ansi_tokans = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
++		def write(self, text):
++			try:
++				wlock.acquire()
++				for param, cmd, txt in self.ansi_tokans.findall(text):
++					if cmd:
++						cmd_func = self.ansi_command_table.get(cmd)
++						if cmd_func:
++							cmd_func(self, param)
++					else:
++						chars_written = c_int()
++						if isinstance(txt, unicode):
++							windll.kernel32.WriteConsoleW(self.hconsole, txt, len(txt), byref(chars_written), None)
++						else:
++							windll.kernel32.WriteConsoleA(self.hconsole, txt, len(txt), byref(chars_written), None)
++			finally:
++				wlock.release()
++
++		def flush(self):
++			pass
++
++		def isatty(self):
++			return True
++
++	sys.stderr = sys.stdout = AnsiTerm()
++	os.environ['TERM'] = 'vt100'
++
+diff --git a/buildtools/wafadmin/pproc.py b/buildtools/wafadmin/pproc.py
+new file mode 100644
+index 0000000..cb15178
+--- /dev/null
++++ b/buildtools/wafadmin/pproc.py
+@@ -0,0 +1,620 @@
++# borrowed from python 2.5.2c1
++# Copyright (c) 2003-2005 by Peter Astrand <astrand at lysator.liu.se>
++# Licensed to PSF under a Contributor Agreement.
++
++import sys
++mswindows = (sys.platform == "win32")
++
++import os
++import types
++import traceback
++import gc
++
++class CalledProcessError(Exception):
++    def __init__(self, returncode, cmd):
++        self.returncode = returncode
++        self.cmd = cmd
++    def __str__(self):
++        return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
++
++if mswindows:
++    import threading
++    import msvcrt
++    if 0:
++        import pywintypes
++        from win32api import GetStdHandle, STD_INPUT_HANDLE, \
++                             STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
++        from win32api import GetCurrentProcess, DuplicateHandle, \
++                             GetModuleFileName, GetVersion
++        from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
++        from win32pipe import CreatePipe
++        from win32process import CreateProcess, STARTUPINFO, \
++                                 GetExitCodeProcess, STARTF_USESTDHANDLES, \
++                                 STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
++        from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
++    else:
++        from _subprocess import *
++        class STARTUPINFO:
++            dwFlags = 0
++            hStdInput = None
++            hStdOutput = None
++            hStdError = None
++            wShowWindow = 0
++        class pywintypes:
++            error = IOError
++else:
++    import select
++    import errno
++    import fcntl
++    import pickle
++
++__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
++
++try:
++    MAXFD = os.sysconf("SC_OPEN_MAX")
++except:
++    MAXFD = 256
++
++try:
++    False
++except NameError:
++    False = 0
++    True = 1
++
++_active = []
++
++def _cleanup():
++    for inst in _active[:]:
++        if inst.poll(_deadstate=sys.maxint) >= 0:
++            try:
++                _active.remove(inst)
++            except ValueError:
++                pass
++
++PIPE = -1
++STDOUT = -2
++
++
++def call(*popenargs, **kwargs):
++    return Popen(*popenargs, **kwargs).wait()
++
++def check_call(*popenargs, **kwargs):
++    retcode = call(*popenargs, **kwargs)
++    cmd = kwargs.get("args")
++    if cmd is None:
++        cmd = popenargs[0]
++    if retcode:
++        raise CalledProcessError(retcode, cmd)
++    return retcode
++
++
++def list2cmdline(seq):
++    result = []
++    needquote = False
++    for arg in seq:
++        bs_buf = []
++
++        if result:
++            result.append(' ')
++
++        needquote = (" " in arg) or ("\t" in arg) or arg == ""
++        if needquote:
++            result.append('"')
++
++        for c in arg:
++            if c == '\\':
++                bs_buf.append(c)
++            elif c == '"':
++                result.append('\\' * len(bs_buf)*2)
++                bs_buf = []
++                result.append('\\"')
++            else:
++                if bs_buf:
++                    result.extend(bs_buf)
++                    bs_buf = []
++                result.append(c)
++
++        if bs_buf:
++            result.extend(bs_buf)
++
++        if needquote:
++            result.extend(bs_buf)
++            result.append('"')
++
++    return ''.join(result)
++
++class Popen(object):
++    def __init__(self, args, bufsize=0, executable=None,
++                 stdin=None, stdout=None, stderr=None,
++                 preexec_fn=None, close_fds=False, shell=False,
++                 cwd=None, env=None, universal_newlines=False,
++                 startupinfo=None, creationflags=0):
++        _cleanup()
++
++        self._child_created = False
++        if not isinstance(bufsize, (int, long)):
++            raise TypeError("bufsize must be an integer")
++
++        if mswindows:
++            if preexec_fn is not None:
++                raise ValueError("preexec_fn is not supported on Windows platforms")
++            if close_fds:
++                raise ValueError("close_fds is not supported on Windows platforms")
++        else:
++            if startupinfo is not None:
++                raise ValueError("startupinfo is only supported on Windows platforms")
++            if creationflags != 0:
++                raise ValueError("creationflags is only supported on Windows platforms")
++
++        self.stdin = None
++        self.stdout = None
++        self.stderr = None
++        self.pid = None
++        self.returncode = None
++        self.universal_newlines = universal_newlines
++
++        (p2cread, p2cwrite,
++         c2pread, c2pwrite,
++         errread, errwrite) = self._get_handles(stdin, stdout, stderr)
++
++        self._execute_child(args, executable, preexec_fn, close_fds,
++                            cwd, env, universal_newlines,
++                            startupinfo, creationflags, shell,
++                            p2cread, p2cwrite,
++                            c2pread, c2pwrite,
++                            errread, errwrite)
++
++        if mswindows:
++            if stdin is None and p2cwrite is not None:
++                os.close(p2cwrite)
++                p2cwrite = None
++            if stdout is None and c2pread is not None:
++                os.close(c2pread)
++                c2pread = None
++            if stderr is None and errread is not None:
++                os.close(errread)
++                errread = None
++
++        if p2cwrite:
++            self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
++        if c2pread:
++            if universal_newlines:
++                self.stdout = os.fdopen(c2pread, 'rU', bufsize)
++            else:
++                self.stdout = os.fdopen(c2pread, 'rb', bufsize)
++        if errread:
++            if universal_newlines:
++                self.stderr = os.fdopen(errread, 'rU', bufsize)
++            else:
++                self.stderr = os.fdopen(errread, 'rb', bufsize)
++
++
++    def _translate_newlines(self, data):
++        data = data.replace("\r\n", "\n")
++        data = data.replace("\r", "\n")
++        return data
++
++
++    def __del__(self, sys=sys):
++        if not self._child_created:
++            return
++        self.poll(_deadstate=sys.maxint)
++        if self.returncode is None and _active is not None:
++            _active.append(self)
++
++
++    def communicate(self, input=None):
++        if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
++            stdout = None
++            stderr = None
++            if self.stdin:
++                if input:
++                    self.stdin.write(input)
++                self.stdin.close()
++            elif self.stdout:
++                stdout = self.stdout.read()
++            elif self.stderr:
++                stderr = self.stderr.read()
++            self.wait()
++            return (stdout, stderr)
++
++        return self._communicate(input)
++
++
++    if mswindows:
++        def _get_handles(self, stdin, stdout, stderr):
++            if stdin is None and stdout is None and stderr is None:
++                return (None, None, None, None, None, None)
++
++            p2cread, p2cwrite = None, None
++            c2pread, c2pwrite = None, None
++            errread, errwrite = None, None
++
++            if stdin is None:
++                p2cread = GetStdHandle(STD_INPUT_HANDLE)
++            if p2cread is not None:
++                pass
++            elif stdin is None or stdin == PIPE:
++                p2cread, p2cwrite = CreatePipe(None, 0)
++                p2cwrite = p2cwrite.Detach()
++                p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
++            elif isinstance(stdin, int):
++                p2cread = msvcrt.get_osfhandle(stdin)
++            else:
++                p2cread = msvcrt.get_osfhandle(stdin.fileno())
++            p2cread = self._make_inheritable(p2cread)
++
++            if stdout is None:
++                c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
++            if c2pwrite is not None:
++                pass
++            elif stdout is None or stdout == PIPE:
++                c2pread, c2pwrite = CreatePipe(None, 0)
++                c2pread = c2pread.Detach()
++                c2pread = msvcrt.open_osfhandle(c2pread, 0)
++            elif isinstance(stdout, int):
++                c2pwrite = msvcrt.get_osfhandle(stdout)
++            else:
++                c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
++            c2pwrite = self._make_inheritable(c2pwrite)
++
++            if stderr is None:
++                errwrite = GetStdHandle(STD_ERROR_HANDLE)
++            if errwrite is not None:
++                pass
++            elif stderr is None or stderr == PIPE:
++                errread, errwrite = CreatePipe(None, 0)
++                errread = errread.Detach()
++                errread = msvcrt.open_osfhandle(errread, 0)
++            elif stderr == STDOUT:
++                errwrite = c2pwrite
++            elif isinstance(stderr, int):
++                errwrite = msvcrt.get_osfhandle(stderr)
++            else:
++                errwrite = msvcrt.get_osfhandle(stderr.fileno())
++            errwrite = self._make_inheritable(errwrite)
++
++            return (p2cread, p2cwrite,
++                    c2pread, c2pwrite,
++                    errread, errwrite)
++        def _make_inheritable(self, handle):
++            return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS)
++
++        def _find_w9xpopen(self):
++            w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe")
++            if not os.path.exists(w9xpopen):
++                w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe")
++                if not os.path.exists(w9xpopen):
++                    raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.")
++            return w9xpopen
++
++        def _execute_child(self, args, executable, preexec_fn, close_fds,
++                           cwd, env, universal_newlines,
++                           startupinfo, creationflags, shell,
++                           p2cread, p2cwrite,
++                           c2pread, c2pwrite,
++                           errread, errwrite):
++
++            if not isinstance(args, types.StringTypes):
++                args = list2cmdline(args)
++
++            if startupinfo is None:
++                startupinfo = STARTUPINFO()
++            if None not in (p2cread, c2pwrite, errwrite):
++                startupinfo.dwFlags |= STARTF_USESTDHANDLES
++                startupinfo.hStdInput = p2cread
++                startupinfo.hStdOutput = c2pwrite
++                startupinfo.hStdError = errwrite
++
++            if shell:
++                startupinfo.dwFlags |= STARTF_USESHOWWINDOW
++                startupinfo.wShowWindow = SW_HIDE
++                comspec = os.environ.get("COMSPEC", "cmd.exe")
++                args = comspec + " /c " + args
++                if (GetVersion() >= 0x80000000L or
++                        os.path.basename(comspec).lower() == "command.com"):
++                    w9xpopen = self._find_w9xpopen()
++                    args = '"%s" %s' % (w9xpopen, args)
++                    creationflags |= CREATE_NEW_CONSOLE
++
++            try:
++                hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo)
++            except pywintypes.error, e:
++                raise WindowsError(*e.args)
++
++            self._child_created = True
++            self._handle = hp
++            self.pid = pid
++            ht.Close()
++
++            if p2cread is not None:
++                p2cread.Close()
++            if c2pwrite is not None:
++                c2pwrite.Close()
++            if errwrite is not None:
++                errwrite.Close()
++
++
++        def poll(self, _deadstate=None):
++            if self.returncode is None:
++                if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
++                    self.returncode = GetExitCodeProcess(self._handle)
++            return self.returncode
++
++
++        def wait(self):
++            if self.returncode is None:
++                obj = WaitForSingleObject(self._handle, INFINITE)
++                self.returncode = GetExitCodeProcess(self._handle)
++            return self.returncode
++
++        def _readerthread(self, fh, buffer):
++            buffer.append(fh.read())
++
++        def _communicate(self, input):
++            stdout = None
++            stderr = None
++
++            if self.stdout:
++                stdout = []
++                stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout))
++                stdout_thread.setDaemon(True)
++                stdout_thread.start()
++            if self.stderr:
++                stderr = []
++                stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr))
++                stderr_thread.setDaemon(True)
++                stderr_thread.start()
++
++            if self.stdin:
++                if input is not None:
++                    self.stdin.write(input)
++                self.stdin.close()
++
++            if self.stdout:
++                stdout_thread.join()
++            if self.stderr:
++                stderr_thread.join()
++
++            if stdout is not None:
++                stdout = stdout[0]
++            if stderr is not None:
++                stderr = stderr[0]
++
++            if self.universal_newlines and hasattr(file, 'newlines'):
++                if stdout:
++                    stdout = self._translate_newlines(stdout)
++                if stderr:
++                    stderr = self._translate_newlines(stderr)
++
++            self.wait()
++            return (stdout, stderr)
++
++    else:
++        def _get_handles(self, stdin, stdout, stderr):
++            p2cread, p2cwrite = None, None
++            c2pread, c2pwrite = None, None
++            errread, errwrite = None, None
++
++            if stdin is None:
++                pass
++            elif stdin == PIPE:
++                p2cread, p2cwrite = os.pipe()
++            elif isinstance(stdin, int):
++                p2cread = stdin
++            else:
++                p2cread = stdin.fileno()
++
++            if stdout is None:
++                pass
++            elif stdout == PIPE:
++                c2pread, c2pwrite = os.pipe()
++            elif isinstance(stdout, int):
++                c2pwrite = stdout
++            else:
++                c2pwrite = stdout.fileno()
++
++            if stderr is None:
++                pass
++            elif stderr == PIPE:
++                errread, errwrite = os.pipe()
++            elif stderr == STDOUT:
++                errwrite = c2pwrite
++            elif isinstance(stderr, int):
++                errwrite = stderr
++            else:
++                errwrite = stderr.fileno()
++
++            return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
++
++        def _set_cloexec_flag(self, fd):
++            try:
++                cloexec_flag = fcntl.FD_CLOEXEC
++            except AttributeError:
++                cloexec_flag = 1
++
++            old = fcntl.fcntl(fd, fcntl.F_GETFD)
++            fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
++
++        def _close_fds(self, but):
++            for i in xrange(3, MAXFD):
++                if i == but:
++                    continue
++                try:
++                    os.close(i)
++                except:
++                    pass
++
++        def _execute_child(self, args, executable, preexec_fn, close_fds,
++                           cwd, env, universal_newlines, startupinfo, creationflags, shell,
++                           p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite):
++
++            if isinstance(args, types.StringTypes):
++                args = [args]
++            else:
++                args = list(args)
++
++            if shell:
++                args = ["/bin/sh", "-c"] + args
++
++            if executable is None:
++                executable = args[0]
++
++            errpipe_read, errpipe_write = os.pipe()
++            self._set_cloexec_flag(errpipe_write)
++
++            gc_was_enabled = gc.isenabled()
++            gc.disable()
++            try:
++                self.pid = os.fork()
++            except:
++                if gc_was_enabled:
++                    gc.enable()
++                raise
++            self._child_created = True
++            if self.pid == 0:
++                try:
++                    if p2cwrite:
++                        os.close(p2cwrite)
++                    if c2pread:
++                        os.close(c2pread)
++                    if errread:
++                        os.close(errread)
++                    os.close(errpipe_read)
++
++                    if p2cread:
++                        os.dup2(p2cread, 0)
++                    if c2pwrite:
++                        os.dup2(c2pwrite, 1)
++                    if errwrite:
++                        os.dup2(errwrite, 2)
++
++                    if p2cread and p2cread not in (0,):
++                        os.close(p2cread)
++                    if c2pwrite and c2pwrite not in (p2cread, 1):
++                        os.close(c2pwrite)
++                    if errwrite and errwrite not in (p2cread, c2pwrite, 2):
++                        os.close(errwrite)
++
++                    if close_fds:
++                        self._close_fds(but=errpipe_write)
++
++                    if cwd is not None:
++                        os.chdir(cwd)
++
++                    if preexec_fn:
++                        apply(preexec_fn)
++
++                    if env is None:
++                        os.execvp(executable, args)
++                    else:
++                        os.execvpe(executable, args, env)
++
++                except:
++                    exc_type, exc_value, tb = sys.exc_info()
++                    exc_lines = traceback.format_exception(exc_type, exc_value, tb)
++                    exc_value.child_traceback = ''.join(exc_lines)
++                    os.write(errpipe_write, pickle.dumps(exc_value))
++
++                os._exit(255)
++
++            if gc_was_enabled:
++                gc.enable()
++            os.close(errpipe_write)
++            if p2cread and p2cwrite:
++                os.close(p2cread)
++            if c2pwrite and c2pread:
++                os.close(c2pwrite)
++            if errwrite and errread:
++                os.close(errwrite)
++
++            data = os.read(errpipe_read, 1048576)
++            os.close(errpipe_read)
++            if data != "":
++                os.waitpid(self.pid, 0)
++                child_exception = pickle.loads(data)
++                raise child_exception
++
++        def _handle_exitstatus(self, sts):
++            if os.WIFSIGNALED(sts):
++                self.returncode = -os.WTERMSIG(sts)
++            elif os.WIFEXITED(sts):
++                self.returncode = os.WEXITSTATUS(sts)
++            else:
++                raise RuntimeError("Unknown child exit status!")
++
++        def poll(self, _deadstate=None):
++            if self.returncode is None:
++                try:
++                    pid, sts = os.waitpid(self.pid, os.WNOHANG)
++                    if pid == self.pid:
++                        self._handle_exitstatus(sts)
++                except os.error:
++                    if _deadstate is not None:
++                        self.returncode = _deadstate
++            return self.returncode
++
++        def wait(self):
++            if self.returncode is None:
++                pid, sts = os.waitpid(self.pid, 0)
++                self._handle_exitstatus(sts)
++            return self.returncode
++
++        def _communicate(self, input):
++            read_set = []
++            write_set = []
++            stdout = None
++            stderr = None
++
++            if self.stdin:
++                self.stdin.flush()
++                if input:
++                    write_set.append(self.stdin)
++                else:
++                    self.stdin.close()
++            if self.stdout:
++                read_set.append(self.stdout)
++                stdout = []
++            if self.stderr:
++                read_set.append(self.stderr)
++                stderr = []
++
++            input_offset = 0
++            while read_set or write_set:
++                rlist, wlist, xlist = select.select(read_set, write_set, [])
++
++                if self.stdin in wlist:
++                    bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
++                    input_offset += bytes_written
++                    if input_offset >= len(input):
++                        self.stdin.close()
++                        write_set.remove(self.stdin)
++
++                if self.stdout in rlist:
++                    data = os.read(self.stdout.fileno(), 1024)
++                    if data == "":
++                        self.stdout.close()
++                        read_set.remove(self.stdout)
++                    stdout.append(data)
++
++                if self.stderr in rlist:
++                    data = os.read(self.stderr.fileno(), 1024)
++                    if data == "":
++                        self.stderr.close()
++                        read_set.remove(self.stderr)
++                    stderr.append(data)
++
++            if stdout is not None:
++                stdout = ''.join(stdout)
++            if stderr is not None:
++                stderr = ''.join(stderr)
++
++            if self.universal_newlines and hasattr(file, 'newlines'):
++                if stdout:
++                    stdout = self._translate_newlines(stdout)
++                if stderr:
++                    stderr = self._translate_newlines(stderr)
++
++            self.wait()
++            return (stdout, stderr)
++
+diff --git a/buildtools/wafadmin/py3kfixes.py b/buildtools/wafadmin/py3kfixes.py
+new file mode 100644
+index 0000000..2f3c9c2
+--- /dev/null
++++ b/buildtools/wafadmin/py3kfixes.py
+@@ -0,0 +1,130 @@
++#!/usr/bin/env python
++# encoding: utf-8
++# Thomas Nagy, 2009 (ita)
++
++"""
++Fixes for py3k go here
++"""
++
++import os
++
++all_modifs = {}
++
++def modif(dir, name, fun):
++	if name == '*':
++		lst = []
++		for y in '. Tools 3rdparty'.split():
++			for x in os.listdir(os.path.join(dir, y)):
++				if x.endswith('.py'):
++					lst.append(y + os.sep + x)
++		#lst = [y + os.sep + x for x in os.listdir(os.path.join(dir, y)) for y in '. Tools 3rdparty'.split() if x.endswith('.py')]
++		for x in lst:
++			modif(dir, x, fun)
++		return
++
++	filename = os.path.join(dir, name)
++	f = open(filename, 'r')
++	txt = f.read()
++	f.close()
++
++	txt = fun(txt)
++
++	f = open(filename, 'w')
++	f.write(txt)
++	f.close()
++
++def subst(filename):
++	def do_subst(fun):
++		global all_modifs
++		try:
++			all_modifs[filename] += fun
++		except KeyError:
++			all_modifs[filename] = [fun]
++		return fun
++	return do_subst
++
++ at subst('Constants.py')
++def r1(code):
++	code = code.replace("'iluvcuteoverload'", "b'iluvcuteoverload'")
++	code = code.replace("ABI=7", "ABI=37")
++	return code
++
++ at subst('Tools/ccroot.py')
++def r2(code):
++	code = code.replace("p.stdin.write('\\n')", "p.stdin.write(b'\\n')")
++	code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
++	return code
++
++ at subst('Utils.py')
++def r3(code):
++	code = code.replace("m.update(str(lst))", "m.update(str(lst).encode())")
++	code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
++	return code
++
++ at subst('ansiterm.py')
++def r33(code):
++	code = code.replace('unicode', 'str')
++	return code
++
++ at subst('Task.py')
++def r4(code):
++	code = code.replace("up(self.__class__.__name__)", "up(self.__class__.__name__.encode())")
++	code = code.replace("up(self.env.variant())", "up(self.env.variant().encode())")
++	code = code.replace("up(x.parent.abspath())", "up(x.parent.abspath().encode())")
++	code = code.replace("up(x.name)", "up(x.name.encode())")
++	code = code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type', 'import binascii\n\nclass TaskBase(object, metaclass=store_task_type):')
++	code = code.replace('keys=self.cstr_groups.keys()', 'keys=list(self.cstr_groups.keys())')
++	code = code.replace("sig.encode('hex')", 'binascii.hexlify(sig)')
++	code = code.replace("os.path.join(Options.cache_global,ssig)", "os.path.join(Options.cache_global,ssig.decode())")
++	return code
++
++ at subst('Build.py')
++def r5(code):
++	code = code.replace("cPickle.dump(data,file,-1)", "cPickle.dump(data,file)")
++	code = code.replace('for node in src_dir_node.childs.values():', 'for node in list(src_dir_node.childs.values()):')
++	return code
++
++ at subst('*')
++def r6(code):
++	code = code.replace('xrange', 'range')
++	code = code.replace('iteritems', 'items')
++	code = code.replace('maxint', 'maxsize')
++	code = code.replace('iterkeys', 'keys')
++	code = code.replace('Error,e:', 'Error as e:')
++	code = code.replace('Exception,e:', 'Exception as e:')
++	return code
++
++ at subst('TaskGen.py')
++def r7(code):
++	code = code.replace('class task_gen(object):\n\t__metaclass__=register_obj', 'class task_gen(object, metaclass=register_obj):')
++	return code
++
++ at subst('Tools/python.py')
++def r8(code):
++	code = code.replace('proc.communicate()[0]', 'proc.communicate()[0].decode("utf-8")')
++	return code
++
++ at subst('Tools/glib2.py')
++def r9(code):
++	code = code.replace('f.write(c)', 'f.write(c.encode("utf-8"))')
++	return code
++
++ at subst('Tools/config_c.py')
++def r10(code):
++	code = code.replace("key=kw['success']", "key=kw['success']\n\t\t\t\ttry:\n\t\t\t\t\tkey=key.decode('utf-8')\n\t\t\t\texcept:\n\t\t\t\t\tpass")
++	code = code.replace('out=str(out)','out=out.decode("utf-8")')
++	code = code.replace('err=str(err)','err=err.decode("utf-8")')
++	return code
++
++ at subst('Tools/d.py')
++def r11(code):
++	code = code.replace('ret.strip()', 'ret.strip().decode("utf-8")')
++	return code
++
++def fixdir(dir):
++	global all_modifs
++	for k in all_modifs:
++		for v in all_modifs[k]:
++			modif(os.path.join(dir, 'wafadmin'), k, v)
++	#print('substitutions finished')
++





More information about the Pkg-samba-maint mailing list