summaryrefslogtreecommitdiff
path: root/buildtools/wafadmin/3rdparty
diff options
context:
space:
mode:
Diffstat (limited to 'buildtools/wafadmin/3rdparty')
-rw-r--r--buildtools/wafadmin/3rdparty/ParallelDebug.py299
-rw-r--r--buildtools/wafadmin/3rdparty/batched_cc.py183
-rw-r--r--buildtools/wafadmin/3rdparty/boost.py343
-rw-r--r--buildtools/wafadmin/3rdparty/fluid.py27
-rw-r--r--buildtools/wafadmin/3rdparty/gccdeps.py128
-rw-r--r--buildtools/wafadmin/3rdparty/go.py111
-rw-r--r--buildtools/wafadmin/3rdparty/lru_cache.py97
-rw-r--r--buildtools/wafadmin/3rdparty/paranoid.py35
-rw-r--r--buildtools/wafadmin/3rdparty/swig.py190
-rw-r--r--buildtools/wafadmin/3rdparty/valadoc.py113
10 files changed, 1526 insertions, 0 deletions
diff --git a/buildtools/wafadmin/3rdparty/ParallelDebug.py b/buildtools/wafadmin/3rdparty/ParallelDebug.py
new file mode 100644
index 0000000000..9d0493e5e1
--- /dev/null
+++ b/buildtools/wafadmin/3rdparty/ParallelDebug.py
@@ -0,0 +1,299 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2007-2010 (ita)
+
+"""
+debugging helpers for parallel compilation, outputs
+a svg file in the build directory
+"""
+
+import os, time, sys, threading
+try: from Queue import Queue
+except: from queue import Queue
+import Runner, Options, Utils, Task, Logs
+from Constants import *
+
+#import random
+#random.seed(100)
+
+def set_options(opt):
+ opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
+ help='title for the svg diagram', dest='dtitle')
+ opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=1000, dest='dwidth')
+ opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
+ opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
+ opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
+
+# red #ff4d4d
+# green #4da74d
+# lila #a751ff
+
+color2code = {
+ 'GREEN' : '#4da74d',
+ 'YELLOW' : '#fefe44',
+ 'PINK' : '#a751ff',
+ 'RED' : '#cc1d1d',
+ 'BLUE' : '#6687bb',
+ 'CYAN' : '#34e2e2',
+
+}
+
+mp = {}
+info = [] # list of (text,color)
+
+def map_to_color(name):
+ if name in mp:
+ return mp[name]
+ try:
+ cls = Task.TaskBase.classes[name]
+ except KeyError:
+ return color2code['RED']
+ if cls.color in mp:
+ return mp[cls.color]
+ if cls.color in color2code:
+ return color2code[cls.color]
+ return color2code['RED']
+
+def loop(self):
+ while 1:
+ tsk=Runner.TaskConsumer.ready.get()
+ tsk.master.set_running(1, id(threading.currentThread()), tsk)
+ Runner.process_task(tsk)
+ tsk.master.set_running(-1, id(threading.currentThread()), tsk)
+Runner.TaskConsumer.loop = loop
+
+
+old_start = Runner.Parallel.start
+def do_start(self):
+ print Options.options
+ try:
+ Options.options.dband
+ except AttributeError:
+ raise ValueError('use def options(opt): opt.load("parallel_debug")!')
+
+ self.taskinfo = Queue()
+ old_start(self)
+ process_colors(self)
+Runner.Parallel.start = do_start
+
+def set_running(self, by, i, tsk):
+ self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by) )
+Runner.Parallel.set_running = set_running
+
+def name2class(name):
+ return name.replace(' ', '_').replace('.', '_')
+
+def process_colors(producer):
+ # first, cast the parameters
+ tmp = []
+ try:
+ while True:
+ tup = producer.taskinfo.get(False)
+ tmp.append(list(tup))
+ except:
+ pass
+
+ try:
+ ini = float(tmp[0][2])
+ except:
+ return
+
+ if not info:
+ seen = []
+ for x in tmp:
+ name = x[3]
+ if not name in seen:
+ seen.append(name)
+ else:
+ continue
+
+ info.append((name, map_to_color(name)))
+ info.sort(key=lambda x: x[0])
+
+ thread_count = 0
+ acc = []
+ for x in tmp:
+ thread_count += x[6]
+ acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
+ f = open('pdebug.dat', 'w')
+ #Utils.write('\n'.join(acc))
+ f.write('\n'.join(acc))
+
+ tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
+
+ st = {}
+ for l in tmp:
+ if not l[0] in st:
+ st[l[0]] = len(st.keys())
+ tmp = [ [st[lst[0]]] + lst[1:] for lst in tmp ]
+ THREAD_AMOUNT = len(st.keys())
+
+ st = {}
+ for l in tmp:
+ if not l[1] in st:
+ st[l[1]] = len(st.keys())
+ tmp = [ [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
+
+
+ BAND = Options.options.dband
+
+ seen = {}
+ acc = []
+ for x in range(len(tmp)):
+ line = tmp[x]
+ id = line[1]
+
+ if id in seen:
+ continue
+ seen[id] = True
+
+ begin = line[2]
+ thread_id = line[0]
+ for y in range(x + 1, len(tmp)):
+ line = tmp[y]
+ if line[1] == id:
+ end = line[2]
+ #print id, thread_id, begin, end
+ #acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
+ acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
+ break
+
+ if Options.options.dmaxtime < 0.1:
+ gwidth = 1
+ for x in tmp:
+ m = BAND * x[2]
+ if m > gwidth:
+ gwidth = m
+ else:
+ gwidth = BAND * Options.options.dmaxtime
+
+ ratio = float(Options.options.dwidth) / gwidth
+ gwidth = Options.options.dwidth
+
+ gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
+
+ out = []
+
+ out.append("""<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>
+<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\"
+\"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\">
+<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" version=\"1.0\"
+ x=\"%r\" y=\"%r\" width=\"%r\" height=\"%r\"
+ id=\"svg602\" xml:space=\"preserve\">
+
+<style type='text/css' media='screen'>
+ g.over rect { stroke:#FF0000; fill-opacity:0.4 }
+</style>
+
+<script type='text/javascript'><![CDATA[
+ var svg = document.getElementsByTagName('svg')[0];
+ var svgNS = svg.getAttribute('xmlns');
+ svg.addEventListener('mouseover',function(e){
+ var g = e.target.parentNode;
+ var x = document.getElementById('r_'+g.id);
+ if (x) {
+ g.setAttribute('class', g.getAttribute('class')+' over');
+ x.setAttribute('class', x.getAttribute('class')+' over');
+ showInfo(e, g.id);
+ }
+ },false);
+ svg.addEventListener('mouseout',function(e){
+ var g = e.target.parentNode;
+ var x = document.getElementById('r_'+g.id);
+ if (x) {
+ g.setAttribute('class',g.getAttribute('class').replace(' over',''));
+ x.setAttribute('class',x.getAttribute('class').replace(' over',''));
+ hideInfo(e);
+ }
+ },false);
+
+function showInfo(evt, txt) {
+ tooltip = document.getElementById('tooltip');
+
+ var t = document.getElementById('tooltiptext');
+ t.firstChild.data = txt;
+
+ var x = evt.clientX+10;
+ if (x > 200) { x -= t.getComputedTextLength() + 16; }
+ var y = evt.clientY+30;
+ tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
+ tooltip.setAttributeNS(null,"visibility","visible");
+
+ var r = document.getElementById('tooltiprect');
+ r.setAttribute('width', t.getComputedTextLength()+6)
+}
+
+
+function hideInfo(evt) {
+ tooltip = document.getElementById('tooltip');
+ tooltip.setAttributeNS(null,"visibility","hidden");
+}
+
+]]></script>
+
+<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
+<rect
+ x='%r' y='%r'
+ width='%r' height='%r' z-index='10'
+ style=\"font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;\"
+ />\n
+
+""" % (0, 0, gwidth + 4, gheight + 4, 0, 0, gwidth + 4, gheight + 4))
+
+ # main title
+ if Options.options.dtitle:
+ out.append("""<text x="%d" y="%d" style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">%s</text>
+""" % (gwidth/2, gheight - 5, Options.options.dtitle))
+
+ # the rectangles
+ groups = {}
+ for (x, y, w, h, clsname) in acc:
+ try:
+ groups[clsname].append((x, y, w, h))
+ except:
+ groups[clsname] = [(x, y, w, h)]
+
+ for cls in groups:
+
+ out.append("<g id='%s'>\n" % name2class(cls))
+
+ for (x, y, w, h) in groups[cls]:
+ out.append(""" <rect
+ x='%r' y='%r'
+ width='%r' height='%r' z-index='11'
+ style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
+ />\n""" % (2 + x*ratio, 2 + y, w*ratio, h, map_to_color(cls)))
+
+ out.append("</g>\n")
+
+ # output the caption
+ cnt = THREAD_AMOUNT
+
+ for (text, color) in info:
+ # caption box
+ b = BAND/2
+ out.append("""<g id='r_%s'><rect
+ x='%r' y='%r'
+ width='%r' height='%r'
+ style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
+ />\n""" % (name2class(text), 2 + BAND, 5 + (cnt + 0.5) * BAND, b, b, color))
+
+ # caption text
+ out.append("""<text
+ style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
+ x="%r" y="%d">%s</text></g>\n""" % (2 + 2 * BAND, 5 + (cnt + 0.5) * BAND + 10, text))
+ cnt += 1
+
+ out.append("""
+<g transform="translate(0,0)" visibility="hidden" id="tooltip">
+ <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
+ <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
+</g>""")
+
+ out.append("\n</svg>")
+
+ #node = producer.bld.path.make_node('pdebug.svg')
+ f = open('pdebug.svg', 'w')
+ f.write("".join(out))
+
+
diff --git a/buildtools/wafadmin/3rdparty/batched_cc.py b/buildtools/wafadmin/3rdparty/batched_cc.py
new file mode 100644
index 0000000000..8e310745c6
--- /dev/null
+++ b/buildtools/wafadmin/3rdparty/batched_cc.py
@@ -0,0 +1,183 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006 (ita)
+
+"""
+Batched builds - compile faster
+instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
+cc -c ../file1.c ../file2.c ../file3.c
+
+Files are output on the directory where the compiler is called, and dependencies are more difficult
+to track (do not run the command on all source files if only one file changes)
+
+As such, we do as if the files were compiled one by one, but no command is actually run:
+replace each cc/cpp Task by a TaskSlave
+A new task called TaskMaster collects the signatures from each slave and finds out the command-line
+to run.
+
+To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds
+it is only necessary to import this module in the configuration (no other change required)
+"""
+
+MAX_BATCH = 50
+MAXPARALLEL = False
+
+EXT_C = ['.c', '.cc', '.cpp', '.cxx']
+
+import os, threading
+import TaskGen, Task, ccroot, Build, Logs
+from TaskGen import extension, feature, before
+from Constants import *
+
+cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRCLST}'
+cc_fun = Task.compile_fun_noshell('batched_cc', cc_str)[0]
+
+cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRCLST}'
+cxx_fun = Task.compile_fun_noshell('batched_cxx', cxx_str)[0]
+
+count = 70000
+class batch_task(Task.Task):
+ color = 'RED'
+
+ after = 'cc cxx'
+ before = 'cc_link cxx_link static_link'
+
+ def __str__(self):
+ return '(batch compilation for %d slaves)\n' % len(self.slaves)
+
+ def __init__(self, *k, **kw):
+ Task.Task.__init__(self, *k, **kw)
+ self.slaves = []
+ self.inputs = []
+ self.hasrun = 0
+
+ global count
+ count += 1
+ self.idx = count
+
+ def add_slave(self, slave):
+ self.slaves.append(slave)
+ self.set_run_after(slave)
+
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return ASK_LATER
+
+ for t in self.slaves:
+ #if t.executed:
+ if t.hasrun != SKIPPED:
+ return RUN_ME
+
+ return SKIP_ME
+
+ def run(self):
+ outputs = []
+ self.outputs = []
+
+ srclst = []
+ slaves = []
+ for t in self.slaves:
+ if t.hasrun != SKIPPED:
+ slaves.append(t)
+ srclst.append(t.inputs[0].abspath(self.env))
+
+ self.env.SRCLST = srclst
+ self.cwd = slaves[0].inputs[0].parent.abspath(self.env)
+
+ env = self.env
+ app = env.append_unique
+ cpppath_st = env['CPPPATH_ST']
+ env._CCINCFLAGS = env.CXXINCFLAGS = []
+
+ # local flags come first
+ # set the user-defined includes paths
+ for i in env['INC_PATHS']:
+ app('_CCINCFLAGS', cpppath_st % i.abspath())
+ app('_CXXINCFLAGS', cpppath_st % i.abspath())
+ app('_CCINCFLAGS', cpppath_st % i.abspath(env))
+ app('_CXXINCFLAGS', cpppath_st % i.abspath(env))
+
+ # set the library include paths
+ for i in env['CPPPATH']:
+ app('_CCINCFLAGS', cpppath_st % i)
+ app('_CXXINCFLAGS', cpppath_st % i)
+
+ if self.slaves[0].__class__.__name__ == 'cc':
+ ret = cc_fun(self)
+ else:
+ ret = cxx_fun(self)
+
+ if ret:
+ return ret
+
+ for t in slaves:
+ t.old_post_run()
+
+from TaskGen import extension, feature, after
+
+import cc, cxx
+def wrap(fun):
+ def foo(self, node):
+ # we cannot control the extension, this sucks
+ self.obj_ext = '.o'
+
+ task = fun(self, node)
+ if not getattr(self, 'masters', None):
+ self.masters = {}
+ self.allmasters = []
+
+ if not node.parent.id in self.masters:
+ m = self.masters[node.parent.id] = self.master = self.create_task('batch')
+ self.allmasters.append(m)
+ else:
+ m = self.masters[node.parent.id]
+ if len(m.slaves) > MAX_BATCH:
+ m = self.masters[node.parent.id] = self.master = self.create_task('batch')
+ self.allmasters.append(m)
+
+ m.add_slave(task)
+ return task
+ return foo
+
+c_hook = wrap(cc.c_hook)
+extension(cc.EXT_CC)(c_hook)
+
+cxx_hook = wrap(cxx.cxx_hook)
+extension(cxx.EXT_CXX)(cxx_hook)
+
+
+@feature('cprogram', 'cshlib', 'cstaticlib')
+@after('apply_link')
+def link_after_masters(self):
+ if getattr(self, 'allmasters', None):
+ for m in self.allmasters:
+ self.link_task.set_run_after(m)
+
+for c in ['cc', 'cxx']:
+ t = Task.TaskBase.classes[c]
+ def run(self):
+ pass
+
+ def post_run(self):
+ #self.executed=1
+ pass
+
+ def can_retrieve_cache(self):
+ if self.old_can_retrieve_cache():
+ for m in self.generator.allmasters:
+ try:
+ m.slaves.remove(self)
+ except ValueError:
+ pass #this task wasn't included in that master
+ return 1
+ else:
+ return None
+
+ setattr(t, 'oldrun', t.__dict__['run'])
+ setattr(t, 'run', run)
+ setattr(t, 'old_post_run', t.post_run)
+ setattr(t, 'post_run', post_run)
+ setattr(t, 'old_can_retrieve_cache', t.can_retrieve_cache)
+ setattr(t, 'can_retrieve_cache', can_retrieve_cache)
+
diff --git a/buildtools/wafadmin/3rdparty/boost.py b/buildtools/wafadmin/3rdparty/boost.py
new file mode 100644
index 0000000000..e690a4e274
--- /dev/null
+++ b/buildtools/wafadmin/3rdparty/boost.py
@@ -0,0 +1,343 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# partially based on boost.py written by Gernot Vormayr
+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
+# modified by Bjoern Michaelsen, 2008
+# modified by Luca Fossati, 2008
+# rewritten for waf 1.5.1, Thomas Nagy, 2008
+#
+#def set_options(opt):
+# opt.tool_options('boost')
+# # ...
+#
+#def configure(conf):
+# # ... (e.g. conf.check_tool('g++'))
+# conf.check_tool('boost')
+# conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
+#
+#def build(bld):
+# bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
+#
+#ISSUES:
+# * find_includes should be called only once!
+# * support mandatory
+
+######## boost update ###########
+## ITA: * the method get_boost_version_number does work
+## * the rest of the code has not really been tried
+# * make certain a demo is provided (in demos/adv for example)
+
+# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
+
+import os.path, glob, types, re, sys
+import Configure, config_c, Options, Utils, Logs
+from Logs import warn, debug
+from Configure import conf
+
+boost_code = '''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_VERSION << std::endl; }
+'''
+
+boost_libpath = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
+boost_cpppath = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
+
+STATIC_NOSTATIC = 'nostatic'
+STATIC_BOTH = 'both'
+STATIC_ONLYSTATIC = 'onlystatic'
+
+is_versiontag = re.compile('^\d+_\d+_?\d*$')
+is_threadingtag = re.compile('^mt$')
+is_abitag = re.compile('^[sgydpn]+$')
+is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
+is_pythontag=re.compile('^py[0-9]{2}$')
+
+def set_options(opt):
+ opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
+ opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib')
+
+def string_to_version(s):
+ version = s.split('.')
+ if len(version) < 3: return 0
+ return int(version[0])*100000 + int(version[1])*100 + int(version[2])
+
+def version_string(version):
+ major = version / 100000
+ minor = version / 100 % 1000
+ minor_minor = version % 100
+ if minor_minor == 0:
+ return "%d_%d" % (major, minor)
+ else:
+ return "%d_%d_%d" % (major, minor, minor_minor)
+
+def libfiles(lib, pattern, lib_paths):
+ result = []
+ for lib_path in lib_paths:
+ libname = pattern % ('boost_%s[!_]*' % lib)
+ result += glob.glob(os.path.join(lib_path, libname))
+ return result
+
+@conf
+def get_boost_version_number(self, dir):
+ """silently retrieve the boost version number"""
+ try:
+ return self.run_c_code(compiler='cxx', code=boost_code, includes=dir, execute=1, env=self.env.copy(), type='cprogram', compile_mode='cxx', compile_filename='test.cpp')
+ except Configure.ConfigurationError, e:
+ return -1
+
+def set_default(kw, var, val):
+ if not var in kw:
+ kw[var] = val
+
+def tags_score(tags, kw):
+ """
+ checks library tags
+
+ see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1
+ """
+ score = 0
+ needed_tags = {
+ 'threading': kw['tag_threading'],
+ 'abi': kw['tag_abi'],
+ 'toolset': kw['tag_toolset'],
+ 'version': kw['tag_version'],
+ 'python': kw['tag_python']
+ }
+
+ if kw['tag_toolset'] is None:
+ v = kw['env']
+ toolset = v['CXX_NAME']
+ if v['CXX_VERSION']:
+ version_no = v['CXX_VERSION'].split('.')
+ toolset += version_no[0]
+ if len(version_no) > 1:
+ toolset += version_no[1]
+ needed_tags['toolset'] = toolset
+
+ found_tags = {}
+ for tag in tags:
+ if is_versiontag.match(tag): found_tags['version'] = tag
+ if is_threadingtag.match(tag): found_tags['threading'] = tag
+ if is_abitag.match(tag): found_tags['abi'] = tag
+ if is_toolsettag.match(tag): found_tags['toolset'] = tag
+ if is_pythontag.match(tag): found_tags['python'] = tag
+
+ for tagname in needed_tags.iterkeys():
+ if needed_tags[tagname] is not None and tagname in found_tags:
+ if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
+ score += kw['score_' + tagname][0]
+ else:
+ score += kw['score_' + tagname][1]
+ return score
+
+@conf
+def validate_boost(self, kw):
+ ver = kw.get('version', '')
+
+ for x in 'min_version max_version version'.split():
+ set_default(kw, x, ver)
+
+ set_default(kw, 'lib', '')
+ kw['lib'] = Utils.to_list(kw['lib'])
+
+ set_default(kw, 'env', self.env)
+
+ set_default(kw, 'libpath', boost_libpath)
+ set_default(kw, 'cpppath', boost_cpppath)
+
+ for x in 'tag_threading tag_version tag_toolset'.split():
+ set_default(kw, x, None)
+ set_default(kw, 'tag_abi', '^[^d]*$')
+
+ set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
+ set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
+
+ set_default(kw, 'score_threading', (10, -10))
+ set_default(kw, 'score_abi', (10, -10))
+ set_default(kw, 'score_python', (10,-10))
+ set_default(kw, 'score_toolset', (1, -1))
+ set_default(kw, 'score_version', (100, -100))
+
+ set_default(kw, 'score_min', 0)
+ set_default(kw, 'static', STATIC_NOSTATIC)
+ set_default(kw, 'found_includes', False)
+ set_default(kw, 'min_score', 0)
+
+ set_default(kw, 'errmsg', 'not found')
+ set_default(kw, 'okmsg', 'ok')
+
+@conf
+def find_boost_includes(self, kw):
+ """
+ check every path in kw['cpppath'] for subdir
+ that either starts with boost- or is named boost.
+
+ Then the version is checked and selected accordingly to
+ min_version/max_version. The highest possible version number is
+ selected!
+
+ If no versiontag is set the versiontag is set accordingly to the
+ selected library and CPPPATH_BOOST is set.
+ """
+ boostPath = getattr(Options.options, 'boostincludes', '')
+ if boostPath:
+ boostPath = [os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
+ else:
+ boostPath = Utils.to_list(kw['cpppath'])
+
+ min_version = string_to_version(kw.get('min_version', ''))
+ max_version = string_to_version(kw.get('max_version', '')) or (sys.maxint - 1)
+
+ version = 0
+ for include_path in boostPath:
+ boost_paths = [p for p in glob.glob(os.path.join(include_path, 'boost*')) if os.path.isdir(p)]
+ debug('BOOST Paths: %r' % boost_paths)
+ for path in boost_paths:
+ pathname = os.path.split(path)[-1]
+ ret = -1
+ if pathname == 'boost':
+ path = include_path
+ ret = self.get_boost_version_number(path)
+ elif pathname.startswith('boost-'):
+ ret = self.get_boost_version_number(path)
+ ret = int(ret)
+
+ if ret != -1 and ret >= min_version and ret <= max_version and ret > version:
+ boost_path = path
+ version = ret
+ if not version:
+ self.fatal('boost headers not found! (required version min: %s max: %s)'
+ % (kw['min_version'], kw['max_version']))
+ return False
+
+ found_version = version_string(version)
+ versiontag = '^' + found_version + '$'
+ if kw['tag_version'] is None:
+ kw['tag_version'] = versiontag
+ elif kw['tag_version'] != versiontag:
+ warn('boost header version %r and tag_version %r do not match!' % (versiontag, kw['tag_version']))
+ env = self.env
+ env['CPPPATH_BOOST'] = boost_path
+ env['BOOST_VERSION'] = found_version
+ self.found_includes = 1
+ ret = 'Version %s (%s)' % (found_version, boost_path)
+ return ret
+
+@conf
+def find_boost_library(self, lib, kw):
+
+ def find_library_from_list(lib, files):
+ lib_pattern = re.compile('.*boost_(.*?)\..*')
+ result = (None, None)
+ resultscore = kw['min_score'] - 1
+ for file in files:
+ m = lib_pattern.search(file, 1)
+ if m:
+ libname = m.group(1)
+ libtags = libname.split('-')[1:]
+ currentscore = tags_score(libtags, kw)
+ if currentscore > resultscore:
+ result = (libname, file)
+ resultscore = currentscore
+ return result
+
+ lib_paths = getattr(Options.options, 'boostlibs', '')
+ if lib_paths:
+ lib_paths = [os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
+ else:
+ lib_paths = Utils.to_list(kw['libpath'])
+
+ v = kw.get('env', self.env)
+
+ (libname, file) = (None, None)
+ if kw['static'] in [STATIC_NOSTATIC, STATIC_BOTH]:
+ st_env_prefix = 'LIB'
+ files = libfiles(lib, v['shlib_PATTERN'], lib_paths)
+ (libname, file) = find_library_from_list(lib, files)
+ if libname is None and kw['static'] in [STATIC_ONLYSTATIC, STATIC_BOTH]:
+ st_env_prefix = 'STATICLIB'
+ staticLibPattern = v['staticlib_PATTERN']
+ if self.env['CC_NAME'] == 'msvc':
+ staticLibPattern = 'lib' + staticLibPattern
+ files = libfiles(lib, staticLibPattern, lib_paths)
+ (libname, file) = find_library_from_list(lib, files)
+ if libname is not None:
+ v['LIBPATH_BOOST_' + lib.upper()] = [os.path.split(file)[0]]
+ if self.env['CC_NAME'] == 'msvc' and os.path.splitext(file)[1] == '.lib':
+ v[st_env_prefix + '_BOOST_' + lib.upper()] = ['libboost_'+libname]
+ else:
+ v[st_env_prefix + '_BOOST_' + lib.upper()] = ['boost_'+libname]
+ return
+ self.fatal('lib boost_' + lib + ' not found!')
+
+@conf
+def check_boost(self, *k, **kw):
+ """
+ This should be the main entry point
+
+- min_version
+- max_version
+- version
+- include_path
+- lib_path
+- lib
+- toolsettag - None or a regexp
+- threadingtag - None or a regexp
+- abitag - None or a regexp
+- versiontag - WARNING: you should rather use version or min_version/max_version
+- static - look for static libs (values:
+ 'nostatic' or STATIC_NOSTATIC - ignore static libs (default)
+ 'both' or STATIC_BOTH - find static libs, too
+ 'onlystatic' or STATIC_ONLYSTATIC - find only static libs
+- score_version
+- score_abi
+- scores_threading
+- score_toolset
+ * the scores are tuples (match_score, nomatch_score)
+ match_score is the added to the score if the tag is matched
+ nomatch_score is added when a tag is found and does not match
+- min_score
+ """
+
+ if not self.env['CXX']:
+ self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
+ self.validate_boost(kw)
+ ret = None
+ try:
+ if not kw.get('found_includes', None):
+ self.check_message_1(kw.get('msg_includes', 'boost headers'))
+ ret = self.find_boost_includes(kw)
+
+ except Configure.ConfigurationError, e:
+ if 'errmsg' in kw:
+ self.check_message_2(kw['errmsg'], 'YELLOW')
+ if 'mandatory' in kw:
+ if Logs.verbose > 1:
+ raise
+ else:
+ self.fatal('the configuration failed (see %r)' % self.log.name)
+ else:
+ if 'okmsg' in kw:
+ self.check_message_2(kw.get('okmsg_includes', ret))
+
+ for lib in kw['lib']:
+ self.check_message_1('library boost_'+lib)
+ try:
+ self.find_boost_library(lib, kw)
+ except Configure.ConfigurationError, e:
+ ret = False
+ if 'errmsg' in kw:
+ self.check_message_2(kw['errmsg'], 'YELLOW')
+ if 'mandatory' in kw:
+ if Logs.verbose > 1:
+ raise
+ else:
+ self.fatal('the configuration failed (see %r)' % self.log.name)
+ else:
+ if 'okmsg' in kw:
+ self.check_message_2(kw['okmsg'])
+
+ return ret
+
diff --git a/buildtools/wafadmin/3rdparty/fluid.py b/buildtools/wafadmin/3rdparty/fluid.py
new file mode 100644
index 0000000000..117edef720
--- /dev/null
+++ b/buildtools/wafadmin/3rdparty/fluid.py
@@ -0,0 +1,27 @@
+#!/usr/bin/python
+# encoding: utf-8
+# Grygoriy Fuchedzhy 2009
+
+"""
+Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature.
+"""
+
+import Task
+from TaskGen import extension
+
+Task.simple_task_type('fluid', '${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}', 'BLUE', shell=False, ext_out='.cxx')
+
+@extension('.fl')
+def fluid(self, node):
+ """add the .fl to the source list; the cxx file generated will be compiled when possible"""
+ cpp = node.change_ext('.cpp')
+ hpp = node.change_ext('.hpp')
+ self.create_task('fluid', node, [cpp, hpp])
+
+ if 'cxx' in self.features:
+ self.allnodes.append(cpp)
+
+def detect(conf):
+ fluid = conf.find_program('fluid', var='FLUID', mandatory=True)
+ conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
+
diff --git a/buildtools/wafadmin/3rdparty/gccdeps.py b/buildtools/wafadmin/3rdparty/gccdeps.py
new file mode 100644
index 0000000000..6600c9ca3b
--- /dev/null
+++ b/buildtools/wafadmin/3rdparty/gccdeps.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2010 (ita)
+
+"""
+Execute the tasks with gcc -MD, read the dependencies from the .d file
+and prepare the dependency calculation for the next run
+"""
+
+import os, re, threading
+import Task, Logs, Utils, preproc
+from TaskGen import before, after, feature
+
+lock = threading.Lock()
+
+preprocessor_flag = '-MD'
+
+@feature('cc')
+@before('apply_core')
+def add_mmd_cc(self):
+ if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
+ self.env.append_value('CCFLAGS', preprocessor_flag)
+
+@feature('cxx')
+@before('apply_core')
+def add_mmd_cxx(self):
+ if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
+ self.env.append_value('CXXFLAGS', preprocessor_flag)
+
+def scan(self):
+ "the scanner does not do anything initially"
+ nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
+ names = []
+ return (nodes, names)
+
+re_o = re.compile("\.o$")
+re_src = re.compile("^(\.\.)[\\/](.*)$")
+
+def post_run(self):
+ # The following code is executed by threads, it is not safe, so a lock is needed...
+
+ if getattr(self, 'cached', None):
+ return Task.Task.post_run(self)
+
+ name = self.outputs[0].abspath(self.env)
+ name = re_o.sub('.d', name)
+ txt = Utils.readf(name)
+ #os.unlink(name)
+
+ txt = txt.replace('\\\n', '')
+
+ lst = txt.strip().split(':')
+ val = ":".join(lst[1:])
+ val = val.split()
+
+ nodes = []
+ bld = self.generator.bld
+
+ f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
+ for x in val:
+ if os.path.isabs(x):
+
+ if not preproc.go_absolute:
+ continue
+
+ lock.acquire()
+ try:
+ node = bld.root.find_resource(x)
+ finally:
+ lock.release()
+ else:
+ g = re.search(re_src, x)
+ if g:
+ x = g.group(2)
+ lock.acquire()
+ try:
+ node = bld.bldnode.parent.find_resource(x)
+ finally:
+ lock.release()
+ else:
+ g = re.search(f, x)
+ if g:
+ x = g.group(2)
+ lock.acquire()
+ try:
+ node = bld.srcnode.find_resource(x)
+ finally:
+ lock.release()
+
+ if id(node) == id(self.inputs[0]):
+ # ignore the source file, it is already in the dependencies
+ # this way, successful config tests may be retrieved from the cache
+ continue
+
+ if not node:
+ raise ValueError('could not find %r for %r' % (x, self))
+ else:
+ nodes.append(node)
+
+ Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
+
+ bld.node_deps[self.unique_id()] = nodes
+ bld.raw_deps[self.unique_id()] = []
+
+ try:
+ del self.cache_sig
+ except:
+ pass
+
+ Task.Task.post_run(self)
+
+import Constants, Utils
+def sig_implicit_deps(self):
+ try:
+ return Task.Task.sig_implicit_deps(self)
+ except Utils.WafError:
+ return Constants.SIG_NIL
+
+for name in 'cc cxx'.split():
+ try:
+ cls = Task.TaskBase.classes[name]
+ except KeyError:
+ pass
+ else:
+ cls.post_run = post_run
+ cls.scan = scan
+ cls.sig_implicit_deps = sig_implicit_deps
+
diff --git a/buildtools/wafadmin/3rdparty/go.py b/buildtools/wafadmin/3rdparty/go.py
new file mode 100644
index 0000000000..2d8df0d2b6
--- /dev/null
+++ b/buildtools/wafadmin/3rdparty/go.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# go.py - Waf tool for the Go programming language
+# By: Tom Wambold <tom5760@gmail.com>
+
+import platform, os
+
+import Task
+import Utils
+from TaskGen import feature, extension, after
+
+Task.simple_task_type('gocompile', '${GOC} ${GOCFLAGS} -o ${TGT} ${SRC}', shell=False)
+Task.simple_task_type('gopack', '${GOP} grc ${TGT} ${SRC}', shell=False)
+Task.simple_task_type('golink', '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}', shell=False)
+
+def detect(conf):
+
+ def set_def(var, val):
+ if not conf.env[var]:
+ conf.env[var] = val
+
+ goarch = os.getenv("GOARCH")
+
+ if goarch == '386':
+ set_def('GO_PLATFORM', 'i386')
+ elif goarch == 'amd64':
+ set_def('GO_PLATFORM', 'x86_64')
+ elif goarch == 'arm':
+ set_def('GO_PLATFORM', 'arm')
+ else:
+ set_def('GO_PLATFORM', platform.machine())
+
+ if conf.env.GO_PLATFORM == 'x86_64':
+ set_def('GO_COMPILER', '6g')
+ set_def('GO_LINKER', '6l')
+ set_def('GO_EXTENSION', '.6')
+ elif conf.env.GO_PLATFORM in ['i386', 'i486', 'i586', 'i686']:
+ set_def('GO_COMPILER', '8g')
+ set_def('GO_LINKER', '8l')
+ set_def('GO_EXTENSION', '.8')
+ elif conf.env.GO_PLATFORM == 'arm':
+ set_def('GO_COMPILER', '5g')
+ set_def('GO_LINKER', '5l')
+ set_def('GO_EXTENSION', '.5')
+
+ if not (conf.env.GO_COMPILER or conf.env.GO_LINKER or conf.env.GO_EXTENSION):
+ raise conf.fatal('Unsupported platform ' + platform.machine())
+
+ set_def('GO_PACK', 'gopack')
+ set_def('GO_PACK_EXTENSION', '.a')
+
+ conf.find_program(conf.env.GO_COMPILER, var='GOC', mandatory=True)
+ conf.find_program(conf.env.GO_LINKER, var='GOL', mandatory=True)
+ conf.find_program(conf.env.GO_PACK, var='GOP', mandatory=True)
+ conf.find_program('cgo', var='CGO', mandatory=True)
+
+@extension('.go')
+def compile_go(self, node):
+ try:
+ self.go_nodes.append(node)
+ except AttributeError:
+ self.go_nodes = [node]
+
+@feature('go')
+@after('apply_core')
+def apply_compile_go(self):
+ try:
+ nodes = self.go_nodes
+ except AttributeError:
+ self.go_compile_task = None
+ else:
+ self.go_compile_task = self.create_task('gocompile',
+ nodes,
+ [self.path.find_or_declare(self.target + self.env.GO_EXTENSION)])
+
+@feature('gopackage', 'goprogram')
+@after('apply_compile_go')
+def apply_goinc(self):
+ if not getattr(self, 'go_compile_task', None):
+ return
+
+ names = self.to_list(getattr(self, 'uselib_local', []))
+ for name in names:
+ obj = self.name_to_obj(name)
+ if not obj:
+ raise Utils.WafError('object %r was not found in uselib_local '
+ '(required by %r)' % (lib_name, self.name))
+ obj.post()
+ self.go_compile_task.set_run_after(obj.go_package_task)
+ self.go_compile_task.dep_nodes.extend(obj.go_package_task.outputs)
+ self.env.append_unique('GOCFLAGS', '-I' + obj.path.abspath(obj.env))
+ self.env.append_unique('GOLFLAGS', '-L' + obj.path.abspath(obj.env))
+
+@feature('gopackage')
+@after('apply_goinc')
+def apply_gopackage(self):
+ self.go_package_task = self.create_task('gopack',
+ self.go_compile_task.outputs[0],
+ self.path.find_or_declare(self.target + self.env.GO_PACK_EXTENSION))
+ self.go_package_task.set_run_after(self.go_compile_task)
+ self.go_package_task.dep_nodes.extend(self.go_compile_task.outputs)
+
+@feature('goprogram')
+@after('apply_goinc')
+def apply_golink(self):
+ self.go_link_task = self.create_task('golink',
+ self.go_compile_task.outputs[0],
+ self.path.find_or_declare(self.target))
+ self.go_link_task.set_run_after(self.go_compile_task)
+ self.go_link_task.dep_nodes.extend(self.go_compile_task.outputs)
+
diff --git a/buildtools/wafadmin/3rdparty/lru_cache.py b/buildtools/wafadmin/3rdparty/lru_cache.py
new file mode 100644
index 0000000000..5b00abc29b
--- /dev/null
+++ b/buildtools/wafadmin/3rdparty/lru_cache.py
@@ -0,0 +1,97 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2011
+
+import os, shutil, re
+import Options, Build, Logs
+
+"""
+Apply a least recently used policy to the Waf cache.
+
+For performance reasons, it is called after the build is complete.
+
+We assume that the the folders are written atomically
+
+Do export WAFCACHE=/tmp/foo-xyz where xyz represents the cache size in megabytes
+If missing, the default cache size will be set to 10GB
+"""
+
+re_num = re.compile('[a-zA-Z_]+(\d+)')
+
+CACHESIZE = 10*1024*1024*1024 # in bytes
+CLEANRATIO = 0.8
+DIRSIZE = 4096
+
+def compile(self):
+ if Options.cache_global and not Options.options.nocache:
+ try:
+ os.makedirs(Options.cache_global)
+ except:
+ pass
+
+ try:
+ self.raw_compile()
+ finally:
+ if Options.cache_global and not Options.options.nocache:
+ self.sweep()
+
+def sweep(self):
+ global CACHESIZE
+ CACHEDIR = Options.cache_global
+
+ # get the cache max size from the WAFCACHE filename
+ re_num = re.compile('[a-zA-Z_]+(\d+)')
+ val = re_num.sub('\\1', os.path.basename(Options.cache_global))
+ try:
+ CACHESIZE = int(val)
+ except:
+ pass
+
+ # map folder names to timestamps
+ flist = {}
+ for x in os.listdir(CACHEDIR):
+ j = os.path.join(CACHEDIR, x)
+ if os.path.isdir(j) and len(x) == 32: # dir names are md5 hexdigests
+ flist[x] = [os.stat(j).st_mtime, 0]
+
+ for (x, v) in flist.items():
+ cnt = DIRSIZE # each entry takes 4kB
+ d = os.path.join(CACHEDIR, x)
+ for k in os.listdir(d):
+ cnt += os.stat(os.path.join(d, k)).st_size
+ flist[x][1] = cnt
+
+ total = sum([x[1] for x in flist.values()])
+ Logs.debug('lru: Cache size is %r' % total)
+
+ if total >= CACHESIZE:
+ Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE))
+
+ # make a list to sort the folders by timestamp
+ lst = [(p, v[0], v[1]) for (p, v) in flist.items()]
+ lst.sort(key=lambda x: x[1]) # sort by timestamp
+ lst.reverse()
+
+ while total >= CACHESIZE * CLEANRATIO:
+ (k, t, s) = lst.pop()
+ p = os.path.join(CACHEDIR, k)
+ v = p + '.del'
+ try:
+ os.rename(p, v)
+ except:
+ # someone already did it
+ pass
+ else:
+ try:
+ shutil.rmtree(v)
+ except:
+ # this should not happen, but who knows?
+ Logs.warn('If you ever see this message, report it (%r)' % v)
+ total -= s
+ del flist[k]
+ Logs.debug('lru: Total at the end %r' % total)
+
+Build.BuildContext.raw_compile = Build.BuildContext.compile
+Build.BuildContext.compile = compile
+Build.BuildContext.sweep = sweep
+
diff --git a/buildtools/wafadmin/3rdparty/paranoid.py b/buildtools/wafadmin/3rdparty/paranoid.py
new file mode 100644
index 0000000000..ead64ea5c3
--- /dev/null
+++ b/buildtools/wafadmin/3rdparty/paranoid.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# ita 2010
+
+import Logs, Utils, Build, Task
+
+def say(txt):
+ Logs.warn("^o^: %s" % txt)
+
+try:
+ ret = Utils.cmd_output('which cowsay 2> /dev/null').strip()
+except Exception, e:
+ pass
+else:
+ def say(txt):
+ f = Utils.cmd_output([ret, txt])
+ Utils.pprint('PINK', f)
+
+say('you make the errors, we detect them')
+
+def check_task_classes(self):
+ for x in Task.TaskBase.classes:
+ if isinstance(x, Task.Task):
+ if not getattr(cls, 'ext_in', None) or getattr(cls, 'before', None):
+ say('class %s has no precedence constraints (ext_in/before)')
+ if not getattr(cls, 'ext_out', None) or getattr(cls, 'after', None):
+ say('class %s has no precedence constraints (ext_out/after)')
+
+comp = Build.BuildContext.compile
+def compile(self):
+ if not getattr(self, 'magic', None):
+ check_task_classes(self)
+ return comp(self)
+Build.BuildContext.compile = compile
+
diff --git a/buildtools/wafadmin/3rdparty/swig.py b/buildtools/wafadmin/3rdparty/swig.py
new file mode 100644
index 0000000000..c0a4108700
--- /dev/null
+++ b/buildtools/wafadmin/3rdparty/swig.py
@@ -0,0 +1,190 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Petar Forai
+# Thomas Nagy 2008
+
+import re
+import Task, Utils, Logs
+from TaskGen import extension
+from Configure import conf
+import preproc
+
+"""
+Welcome in the hell of adding tasks dynamically
+
+swig interface files may be created at runtime, the module name may be unknown in advance
+
+rev 5859 is much more simple
+"""
+
+SWIG_EXTS = ['.swig', '.i']
+
+swig_str = '${SWIG} ${SWIGFLAGS} ${_CCINCFLAGS} ${_CXXINCFLAGS} ${_CCDEFFLAGS} ${_CXXDEFFLAGS} ${SRC}'
+cls = Task.simple_task_type('swig', swig_str, color='BLUE', ext_in='.i .h', ext_out='.o .c .cxx', shell=False)
+
+def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return ASK_LATER
+
+ if not getattr(self, 'init_outputs', None):
+ self.init_outputs = True
+ if not getattr(self, 'module', None):
+ # search the module name
+ txt = self.inputs[0].read(self.env)
+ m = re_module.search(txt)
+ if not m:
+ raise ValueError("could not find the swig module name")
+ self.module = m.group(1)
+
+ swig_c(self)
+
+ # add the language-specific output files as nodes
+ # call funs in the dict swig_langs
+ for x in self.env['SWIGFLAGS']:
+ # obtain the language
+ x = x[1:]
+ try:
+ fun = swig_langs[x]
+ except KeyError:
+ pass
+ else:
+ fun(self)
+
+ return Task.Task.runnable_status(self)
+setattr(cls, 'runnable_status', runnable_status)
+
+re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
+
+re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
+re_2 = re.compile('%include "(.*)"', re.M)
+re_3 = re.compile('#include "(.*)"', re.M)
+
+def scan(self):
+ "scan for swig dependencies, climb the .i files"
+ env = self.env
+
+ lst_src = []
+
+ seen = []
+ to_see = [self.inputs[0]]
+
+ while to_see:
+ node = to_see.pop(0)
+ if node.id in seen:
+ continue
+ seen.append(node.id)
+ lst_src.append(node)
+
+ # read the file
+ code = node.read(env)
+ code = preproc.re_nl.sub('', code)
+ code = preproc.re_cpp.sub(preproc.repl, code)
+
+ # find .i files and project headers
+ names = re_2.findall(code) + re_3.findall(code)
+ for n in names:
+ for d in self.generator.env.INC_PATHS + [node.parent]:
+ u = d.find_resource(n)
+ if u:
+ to_see.append(u)
+ break
+ else:
+ Logs.warn('could not find %r' % n)
+
+ # list of nodes this one depends on, and module name if present
+ if Logs.verbose:
+ Logs.debug('deps: deps for %s: %s' % (str(self), str(lst_src)))
+ return (lst_src, [])
+cls.scan = scan
+
+# provide additional language processing
+swig_langs = {}
+def swig(fun):
+ swig_langs[fun.__name__.replace('swig_', '')] = fun
+
+def swig_c(self):
+ ext = '.swigwrap_%d.c' % self.generator.idx
+ flags = self.env['SWIGFLAGS']
+ if '-c++' in flags:
+ ext += 'xx'
+ out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
+
+ try:
+ if '-c++' in flags:
+ fun = self.generator.cxx_hook
+ else:
+ fun = self.generator.c_hook
+ except AttributeError:
+ raise Utils.WafError('No c%s compiler was found to process swig files' % ('-c++' in flags and '++' or ''))
+
+ task = fun(out_node)
+ task.set_run_after(self)
+
+ ge = self.generator.bld.generator
+ ge.outstanding.insert(0, task)
+ ge.total += 1
+
+ try:
+ ltask = self.generator.link_task
+ except AttributeError:
+ pass
+ else:
+ ltask.inputs.append(task.outputs[0])
+
+ self.outputs.append(out_node)
+
+ if not '-o' in self.env['SWIGFLAGS']:
+ self.env.append_value('SWIGFLAGS', '-o')
+ self.env.append_value('SWIGFLAGS', self.outputs[0].abspath(self.env))
+
+@swig
+def swig_python(tsk):
+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py'))
+
+@swig
+def swig_ocaml(tsk):
+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml'))
+ tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli'))
+
+@extension(SWIG_EXTS)
+def i_file(self, node):
+ # the task instance
+ tsk = self.create_task('swig')
+ tsk.set_inputs(node)
+ tsk.module = getattr(self, 'swig_module', None)
+
+ flags = self.to_list(getattr(self, 'swig_flags', []))
+ self.env.append_value('SWIGFLAGS', flags)
+
+ if not '-outdir' in flags:
+ flags.append('-outdir')
+ flags.append(node.parent.abspath(self.env))
+
+@conf
+def check_swig_version(conf, minver=None):
+ """Check for a minimum swig version like conf.check_swig_version('1.3.28')
+ or conf.check_swig_version((1,3,28)) """
+ reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
+
+ swig_out = Utils.cmd_output('%s -version' % conf.env['SWIG'])
+
+ swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split('.')]
+ if isinstance(minver, basestring):
+ minver = [int(s) for s in minver.split(".")]
+ if isinstance(minver, tuple):
+ minver = [int(s) for s in minver]
+ result = (minver is None) or (minver[:3] <= swigver[:3])
+ swigver_full = '.'.join(map(str, swigver))
+ if result:
+ conf.env['SWIG_VERSION'] = swigver_full
+ minver_str = '.'.join(map(str, minver))
+ if minver is None:
+ conf.check_message_custom('swig version', '', swigver_full)
+ else:
+ conf.check_message('swig version', '>= %s' % (minver_str,), result, option=swigver_full)
+ return result
+
+def detect(conf):
+ swig = conf.find_program('swig', var='SWIG', mandatory=True)
+
diff --git a/buildtools/wafadmin/3rdparty/valadoc.py b/buildtools/wafadmin/3rdparty/valadoc.py
new file mode 100644
index 0000000000..d0a9fe80ed
--- /dev/null
+++ b/buildtools/wafadmin/3rdparty/valadoc.py
@@ -0,0 +1,113 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Nicolas Joseph 2009
+
+from fnmatch import fnmatchcase
+import os, os.path, re, stat
+import Task, Utils, Node, Constants
+from TaskGen import feature, extension, after
+from Logs import debug, warn, error
+
+VALADOC_STR = '${VALADOC}'
+
+class valadoc_task(Task.Task):
+
+ vars = ['VALADOC', 'VALADOCFLAGS']
+ color = 'BLUE'
+ after = 'cxx_link cc_link'
+ quiet = True
+
+ output_dir = ''
+ doclet = ''
+ package_name = ''
+ package_version = ''
+ files = []
+ protected = True
+ private = False
+ inherit = False
+ deps = False
+ enable_non_null_experimental = False
+ force = False
+
+ def runnable_status(self):
+ return True
+
+ def run(self):
+ if self.env['VALADOC']:
+ if not self.env['VALADOCFLAGS']:
+ self.env['VALADOCFLAGS'] = ''
+ cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
+ cmd.append ('-o %s' % self.output_dir)
+ if getattr(self, 'doclet', None):
+ cmd.append ('--doclet %s' % self.doclet)
+ cmd.append ('--package-name %s' % self.package_name)
+ if getattr(self, 'version', None):
+ cmd.append ('--package-version %s' % self.package_version)
+ if getattr(self, 'packages', None):
+ for package in self.packages:
+ cmd.append ('--pkg %s' % package)
+ if getattr(self, 'vapi_dirs', None):
+ for vapi_dir in self.vapi_dirs:
+ cmd.append ('--vapidir %s' % vapi_dir)
+ if not getattr(self, 'protected', None):
+ cmd.append ('--no-protected')
+ if getattr(self, 'private', None):
+ cmd.append ('--private')
+ if getattr(self, 'inherit', None):
+ cmd.append ('--inherit')
+ if getattr(self, 'deps', None):
+ cmd.append ('--deps')
+ if getattr(self, 'enable_non_null_experimental', None):
+ cmd.append ('--enable-non-null-experimental')
+ if getattr(self, 'force', None):
+ cmd.append ('--force')
+ cmd.append (' '.join ([x.relpath_gen (self.generator.bld.bldnode) for x in self.files]))
+ return self.generator.bld.exec_command(' '.join(cmd))
+ else:
+ error ('You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
+ return -1
+
+@feature('valadoc')
+def process_valadoc(self):
+ task = getattr(self, 'task', None)
+ if not task:
+ task = self.create_task('valadoc')
+ self.task = task
+ if getattr(self, 'output_dir', None):
+ task.output_dir = self.output_dir
+ else:
+ Utils.WafError('no output directory')
+ if getattr(self, 'doclet', None):
+ task.doclet = self.doclet
+ else:
+ Utils.WafError('no doclet directory')
+ if getattr(self, 'package_name', None):
+ task.package_name = self.package_name
+ else:
+ Utils.WafError('no package name')
+ if getattr(self, 'package_version', None):
+ task.package_version = self.package_version
+ if getattr(self, 'packages', None):
+ task.packages = Utils.to_list(self.packages)
+ if getattr(self, 'vapi_dirs', None):
+ task.vapi_dirs = Utils.to_list(self.vapi_dirs)
+ if getattr(self, 'files', None):
+ task.files = self.files
+ else:
+ Utils.WafError('no input file')
+ if getattr(self, 'protected', None):
+ task.protected = self.protected
+ if getattr(self, 'private', None):
+ task.private = self.private
+ if getattr(self, 'inherit', None):
+ task.inherit = self.inherit
+ if getattr(self, 'deps', None):
+ task.deps = self.deps
+ if getattr(self, 'enable_non_null_experimental', None):
+ task.enable_non_null_experimental = self.enable_non_null_experimental
+ if getattr(self, 'force', None):
+ task.force = self.force
+
+def detect(conf):
+ conf.find_program('valadoc', var='VALADOC', mandatory=False)
+