diff options
Diffstat (limited to 'buildtools/wafadmin/Tools')
48 files changed, 9270 insertions, 0 deletions
diff --git a/buildtools/wafadmin/Tools/__init__.py b/buildtools/wafadmin/Tools/__init__.py new file mode 100644 index 0000000000..bc6ca230b5 --- /dev/null +++ b/buildtools/wafadmin/Tools/__init__.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) + diff --git a/buildtools/wafadmin/Tools/ar.py b/buildtools/wafadmin/Tools/ar.py new file mode 100644 index 0000000000..af9b17fd6e --- /dev/null +++ b/buildtools/wafadmin/Tools/ar.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2008 (ita) +# Ralf Habacker, 2006 (rh) + +"ar and ranlib" + +import os, sys +import Task, Utils +from Configure import conftest + +ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' +cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False) +cls.maxjobs = 1 +cls.install = Utils.nada + +# remove the output in case it already exists +old = cls.run +def wrap(self): + try: os.remove(self.outputs[0].abspath(self.env)) + except OSError: pass + return old(self) +setattr(cls, 'run', wrap) + +def detect(conf): + conf.find_program('ar', var='AR') + conf.find_program('ranlib', var='RANLIB') + conf.env.ARFLAGS = 'rcs' + +@conftest +def find_ar(conf): + v = conf.env + conf.check_tool('ar') + if not v['AR']: conf.fatal('ar is required for static libraries - not found') + + diff --git a/buildtools/wafadmin/Tools/bison.py b/buildtools/wafadmin/Tools/bison.py new file mode 100644 index 0000000000..49c6051873 --- /dev/null +++ b/buildtools/wafadmin/Tools/bison.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# encoding: utf-8 +# John O'Meara, 2006 +# Thomas Nagy 2009 + +"Bison processing" + +import Task +from TaskGen import extension + +bison = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}' +cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', shell=False) + +@extension(['.y', '.yc', '.yy']) +def big_bison(self, node): + """when it becomes complicated (unlike flex), the old recipes work better (cwd)""" + has_h = '-d' in self.env['BISONFLAGS'] + + outs = [] + if node.name.endswith('.yc'): + outs.append(node.change_ext('.tab.cc')) + if has_h: + outs.append(node.change_ext('.tab.hh')) + else: + outs.append(node.change_ext('.tab.c')) + if has_h: + outs.append(node.change_ext('.tab.h')) + + tsk = self.create_task('bison', node, outs) + tsk.cwd = node.bld_dir(tsk.env) + + # and the c/cxx file must be compiled too + self.allnodes.append(outs[0]) + +def detect(conf): + bison = conf.find_program('bison', var='BISON', mandatory=True) + conf.env['BISONFLAGS'] = '-d' + diff --git a/buildtools/wafadmin/Tools/cc.py b/buildtools/wafadmin/Tools/cc.py new file mode 100644 index 0000000000..903a1c5038 --- /dev/null +++ b/buildtools/wafadmin/Tools/cc.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) + +"Base for c programs/libraries" + +import os +import TaskGen, Build, Utils, Task +from Logs import debug +import ccroot +from TaskGen import feature, before, extension, after + +g_cc_flag_vars = [ +'CCDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', +'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH', +'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES'] + +EXT_CC = ['.c'] + +g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS'] + +# TODO remove in waf 1.6 +class cc_taskgen(ccroot.ccroot_abstract): + pass + +@feature('cc') +@before('apply_type_vars') +@after('default_cc') +def init_cc(self): + self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars) + self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars) + + if not self.env['CC_NAME']: + raise Utils.WafError("At least one compiler (gcc, ..) must be selected") + +@feature('cc') +@after('apply_incpaths') +def apply_obj_vars_cc(self): + """after apply_incpaths for INC_PATHS""" + env = self.env + app = env.append_unique + cpppath_st = env['CPPPATH_ST'] + + # local flags come first + # set the user-defined includes paths + for i in env['INC_PATHS']: + app('_CCINCFLAGS', cpppath_st % i.bldpath(env)) + app('_CCINCFLAGS', cpppath_st % i.srcpath(env)) + + # set the library include paths + for i in env['CPPPATH']: + app('_CCINCFLAGS', cpppath_st % i) + +@feature('cc') +@after('apply_lib_vars') +def apply_defines_cc(self): + """after uselib is set for CCDEFINES""" + self.defines = getattr(self, 'defines', []) + lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES']) + milst = [] + + # now process the local defines + for defi in lst: + if not defi in milst: + milst.append(defi) + + # CCDEFINES_ + libs = self.to_list(self.uselib) + for l in libs: + val = self.env['CCDEFINES_'+l] + if val: milst += val + self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]] + y = self.env['CCDEFINES_ST'] + self.env.append_unique('_CCDEFFLAGS', [y%x for x in milst]) + +@extension(EXT_CC) +def c_hook(self, node): + # create the compilation task: cpp or cc + if getattr(self, 'obj_ext', None): + obj_ext = self.obj_ext + else: + obj_ext = '_%d.o' % self.idx + + task = self.create_task('cc', node, node.change_ext(obj_ext)) + try: + self.compiled_tasks.append(task) + except AttributeError: + raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self)) + return task + +cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}' +cls = Task.simple_task_type('cc', cc_str, 'GREEN', ext_out='.o', ext_in='.c', shell=False) +cls.scan = ccroot.scan +cls.vars.append('CCDEPS') + +link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}' +cls = Task.simple_task_type('cc_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False) +cls.maxjobs = 1 +cls.install = Utils.nada + diff --git a/buildtools/wafadmin/Tools/ccroot.py b/buildtools/wafadmin/Tools/ccroot.py new file mode 100644 index 0000000000..f54c82f186 --- /dev/null +++ b/buildtools/wafadmin/Tools/ccroot.py @@ -0,0 +1,629 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2005-2008 (ita) + +"base for all c/c++ programs and libraries" + +import os, sys, re +import TaskGen, Task, Utils, preproc, Logs, Build, Options +from Logs import error, debug, warn +from Utils import md5 +from TaskGen import taskgen, after, before, feature +from Constants import * +from Configure import conftest +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO + +import config_c # <- necessary for the configuration, do not touch + +USE_TOP_LEVEL = False + +def get_cc_version(conf, cc, gcc=False, icc=False): + + cmd = cc + ['-dM', '-E', '-'] + try: + p = Utils.pproc.Popen(cmd, stdin=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) + p.stdin.write('\n') + out = p.communicate()[0] + except: + conf.fatal('could not determine the compiler version %r' % cmd) + + # PY3K: do not touch + out = str(out) + + if gcc: + if out.find('__INTEL_COMPILER') >= 0: + conf.fatal('The intel compiler pretends to be gcc') + if out.find('__GNUC__') < 0: + conf.fatal('Could not determine the compiler type') + + if icc and out.find('__INTEL_COMPILER') < 0: + conf.fatal('Not icc/icpc') + + k = {} + if icc or gcc: + out = out.split('\n') + import shlex + + for line in out: + lst = shlex.split(line) + if len(lst)>2: + key = lst[1] + val = lst[2] + k[key] = val + + def isD(var): + return var in k + + def isT(var): + return var in k and k[var] != '0' + + # Some documentation is available at http://predef.sourceforge.net + # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns. + mp1 = { + '__linux__' : 'linux', + '__GNU__' : 'gnu', + '__FreeBSD__' : 'freebsd', + '__NetBSD__' : 'netbsd', + '__OpenBSD__' : 'openbsd', + '__sun' : 'sunos', + '__hpux' : 'hpux', + '__sgi' : 'irix', + '_AIX' : 'aix', + '__CYGWIN__' : 'cygwin', + '__MSYS__' : 'msys', + '_UWIN' : 'uwin', + '_WIN64' : 'win32', + '_WIN32' : 'win32', + '__POWERPC__' : 'powerpc', + } + + for i in mp1: + if isD(i): + conf.env.DEST_OS = mp1[i] + break + else: + if isD('__APPLE__') and isD('__MACH__'): + conf.env.DEST_OS = 'darwin' + elif isD('__unix__'): # unix must be tested last as it's a generic fallback + conf.env.DEST_OS = 'generic' + + if isD('__ELF__'): + conf.env.DEST_BINFMT = 'elf' + elif isD('__WINNT__') or isD('__CYGWIN__'): + conf.env.DEST_BINFMT = 'pe' + elif isD('__APPLE__'): + conf.env.DEST_BINFMT = 'mac-o' + + mp2 = { + '__x86_64__' : 'x86_64', + '__i386__' : 'x86', + '__ia64__' : 'ia', + '__mips__' : 'mips', + '__sparc__' : 'sparc', + '__alpha__' : 'alpha', + '__arm__' : 'arm', + '__hppa__' : 'hppa', + '__powerpc__' : 'powerpc', + } + for i in mp2: + if isD(i): + conf.env.DEST_CPU = mp2[i] + break + + debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')])) + conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) + return k + +class DEBUG_LEVELS: + """Will disappear in waf 1.6""" + ULTRADEBUG = "ultradebug" + DEBUG = "debug" + RELEASE = "release" + OPTIMIZED = "optimized" + CUSTOM = "custom" + + ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM] + +def scan(self): + "look for .h the .cpp need" + debug('ccroot: _scan_preprocessor(self, node, env, path_lst)') + + # TODO waf 1.6 - assume the default input has exactly one file + + if len(self.inputs) == 1: + node = self.inputs[0] + (nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS']) + if Logs.verbose: + debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names) + return (nodes, names) + + all_nodes = [] + all_names = [] + seen = set() + for node in self.inputs: + (nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS']) + if Logs.verbose: + debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names) + for x in nodes: + if id(x) in seen: continue + seen.add(id(x)) + all_nodes.append(x) + for x in names: + if not x in all_names: + all_names.append(x) + return (all_nodes, all_names) + +class ccroot_abstract(TaskGen.task_gen): + "Parent class for programs and libraries in languages c, c++ and moc (Qt)" + def __init__(self, *k, **kw): + # COMPAT remove in waf 1.6 TODO + if len(k) > 1: + k = list(k) + if k[1][0] != 'c': + k[1] = 'c' + k[1] + TaskGen.task_gen.__init__(self, *k, **kw) + +def get_target_name(self): + tp = 'program' + for x in self.features: + if x in ['cshlib', 'cstaticlib']: + tp = x.lstrip('c') + + pattern = self.env[tp + '_PATTERN'] + if not pattern: pattern = '%s' + + dir, name = os.path.split(self.target) + + if self.env.DEST_BINFMT == 'pe' and getattr(self, 'vnum', None) and 'cshlib' in self.features: + # include the version in the dll file name, + # the import lib file name stays unversionned. + name = name + '-' + self.vnum.split('.')[0] + + return os.path.join(dir, pattern % name) + +@feature('cc', 'cxx') +@before('apply_core') +def default_cc(self): + """compiled_tasks attribute must be set before the '.c->.o' tasks can be created""" + Utils.def_attrs(self, + includes = '', + defines= '', + rpaths = '', + uselib = '', + uselib_local = '', + add_objects = '', + p_flag_vars = [], + p_type_vars = [], + compiled_tasks = [], + link_task = None) + + # The only thing we need for cross-compilation is DEST_BINFMT. + # At some point, we may reach a case where DEST_BINFMT is not enough, but for now it's sufficient. + # Currently, cross-compilation is auto-detected only for the gnu and intel compilers. + if not self.env.DEST_BINFMT: + # Infer the binary format from the os name. + self.env.DEST_BINFMT = Utils.unversioned_sys_platform_to_binary_format( + self.env.DEST_OS or Utils.unversioned_sys_platform()) + + if not self.env.BINDIR: self.env.BINDIR = Utils.subst_vars('${PREFIX}/bin', self.env) + if not self.env.LIBDIR: self.env.LIBDIR = Utils.subst_vars('${PREFIX}/lib${LIB_EXT}', self.env) + +@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib') +def apply_verif(self): + """no particular order, used for diagnostic""" + if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None) or getattr(self, 'obj_files', None)): + raise Utils.WafError('no source files specified for %s' % self) + if not self.target: + raise Utils.WafError('no target for %s' % self) + +# TODO reference the d programs, shlibs in d.py, not here + +@feature('cprogram', 'dprogram') +@after('default_cc') +@before('apply_core') +def vars_target_cprogram(self): + self.default_install_path = self.env.BINDIR + self.default_chmod = O755 + +@after('default_cc') +@feature('cshlib', 'dshlib') +@before('apply_core') +def vars_target_cshlib(self): + if self.env.DEST_BINFMT == 'pe': + # set execute bit on libs to avoid 'permission denied' (issue 283) + self.default_chmod = O755 + self.default_install_path = self.env.BINDIR + else: + self.default_install_path = self.env.LIBDIR + +@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib') +@after('apply_link', 'vars_target_cprogram', 'vars_target_cshlib') +def default_link_install(self): + """you may kill this method to inject your own installation for the first element + any other install should only process its own nodes and not those from the others""" + if self.install_path: + self.bld.install_files(self.install_path, self.link_task.outputs[0], env=self.env, chmod=self.chmod) + +@feature('cc', 'cxx') +@after('apply_type_vars', 'apply_lib_vars', 'apply_core') +def apply_incpaths(self): + """used by the scanner + after processing the uselib for CPPPATH + after apply_core because some processing may add include paths + """ + lst = [] + # TODO move the uselib processing out of here + for lib in self.to_list(self.uselib): + for path in self.env['CPPPATH_' + lib]: + if not path in lst: + lst.append(path) + if preproc.go_absolute: + for path in preproc.standard_includes: + if not path in lst: + lst.append(path) + + for path in self.to_list(self.includes): + if not path in lst: + if preproc.go_absolute or not os.path.isabs(path): + lst.append(path) + else: + self.env.prepend_value('CPPPATH', path) + + for path in lst: + node = None + if os.path.isabs(path): + if preproc.go_absolute: + node = self.bld.root.find_dir(path) + elif path[0] == '#': + node = self.bld.srcnode + if len(path) > 1: + node = node.find_dir(path[1:]) + else: + node = self.path.find_dir(path) + + if node: + self.env.append_value('INC_PATHS', node) + + # TODO WAF 1.6 + if USE_TOP_LEVEL: + self.env.append_value('INC_PATHS', self.bld.srcnode) + +@feature('cc', 'cxx') +@after('init_cc', 'init_cxx') +@before('apply_lib_vars') +def apply_type_vars(self): + """before apply_lib_vars because we modify uselib + after init_cc and init_cxx because web need p_type_vars + """ + for x in self.features: + if not x in ['cprogram', 'cstaticlib', 'cshlib']: + continue + x = x.lstrip('c') + + # if the type defines uselib to add, add them + st = self.env[x + '_USELIB'] + if st: self.uselib = self.uselib + ' ' + st + + # each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc + # so when we make a task generator of the type shlib, CXXFLAGS are modified accordingly + for var in self.p_type_vars: + compvar = '%s_%s' % (x, var) + #print compvar + value = self.env[compvar] + if value: self.env.append_value(var, value) + +@feature('cprogram', 'cshlib', 'cstaticlib') +@after('apply_core') +def apply_link(self): + """executes after apply_core for collecting 'compiled_tasks' + use a custom linker if specified (self.link='name-of-custom-link-task')""" + link = getattr(self, 'link', None) + if not link: + if 'cstaticlib' in self.features: link = 'static_link' + elif 'cxx' in self.features: link = 'cxx_link' + else: link = 'cc_link' + + tsk = self.create_task(link) + outputs = [t.outputs[0] for t in self.compiled_tasks] + tsk.set_inputs(outputs) + tsk.set_outputs(self.path.find_or_declare(get_target_name(self))) + + self.link_task = tsk + +@feature('cc', 'cxx') +@after('apply_link', 'init_cc', 'init_cxx', 'apply_core') +def apply_lib_vars(self): + """after apply_link because of 'link_task' + after default_cc because of the attribute 'uselib'""" + + # after 'apply_core' in case if 'cc' if there is no link + + env = self.env + + # 1. the case of the libs defined in the project (visit ancestors first) + # the ancestors external libraries (uselib) will be prepended + self.uselib = self.to_list(self.uselib) + names = self.to_list(self.uselib_local) + + seen = set([]) + tmp = Utils.deque(names) # consume a copy of the list of names + while tmp: + lib_name = tmp.popleft() + # visit dependencies only once + if lib_name in seen: + continue + + y = self.name_to_obj(lib_name) + if not y: + raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name)) + y.post() + seen.add(lib_name) + + # object has ancestors to process (shared libraries): add them to the end of the list + if getattr(y, 'uselib_local', None): + lst = y.to_list(y.uselib_local) + if 'cshlib' in y.features or 'cprogram' in y.features: + lst = [x for x in lst if not 'cstaticlib' in self.name_to_obj(x).features] + tmp.extend(lst) + + # link task and flags + if getattr(y, 'link_task', None): + + link_name = y.target[y.target.rfind(os.sep) + 1:] + if 'cstaticlib' in y.features: + env.append_value('STATICLIB', link_name) + elif 'cshlib' in y.features or 'cprogram' in y.features: + # WARNING some linkers can link against programs + env.append_value('LIB', link_name) + + # the order + self.link_task.set_run_after(y.link_task) + + # for the recompilation + dep_nodes = getattr(self.link_task, 'dep_nodes', []) + self.link_task.dep_nodes = dep_nodes + y.link_task.outputs + + # add the link path too + tmp_path = y.link_task.outputs[0].parent.bldpath(self.env) + if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path) + + # add ancestors uselib too - but only propagate those that have no staticlib + for v in self.to_list(y.uselib): + if not env['STATICLIB_' + v]: + if not v in self.uselib: + self.uselib.insert(0, v) + + # if the library task generator provides 'export_incdirs', add to the include path + # the export_incdirs must be a list of paths relative to the other library + if getattr(y, 'export_incdirs', None): + for x in self.to_list(y.export_incdirs): + node = y.path.find_dir(x) + if not node: + raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x)) + self.env.append_unique('INC_PATHS', node) + + # 2. the case of the libs defined outside + for x in self.uselib: + for v in self.p_flag_vars: + val = self.env[v + '_' + x] + if val: self.env.append_value(v, val) + +@feature('cprogram', 'cstaticlib', 'cshlib') +@after('init_cc', 'init_cxx', 'apply_link') +def apply_objdeps(self): + "add the .o files produced by some other object files in the same manner as uselib_local" + if not getattr(self, 'add_objects', None): return + + seen = [] + names = self.to_list(self.add_objects) + while names: + x = names[0] + + # visit dependencies only once + if x in seen: + names = names[1:] + continue + + # object does not exist ? + y = self.name_to_obj(x) + if not y: + raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name)) + + # object has ancestors to process first ? update the list of names + if getattr(y, 'add_objects', None): + added = 0 + lst = y.to_list(y.add_objects) + lst.reverse() + for u in lst: + if u in seen: continue + added = 1 + names = [u]+names + if added: continue # list of names modified, loop + + # safe to process the current object + y.post() + seen.append(x) + + for t in y.compiled_tasks: + self.link_task.inputs.extend(t.outputs) + +@feature('cprogram', 'cshlib', 'cstaticlib') +@after('apply_lib_vars') +def apply_obj_vars(self): + """after apply_lib_vars for uselib""" + v = self.env + lib_st = v['LIB_ST'] + staticlib_st = v['STATICLIB_ST'] + libpath_st = v['LIBPATH_ST'] + staticlibpath_st = v['STATICLIBPATH_ST'] + rpath_st = v['RPATH_ST'] + + app = v.append_unique + + if v['FULLSTATIC']: + v.append_value('LINKFLAGS', v['FULLSTATIC_MARKER']) + + for i in v['RPATH']: + if i and rpath_st: + app('LINKFLAGS', rpath_st % i) + + for i in v['LIBPATH']: + app('LINKFLAGS', libpath_st % i) + app('LINKFLAGS', staticlibpath_st % i) + + if v['STATICLIB']: + v.append_value('LINKFLAGS', v['STATICLIB_MARKER']) + k = [(staticlib_st % i) for i in v['STATICLIB']] + app('LINKFLAGS', k) + + # fully static binaries ? + if not v['FULLSTATIC']: + if v['STATICLIB'] or v['LIB']: + v.append_value('LINKFLAGS', v['SHLIB_MARKER']) + + app('LINKFLAGS', [lib_st % i for i in v['LIB']]) + +@after('apply_link') +def process_obj_files(self): + if not hasattr(self, 'obj_files'): return + for x in self.obj_files: + node = self.path.find_resource(x) + self.link_task.inputs.append(node) + +@taskgen +def add_obj_file(self, file): + """Small example on how to link object files as if they were source + obj = bld.create_obj('cc') + obj.add_obj_file('foo.o')""" + if not hasattr(self, 'obj_files'): self.obj_files = [] + if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files') + self.obj_files.append(file) + +c_attrs = { +'cxxflag' : 'CXXFLAGS', +'cflag' : 'CCFLAGS', +'ccflag' : 'CCFLAGS', +'linkflag' : 'LINKFLAGS', +'ldflag' : 'LINKFLAGS', +'lib' : 'LIB', +'libpath' : 'LIBPATH', +'staticlib': 'STATICLIB', +'staticlibpath': 'STATICLIBPATH', +'rpath' : 'RPATH', +'framework' : 'FRAMEWORK', +'frameworkpath' : 'FRAMEWORKPATH' +} + +@feature('cc', 'cxx') +@before('init_cxx', 'init_cc') +@before('apply_lib_vars', 'apply_obj_vars', 'apply_incpaths', 'init_cc') +def add_extra_flags(self): + """case and plural insensitive + before apply_obj_vars for processing the library attributes + """ + for x in self.__dict__.keys(): + y = x.lower() + if y[-1] == 's': + y = y[:-1] + if c_attrs.get(y, None): + self.env.append_unique(c_attrs[y], getattr(self, x)) + +# ============ the code above must not know anything about import libs ========== + +@feature('cshlib') +@after('apply_link', 'default_cc') +@before('apply_lib_vars', 'apply_objdeps', 'default_link_install') +def apply_implib(self): + """On mswindows, handle dlls and their import libs + the .dll.a is the import lib and it is required for linking so it is installed too + """ + if not self.env.DEST_BINFMT == 'pe': + return + + self.meths.remove('default_link_install') + + bindir = self.install_path + if not bindir: return + + # install the dll in the bin dir + dll = self.link_task.outputs[0] + self.bld.install_files(bindir, dll, self.env, self.chmod) + + # add linker flags to generate the import lib + implib = self.env['implib_PATTERN'] % os.path.split(self.target)[1] + + implib = dll.parent.find_or_declare(implib) + self.link_task.outputs.append(implib) + self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env) + + self.env.append_value('LINKFLAGS', (self.env['IMPLIB_ST'] % implib.bldpath(self.env)).split()) + +# ============ the code above must not know anything about vnum processing on unix platforms ========= + +@feature('cshlib') +@after('apply_link') +@before('apply_lib_vars', 'default_link_install') +def apply_vnum(self): + """ + libfoo.so is installed as libfoo.so.1.2.3 + """ + if not getattr(self, 'vnum', '') or not 'cshlib' in self.features or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'): + return + + self.meths.remove('default_link_install') + + link = self.link_task + nums = self.vnum.split('.') + node = link.outputs[0] + + libname = node.name + if libname.endswith('.dylib'): + name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum) + name2 = libname.replace('.dylib', '.%s.dylib' % nums[0]) + else: + name3 = libname + '.' + self.vnum + name2 = libname + '.' + nums[0] + + if self.env.SONAME_ST: + v = self.env.SONAME_ST % name2 + self.env.append_value('LINKFLAGS', v.split()) + + bld = self.bld + nums = self.vnum.split('.') + + path = self.install_path + if not path: return + + bld.install_as(path + os.sep + name3, node, env=self.env) + bld.symlink_as(path + os.sep + name2, name3) + bld.symlink_as(path + os.sep + libname, name3) + + # the following task is just to enable execution from the build dir :-/ + self.create_task('vnum', node, [node.parent.find_or_declare(name2), node.parent.find_or_declare(name3)]) + +def exec_vnum_link(self): + for x in self.outputs: + path = x.abspath(self.env) + try: + os.remove(path) + except OSError: + pass + + try: + os.symlink(self.inputs[0].name, path) + except OSError: + return 1 + +cls = Task.task_type_from_func('vnum', func=exec_vnum_link, ext_in='.bin', color='CYAN') +cls.quiet = 1 + +# ============ the --as-needed flag should added during the configuration, not at runtime ========= + +@conftest +def add_as_needed(conf): + if conf.env.DEST_BINFMT == 'elf' and 'gcc' in (conf.env.CXX_NAME, conf.env.CC_NAME): + conf.env.append_unique('LINKFLAGS', '--as-needed') + diff --git a/buildtools/wafadmin/Tools/compiler_cc.py b/buildtools/wafadmin/Tools/compiler_cc.py new file mode 100644 index 0000000000..0421503f7c --- /dev/null +++ b/buildtools/wafadmin/Tools/compiler_cc.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat) + +import os, sys, imp, types, ccroot +import optparse +import Utils, Configure, Options +from Logs import debug + +c_compiler = { + 'win32': ['msvc', 'gcc'], + 'cygwin': ['gcc'], + 'darwin': ['gcc'], + 'aix': ['xlc', 'gcc'], + 'linux': ['gcc', 'icc', 'suncc'], + 'sunos': ['gcc', 'suncc'], + 'irix': ['gcc'], + 'hpux': ['gcc'], + 'gnu': ['gcc'], + 'default': ['gcc'] +} + +def __list_possible_compiler(platform): + try: + return c_compiler[platform] + except KeyError: + return c_compiler["default"] + +def detect(conf): + """ + for each compiler for the platform, try to configure the compiler + in theory the tools should raise a configuration error if the compiler + pretends to be something it is not (setting CC=icc and trying to configure gcc) + """ + try: test_for_compiler = Options.options.check_c_compiler + except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')") + orig = conf.env + for compiler in test_for_compiler.split(): + conf.env = orig.copy() + try: + conf.check_tool(compiler) + except Configure.ConfigurationError, e: + debug('compiler_cc: %r' % e) + else: + if conf.env['CC']: + orig.table = conf.env.get_merged_dict() + conf.env = orig + conf.check_message(compiler, '', True) + conf.env['COMPILER_CC'] = compiler + break + conf.check_message(compiler, '', False) + break + else: + conf.fatal('could not configure a c compiler!') + +def set_options(opt): + build_platform = Utils.unversioned_sys_platform() + possible_compiler_list = __list_possible_compiler(build_platform) + test_for_compiler = ' '.join(possible_compiler_list) + cc_compiler_opts = opt.add_option_group("C Compiler Options") + cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler, + help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler), + dest="check_c_compiler") + + for c_compiler in test_for_compiler.split(): + opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts) + diff --git a/buildtools/wafadmin/Tools/compiler_cxx.py b/buildtools/wafadmin/Tools/compiler_cxx.py new file mode 100644 index 0000000000..5308ea9719 --- /dev/null +++ b/buildtools/wafadmin/Tools/compiler_cxx.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat) + +import os, sys, imp, types, ccroot +import optparse +import Utils, Configure, Options +from Logs import debug + +cxx_compiler = { +'win32': ['msvc', 'g++'], +'cygwin': ['g++'], +'darwin': ['g++'], +'aix': ['xlc++', 'g++'], +'linux': ['g++', 'icpc', 'sunc++'], +'sunos': ['g++', 'sunc++'], +'irix': ['g++'], +'hpux': ['g++'], +'gnu': ['g++'], +'default': ['g++'] +} + +def __list_possible_compiler(platform): + try: + return cxx_compiler[platform] + except KeyError: + return cxx_compiler["default"] + +def detect(conf): + try: test_for_compiler = Options.options.check_cxx_compiler + except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')") + orig = conf.env + for compiler in test_for_compiler.split(): + try: + conf.env = orig.copy() + conf.check_tool(compiler) + except Configure.ConfigurationError, e: + debug('compiler_cxx: %r' % e) + else: + if conf.env['CXX']: + orig.table = conf.env.get_merged_dict() + conf.env = orig + conf.check_message(compiler, '', True) + conf.env['COMPILER_CXX'] = compiler + break + conf.check_message(compiler, '', False) + break + else: + conf.fatal('could not configure a cxx compiler!') + +def set_options(opt): + build_platform = Utils.unversioned_sys_platform() + possible_compiler_list = __list_possible_compiler(build_platform) + test_for_compiler = ' '.join(possible_compiler_list) + cxx_compiler_opts = opt.add_option_group('C++ Compiler Options') + cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler, + help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler), + dest="check_cxx_compiler") + + for cxx_compiler in test_for_compiler.split(): + opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts) + diff --git a/buildtools/wafadmin/Tools/compiler_d.py b/buildtools/wafadmin/Tools/compiler_d.py new file mode 100644 index 0000000000..1ea5efa30e --- /dev/null +++ b/buildtools/wafadmin/Tools/compiler_d.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Carlos Rafael Giani, 2007 (dv) + +import os, sys, imp, types +import Utils, Configure, Options + +def detect(conf): + if getattr(Options.options, 'check_dmd_first', None): + test_for_compiler = ['dmd', 'gdc'] + else: + test_for_compiler = ['gdc', 'dmd'] + + for d_compiler in test_for_compiler: + try: + conf.check_tool(d_compiler) + except: + pass + else: + break + else: + conf.fatal('no suitable d compiler was found') + +def set_options(opt): + d_compiler_opts = opt.add_option_group('D Compiler Options') + d_compiler_opts.add_option('--check-dmd-first', action='store_true', + help='checks for the gdc compiler before dmd (default is the other way round)', + dest='check_dmd_first', + default=False) + + for d_compiler in ['gdc', 'dmd']: + opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts) + diff --git a/buildtools/wafadmin/Tools/config_c.py b/buildtools/wafadmin/Tools/config_c.py new file mode 100644 index 0000000000..a32d8aaf1a --- /dev/null +++ b/buildtools/wafadmin/Tools/config_c.py @@ -0,0 +1,736 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2005-2008 (ita) + +""" +c/c++ configuration routines +""" + +import os, imp, sys, shlex, shutil +from Utils import md5 +import Build, Utils, Configure, Task, Options, Logs, TaskGen +from Constants import * +from Configure import conf, conftest + +cfg_ver = { + 'atleast-version': '>=', + 'exact-version': '==', + 'max-version': '<=', +} + +SNIP1 = ''' + int main() { + void *p; + p=(void*)(%s); + return 0; +} +''' + +SNIP2 = ''' +int main() { + if ((%(type_name)s *) 0) return 0; + if (sizeof (%(type_name)s)) return 0; +} +''' + +SNIP3 = ''' +int main() { + return 0; +} +''' + +def parse_flags(line, uselib, env): + """pkg-config still has bugs on some platforms, and there are many -config programs, parsing flags is necessary :-/""" + + lst = shlex.split(line) + while lst: + x = lst.pop(0) + st = x[:2] + ot = x[2:] + app = env.append_value + if st == '-I' or st == '/I': + if not ot: ot = lst.pop(0) + app('CPPPATH_' + uselib, ot) + elif st == '-D': + if not ot: ot = lst.pop(0) + app('CXXDEFINES_' + uselib, ot) + app('CCDEFINES_' + uselib, ot) + elif st == '-l': + if not ot: ot = lst.pop(0) + app('LIB_' + uselib, ot) + elif st == '-L': + if not ot: ot = lst.pop(0) + app('LIBPATH_' + uselib, ot) + elif x == '-pthread' or x.startswith('+'): + app('CCFLAGS_' + uselib, x) + app('CXXFLAGS_' + uselib, x) + app('LINKFLAGS_' + uselib, x) + elif x == '-framework': + app('FRAMEWORK_' + uselib, lst.pop(0)) + elif x.startswith('-F'): + app('FRAMEWORKPATH_' + uselib, x[2:]) + elif x.startswith('-std'): + app('CCFLAGS_' + uselib, x) + app('CXXFLAGS_' + uselib, x) + app('LINKFLAGS_' + uselib, x) + elif x.startswith('-Wl'): + app('LINKFLAGS_' + uselib, x) + elif x.startswith('-m') or x.startswith('-f'): + app('CCFLAGS_' + uselib, x) + app('CXXFLAGS_' + uselib, x) + +@conf +def ret_msg(self, f, kw): + """execute a function, when provided""" + if isinstance(f, str): + return f + return f(kw) + +@conf +def validate_cfg(self, kw): + if not 'path' in kw: + kw['path'] = 'pkg-config --errors-to-stdout --print-errors' + + # pkg-config version + if 'atleast_pkgconfig_version' in kw: + if not 'msg' in kw: + kw['msg'] = 'Checking for pkg-config version >= %s' % kw['atleast_pkgconfig_version'] + return + + # pkg-config --modversion + if 'modversion' in kw: + return + + if 'variables' in kw: + if not 'msg' in kw: + kw['msg'] = 'Checking for %s variables' % kw['package'] + return + + # checking for the version of a module, for the moment, one thing at a time + for x in cfg_ver.keys(): + y = x.replace('-', '_') + if y in kw: + if not 'package' in kw: + raise ValueError('%s requires a package' % x) + + if not 'msg' in kw: + kw['msg'] = 'Checking for %s %s %s' % (kw['package'], cfg_ver[x], kw[y]) + return + + if not 'msg' in kw: + kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path']) + if not 'okmsg' in kw: + kw['okmsg'] = 'yes' + if not 'errmsg' in kw: + kw['errmsg'] = 'not found' + +@conf +def cmd_and_log(self, cmd, kw): + Logs.debug('runner: %s\n' % cmd) + if self.log: + self.log.write('%s\n' % cmd) + + try: + p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True) + (out, err) = p.communicate() + except OSError, e: + self.log.write('error %r' % e) + self.fatal(str(e)) + + # placeholder, don't touch + out = str(out) + err = str(err) + + if self.log: + self.log.write(out) + self.log.write(err) + + if p.returncode: + if not kw.get('errmsg', ''): + if kw.get('mandatory', False): + kw['errmsg'] = out.strip() + else: + kw['errmsg'] = 'no' + self.fatal('fail') + return out + +@conf +def exec_cfg(self, kw): + + # pkg-config version + if 'atleast_pkgconfig_version' in kw: + cmd = '%s --atleast-pkgconfig-version=%s' % (kw['path'], kw['atleast_pkgconfig_version']) + self.cmd_and_log(cmd, kw) + if not 'okmsg' in kw: + kw['okmsg'] = 'yes' + return + + # checking for the version of a module + for x in cfg_ver: + y = x.replace('-', '_') + if y in kw: + self.cmd_and_log('%s --%s=%s %s' % (kw['path'], x, kw[y], kw['package']), kw) + if not 'okmsg' in kw: + kw['okmsg'] = 'yes' + self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0) + break + + # retrieving the version of a module + if 'modversion' in kw: + version = self.cmd_and_log('%s --modversion %s' % (kw['path'], kw['modversion']), kw).strip() + self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version) + return version + + # retrieving variables of a module + if 'variables' in kw: + env = kw.get('env', self.env) + uselib = kw.get('uselib_store', kw['package'].upper()) + vars = Utils.to_list(kw['variables']) + for v in vars: + val = self.cmd_and_log('%s --variable=%s %s' % (kw['path'], v, kw['package']), kw).strip() + var = '%s_%s' % (uselib, v) + env[var] = val + if not 'okmsg' in kw: + kw['okmsg'] = 'yes' + return + + lst = [kw['path']] + + + defi = kw.get('define_variable', None) + if not defi: + defi = self.env.PKG_CONFIG_DEFINES or {} + for key, val in defi.iteritems(): + lst.append('--define-variable=%s=%s' % (key, val)) + + lst.append(kw.get('args', '')) + lst.append(kw['package']) + + # so we assume the command-line will output flags to be parsed afterwards + cmd = ' '.join(lst) + ret = self.cmd_and_log(cmd, kw) + if not 'okmsg' in kw: + kw['okmsg'] = 'yes' + + self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0) + parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env)) + return ret + +@conf +def check_cfg(self, *k, **kw): + """ + for pkg-config mostly, but also all the -config tools + conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI') + conf.check_cfg(package='dbus-1', variables='system_bus_default_address session_bus_services_dir') + """ + + self.validate_cfg(kw) + if 'msg' in kw: + self.check_message_1(kw['msg']) + ret = None + try: + ret = self.exec_cfg(kw) + except Configure.ConfigurationError, e: + if 'errmsg' in kw: + self.check_message_2(kw['errmsg'], 'YELLOW') + if 'mandatory' in kw and kw['mandatory']: + if Logs.verbose > 1: + raise + else: + self.fatal('the configuration failed (see %r)' % self.log.name) + else: + kw['success'] = ret + if 'okmsg' in kw: + self.check_message_2(self.ret_msg(kw['okmsg'], kw)) + + return ret + +# the idea is the following: now that we are certain +# that all the code here is only for c or c++, it is +# easy to put all the logic in one function +# +# this should prevent code duplication (ita) + +# env: an optional environment (modified -> provide a copy) +# compiler: cc or cxx - it tries to guess what is best +# type: cprogram, cshlib, cstaticlib +# code: a c code to execute +# uselib_store: where to add the variables +# uselib: parameters to use for building +# define: define to set, like FOO in #define FOO, if not set, add /* #undef FOO */ +# execute: True or False - will return the result of the execution + +@conf +def validate_c(self, kw): + """validate the parameters for the test method""" + + if not 'env' in kw: + kw['env'] = self.env.copy() + + env = kw['env'] + if not 'compiler' in kw: + kw['compiler'] = 'cc' + if env['CXX_NAME'] and Task.TaskBase.classes.get('cxx', None): + kw['compiler'] = 'cxx' + if not self.env['CXX']: + self.fatal('a c++ compiler is required') + else: + if not self.env['CC']: + self.fatal('a c compiler is required') + + if not 'type' in kw: + kw['type'] = 'cprogram' + + assert not(kw['type'] != 'cprogram' and kw.get('execute', 0)), 'can only execute programs' + + + #if kw['type'] != 'program' and kw.get('execute', 0): + # raise ValueError, 'can only execute programs' + + def to_header(dct): + if 'header_name' in dct: + dct = Utils.to_list(dct['header_name']) + return ''.join(['#include <%s>\n' % x for x in dct]) + return '' + + # set the file name + if not 'compile_mode' in kw: + kw['compile_mode'] = (kw['compiler'] == 'cxx') and 'cxx' or 'cc' + + if not 'compile_filename' in kw: + kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '') + + #OSX + if 'framework_name' in kw: + try: TaskGen.task_gen.create_task_macapp + except AttributeError: self.fatal('frameworks require the osx tool') + + fwkname = kw['framework_name'] + if not 'uselib_store' in kw: + kw['uselib_store'] = fwkname.upper() + + if not kw.get('no_header', False): + if not 'header_name' in kw: + kw['header_name'] = [] + fwk = '%s/%s.h' % (fwkname, fwkname) + if kw.get('remove_dot_h', None): + fwk = fwk[:-2] + kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk] + + kw['msg'] = 'Checking for framework %s' % fwkname + kw['framework'] = fwkname + #kw['frameworkpath'] = set it yourself + + if 'function_name' in kw: + fu = kw['function_name'] + if not 'msg' in kw: + kw['msg'] = 'Checking for function %s' % fu + kw['code'] = to_header(kw) + SNIP1 % fu + if not 'uselib_store' in kw: + kw['uselib_store'] = fu.upper() + if not 'define_name' in kw: + kw['define_name'] = self.have_define(fu) + + elif 'type_name' in kw: + tu = kw['type_name'] + if not 'msg' in kw: + kw['msg'] = 'Checking for type %s' % tu + if not 'header_name' in kw: + kw['header_name'] = 'stdint.h' + kw['code'] = to_header(kw) + SNIP2 % {'type_name' : tu} + if not 'define_name' in kw: + kw['define_name'] = self.have_define(tu.upper()) + + elif 'header_name' in kw: + if not 'msg' in kw: + kw['msg'] = 'Checking for header %s' % kw['header_name'] + + l = Utils.to_list(kw['header_name']) + assert len(l)>0, 'list of headers in header_name is empty' + + kw['code'] = to_header(kw) + SNIP3 + + if not 'uselib_store' in kw: + kw['uselib_store'] = l[0].upper() + + if not 'define_name' in kw: + kw['define_name'] = self.have_define(l[0]) + + if 'lib' in kw: + if not 'msg' in kw: + kw['msg'] = 'Checking for library %s' % kw['lib'] + if not 'uselib_store' in kw: + kw['uselib_store'] = kw['lib'].upper() + + if 'staticlib' in kw: + if not 'msg' in kw: + kw['msg'] = 'Checking for static library %s' % kw['staticlib'] + if not 'uselib_store' in kw: + kw['uselib_store'] = kw['staticlib'].upper() + + if 'fragment' in kw: + # an additional code fragment may be provided to replace the predefined code + # in custom headers + kw['code'] = kw['fragment'] + if not 'msg' in kw: + kw['msg'] = 'Checking for custom code' + if not 'errmsg' in kw: + kw['errmsg'] = 'no' + + for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]: + if flagsname in kw: + if not 'msg' in kw: + kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname]) + if not 'errmsg' in kw: + kw['errmsg'] = 'no' + + if not 'execute' in kw: + kw['execute'] = False + + if not 'errmsg' in kw: + kw['errmsg'] = 'not found' + + if not 'okmsg' in kw: + kw['okmsg'] = 'yes' + + if not 'code' in kw: + kw['code'] = SNIP3 + + if not kw.get('success'): kw['success'] = None + + assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c' + +@conf +def post_check(self, *k, **kw): + "set the variables after a test was run successfully" + + is_success = False + if kw['execute']: + if kw['success'] is not None: + is_success = True + else: + is_success = (kw['success'] == 0) + + if 'define_name' in kw: + if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw: + if kw['execute']: + key = kw['success'] + if isinstance(key, str): + if key: + self.define(kw['define_name'], key, quote=kw.get('quote', 1)) + else: + self.define_cond(kw['define_name'], True) + else: + self.define_cond(kw['define_name'], False) + else: + self.define_cond(kw['define_name'], is_success) + + if is_success and 'uselib_store' in kw: + import cc, cxx + for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars): + lk = k.lower() + # inconsistency: includes -> CPPPATH + if k == 'CPPPATH': lk = 'includes' + if k == 'CXXDEFINES': lk = 'defines' + if k == 'CCDEFINES': lk = 'defines' + if lk in kw: + val = kw[lk] + # remove trailing slash + if isinstance(val, str): + val = val.rstrip(os.path.sep) + self.env.append_unique(k + '_' + kw['uselib_store'], val) + +@conf +def check(self, *k, **kw): + # so this will be the generic function + # it will be safer to use check_cxx or check_cc + self.validate_c(kw) + self.check_message_1(kw['msg']) + ret = None + try: + ret = self.run_c_code(*k, **kw) + except Configure.ConfigurationError, e: + self.check_message_2(kw['errmsg'], 'YELLOW') + if 'mandatory' in kw and kw['mandatory']: + if Logs.verbose > 1: + raise + else: + self.fatal('the configuration failed (see %r)' % self.log.name) + else: + kw['success'] = ret + self.check_message_2(self.ret_msg(kw['okmsg'], kw)) + + self.post_check(*k, **kw) + if not kw.get('execute', False): + return ret == 0 + return ret + +@conf +def run_c_code(self, *k, **kw): + test_f_name = kw['compile_filename'] + + k = 0 + while k < 10000: + # make certain to use a fresh folder - necessary for win32 + dir = os.path.join(self.blddir, '.conf_check_%d' % k) + + # if the folder already exists, remove it + try: + shutil.rmtree(dir) + except OSError: + pass + + try: + os.stat(dir) + except OSError: + break + + k += 1 + + try: + os.makedirs(dir) + except: + self.fatal('cannot create a configuration test folder %r' % dir) + + try: + os.stat(dir) + except: + self.fatal('cannot use the configuration test folder %r' % dir) + + bdir = os.path.join(dir, 'testbuild') + + if not os.path.exists(bdir): + os.makedirs(bdir) + + env = kw['env'] + + dest = open(os.path.join(dir, test_f_name), 'w') + dest.write(kw['code']) + dest.close() + + back = os.path.abspath('.') + + bld = Build.BuildContext() + bld.log = self.log + bld.all_envs.update(self.all_envs) + bld.all_envs['default'] = env + bld.lst_variants = bld.all_envs.keys() + bld.load_dirs(dir, bdir) + + os.chdir(dir) + + bld.rescan(bld.srcnode) + + if not 'features' in kw: + # conf.check(features='cc cprogram pyext', ...) + kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram cc" + + o = bld(features=kw['features'], source=test_f_name, target='testprog') + + for k, v in kw.iteritems(): + setattr(o, k, v) + + self.log.write("==>\n%s\n<==\n" % kw['code']) + + # compile the program + try: + bld.compile() + except Utils.WafError: + ret = Utils.ex_stack() + else: + ret = 0 + + # chdir before returning + os.chdir(back) + + if ret: + self.log.write('command returned %r' % ret) + self.fatal(str(ret)) + + # if we need to run the program, try to get its result + # keep the name of the program to execute + if kw['execute']: + lastprog = o.link_task.outputs[0].abspath(env) + + args = Utils.to_list(kw.get('exec_args', [])) + proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) + (out, err) = proc.communicate() + w = self.log.write + w(str(out)) + w('\n') + w(str(err)) + w('\n') + w('returncode %r' % proc.returncode) + w('\n') + if proc.returncode: + self.fatal(Utils.ex_stack()) + ret = out + + return ret + +@conf +def check_cxx(self, *k, **kw): + kw['compiler'] = 'cxx' + return self.check(*k, **kw) + +@conf +def check_cc(self, *k, **kw): + kw['compiler'] = 'cc' + return self.check(*k, **kw) + +@conf +def define(self, define, value, quote=1): + """store a single define and its state into an internal list for later + writing to a config header file. Value can only be + a string or int; other types not supported. String + values will appear properly quoted in the generated + header file.""" + assert define and isinstance(define, str) + + # ordered_dict is for writing the configuration header in order + tbl = self.env[DEFINES] or Utils.ordered_dict() + + # the user forgot to tell if the value is quoted or not + if isinstance(value, str): + if quote: + tbl[define] = '"%s"' % repr('"'+value)[2:-1].replace('"', '\\"') + else: + tbl[define] = value + elif isinstance(value, int): + tbl[define] = value + else: + raise TypeError('define %r -> %r must be a string or an int' % (define, value)) + + # add later to make reconfiguring faster + self.env[DEFINES] = tbl + self.env[define] = value # <- not certain this is necessary + +@conf +def undefine(self, define): + """store a single define and its state into an internal list + for later writing to a config header file""" + assert define and isinstance(define, str) + + tbl = self.env[DEFINES] or Utils.ordered_dict() + + value = UNDEFINED + tbl[define] = value + + # add later to make reconfiguring faster + self.env[DEFINES] = tbl + self.env[define] = value + +@conf +def define_cond(self, name, value): + """Conditionally define a name. + Formally equivalent to: if value: define(name, 1) else: undefine(name)""" + if value: + self.define(name, 1) + else: + self.undefine(name) + +@conf +def is_defined(self, key): + defines = self.env[DEFINES] + if not defines: + return False + try: + value = defines[key] + except KeyError: + return False + else: + return value != UNDEFINED + +@conf +def get_define(self, define): + "get the value of a previously stored define" + try: return self.env[DEFINES][define] + except KeyError: return None + +@conf +def have_define(self, name): + "prefix the define with 'HAVE_' and make sure it has valid characters." + return self.__dict__.get('HAVE_PAT', 'HAVE_%s') % Utils.quote_define_name(name) + +@conf +def write_config_header(self, configfile='', env='', guard='', top=False): + "save the defines into a file" + if not configfile: configfile = WAF_CONFIG_H + waf_guard = guard or '_%s_WAF' % Utils.quote_define_name(configfile) + + # configfile -> absolute path + # there is a good reason to concatenate first and to split afterwards + if not env: env = self.env + if top: + diff = '' + else: + diff = Utils.diff_path(self.srcdir, self.curdir) + full = os.sep.join([self.blddir, env.variant(), diff, configfile]) + full = os.path.normpath(full) + (dir, base) = os.path.split(full) + + try: os.makedirs(dir) + except: pass + + dest = open(full, 'w') + dest.write('/* Configuration header created by Waf - do not edit */\n') + dest.write('#ifndef %s\n#define %s\n\n' % (waf_guard, waf_guard)) + + dest.write(self.get_config_header()) + + # config files are not removed on "waf clean" + env.append_unique(CFG_FILES, os.path.join(diff, configfile)) + + dest.write('\n#endif /* %s */\n' % waf_guard) + dest.close() + +@conf +def get_config_header(self): + """Fill-in the contents of the config header. Override when you need to write your own config header.""" + config_header = [] + + tbl = self.env[DEFINES] or Utils.ordered_dict() + for key in tbl.allkeys: + value = tbl[key] + if value is None: + config_header.append('#define %s' % key) + elif value is UNDEFINED: + config_header.append('/* #undef %s */' % key) + else: + config_header.append('#define %s %s' % (key, value)) + return "\n".join(config_header) + +@conftest +def find_cpp(conf): + v = conf.env + cpp = [] + if v['CPP']: cpp = v['CPP'] + elif 'CPP' in conf.environ: cpp = conf.environ['CPP'] + if not cpp: cpp = conf.find_program('cpp', var='CPP') + #if not cpp: cpp = v['CC'] + #if not cpp: cpp = v['CXX'] + v['CPP'] = cpp + +@conftest +def cc_add_flags(conf): + conf.add_os_flags('CFLAGS', 'CCFLAGS') + conf.add_os_flags('CPPFLAGS') + +@conftest +def cxx_add_flags(conf): + conf.add_os_flags('CXXFLAGS') + conf.add_os_flags('CPPFLAGS') + +@conftest +def link_add_flags(conf): + conf.add_os_flags('LINKFLAGS') + conf.add_os_flags('LDFLAGS', 'LINKFLAGS') + +@conftest +def cc_load_tools(conf): + conf.check_tool('cc') + +@conftest +def cxx_load_tools(conf): + conf.check_tool('cxx') + diff --git a/buildtools/wafadmin/Tools/cs.py b/buildtools/wafadmin/Tools/cs.py new file mode 100644 index 0000000000..43544856f3 --- /dev/null +++ b/buildtools/wafadmin/Tools/cs.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) + +"C# support" + +import TaskGen, Utils, Task, Options +from Logs import error +from TaskGen import before, after, taskgen, feature + +flag_vars= ['FLAGS', 'ASSEMBLIES'] + +@feature('cs') +def init_cs(self): + Utils.def_attrs(self, + flags = '', + assemblies = '', + resources = '', + uselib = '') + +@feature('cs') +@after('init_cs') +def apply_uselib_cs(self): + if not self.uselib: + return + global flag_vars + for var in self.to_list(self.uselib): + for v in self.flag_vars: + val = self.env[v+'_'+var] + if val: self.env.append_value(v, val) + +@feature('cs') +@after('apply_uselib_cs') +@before('apply_core') +def apply_cs(self): + try: self.meths.remove('apply_core') + except ValueError: pass + + # process the flags for the assemblies + for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']: + self.env.append_unique('_ASSEMBLIES', '/r:'+i) + + # process the flags for the resources + for i in self.to_list(self.resources): + self.env.append_unique('_RESOURCES', '/resource:'+i) + + # what kind of assembly are we generating? + self.env['_TYPE'] = getattr(self, 'type', 'exe') + + # additional flags + self.env.append_unique('_FLAGS', self.to_list(self.flags)) + self.env.append_unique('_FLAGS', self.env.FLAGS) + + # process the sources + nodes = [self.path.find_resource(i) for i in self.to_list(self.source)] + self.create_task('mcs', nodes, self.path.find_or_declare(self.target)) + +Task.simple_task_type('mcs', '${MCS} ${SRC} /target:${_TYPE} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW') + +def detect(conf): + csc = getattr(Options.options, 'cscbinary', None) + if csc: + conf.env.MCS = csc + conf.find_program(['gmcs', 'mcs'], var='MCS') + +def set_options(opt): + opt.add_option('--with-csc-binary', type='string', dest='cscbinary') + diff --git a/buildtools/wafadmin/Tools/cxx.py b/buildtools/wafadmin/Tools/cxx.py new file mode 100644 index 0000000000..719b82177c --- /dev/null +++ b/buildtools/wafadmin/Tools/cxx.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2005 (ita) + +"Base for c++ programs and libraries" + +import TaskGen, Task, Utils +from Logs import debug +import ccroot # <- do not remove +from TaskGen import feature, before, extension, after + +g_cxx_flag_vars = [ +'CXXDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', +'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH', +'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES'] +"main cpp variables" + +EXT_CXX = ['.cpp', '.cc', '.cxx', '.C', '.c++'] + +g_cxx_type_vars=['CXXFLAGS', 'LINKFLAGS'] + +# TODO remove in waf 1.6 +class cxx_taskgen(ccroot.ccroot_abstract): + pass + +@feature('cxx') +@before('apply_type_vars') +@after('default_cc') +def init_cxx(self): + if not 'cc' in self.features: + self.mappings['.c'] = TaskGen.task_gen.mappings['.cxx'] + + self.p_flag_vars = set(self.p_flag_vars).union(g_cxx_flag_vars) + self.p_type_vars = set(self.p_type_vars).union(g_cxx_type_vars) + + if not self.env['CXX_NAME']: + raise Utils.WafError("At least one compiler (g++, ..) must be selected") + +@feature('cxx') +@after('apply_incpaths') +def apply_obj_vars_cxx(self): + """after apply_incpaths for INC_PATHS""" + env = self.env + app = env.append_unique + cxxpath_st = env['CPPPATH_ST'] + + # local flags come first + # set the user-defined includes paths + for i in env['INC_PATHS']: + app('_CXXINCFLAGS', cxxpath_st % i.bldpath(env)) + app('_CXXINCFLAGS', cxxpath_st % i.srcpath(env)) + + # set the library include paths + for i in env['CPPPATH']: + app('_CXXINCFLAGS', cxxpath_st % i) + +@feature('cxx') +@after('apply_lib_vars') +def apply_defines_cxx(self): + """after uselib is set for CXXDEFINES""" + self.defines = getattr(self, 'defines', []) + lst = self.to_list(self.defines) + self.to_list(self.env['CXXDEFINES']) + milst = [] + + # now process the local defines + for defi in lst: + if not defi in milst: + milst.append(defi) + + # CXXDEFINES_USELIB + libs = self.to_list(self.uselib) + for l in libs: + val = self.env['CXXDEFINES_'+l] + if val: milst += self.to_list(val) + + self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]] + y = self.env['CXXDEFINES_ST'] + self.env.append_unique('_CXXDEFFLAGS', [y%x for x in milst]) + +@extension(EXT_CXX) +def cxx_hook(self, node): + # create the compilation task: cpp or cc + if getattr(self, 'obj_ext', None): + obj_ext = self.obj_ext + else: + obj_ext = '_%d.o' % self.idx + + task = self.create_task('cxx', node, node.change_ext(obj_ext)) + try: + self.compiled_tasks.append(task) + except AttributeError: + raise Utils.WafError('Have you forgotten to set the feature "cxx" on %s?' % str(self)) + return task + +cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}' +cls = Task.simple_task_type('cxx', cxx_str, color='GREEN', ext_out='.o', ext_in='.cxx', shell=False) +cls.scan = ccroot.scan +cls.vars.append('CXXDEPS') + +link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}' +cls = Task.simple_task_type('cxx_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False) +cls.maxjobs = 1 +cls.install = Utils.nada + diff --git a/buildtools/wafadmin/Tools/d.py b/buildtools/wafadmin/Tools/d.py new file mode 100644 index 0000000000..1a22821bc5 --- /dev/null +++ b/buildtools/wafadmin/Tools/d.py @@ -0,0 +1,535 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Carlos Rafael Giani, 2007 (dv) +# Thomas Nagy, 2007-2008 (ita) + +import os, sys, re, optparse +import ccroot # <- leave this +import TaskGen, Utils, Task, Configure, Logs, Build +from Logs import debug, error +from TaskGen import taskgen, feature, after, before, extension +from Configure import conftest + +EXT_D = ['.d', '.di', '.D'] +D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps'] # additional d methods + +DLIB = """ +version(D_Version2) { + import std.stdio; + int main() { + writefln("phobos2"); + return 0; + } +} else { + version(Tango) { + import tango.stdc.stdio; + int main() { + printf("tango"); + return 0; + } + } else { + import std.stdio; + int main() { + writefln("phobos1"); + return 0; + } + } +} +""" + +def filter_comments(filename): + txt = Utils.readf(filename) + i = 0 + buf = [] + max = len(txt) + begin = 0 + while i < max: + c = txt[i] + if c == '"' or c == "'": # skip a string or character literal + buf.append(txt[begin:i]) + delim = c + i += 1 + while i < max: + c = txt[i] + if c == delim: break + elif c == '\\': # skip the character following backslash + i += 1 + i += 1 + i += 1 + begin = i + elif c == '/': # try to replace a comment with whitespace + buf.append(txt[begin:i]) + i += 1 + if i == max: break + c = txt[i] + if c == '+': # eat nesting /+ +/ comment + i += 1 + nesting = 1 + c = None + while i < max: + prev = c + c = txt[i] + if prev == '/' and c == '+': + nesting += 1 + c = None + elif prev == '+' and c == '/': + nesting -= 1 + if nesting == 0: break + c = None + i += 1 + elif c == '*': # eat /* */ comment + i += 1 + c = None + while i < max: + prev = c + c = txt[i] + if prev == '*' and c == '/': break + i += 1 + elif c == '/': # eat // comment + i += 1 + while i < max and txt[i] != '\n': + i += 1 + else: # no comment + begin = i - 1 + continue + i += 1 + begin = i + buf.append(' ') + else: + i += 1 + buf.append(txt[begin:]) + return buf + +class d_parser(object): + def __init__(self, env, incpaths): + #self.code = '' + #self.module = '' + #self.imports = [] + + self.allnames = [] + + self.re_module = re.compile("module\s+([^;]+)") + self.re_import = re.compile("import\s+([^;]+)") + self.re_import_bindings = re.compile("([^:]+):(.*)") + self.re_import_alias = re.compile("[^=]+=(.+)") + + self.env = env + + self.nodes = [] + self.names = [] + + self.incpaths = incpaths + + def tryfind(self, filename): + found = 0 + for n in self.incpaths: + found = n.find_resource(filename.replace('.', '/') + '.d') + if found: + self.nodes.append(found) + self.waiting.append(found) + break + if not found: + if not filename in self.names: + self.names.append(filename) + + def get_strings(self, code): + #self.imports = [] + self.module = '' + lst = [] + + # get the module name (if present) + + mod_name = self.re_module.search(code) + if mod_name: + self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces + + # go through the code, have a look at all import occurrences + + # first, lets look at anything beginning with "import" and ending with ";" + import_iterator = self.re_import.finditer(code) + if import_iterator: + for import_match in import_iterator: + import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces + + # does this end with an import bindings declaration? + # (import bindings always terminate the list of imports) + bindings_match = self.re_import_bindings.match(import_match_str) + if bindings_match: + import_match_str = bindings_match.group(1) + # if so, extract the part before the ":" (since the module declaration(s) is/are located there) + + # split the matching string into a bunch of strings, separated by a comma + matches = import_match_str.split(',') + + for match in matches: + alias_match = self.re_import_alias.match(match) + if alias_match: + # is this an alias declaration? (alias = module name) if so, extract the module name + match = alias_match.group(1) + + lst.append(match) + return lst + + def start(self, node): + self.waiting = [node] + # while the stack is not empty, add the dependencies + while self.waiting: + nd = self.waiting.pop(0) + self.iter(nd) + + def iter(self, node): + path = node.abspath(self.env) # obtain the absolute path + code = "".join(filter_comments(path)) # read the file and filter the comments + names = self.get_strings(code) # obtain the import strings + for x in names: + # optimization + if x in self.allnames: continue + self.allnames.append(x) + + # for each name, see if it is like a node or not + self.tryfind(x) + +def scan(self): + "look for .d/.di the .d source need" + env = self.env + gruik = d_parser(env, env['INC_PATHS']) + gruik.start(self.inputs[0]) + + if Logs.verbose: + debug('deps: nodes found for %s: %s %s' % (str(self.inputs[0]), str(gruik.nodes), str(gruik.names))) + #debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps') + return (gruik.nodes, gruik.names) + +def get_target_name(self): + "for d programs and libs" + v = self.env + tp = 'program' + for x in self.features: + if x in ['dshlib', 'dstaticlib']: + tp = x.lstrip('d') + return v['D_%s_PATTERN' % tp] % self.target + +d_params = { +'dflags': '', +'importpaths':'', +'libs':'', +'libpaths':'', +'generate_headers':False, +} + +@feature('d') +@before('apply_type_vars') +def init_d(self): + for x in d_params: + setattr(self, x, getattr(self, x, d_params[x])) + +class d_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + + # COMPAT + if len(k) > 1: + self.features.append('d' + k[1]) + +# okay, we borrow a few methods from ccroot +TaskGen.bind_feature('d', D_METHS) + +@feature('d') +@before('apply_d_libs') +def init_d(self): + Utils.def_attrs(self, + dflags='', + importpaths='', + libs='', + libpaths='', + uselib='', + uselib_local='', + generate_headers=False, # set to true if you want .di files as well as .o + compiled_tasks=[], + add_objects=[], + link_task=None) + +@feature('d') +@after('apply_d_link', 'init_d') +@before('apply_vnum', 'apply_d_vars') +def apply_d_libs(self): + """after apply_link because of 'link_task' + after default_cc because of the attribute 'uselib'""" + env = self.env + + # 1. the case of the libs defined in the project (visit ancestors first) + # the ancestors external libraries (uselib) will be prepended + self.uselib = self.to_list(self.uselib) + names = self.to_list(self.uselib_local) + + seen = set([]) + tmp = Utils.deque(names) # consume a copy of the list of names + while tmp: + lib_name = tmp.popleft() + # visit dependencies only once + if lib_name in seen: + continue + + y = self.name_to_obj(lib_name) + if not y: + raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name)) + y.post() + seen.add(lib_name) + + # object has ancestors to process (shared libraries): add them to the end of the list + if getattr(y, 'uselib_local', None): + lst = y.to_list(y.uselib_local) + if 'dshlib' in y.features or 'dprogram' in y.features: + lst = [x for x in lst if not 'dstaticlib' in self.name_to_obj(x).features] + tmp.extend(lst) + + # link task and flags + if getattr(y, 'link_task', None): + + link_name = y.target[y.target.rfind(os.sep) + 1:] + if 'dstaticlib' in y.features or 'dshlib' in y.features: + env.append_unique('DLINKFLAGS', env.DLIB_ST % link_name) + env.append_unique('DLINKFLAGS', env.DLIBPATH_ST % y.link_task.outputs[0].parent.bldpath(env)) + + # the order + self.link_task.set_run_after(y.link_task) + + # for the recompilation + dep_nodes = getattr(self.link_task, 'dep_nodes', []) + self.link_task.dep_nodes = dep_nodes + y.link_task.outputs + + # add ancestors uselib too - but only propagate those that have no staticlib + for v in self.to_list(y.uselib): + if not v in self.uselib: + self.uselib.insert(0, v) + + # if the library task generator provides 'export_incdirs', add to the include path + # the export_incdirs must be a list of paths relative to the other library + if getattr(y, 'export_incdirs', None): + for x in self.to_list(y.export_incdirs): + node = y.path.find_dir(x) + if not node: + raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x)) + self.env.append_unique('INC_PATHS', node) + +@feature('dprogram', 'dshlib', 'dstaticlib') +@after('apply_core') +def apply_d_link(self): + link = getattr(self, 'link', None) + if not link: + if 'dstaticlib' in self.features: link = 'static_link' + else: link = 'd_link' + + outputs = [t.outputs[0] for t in self.compiled_tasks] + self.link_task = self.create_task(link, outputs, self.path.find_or_declare(get_target_name(self))) + +@feature('d') +@after('apply_core') +def apply_d_vars(self): + env = self.env + dpath_st = env['DPATH_ST'] + lib_st = env['DLIB_ST'] + libpath_st = env['DLIBPATH_ST'] + + importpaths = self.to_list(self.importpaths) + libpaths = [] + libs = [] + uselib = self.to_list(self.uselib) + + for i in uselib: + if env['DFLAGS_' + i]: + env.append_unique('DFLAGS', env['DFLAGS_' + i]) + + for x in self.features: + if not x in ['dprogram', 'dstaticlib', 'dshlib']: + continue + x.lstrip('d') + d_shlib_dflags = env['D_' + x + '_DFLAGS'] + if d_shlib_dflags: + env.append_unique('DFLAGS', d_shlib_dflags) + + # add import paths + for i in uselib: + if env['DPATH_' + i]: + for entry in self.to_list(env['DPATH_' + i]): + if not entry in importpaths: + importpaths.append(entry) + + # now process the import paths + for path in importpaths: + if os.path.isabs(path): + env.append_unique('_DIMPORTFLAGS', dpath_st % path) + else: + node = self.path.find_dir(path) + self.env.append_unique('INC_PATHS', node) + env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env)) + env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env)) + + # add library paths + for i in uselib: + if env['LIBPATH_' + i]: + for entry in self.to_list(env['LIBPATH_' + i]): + if not entry in libpaths: + libpaths.append(entry) + libpaths = self.to_list(self.libpaths) + libpaths + + # now process the library paths + # apply same path manipulation as used with import paths + for path in libpaths: + if not os.path.isabs(path): + node = self.path.find_resource(path) + if not node: + raise Utils.WafError('could not find libpath %r from %r' % (path, self)) + path = node.abspath(self.env) + + env.append_unique('DLINKFLAGS', libpath_st % path) + + # add libraries + for i in uselib: + if env['LIB_' + i]: + for entry in self.to_list(env['LIB_' + i]): + if not entry in libs: + libs.append(entry) + libs.extend(self.to_list(self.libs)) + + # process user flags + for flag in self.to_list(self.dflags): + env.append_unique('DFLAGS', flag) + + # now process the libraries + for lib in libs: + env.append_unique('DLINKFLAGS', lib_st % lib) + + # add linker flags + for i in uselib: + dlinkflags = env['DLINKFLAGS_' + i] + if dlinkflags: + for linkflag in dlinkflags: + env.append_unique('DLINKFLAGS', linkflag) + +@feature('dshlib') +@after('apply_d_vars') +def add_shlib_d_flags(self): + for linkflag in self.env['D_shlib_LINKFLAGS']: + self.env.append_unique('DLINKFLAGS', linkflag) + +@extension(EXT_D) +def d_hook(self, node): + # create the compilation task: cpp or cc + task = self.create_task(self.generate_headers and 'd_with_header' or 'd') + try: obj_ext = self.obj_ext + except AttributeError: obj_ext = '_%d.o' % self.idx + + task.inputs = [node] + task.outputs = [node.change_ext(obj_ext)] + self.compiled_tasks.append(task) + + if self.generate_headers: + header_node = node.change_ext(self.env['DHEADER_ext']) + task.outputs += [header_node] + +d_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}' +d_with_header_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \ +${D_HDR_F}${TGT[1].bldpath(env)} \ +${D_SRC_F}${SRC} \ +${D_TGT_F}${TGT[0].bldpath(env)}' +link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}' + +def override_exec(cls): + """stupid dmd wants -of stuck to the file name""" + old_exec = cls.exec_command + def exec_command(self, *k, **kw): + if isinstance(k[0], list): + lst = k[0] + for i in xrange(len(lst)): + if lst[i] == '-of': + del lst[i] + lst[i] = '-of' + lst[i] + break + return old_exec(self, *k, **kw) + cls.exec_command = exec_command + +cls = Task.simple_task_type('d', d_str, 'GREEN', before='static_link d_link', shell=False) +cls.scan = scan +override_exec(cls) + +cls = Task.simple_task_type('d_with_header', d_with_header_str, 'GREEN', before='static_link d_link', shell=False) +override_exec(cls) + +cls = Task.simple_task_type('d_link', link_str, color='YELLOW', shell=False) +override_exec(cls) + +# for feature request #104 +@taskgen +def generate_header(self, filename, install_path): + if not hasattr(self, 'header_lst'): self.header_lst = [] + self.meths.append('process_header') + self.header_lst.append([filename, install_path]) + +@before('apply_core') +def process_header(self): + env = self.env + for i in getattr(self, 'header_lst', []): + node = self.path.find_resource(i[0]) + + if not node: + raise Utils.WafError('file not found on d obj '+i[0]) + + task = self.create_task('d_header') + task.set_inputs(node) + task.set_outputs(node.change_ext('.di')) + +d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}' +Task.simple_task_type('d_header', d_header_str, color='BLUE', shell=False) + +@conftest +def d_platform_flags(conf): + v = conf.env + binfmt = v.DEST_BINFMT or Utils.unversioned_sys_platform_to_binary_format( + v.DEST_OS or Utils.unversioned_sys_platform()) + if binfmt == 'pe': + v['D_program_PATTERN'] = '%s.exe' + v['D_shlib_PATTERN'] = 'lib%s.dll' + v['D_staticlib_PATTERN'] = 'lib%s.a' + else: + v['D_program_PATTERN'] = '%s' + v['D_shlib_PATTERN'] = 'lib%s.so' + v['D_staticlib_PATTERN'] = 'lib%s.a' + +@conftest +def check_dlibrary(conf): + ret = conf.check_cc(features='d dprogram', fragment=DLIB, mandatory=True, compile_filename='test.d', execute=True) + conf.env.DLIBRARY = ret.strip() + +# quick test # +if __name__ == "__main__": + #Logs.verbose = 2 + + try: arg = sys.argv[1] + except IndexError: arg = "file.d" + + print("".join(filter_comments(arg))) + # TODO + paths = ['.'] + + #gruik = filter() + #gruik.start(arg) + + #code = "".join(gruik.buf) + + #print "we have found the following code" + #print code + + #print "now parsing" + #print "-------------------------------------------" + """ + parser_ = d_parser() + parser_.start(arg) + + print "module: %s" % parser_.module + print "imports: ", + for imp in parser_.imports: + print imp + " ", + print +""" + diff --git a/buildtools/wafadmin/Tools/dbus.py b/buildtools/wafadmin/Tools/dbus.py new file mode 100644 index 0000000000..3179999392 --- /dev/null +++ b/buildtools/wafadmin/Tools/dbus.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Ali Sabil, 2007 + +import Task, Utils +from TaskGen import taskgen, before, after, feature + +@taskgen +def add_dbus_file(self, filename, prefix, mode): + if not hasattr(self, 'dbus_lst'): + self.dbus_lst = [] + self.meths.append('process_dbus') + self.dbus_lst.append([filename, prefix, mode]) + +@before('apply_core') +def process_dbus(self): + for filename, prefix, mode in getattr(self, 'dbus_lst', []): + node = self.path.find_resource(filename) + + if not node: + raise Utils.WafError('file not found ' + filename) + + tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h')) + + tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix + tsk.env.DBUS_BINDING_TOOL_MODE = mode + +Task.simple_task_type('dbus_binding_tool', + '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}', + color='BLUE', before='cc') + +def detect(conf): + dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL') + diff --git a/buildtools/wafadmin/Tools/dmd.py b/buildtools/wafadmin/Tools/dmd.py new file mode 100644 index 0000000000..9c7490844d --- /dev/null +++ b/buildtools/wafadmin/Tools/dmd.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Carlos Rafael Giani, 2007 (dv) +# Thomas Nagy, 2008 (ita) + +import sys +import Utils, ar +from Configure import conftest + +@conftest +def find_dmd(conf): + conf.find_program(['dmd', 'ldc'], var='D_COMPILER', mandatory=True) + +@conftest +def common_flags_ldc(conf): + v = conf.env + v['DFLAGS'] = ['-d-version=Posix'] + v['DLINKFLAGS'] = [] + v['D_shlib_DFLAGS'] = ['-relocation-model=pic'] + +@conftest +def common_flags_dmd(conf): + v = conf.env + + # _DFLAGS _DIMPORTFLAGS + + # Compiler is dmd so 'gdc' part will be ignored, just + # ensure key is there, so wscript can append flags to it + v['DFLAGS'] = ['-version=Posix'] + + v['D_SRC_F'] = '' + v['D_TGT_F'] = ['-c', '-of'] + v['DPATH_ST'] = '-I%s' # template for adding import paths + + # linker + v['D_LINKER'] = v['D_COMPILER'] + v['DLNK_SRC_F'] = '' + v['DLNK_TGT_F'] = '-of' + + v['DLIB_ST'] = '-L-l%s' # template for adding libs + v['DLIBPATH_ST'] = '-L-L%s' # template for adding libpaths + + # linker debug levels + v['DFLAGS_OPTIMIZED'] = ['-O'] + v['DFLAGS_DEBUG'] = ['-g', '-debug'] + v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug'] + v['DLINKFLAGS'] = ['-quiet'] + + v['D_shlib_DFLAGS'] = ['-fPIC'] + v['D_shlib_LINKFLAGS'] = ['-L-shared'] + + v['DHEADER_ext'] = '.di' + v['D_HDR_F'] = ['-H', '-Hf'] + +def detect(conf): + conf.find_dmd() + conf.check_tool('ar') + conf.check_tool('d') + conf.common_flags_dmd() + conf.d_platform_flags() + + if conf.env.D_COMPILER.find('ldc') > -1: + conf.common_flags_ldc() + diff --git a/buildtools/wafadmin/Tools/flex.py b/buildtools/wafadmin/Tools/flex.py new file mode 100644 index 0000000000..5ce9f2210e --- /dev/null +++ b/buildtools/wafadmin/Tools/flex.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# encoding: utf-8 +# John O'Meara, 2006 +# Thomas Nagy, 2006-2008 + +"Flex processing" + +import TaskGen + +def decide_ext(self, node): + if 'cxx' in self.features: return '.lex.cc' + else: return '.lex.c' + +TaskGen.declare_chain( + name = 'flex', + rule = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}', + ext_in = '.l', + ext_out = '.c .cxx', + decider = decide_ext +) + +def detect(conf): + conf.find_program('flex', var='FLEX', mandatory=True) + conf.env['FLEXFLAGS'] = '' + diff --git a/buildtools/wafadmin/Tools/gas.py b/buildtools/wafadmin/Tools/gas.py new file mode 100644 index 0000000000..c983b0a395 --- /dev/null +++ b/buildtools/wafadmin/Tools/gas.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2008 (ita) + +"as and gas" + +import os, sys +import Task +from TaskGen import extension, taskgen, after, before + +EXT_ASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP'] + +as_str = '${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}' +Task.simple_task_type('asm', as_str, 'PINK', ext_out='.o', shell=False) + +@extension(EXT_ASM) +def asm_hook(self, node): + # create the compilation task: cpp or cc + try: obj_ext = self.obj_ext + except AttributeError: obj_ext = '_%d.o' % self.idx + + task = self.create_task('asm', node, node.change_ext(obj_ext)) + self.compiled_tasks.append(task) + self.meths.append('asm_incflags') + +@after('apply_obj_vars_cc') +@after('apply_obj_vars_cxx') +@before('apply_link') +def asm_incflags(self): + self.env.append_value('_ASINCFLAGS', self.env.ASINCFLAGS) + var = ('cxx' in self.features) and 'CXX' or 'CC' + self.env.append_value('_ASINCFLAGS', self.env['_%sINCFLAGS' % var]) + +def detect(conf): + conf.find_program(['gas', 'as'], var='AS') + if not conf.env.AS: conf.env.AS = conf.env.CC + #conf.env.ASFLAGS = ['-c'] <- may be necesary for .S files + diff --git a/buildtools/wafadmin/Tools/gcc.py b/buildtools/wafadmin/Tools/gcc.py new file mode 100644 index 0000000000..420b44fd8f --- /dev/null +++ b/buildtools/wafadmin/Tools/gcc.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2008 (ita) +# Ralf Habacker, 2006 (rh) +# Yinon Ehrlich, 2009 + +import os, sys +import Configure, Options, Utils +import ccroot, ar +from Configure import conftest + +@conftest +def find_gcc(conf): + cc = conf.find_program(['gcc', 'cc'], var='CC', mandatory=True) + cc = conf.cmd_to_list(cc) + ccroot.get_cc_version(conf, cc, gcc=True) + conf.env.CC_NAME = 'gcc' + conf.env.CC = cc + +@conftest +def gcc_common_flags(conf): + v = conf.env + + # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS + + v['CCFLAGS_DEBUG'] = ['-g'] + + v['CCFLAGS_RELEASE'] = ['-O2'] + + v['CC_SRC_F'] = '' + v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD + v['CPPPATH_ST'] = '-I%s' # template for adding include paths + + # linker + if not v['LINK_CC']: v['LINK_CC'] = v['CC'] + v['CCLNK_SRC_F'] = '' + v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD + + v['LIB_ST'] = '-l%s' # template for adding libs + v['LIBPATH_ST'] = '-L%s' # template for adding libpaths + v['STATICLIB_ST'] = '-l%s' + v['STATICLIBPATH_ST'] = '-L%s' + v['RPATH_ST'] = '-Wl,-rpath,%s' + v['CCDEFINES_ST'] = '-D%s' + + v['SONAME_ST'] = '-Wl,-h,%s' + v['SHLIB_MARKER'] = '-Wl,-Bdynamic' + v['STATICLIB_MARKER'] = '-Wl,-Bstatic' + v['FULLSTATIC_MARKER'] = '-static' + + # program + v['program_PATTERN'] = '%s' + + # shared library + v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro + v['shlib_LINKFLAGS'] = ['-shared'] + v['shlib_PATTERN'] = 'lib%s.so' + + # static lib + v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic'] + v['staticlib_PATTERN'] = 'lib%s.a' + + # osx stuff + v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup'] + v['CCFLAGS_MACBUNDLE'] = ['-fPIC'] + v['macbundle_PATTERN'] = '%s.bundle' + +@conftest +def gcc_modifier_win32(conf): + v = conf.env + v['program_PATTERN'] = '%s.exe' + + v['shlib_PATTERN'] = '%s.dll' + v['implib_PATTERN'] = 'lib%s.dll.a' + v['IMPLIB_ST'] = '-Wl,--out-implib,%s' + + dest_arch = v['DEST_CPU'] + v['shlib_CCFLAGS'] = ['-DPIC'] + + v.append_value('shlib_CCFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea + + # Auto-import is enabled by default even without this option, + # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages + # that the linker emits otherwise. + v.append_value('LINKFLAGS', '-Wl,--enable-auto-import') + +@conftest +def gcc_modifier_cygwin(conf): + gcc_modifier_win32(conf) + v = conf.env + v['shlib_PATTERN'] = 'cyg%s.dll' + v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base') + +@conftest +def gcc_modifier_darwin(conf): + v = conf.env + v['shlib_CCFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1'] + v['shlib_LINKFLAGS'] = ['-dynamiclib'] + v['shlib_PATTERN'] = 'lib%s.dylib' + + v['staticlib_LINKFLAGS'] = [] + + v['SHLIB_MARKER'] = '' + v['STATICLIB_MARKER'] = '' + v['SONAME_ST'] = '' + +@conftest +def gcc_modifier_aix(conf): + v = conf.env + v['program_LINKFLAGS'] = ['-Wl,-brtl'] + + v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull'] + + v['SHLIB_MARKER'] = '' + +@conftest +def gcc_modifier_platform(conf): + # * set configurations specific for a platform. + # * the destination platform is detected automatically by looking at the macros the compiler predefines, + # and if it's not recognised, it fallbacks to sys.platform. + dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() + gcc_modifier_func = globals().get('gcc_modifier_' + dest_os) + if gcc_modifier_func: + gcc_modifier_func(conf) + +def detect(conf): + conf.find_gcc() + conf.find_cpp() + conf.find_ar() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() + diff --git a/buildtools/wafadmin/Tools/gdc.py b/buildtools/wafadmin/Tools/gdc.py new file mode 100644 index 0000000000..4d2a3216ed --- /dev/null +++ b/buildtools/wafadmin/Tools/gdc.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Carlos Rafael Giani, 2007 (dv) + +import sys +import Utils, ar +from Configure import conftest + +@conftest +def find_gdc(conf): + conf.find_program('gdc', var='D_COMPILER', mandatory=True) + +@conftest +def common_flags_gdc(conf): + v = conf.env + + # _DFLAGS _DIMPORTFLAGS + + # for mory info about the meaning of this dict see dmd.py + v['DFLAGS'] = [] + + v['D_SRC_F'] = '' + v['D_TGT_F'] = ['-c', '-o', ''] + v['DPATH_ST'] = '-I%s' # template for adding import paths + + # linker + v['D_LINKER'] = v['D_COMPILER'] + v['DLNK_SRC_F'] = '' + v['DLNK_TGT_F'] = ['-o', ''] + + v['DLIB_ST'] = '-l%s' # template for adding libs + v['DLIBPATH_ST'] = '-L%s' # template for adding libpaths + + # debug levels + v['DLINKFLAGS'] = [] + v['DFLAGS_OPTIMIZED'] = ['-O3'] + v['DFLAGS_DEBUG'] = ['-O0'] + v['DFLAGS_ULTRADEBUG'] = ['-O0'] + + v['D_shlib_DFLAGS'] = [] + v['D_shlib_LINKFLAGS'] = ['-shared'] + + v['DHEADER_ext'] = '.di' + v['D_HDR_F'] = '-fintfc -fintfc-file=' + +def detect(conf): + conf.find_gdc() + conf.check_tool('ar') + conf.check_tool('d') + conf.common_flags_gdc() + conf.d_platform_flags() + diff --git a/buildtools/wafadmin/Tools/glib2.py b/buildtools/wafadmin/Tools/glib2.py new file mode 100644 index 0000000000..042d612cbe --- /dev/null +++ b/buildtools/wafadmin/Tools/glib2.py @@ -0,0 +1,164 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2008 (ita) + +"GLib2 support" + +import Task, Utils +from TaskGen import taskgen, before, after, feature + +# +# glib-genmarshal +# + +@taskgen +def add_marshal_file(self, filename, prefix): + if not hasattr(self, 'marshal_list'): + self.marshal_list = [] + self.meths.append('process_marshal') + self.marshal_list.append((filename, prefix)) + +@before('apply_core') +def process_marshal(self): + for f, prefix in getattr(self, 'marshal_list', []): + node = self.path.find_resource(f) + + if not node: + raise Utils.WafError('file not found %r' % f) + + h_node = node.change_ext('.h') + c_node = node.change_ext('.c') + + task = self.create_task('glib_genmarshal', node, [h_node, c_node]) + task.env.GLIB_GENMARSHAL_PREFIX = prefix + self.allnodes.append(c_node) + +def genmarshal_func(self): + + bld = self.inputs[0].__class__.bld + + get = self.env.get_flat + cmd1 = "%s %s --prefix=%s --header > %s" % ( + get('GLIB_GENMARSHAL'), + self.inputs[0].srcpath(self.env), + get('GLIB_GENMARSHAL_PREFIX'), + self.outputs[0].abspath(self.env) + ) + + ret = bld.exec_command(cmd1) + if ret: return ret + + #print self.outputs[1].abspath(self.env) + f = open(self.outputs[1].abspath(self.env), 'wb') + c = '''#include "%s"\n''' % self.outputs[0].name + f.write(c) + f.close() + + cmd2 = "%s %s --prefix=%s --body >> %s" % ( + get('GLIB_GENMARSHAL'), + self.inputs[0].srcpath(self.env), + get('GLIB_GENMARSHAL_PREFIX'), + self.outputs[1].abspath(self.env) + ) + ret = Utils.exec_command(cmd2) + if ret: return ret + +# +# glib-mkenums +# + +@taskgen +def add_enums_from_template(self, source='', target='', template='', comments=''): + if not hasattr(self, 'enums_list'): + self.enums_list = [] + self.meths.append('process_enums') + self.enums_list.append({'source': source, + 'target': target, + 'template': template, + 'file-head': '', + 'file-prod': '', + 'file-tail': '', + 'enum-prod': '', + 'value-head': '', + 'value-prod': '', + 'value-tail': '', + 'comments': comments}) + +@taskgen +def add_enums(self, source='', target='', + file_head='', file_prod='', file_tail='', enum_prod='', + value_head='', value_prod='', value_tail='', comments=''): + if not hasattr(self, 'enums_list'): + self.enums_list = [] + self.meths.append('process_enums') + self.enums_list.append({'source': source, + 'template': '', + 'target': target, + 'file-head': file_head, + 'file-prod': file_prod, + 'file-tail': file_tail, + 'enum-prod': enum_prod, + 'value-head': value_head, + 'value-prod': value_prod, + 'value-tail': value_tail, + 'comments': comments}) + +@before('apply_core') +def process_enums(self): + for enum in getattr(self, 'enums_list', []): + task = self.create_task('glib_mkenums') + env = task.env + + inputs = [] + + # process the source + source_list = self.to_list(enum['source']) + if not source_list: + raise Utils.WafError('missing source ' + str(enum)) + source_list = [self.path.find_resource(k) for k in source_list] + inputs += source_list + env['GLIB_MKENUMS_SOURCE'] = [k.srcpath(env) for k in source_list] + + # find the target + if not enum['target']: + raise Utils.WafError('missing target ' + str(enum)) + tgt_node = self.path.find_or_declare(enum['target']) + if tgt_node.name.endswith('.c'): + self.allnodes.append(tgt_node) + env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath(env) + + + options = [] + + if enum['template']: # template, if provided + template_node = self.path.find_resource(enum['template']) + options.append('--template %s' % (template_node.abspath(env))) + inputs.append(template_node) + params = {'file-head' : '--fhead', + 'file-prod' : '--fprod', + 'file-tail' : '--ftail', + 'enum-prod' : '--eprod', + 'value-head' : '--vhead', + 'value-prod' : '--vprod', + 'value-tail' : '--vtail', + 'comments': '--comments'} + for param, option in params.iteritems(): + if enum[param]: + options.append('%s %r' % (option, enum[param])) + + env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options) + + # update the task instance + task.set_inputs(inputs) + task.set_outputs(tgt_node) + +Task.task_type_from_func('glib_genmarshal', func=genmarshal_func, vars=['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'], + color='BLUE', before='cc cxx') +Task.simple_task_type('glib_mkenums', + '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}', + color='PINK', before='cc cxx') + +def detect(conf): + glib_genmarshal = conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL') + mk_enums_tool = conf.find_program('glib-mkenums', var='GLIB_MKENUMS') + diff --git a/buildtools/wafadmin/Tools/gnome.py b/buildtools/wafadmin/Tools/gnome.py new file mode 100644 index 0000000000..c098a41bb4 --- /dev/null +++ b/buildtools/wafadmin/Tools/gnome.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2008 (ita) + +"Gnome support" + +import os, re +import TaskGen, Utils, Runner, Task, Build, Options, Logs +import cc +from Logs import error +from TaskGen import taskgen, before, after, feature + +n1_regexp = re.compile('<refentrytitle>(.*)</refentrytitle>', re.M) +n2_regexp = re.compile('<manvolnum>(.*)</manvolnum>', re.M) + +def postinstall_schemas(prog_name): + if Build.bld.is_install: + dir = Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas' % prog_name) + if not Options.options.destdir: + # add the gconf schema + Utils.pprint('YELLOW', 'Installing GConf schema') + command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir + ret = Utils.exec_command(command) + else: + Utils.pprint('YELLOW', 'GConf schema not installed. After install, run this:') + Utils.pprint('YELLOW', 'gconftool-2 --install-schema-file=%s' % dir) + +def postinstall_icons(): + dir = Build.bld.get_install_path('${DATADIR}/icons/hicolor') + if Build.bld.is_install: + if not Options.options.destdir: + # update the pixmap cache directory + Utils.pprint('YELLOW', "Updating Gtk icon cache.") + command = 'gtk-update-icon-cache -q -f -t %s' % dir + ret = Utils.exec_command(command) + else: + Utils.pprint('YELLOW', 'Icon cache not updated. After install, run this:') + Utils.pprint('YELLOW', 'gtk-update-icon-cache -q -f -t %s' % dir) + +def postinstall_scrollkeeper(prog_name): + if Build.bld.is_install: + # now the scrollkeeper update if we can write to the log file + if os.access('/var/log/scrollkeeper.log', os.W_OK): + dir1 = Build.bld.get_install_path('${PREFIX}/var/scrollkeeper') + dir2 = Build.bld.get_install_path('${DATADIR}/omf/%s' % prog_name) + command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2) + ret = Utils.exec_command(command) + +def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1): + if schemas: postinstall_schemas(prog_name) + if icons: postinstall_icons() + if scrollkeeper: postinstall_scrollkeeper(prog_name) + +# OBSOLETE +class gnome_doc_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@feature('gnome_doc') +def init_gnome_doc(self): + self.default_install_path = '${PREFIX}/share' + +@feature('gnome_doc') +@after('init_gnome_doc') +def apply_gnome_doc(self): + self.env['APPNAME'] = self.doc_module + lst = self.to_list(self.doc_linguas) + bld = self.bld + lst.append('C') + + for x in lst: + if not x == 'C': + tsk = self.create_task('xml2po') + node = self.path.find_resource(x+'/'+x+'.po') + src = self.path.find_resource('C/%s.xml' % self.doc_module) + out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module)) + tsk.set_inputs([node, src]) + tsk.set_outputs(out) + else: + out = self.path.find_resource('%s/%s.xml' % (x, self.doc_module)) + + tsk2 = self.create_task('xsltproc2po') + out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x)) + tsk2.set_outputs(out2) + node = self.path.find_resource(self.doc_module+".omf.in") + tsk2.inputs = [node, out] + + tsk2.run_after.append(tsk) + + if bld.is_install: + path = self.install_path + '/gnome/help/%s/%s' % (self.doc_module, x) + bld.install_files(self.install_path + '/omf', out2, env=self.env) + for y in self.to_list(self.doc_figures): + try: + os.stat(self.path.abspath() + '/' + x + '/' + y) + bld.install_as(path + '/' + y, self.path.abspath() + '/' + x + '/' + y) + except: + bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y) + bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env)) + if x == 'C': + xmls = self.to_list(self.doc_includes) + xmls.append(self.doc_entities) + for z in xmls: + out = self.path.find_resource('%s/%s' % (x, z)) + bld.install_as(path + '/%s' % z, out.abspath(self.env)) + +# OBSOLETE +class xml_to_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@feature('xml_to') +def init_xml_to(self): + Utils.def_attrs(self, + source = 'xmlfile', + xslt = 'xlsltfile', + target = 'hey', + default_install_path = '${PREFIX}', + task_created = None) + +@feature('xml_to') +@after('init_xml_to') +def apply_xml_to(self): + xmlfile = self.path.find_resource(self.source) + xsltfile = self.path.find_resource(self.xslt) + tsk = self.create_task('xmlto', [xmlfile, xsltfile], xmlfile.change_ext('html')) + tsk.install_path = self.install_path + +def sgml_scan(self): + node = self.inputs[0] + + env = self.env + variant = node.variant(env) + + fi = open(node.abspath(env), 'r') + content = fi.read() + fi.close() + + # we should use a sgml parser :-/ + name = n1_regexp.findall(content)[0] + num = n2_regexp.findall(content)[0] + + doc_name = name+'.'+num + + if not self.outputs: + self.outputs = [self.generator.path.find_or_declare(doc_name)] + + return ([], [doc_name]) + +class gnome_sgml2man_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@feature('gnome_sgml2man') +def apply_gnome_sgml2man(self): + """ + we could make it more complicated, but for now we just scan the document each time + """ + assert(getattr(self, 'appname', None)) + + def install_result(task): + out = task.outputs[0] + name = out.name + ext = name[-1] + env = task.env + self.bld.install_files('${DATADIR}/man/man%s/' % ext, out, env) + + self.bld.rescan(self.path) + for name in self.bld.cache_dir_contents[self.path.id]: + base, ext = os.path.splitext(name) + if ext != '.sgml': continue + + task = self.create_task('sgml2man') + task.set_inputs(self.path.find_resource(name)) + task.task_generator = self + if self.bld.is_install: task.install = install_result + # no outputs, the scanner does it + # no caching for now, this is not a time-critical feature + # in the future the scanner can be used to do more things (find dependencies, etc) + task.scan() + +cls = Task.simple_task_type('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null', color='BLUE') +cls.scan = sgml_scan +cls.quiet = 1 + +Task.simple_task_type('xmlto', '${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}') + +Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', color='BLUE') + +# how do you expect someone to understand this?! +xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \ +--stringparam db2omf.basename ${APPNAME} \ +--stringparam db2omf.format docbook \ +--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \ +--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \ +--stringparam db2omf.omf_dir ${PREFIX}/share/omf \ +--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \ +--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \ +--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \ +${DB2OMF} ${SRC[1].abspath(env)}""" + +#--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \ +Task.simple_task_type('xsltproc2po', xslt_magic, color='BLUE') + +def detect(conf): + conf.check_tool('gnu_dirs glib2 dbus') + sgml2man = conf.find_program('docbook2man', var='SGML2MAN') + + def getstr(varname): + return getattr(Options.options, varname, '') + + # addefine also sets the variable to the env + conf.define('GNOMELOCALEDIR', os.path.join(conf.env['DATADIR'], 'locale')) + + xml2po = conf.find_program('xml2po', var='XML2PO') + xsltproc2po = conf.find_program('xsltproc', var='XSLTPROC2PO') + conf.env['XML2POFLAGS'] = '-e -p' + conf.env['SCROLLKEEPER_DATADIR'] = Utils.cmd_output("scrollkeeper-config --pkgdatadir", silent=1).strip() + conf.env['DB2OMF'] = Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils", silent=1).strip() + +def set_options(opt): + opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]') + diff --git a/buildtools/wafadmin/Tools/gnu_dirs.py b/buildtools/wafadmin/Tools/gnu_dirs.py new file mode 100644 index 0000000000..856e4a7204 --- /dev/null +++ b/buildtools/wafadmin/Tools/gnu_dirs.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Ali Sabil, 2007 + +""" +To use this module do not forget to call +opt.tool_options('gnu_dirs') +AND +conf.check_tool('gnu_dirs') + +Add options for the standard GNU directories, this tool will add the options +found in autotools, and will update the environment with the following +installation variables: + + * PREFIX : architecture-independent files [/usr/local] + * EXEC_PREFIX : architecture-dependent files [PREFIX] + * BINDIR : user executables [EXEC_PREFIX/bin] + * SBINDIR : user executables [EXEC_PREFIX/sbin] + * LIBEXECDIR : program executables [EXEC_PREFIX/libexec] + * SYSCONFDIR : read-only single-machine data [PREFIX/etc] + * SHAREDSTATEDIR : modifiable architecture-independent data [PREFIX/com] + * LOCALSTATEDIR : modifiable single-machine data [PREFIX/var] + * LIBDIR : object code libraries [EXEC_PREFIX/lib] + * INCLUDEDIR : C header files [PREFIX/include] + * OLDINCLUDEDIR : C header files for non-gcc [/usr/include] + * DATAROOTDIR : read-only arch.-independent data root [PREFIX/share] + * DATADIR : read-only architecture-independent data [DATAROOTDIR] + * INFODIR : info documentation [DATAROOTDIR/info] + * LOCALEDIR : locale-dependent data [DATAROOTDIR/locale] + * MANDIR : man documentation [DATAROOTDIR/man] + * DOCDIR : documentation root [DATAROOTDIR/doc/telepathy-glib] + * HTMLDIR : html documentation [DOCDIR] + * DVIDIR : dvi documentation [DOCDIR] + * PDFDIR : pdf documentation [DOCDIR] + * PSDIR : ps documentation [DOCDIR] +""" + +import Utils, Options + +_options = [x.split(', ') for x in ''' +bindir, user executables, ${EXEC_PREFIX}/bin +sbindir, system admin executables, ${EXEC_PREFIX}/sbin +libexecdir, program executables, ${EXEC_PREFIX}/libexec +sysconfdir, read-only single-machine data, ${PREFIX}/etc +sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com +localstatedir, modifiable single-machine data, ${PREFIX}/var +libdir, object code libraries, ${EXEC_PREFIX}/lib +includedir, C header files, ${PREFIX}/include +oldincludedir, C header files for non-gcc, /usr/include +datarootdir, read-only arch.-independent data root, ${PREFIX}/share +datadir, read-only architecture-independent data, ${DATAROOTDIR} +infodir, info documentation, ${DATAROOTDIR}/info +localedir, locale-dependent data, ${DATAROOTDIR}/locale +mandir, man documentation, ${DATAROOTDIR}/man +docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE} +htmldir, html documentation, ${DOCDIR} +dvidir, dvi documentation, ${DOCDIR} +pdfdir, pdf documentation, ${DOCDIR} +psdir, ps documentation, ${DOCDIR} +'''.split('\n') if x] + +def detect(conf): + def get_param(varname, default): + return getattr(Options.options, varname, '') or default + + env = conf.env + env['EXEC_PREFIX'] = get_param('EXEC_PREFIX', env['PREFIX']) + env['PACKAGE'] = Utils.g_module.APPNAME + + complete = False + iter = 0 + while not complete and iter < len(_options) + 1: + iter += 1 + complete = True + for name, help, default in _options: + name = name.upper() + if not env[name]: + try: + env[name] = Utils.subst_vars(get_param(name, default), env) + except TypeError: + complete = False + if not complete: + lst = [name for name, _, _ in _options if not env[name.upper()]] + raise Utils.WafError('Variable substitution failure %r' % lst) + +def set_options(opt): + + inst_dir = opt.add_option_group('Installation directories', +'By default, "waf install" will put the files in\ + "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\ + than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"') + + for k in ('--prefix', '--destdir'): + option = opt.parser.get_option(k) + if option: + opt.parser.remove_option(k) + inst_dir.add_option(option) + + inst_dir.add_option('--exec-prefix', + help = 'installation prefix [Default: ${PREFIX}]', + default = '', + dest = 'EXEC_PREFIX') + + dirs_options = opt.add_option_group('Pre-defined installation directories', '') + + for name, help, default in _options: + option_name = '--' + name + str_default = default + str_help = '%s [Default: %s]' % (help, str_default) + dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper()) + diff --git a/buildtools/wafadmin/Tools/gob2.py b/buildtools/wafadmin/Tools/gob2.py new file mode 100644 index 0000000000..00aaa32acd --- /dev/null +++ b/buildtools/wafadmin/Tools/gob2.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Ali Sabil, 2007 + +import TaskGen + +TaskGen.declare_chain( + name = 'gob2', + rule = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}', + ext_in = '.gob', + ext_out = '.c' +) + +def detect(conf): + gob2 = conf.find_program('gob2', var='GOB2', mandatory=True) + conf.env['GOB2'] = gob2 + conf.env['GOB2FLAGS'] = '' + diff --git a/buildtools/wafadmin/Tools/gxx.py b/buildtools/wafadmin/Tools/gxx.py new file mode 100644 index 0000000000..8f4a0bfef0 --- /dev/null +++ b/buildtools/wafadmin/Tools/gxx.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) +# Ralf Habacker, 2006 (rh) +# Yinon Ehrlich, 2009 + +import os, sys +import Configure, Options, Utils +import ccroot, ar +from Configure import conftest + +@conftest +def find_gxx(conf): + cxx = conf.find_program(['g++', 'c++'], var='CXX', mandatory=True) + cxx = conf.cmd_to_list(cxx) + ccroot.get_cc_version(conf, cxx, gcc=True) + conf.env.CXX_NAME = 'gcc' + conf.env.CXX = cxx + +@conftest +def gxx_common_flags(conf): + v = conf.env + + # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS + v['CXXFLAGS_DEBUG'] = ['-g'] + v['CXXFLAGS_RELEASE'] = ['-O2'] + + v['CXX_SRC_F'] = '' + v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD + v['CPPPATH_ST'] = '-I%s' # template for adding include paths + + # linker + if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] + v['CXXLNK_SRC_F'] = '' + v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD + + v['LIB_ST'] = '-l%s' # template for adding libs + v['LIBPATH_ST'] = '-L%s' # template for adding libpaths + v['STATICLIB_ST'] = '-l%s' + v['STATICLIBPATH_ST'] = '-L%s' + v['RPATH_ST'] = '-Wl,-rpath,%s' + v['CXXDEFINES_ST'] = '-D%s' + + v['SONAME_ST'] = '-Wl,-h,%s' + v['SHLIB_MARKER'] = '-Wl,-Bdynamic' + v['STATICLIB_MARKER'] = '-Wl,-Bstatic' + v['FULLSTATIC_MARKER'] = '-static' + + # program + v['program_PATTERN'] = '%s' + + # shared library + v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro + v['shlib_LINKFLAGS'] = ['-shared'] + v['shlib_PATTERN'] = 'lib%s.so' + + # static lib + v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic'] + v['staticlib_PATTERN'] = 'lib%s.a' + + # osx stuff + v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup'] + v['CCFLAGS_MACBUNDLE'] = ['-fPIC'] + v['macbundle_PATTERN'] = '%s.bundle' + +@conftest +def gxx_modifier_win32(conf): + v = conf.env + v['program_PATTERN'] = '%s.exe' + + v['shlib_PATTERN'] = '%s.dll' + v['implib_PATTERN'] = 'lib%s.dll.a' + v['IMPLIB_ST'] = '-Wl,--out-implib,%s' + + dest_arch = v['DEST_CPU'] + v['shlib_CXXFLAGS'] = [] + + v.append_value('shlib_CXXFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea + + # Auto-import is enabled by default even without this option, + # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages + # that the linker emits otherwise. + v.append_value('LINKFLAGS', '-Wl,--enable-auto-import') + +@conftest +def gxx_modifier_cygwin(conf): + gxx_modifier_win32(conf) + v = conf.env + v['shlib_PATTERN'] = 'cyg%s.dll' + v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base') + +@conftest +def gxx_modifier_darwin(conf): + v = conf.env + v['shlib_CXXFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1'] + v['shlib_LINKFLAGS'] = ['-dynamiclib'] + v['shlib_PATTERN'] = 'lib%s.dylib' + + v['staticlib_LINKFLAGS'] = [] + + v['SHLIB_MARKER'] = '' + v['STATICLIB_MARKER'] = '' + v['SONAME_ST'] = '' + +@conftest +def gxx_modifier_aix(conf): + v = conf.env + v['program_LINKFLAGS'] = ['-Wl,-brtl'] + + v['shlib_LINKFLAGS'] = ['-shared', '-Wl,-brtl,-bexpfull'] + + v['SHLIB_MARKER'] = '' + +@conftest +def gxx_modifier_platform(conf): + # * set configurations specific for a platform. + # * the destination platform is detected automatically by looking at the macros the compiler predefines, + # and if it's not recognised, it fallbacks to sys.platform. + dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() + gxx_modifier_func = globals().get('gxx_modifier_' + dest_os) + if gxx_modifier_func: + gxx_modifier_func(conf) + +def detect(conf): + conf.find_gxx() + conf.find_cpp() + conf.find_ar() + conf.gxx_common_flags() + conf.gxx_modifier_platform() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() + diff --git a/buildtools/wafadmin/Tools/icc.py b/buildtools/wafadmin/Tools/icc.py new file mode 100644 index 0000000000..9c9a92602c --- /dev/null +++ b/buildtools/wafadmin/Tools/icc.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Stian Selnes, 2008 +# Thomas Nagy 2009 + +import os, sys +import Configure, Options, Utils +import ccroot, ar, gcc +from Configure import conftest + +@conftest +def find_icc(conf): + if sys.platform == 'cygwin': + conf.fatal('The Intel compiler does not work on Cygwin') + + v = conf.env + cc = None + if v['CC']: cc = v['CC'] + elif 'CC' in conf.environ: cc = conf.environ['CC'] + if not cc: cc = conf.find_program('icc', var='CC') + if not cc: cc = conf.find_program('ICL', var='CC') + if not cc: conf.fatal('Intel C Compiler (icc) was not found') + cc = conf.cmd_to_list(cc) + + ccroot.get_cc_version(conf, cc, icc=True) + v['CC'] = cc + v['CC_NAME'] = 'icc' + +detect = ''' +find_icc +find_ar +gcc_common_flags +gcc_modifier_platform +cc_load_tools +cc_add_flags +link_add_flags +''' diff --git a/buildtools/wafadmin/Tools/icpc.py b/buildtools/wafadmin/Tools/icpc.py new file mode 100644 index 0000000000..7d79c57e30 --- /dev/null +++ b/buildtools/wafadmin/Tools/icpc.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy 2009 + +import os, sys +import Configure, Options, Utils +import ccroot, ar, gxx +from Configure import conftest + +@conftest +def find_icpc(conf): + if sys.platform == 'cygwin': + conf.fatal('The Intel compiler does not work on Cygwin') + + v = conf.env + cxx = None + if v['CXX']: cxx = v['CXX'] + elif 'CXX' in conf.environ: cxx = conf.environ['CXX'] + if not cxx: cxx = conf.find_program('icpc', var='CXX') + if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found') + cxx = conf.cmd_to_list(cxx) + + ccroot.get_cc_version(conf, cxx, icc=True) + v['CXX'] = cxx + v['CXX_NAME'] = 'icc' + +detect = ''' +find_icpc +find_ar +gxx_common_flags +gxx_modifier_platform +cxx_load_tools +cxx_add_flags +link_add_flags +''' diff --git a/buildtools/wafadmin/Tools/intltool.py b/buildtools/wafadmin/Tools/intltool.py new file mode 100644 index 0000000000..deb8f4a634 --- /dev/null +++ b/buildtools/wafadmin/Tools/intltool.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) + +"intltool support" + +import os, re +import Configure, TaskGen, Task, Utils, Runner, Options, Build, config_c +from TaskGen import feature, before, taskgen +from Logs import error + +""" +Usage: + +bld(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='') + +""" + +class intltool_in_taskgen(TaskGen.task_gen): + """deprecated""" + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@before('apply_core') +@feature('intltool_in') +def iapply_intltool_in_f(self): + try: self.meths.remove('apply_core') + except ValueError: pass + + for i in self.to_list(self.source): + node = self.path.find_resource(i) + + podir = getattr(self, 'podir', 'po') + podirnode = self.path.find_dir(podir) + if not podirnode: + error("could not find the podir %r" % podir) + continue + + cache = getattr(self, 'intlcache', '.intlcache') + self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), podir, cache) + self.env['INTLPODIR'] = podirnode.srcpath(self.env) + self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c']) + + task = self.create_task('intltool', node, node.change_ext('')) + task.install_path = self.install_path + +class intltool_po_taskgen(TaskGen.task_gen): + """deprecated""" + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + + +@feature('intltool_po') +def apply_intltool_po(self): + try: self.meths.remove('apply_core') + except ValueError: pass + + self.default_install_path = '${LOCALEDIR}' + appname = getattr(self, 'appname', 'set_your_app_name') + podir = getattr(self, 'podir', '') + + def install_translation(task): + out = task.outputs[0] + filename = out.name + (langname, ext) = os.path.splitext(filename) + inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo' + self.bld.install_as(os.path.join(self.install_path, inst_file), out, self.env, self.chmod) + + linguas = self.path.find_resource(os.path.join(podir, 'LINGUAS')) + if linguas: + # scan LINGUAS file for locales to process + file = open(linguas.abspath()) + langs = [] + for line in file.readlines(): + # ignore lines containing comments + if not line.startswith('#'): + langs += line.split() + file.close() + re_linguas = re.compile('[-a-zA-Z_@.]+') + for lang in langs: + # Make sure that we only process lines which contain locales + if re_linguas.match(lang): + node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po')) + task = self.create_task('po') + task.set_inputs(node) + task.set_outputs(node.change_ext('.mo')) + if self.bld.is_install: task.install = install_translation + else: + Utils.pprint('RED', "Error no LINGUAS file found in po directory") + +Task.simple_task_type('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', shell=False) +Task.simple_task_type('intltool', + '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}', + color='BLUE', after="cc_link cxx_link", shell=False) + +def detect(conf): + pocom = conf.find_program('msgfmt') + if not pocom: + # if msgfmt should not be mandatory, catch the thrown exception in your wscript + conf.fatal('The program msgfmt (gettext) is mandatory!') + conf.env['POCOM'] = pocom + + # NOTE: it is possible to set INTLTOOL in the environment, but it must not have spaces in it + + intltool = conf.find_program('intltool-merge', var='INTLTOOL') + if not intltool: + # if intltool-merge should not be mandatory, catch the thrown exception in your wscript + if Options.platform == 'win32': + perl = conf.find_program('perl', var='PERL') + if not perl: + conf.fatal('The program perl (required by intltool) could not be found') + + intltooldir = Configure.find_file('intltool-merge', os.environ['PATH'].split(os.pathsep)) + if not intltooldir: + conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!') + + conf.env['INTLTOOL'] = Utils.to_list(conf.env['PERL']) + [intltooldir + os.sep + 'intltool-merge'] + conf.check_message('intltool', '', True, ' '.join(conf.env['INTLTOOL'])) + else: + conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!') + + def getstr(varname): + return getattr(Options.options, varname, '') + + prefix = conf.env['PREFIX'] + datadir = getstr('datadir') + if not datadir: datadir = os.path.join(prefix,'share') + + conf.define('LOCALEDIR', os.path.join(datadir, 'locale')) + conf.define('DATADIR', datadir) + + if conf.env['CC'] or conf.env['CXX']: + # Define to 1 if <locale.h> is present + conf.check(header_name='locale.h') + +def set_options(opt): + opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]') + opt.add_option('--datadir', type='string', default='', dest='datadir', help='read-only application data') + diff --git a/buildtools/wafadmin/Tools/javaw.py b/buildtools/wafadmin/Tools/javaw.py new file mode 100644 index 0000000000..301ebc426b --- /dev/null +++ b/buildtools/wafadmin/Tools/javaw.py @@ -0,0 +1,255 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2008 (ita) + +""" +Java support + +Javac is one of the few compilers that behaves very badly: +* it outputs files where it wants to (-d is only for the package root) +* it recompiles files silently behind your back +* it outputs an undefined amount of files (inner classes) + +Fortunately, the convention makes it possible to use the build dir without +too many problems for the moment + +Inner classes must be located and cleaned when a problem arise, +for the moment waf does not track the production of inner classes. + +Adding all the files to a task and executing it if any of the input files +change is only annoying for the compilation times + +Compilation can be run using Jython[1] rather than regular Python. Instead of +running one of the following commands: + ./waf configure + python waf configure +You would have to run: + java -jar /path/to/jython.jar waf configure + +[1] http://www.jython.org/ +""" + +import os, re +from Configure import conf +import TaskGen, Task, Utils, Options, Build +from TaskGen import feature, before, taskgen + +class_check_source = ''' +public class Test { + public static void main(String[] argv) { + Class lib; + if (argv.length < 1) { + System.err.println("Missing argument"); + System.exit(77); + } + try { + lib = Class.forName(argv[0]); + } catch (ClassNotFoundException e) { + System.err.println("ClassNotFoundException"); + System.exit(1); + } + lib = null; + System.exit(0); + } +} +''' + +@feature('jar') +@before('apply_core') +def jar_files(self): + basedir = getattr(self, 'basedir', '.') + destfile = getattr(self, 'destfile', 'test.jar') + jaropts = getattr(self, 'jaropts', []) + jarcreate = getattr(self, 'jarcreate', 'cf') + + dir = self.path.find_dir(basedir) + if not dir: raise + + jaropts.append('-C') + jaropts.append(dir.abspath(self.env)) + jaropts.append('.') + + out = self.path.find_or_declare(destfile) + + tsk = self.create_task('jar_create') + tsk.set_outputs(out) + tsk.inputs = [x for x in dir.find_iter(src=0, bld=1) if x.id != out.id] + tsk.env['JAROPTS'] = jaropts + tsk.env['JARCREATE'] = jarcreate + +@feature('javac') +@before('apply_core') +def apply_java(self): + Utils.def_attrs(self, jarname='', jaropts='', classpath='', + sourcepath='.', srcdir='.', source_re='**/*.java', + jar_mf_attributes={}, jar_mf_classpath=[]) + + if getattr(self, 'source_root', None): + # old stuff + self.srcdir = self.source_root + + + nodes_lst = [] + + if not self.classpath: + if not self.env['CLASSPATH']: + self.env['CLASSPATH'] = '..' + os.pathsep + '.' + else: + self.env['CLASSPATH'] = self.classpath + + srcdir_node = self.path.find_dir(self.srcdir) + if not srcdir_node: + raise Utils.WafError('could not find srcdir %r' % self.srcdir) + + src_nodes = [x for x in srcdir_node.ant_glob(self.source_re, flat=False)] + bld_nodes = [x.change_ext('.class') for x in src_nodes] + + self.env['OUTDIR'] = [srcdir_node.bldpath(self.env)] + + tsk = self.create_task('javac') + tsk.set_inputs(src_nodes) + tsk.set_outputs(bld_nodes) + + if getattr(self, 'compat', None): + tsk.env.append_value('JAVACFLAGS', ['-source', self.compat]) + + if hasattr(self, 'sourcepath'): + fold = [self.path.find_dir(x) for x in self.to_list(self.sourcepath)] + names = os.pathsep.join([x.srcpath() for x in fold]) + else: + names = srcdir_node.srcpath() + + if names: + tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names]) + + if self.jarname: + jtsk = self.create_task('jar_create', bld_nodes, self.path.find_or_declare(self.jarname)) + jtsk.set_run_after(tsk) + + if not self.env.JAROPTS: + if self.jaropts: + self.env.JAROPTS = self.jaropts + else: + dirs = '.' + self.env.JAROPTS = ['-C', ''.join(self.env['OUTDIR']), dirs] + +Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN', shell=False) +cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}', shell=False) +cls.color = 'BLUE' +def post_run_javac(self): + """this is for cleaning the folder + javac creates single files for inner classes + but it is not possible to know which inner classes in advance""" + + par = {} + for x in self.inputs: + par[x.parent.id] = x.parent + + inner = {} + for k in par.values(): + path = k.abspath(self.env) + lst = os.listdir(path) + + for u in lst: + if u.find('$') >= 0: + inner_class_node = k.find_or_declare(u) + inner[inner_class_node.id] = inner_class_node + + to_add = set(inner.keys()) - set([x.id for x in self.outputs]) + for x in to_add: + self.outputs.append(inner[x]) + + self.cached = True # disable the cache here - inner classes are a problem + return Task.Task.post_run(self) +cls.post_run = post_run_javac + +def detect(conf): + # If JAVA_PATH is set, we prepend it to the path list + java_path = conf.environ['PATH'].split(os.pathsep) + v = conf.env + + if 'JAVA_HOME' in conf.environ: + java_path = [os.path.join(conf.environ['JAVA_HOME'], 'bin')] + java_path + conf.env['JAVA_HOME'] = [conf.environ['JAVA_HOME']] + + for x in 'javac java jar'.split(): + conf.find_program(x, var=x.upper(), path_list=java_path) + conf.env[x.upper()] = conf.cmd_to_list(conf.env[x.upper()]) + v['JAVA_EXT'] = ['.java'] + + if 'CLASSPATH' in conf.environ: + v['CLASSPATH'] = conf.environ['CLASSPATH'] + + if not v['JAR']: conf.fatal('jar is required for making java packages') + if not v['JAVAC']: conf.fatal('javac is required for compiling java classes') + v['JARCREATE'] = 'cf' # can use cvf + +@conf +def check_java_class(self, classname, with_classpath=None): + """Check if the specified java class is installed""" + + import shutil + + javatestdir = '.waf-javatest' + + classpath = javatestdir + if self.env['CLASSPATH']: + classpath += os.pathsep + self.env['CLASSPATH'] + if isinstance(with_classpath, str): + classpath += os.pathsep + with_classpath + + shutil.rmtree(javatestdir, True) + os.mkdir(javatestdir) + + java_file = open(os.path.join(javatestdir, 'Test.java'), 'w') + java_file.write(class_check_source) + java_file.close() + + # Compile the source + Utils.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False) + + # Try to run the app + cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname] + self.log.write("%s\n" % str(cmd)) + found = Utils.exec_command(cmd, shell=False, log=self.log) + + self.check_message('Java class %s' % classname, "", not found) + + shutil.rmtree(javatestdir, True) + + return found + +@conf +def check_jni_headers(conf): + """ + Check for jni headers and libraries + + On success the environment variable xxx_JAVA is added for uselib + """ + + if not conf.env.CC_NAME and not conf.env.CXX_NAME: + conf.fatal('load a compiler first (gcc, g++, ..)') + + if not conf.env.JAVA_HOME: + conf.fatal('set JAVA_HOME in the system environment') + + # jni requires the jvm + javaHome = conf.env['JAVA_HOME'][0] + + b = Build.BuildContext() + b.load_dirs(conf.srcdir, conf.blddir) + dir = b.root.find_dir(conf.env.JAVA_HOME[0] + '/include') + f = dir.ant_glob('**/(jni|jni_md).h', flat=False) + incDirs = [x.parent.abspath() for x in f] + + dir = b.root.find_dir(conf.env.JAVA_HOME[0]) + f = dir.ant_glob('**/*jvm.(so|dll)', flat=False) + libDirs = [x.parent.abspath() for x in f] or [javaHome] + + for i, d in enumerate(libDirs): + if conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm', + libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA'): + break + else: + conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs) + diff --git a/buildtools/wafadmin/Tools/kde4.py b/buildtools/wafadmin/Tools/kde4.py new file mode 100644 index 0000000000..f480929da6 --- /dev/null +++ b/buildtools/wafadmin/Tools/kde4.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) + +import os, sys, re +import Options, TaskGen, Task, Utils +from TaskGen import taskgen, feature, after + +class msgfmt_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@feature('msgfmt') +def init_msgfmt(self): + #langs = '' # for example "foo/fr foo/br" + self.default_install_path = '${KDE4_LOCALE_INSTALL_DIR}' + +@feature('msgfmt') +@after('init_msgfmt') +def apply_msgfmt(self): + for lang in self.to_list(self.langs): + node = self.path.find_resource(lang+'.po') + task = self.create_task('msgfmt', node, node.change_ext('.mo')) + + if not self.bld.is_install: continue + langname = lang.split('/') + langname = langname[-1] + task.install_path = self.install_path + os.sep + langname + os.sep + 'LC_MESSAGES' + task.filename = getattr(self, 'appname', 'set_your_appname') + '.mo' + task.chmod = self.chmod + +def detect(conf): + kdeconfig = conf.find_program('kde4-config') + if not kdeconfig: + conf.fatal('we need kde4-config') + prefix = Utils.cmd_output('%s --prefix' % kdeconfig, silent=True).strip() + file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix + try: os.stat(file) + except OSError: + file = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix + try: os.stat(file) + except OSError: conf.fatal('could not open %s' % file) + + try: + txt = Utils.readf(file) + except (OSError, IOError): + conf.fatal('could not read %s' % file) + + txt = txt.replace('\\\n', '\n') + fu = re.compile('#(.*)\n') + txt = fu.sub('', txt) + + setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') + found = setregexp.findall(txt) + + for (_, key, val) in found: + #print key, val + conf.env[key] = val + + # well well, i could just write an interpreter for cmake files + conf.env['LIB_KDECORE']='kdecore' + conf.env['LIB_KDEUI'] ='kdeui' + conf.env['LIB_KIO'] ='kio' + conf.env['LIB_KHTML'] ='khtml' + conf.env['LIB_KPARTS'] ='kparts' + + conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR'] + conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR'] + conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE") + + conf.env['MSGFMT'] = conf.find_program('msgfmt') + +Task.simple_task_type('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', shell=False) + diff --git a/buildtools/wafadmin/Tools/libtool.py b/buildtools/wafadmin/Tools/libtool.py new file mode 100644 index 0000000000..47fa906fcc --- /dev/null +++ b/buildtools/wafadmin/Tools/libtool.py @@ -0,0 +1,330 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Matthias Jahn, 2008, jahn matthias ath freenet punto de +# Thomas Nagy, 2008 (ita) + +import sys, re, os, optparse + +import TaskGen, Task, Utils, preproc +from Logs import error, debug, warn +from TaskGen import taskgen, after, before, feature + +REVISION="0.1.3" + +""" +if you want to use the code here, you must use something like this: +obj = obj.create(...) +obj.features.append("libtool") +obj.vnum = "1.2.3" # optional, but versioned libraries are common +""" + +# fake libtool files +fakelibtool_vardeps = ['CXX', 'PREFIX'] +def fakelibtool_build(task): + # Writes a .la file, used by libtool + env = task.env + dest = open(task.outputs[0].abspath(env), 'w') + sname = task.inputs[0].name + fu = dest.write + fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n") + if env['vnum']: + nums = env['vnum'].split('.') + libname = task.inputs[0].name + name3 = libname+'.'+env['vnum'] + name2 = libname+'.'+nums[0] + name1 = libname + fu("dlname='%s'\n" % name2) + strn = " ".join([name3, name2, name1]) + fu("library_names='%s'\n" % (strn) ) + else: + fu("dlname='%s'\n" % sname) + fu("library_names='%s %s %s'\n" % (sname, sname, sname) ) + fu("old_library=''\n") + vars = ' '.join(env['libtoolvars']+env['LINKFLAGS']) + fu("dependency_libs='%s'\n" % vars) + fu("current=0\n") + fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n") + fu("dlopen=''\ndlpreopen=''\n") + fu("libdir='%s/lib'\n" % env['PREFIX']) + dest.close() + return 0 + +def read_la_file(path): + sp = re.compile(r'^([^=]+)=\'(.*)\'$') + dc={} + file = open(path, "r") + for line in file.readlines(): + try: + #print sp.split(line.strip()) + _, left, right, _ = sp.split(line.strip()) + dc[left]=right + except ValueError: + pass + file.close() + return dc + +@feature("libtool") +@after('apply_link') +def apply_link_libtool(self): + if self.type != 'program': + linktask = self.link_task + self.latask = self.create_task('fakelibtool', linktask.outputs, linktask.outputs[0].change_ext('.la')) + + if self.bld.is_install: + self.bld.install_files('${PREFIX}/lib', linktask.outputs[0], self.env) + +@feature("libtool") +@before('apply_core') +def apply_libtool(self): + self.env['vnum']=self.vnum + + paths=[] + libs=[] + libtool_files=[] + libtool_vars=[] + + for l in self.env['LINKFLAGS']: + if l[:2]=='-L': + paths.append(l[2:]) + elif l[:2]=='-l': + libs.append(l[2:]) + + for l in libs: + for p in paths: + dict = read_la_file(p+'/lib'+l+'.la') + linkflags2 = dict.get('dependency_libs', '') + for v in linkflags2.split(): + if v.endswith('.la'): + libtool_files.append(v) + libtool_vars.append(v) + continue + self.env.append_unique('LINKFLAGS', v) + break + + self.env['libtoolvars']=libtool_vars + + while libtool_files: + file = libtool_files.pop() + dict = read_la_file(file) + for v in dict['dependency_libs'].split(): + if v[-3:] == '.la': + libtool_files.append(v) + continue + self.env.append_unique('LINKFLAGS', v) + +Task.task_type_from_func('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', after="cc_link cxx_link static_link") + +class libtool_la_file: + def __init__ (self, la_filename): + self.__la_filename = la_filename + #remove path and .la suffix + self.linkname = str(os.path.split(la_filename)[-1])[:-3] + if self.linkname.startswith("lib"): + self.linkname = self.linkname[3:] + # The name that we can dlopen(3). + self.dlname = None + # Names of this library + self.library_names = None + # The name of the static archive. + self.old_library = None + # Libraries that this one depends upon. + self.dependency_libs = None + # Version information for libIlmImf. + self.current = None + self.age = None + self.revision = None + # Is this an already installed library? + self.installed = None + # Should we warn about portability when linking against -modules? + self.shouldnotlink = None + # Files to dlopen/dlpreopen + self.dlopen = None + self.dlpreopen = None + # Directory that this library needs to be installed in: + self.libdir = '/usr/lib' + if not self.__parse(): + raise ValueError("file %s not found!!" %(la_filename)) + + def __parse(self): + "Retrieve the variables from a file" + if not os.path.isfile(self.__la_filename): return 0 + la_file=open(self.__la_filename, 'r') + for line in la_file: + ln = line.strip() + if not ln: continue + if ln[0]=='#': continue + (key, value) = str(ln).split('=', 1) + key = key.strip() + value = value.strip() + if value == "no": value = False + elif value == "yes": value = True + else: + try: value = int(value) + except ValueError: value = value.strip("'") + setattr(self, key, value) + la_file.close() + return 1 + + def get_libs(self): + """return linkflags for this lib""" + libs = [] + if self.dependency_libs: + libs = str(self.dependency_libs).strip().split() + if libs == None: + libs = [] + # add la lib and libdir + libs.insert(0, "-l%s" % self.linkname.strip()) + libs.insert(0, "-L%s" % self.libdir.strip()) + return libs + + def __str__(self): + return '''\ +dlname = "%(dlname)s" +library_names = "%(library_names)s" +old_library = "%(old_library)s" +dependency_libs = "%(dependency_libs)s" +version = %(current)s.%(age)s.%(revision)s +installed = "%(installed)s" +shouldnotlink = "%(shouldnotlink)s" +dlopen = "%(dlopen)s" +dlpreopen = "%(dlpreopen)s" +libdir = "%(libdir)s"''' % self.__dict__ + +class libtool_config: + def __init__ (self, la_filename): + self.__libtool_la_file = libtool_la_file(la_filename) + tmp = self.__libtool_la_file + self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)] + self.__sub_la_files = [] + self.__sub_la_files.append(la_filename) + self.__libs = None + + def __cmp__(self, other): + """make it compareable with X.Y.Z versions (Y and Z are optional)""" + if not other: + return 1 + othervers = [int(s) for s in str(other).split(".")] + selfvers = self.__version + return cmp(selfvers, othervers) + + def __str__(self): + return "\n".join([ + str(self.__libtool_la_file), + ' '.join(self.__libtool_la_file.get_libs()), + '* New getlibs:', + ' '.join(self.get_libs()) + ]) + + def __get_la_libs(self, la_filename): + return libtool_la_file(la_filename).get_libs() + + def get_libs(self): + """return the complete uniqe linkflags that do not + contain .la files anymore""" + libs_list = list(self.__libtool_la_file.get_libs()) + libs_map = {} + while len(libs_list) > 0: + entry = libs_list.pop(0) + if entry: + if str(entry).endswith(".la"): + ## prevents duplicate .la checks + if entry not in self.__sub_la_files: + self.__sub_la_files.append(entry) + libs_list.extend(self.__get_la_libs(entry)) + else: + libs_map[entry]=1 + self.__libs = libs_map.keys() + return self.__libs + + def get_libs_only_L(self): + if not self.__libs: self.get_libs() + libs = self.__libs + libs = [s for s in libs if str(s).startswith('-L')] + return libs + + def get_libs_only_l(self): + if not self.__libs: self.get_libs() + libs = self.__libs + libs = [s for s in libs if str(s).startswith('-l')] + return libs + + def get_libs_only_other(self): + if not self.__libs: self.get_libs() + libs = self.__libs + libs = [s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))] + return libs + +def useCmdLine(): + """parse cmdline args and control build""" + usage = '''Usage: %prog [options] PathToFile.la +example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la +nor: %prog --libs /usr/lib/libamarok.la''' + parser = optparse.OptionParser(usage) + a = parser.add_option + a("--version", dest = "versionNumber", + action = "store_true", default = False, + help = "output version of libtool-config" + ) + a("--debug", dest = "debug", + action = "store_true", default = False, + help = "enable debug" + ) + a("--libs", dest = "libs", + action = "store_true", default = False, + help = "output all linker flags" + ) + a("--libs-only-l", dest = "libs_only_l", + action = "store_true", default = False, + help = "output -l flags" + ) + a("--libs-only-L", dest = "libs_only_L", + action = "store_true", default = False, + help = "output -L flags" + ) + a("--libs-only-other", dest = "libs_only_other", + action = "store_true", default = False, + help = "output other libs (e.g. -pthread)" + ) + a("--atleast-version", dest = "atleast_version", + default=None, + help = "return 0 if the module is at least version ATLEAST_VERSION" + ) + a("--exact-version", dest = "exact_version", + default=None, + help = "return 0 if the module is exactly version EXACT_VERSION" + ) + a("--max-version", dest = "max_version", + default=None, + help = "return 0 if the module is at no newer than version MAX_VERSION" + ) + + (options, args) = parser.parse_args() + if len(args) != 1 and not options.versionNumber: + parser.error("incorrect number of arguments") + if options.versionNumber: + print("libtool-config version %s" % REVISION) + return 0 + ltf = libtool_config(args[0]) + if options.debug: + print(ltf) + if options.atleast_version: + if ltf >= options.atleast_version: return 0 + sys.exit(1) + if options.exact_version: + if ltf == options.exact_version: return 0 + sys.exit(1) + if options.max_version: + if ltf <= options.max_version: return 0 + sys.exit(1) + + def p(x): + print(" ".join(x)) + if options.libs: p(ltf.get_libs()) + elif options.libs_only_l: p(ltf.get_libs_only_l()) + elif options.libs_only_L: p(ltf.get_libs_only_L()) + elif options.libs_only_other: p(ltf.get_libs_only_other()) + return 0 + +if __name__ == '__main__': + useCmdLine() + diff --git a/buildtools/wafadmin/Tools/lua.py b/buildtools/wafadmin/Tools/lua.py new file mode 100644 index 0000000000..5b181e1310 --- /dev/null +++ b/buildtools/wafadmin/Tools/lua.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Sebastian Schlingmann, 2008 +# Thomas Nagy, 2008 (ita) + +import TaskGen +from TaskGen import taskgen, feature +from Constants import * + +TaskGen.declare_chain( + name = 'luac', + rule = '${LUAC} -s -o ${TGT} ${SRC}', + ext_in = '.lua', + ext_out = '.luac', + reentrant = False, + install = 'LUADIR', # env variable +) + +@feature('lua') +def init_lua(self): + self.default_chmod = O755 + +def detect(conf): + conf.find_program('luac', var='LUAC', mandatory = True) + diff --git a/buildtools/wafadmin/Tools/misc.py b/buildtools/wafadmin/Tools/misc.py new file mode 100644 index 0000000000..9903ee4bd3 --- /dev/null +++ b/buildtools/wafadmin/Tools/misc.py @@ -0,0 +1,430 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) + +""" +Custom objects: + - execute a function everytime + - copy a file somewhere else +""" + +import shutil, re, os +import TaskGen, Node, Task, Utils, Build, Constants +from TaskGen import feature, taskgen, after, before +from Logs import debug + +def copy_func(tsk): + "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)" + env = tsk.env + infile = tsk.inputs[0].abspath(env) + outfile = tsk.outputs[0].abspath(env) + try: + shutil.copy2(infile, outfile) + except (OSError, IOError): + return 1 + else: + if tsk.chmod: os.chmod(outfile, tsk.chmod) + return 0 + +def action_process_file_func(tsk): + "Ask the function attached to the task to process it" + if not tsk.fun: raise Utils.WafError('task must have a function attached to it for copy_func to work!') + return tsk.fun(tsk) + +class cmd_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@feature('cmd') +def apply_cmd(self): + "call a command everytime" + if not self.fun: raise Utils.WafError('cmdobj needs a function!') + tsk = Task.TaskBase() + tsk.fun = self.fun + tsk.env = self.env + self.tasks.append(tsk) + tsk.install_path = self.install_path + +class copy_taskgen(TaskGen.task_gen): + "By default, make a file copy, if fun is provided, fun will make the copy (or call a compiler, etc)" + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@feature('copy') +@before('apply_core') +def apply_copy(self): + Utils.def_attrs(self, fun=copy_func) + self.default_install_path = 0 + + lst = self.to_list(self.source) + self.meths.remove('apply_core') + + for filename in lst: + node = self.path.find_resource(filename) + if not node: raise Utils.WafError('cannot find input file %s for processing' % filename) + + target = self.target + if not target or len(lst)>1: target = node.name + + # TODO the file path may be incorrect + newnode = self.path.find_or_declare(target) + + tsk = self.create_task('copy', node, newnode) + tsk.fun = self.fun + tsk.chmod = self.chmod + tsk.install_path = self.install_path + + if not tsk.env: + tsk.debug() + raise Utils.WafError('task without an environment') + +def subst_func(tsk): + "Substitutes variables in a .in file" + + m4_re = re.compile('@(\w+)@', re.M) + + env = tsk.env + infile = tsk.inputs[0].abspath(env) + outfile = tsk.outputs[0].abspath(env) + + code = Utils.readf(infile) + + # replace all % by %% to prevent errors by % signs in the input file while string formatting + code = code.replace('%', '%%') + + s = m4_re.sub(r'%(\1)s', code) + + di = tsk.dict or {} + if not di: + names = m4_re.findall(code) + for i in names: + di[i] = env.get_flat(i) or env.get_flat(i.upper()) + + file = open(outfile, 'w') + file.write(s % di) + file.close() + if tsk.chmod: os.chmod(outfile, tsk.chmod) + +class subst_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@feature('subst') +@before('apply_core') +def apply_subst(self): + Utils.def_attrs(self, fun=subst_func) + self.default_install_path = 0 + lst = self.to_list(self.source) + self.meths.remove('apply_core') + + self.dict = getattr(self, 'dict', {}) + + for filename in lst: + node = self.path.find_resource(filename) + if not node: raise Utils.WafError('cannot find input file %s for processing' % filename) + + if self.target: + newnode = self.path.find_or_declare(self.target) + else: + newnode = node.change_ext('') + + try: + self.dict = self.dict.get_merged_dict() + except AttributeError: + pass + + if self.dict and not self.env['DICT_HASH']: + self.env = self.env.copy() + keys = list(self.dict.keys()) + keys.sort() + lst = [self.dict[x] for x in keys] + self.env['DICT_HASH'] = str(Utils.h_list(lst)) + + tsk = self.create_task('copy', node, newnode) + tsk.fun = self.fun + tsk.dict = self.dict + tsk.dep_vars = ['DICT_HASH'] + tsk.install_path = self.install_path + tsk.chmod = self.chmod + + if not tsk.env: + tsk.debug() + raise Utils.WafError('task without an environment') + +#################### +## command-output #### +#################### + +class cmd_arg(object): + """command-output arguments for representing files or folders""" + def __init__(self, name, template='%s'): + self.name = name + self.template = template + self.node = None + +class input_file(cmd_arg): + def find_node(self, base_path): + assert isinstance(base_path, Node.Node) + self.node = base_path.find_resource(self.name) + if self.node is None: + raise Utils.WafError("Input file %s not found in " % (self.name, base_path)) + + def get_path(self, env, absolute): + if absolute: + return self.template % self.node.abspath(env) + else: + return self.template % self.node.srcpath(env) + +class output_file(cmd_arg): + def find_node(self, base_path): + assert isinstance(base_path, Node.Node) + self.node = base_path.find_or_declare(self.name) + if self.node is None: + raise Utils.WafError("Output file %s not found in " % (self.name, base_path)) + + def get_path(self, env, absolute): + if absolute: + return self.template % self.node.abspath(env) + else: + return self.template % self.node.bldpath(env) + +class cmd_dir_arg(cmd_arg): + def find_node(self, base_path): + assert isinstance(base_path, Node.Node) + self.node = base_path.find_dir(self.name) + if self.node is None: + raise Utils.WafError("Directory %s not found in " % (self.name, base_path)) + +class input_dir(cmd_dir_arg): + def get_path(self, dummy_env, dummy_absolute): + return self.template % self.node.abspath() + +class output_dir(cmd_dir_arg): + def get_path(self, env, dummy_absolute): + return self.template % self.node.abspath(env) + + +class command_output(Task.Task): + color = "BLUE" + def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr): + Task.Task.__init__(self, env, normal=1) + assert isinstance(command, (str, Node.Node)) + self.command = command + self.command_args = command_args + self.stdin = stdin + self.stdout = stdout + self.cwd = cwd + self.os_env = os_env + self.stderr = stderr + + if command_node is not None: self.dep_nodes = [command_node] + self.dep_vars = [] # additional environment variables to look + + def run(self): + task = self + #assert len(task.inputs) > 0 + + def input_path(node, template): + if task.cwd is None: + return template % node.bldpath(task.env) + else: + return template % node.abspath() + def output_path(node, template): + fun = node.abspath + if task.cwd is None: fun = node.bldpath + return template % fun(task.env) + + if isinstance(task.command, Node.Node): + argv = [input_path(task.command, '%s')] + else: + argv = [task.command] + + for arg in task.command_args: + if isinstance(arg, str): + argv.append(arg) + else: + assert isinstance(arg, cmd_arg) + argv.append(arg.get_path(task.env, (task.cwd is not None))) + + if task.stdin: + stdin = open(input_path(task.stdin, '%s')) + else: + stdin = None + + if task.stdout: + stdout = open(output_path(task.stdout, '%s'), "w") + else: + stdout = None + + if task.stderr: + stderr = open(output_path(task.stderr, '%s'), "w") + else: + stderr = None + + if task.cwd is None: + cwd = ('None (actually %r)' % os.getcwd()) + else: + cwd = repr(task.cwd) + debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" % + (cwd, stdin, stdout, argv)) + + if task.os_env is None: + os_env = os.environ + else: + os_env = task.os_env + command = Utils.pproc.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env) + return command.wait() + +class cmd_output_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@feature('command-output') +def init_cmd_output(self): + Utils.def_attrs(self, + stdin = None, + stdout = None, + stderr = None, + # the command to execute + command = None, + + # whether it is an external command; otherwise it is assumed + # to be an executable binary or script that lives in the + # source or build tree. + command_is_external = False, + + # extra parameters (argv) to pass to the command (excluding + # the command itself) + argv = [], + + # dependencies to other objects -> this is probably not what you want (ita) + # values must be 'task_gen' instances (not names!) + dependencies = [], + + # dependencies on env variable contents + dep_vars = [], + + # input files that are implicit, i.e. they are not + # stdin, nor are they mentioned explicitly in argv + hidden_inputs = [], + + # output files that are implicit, i.e. they are not + # stdout, nor are they mentioned explicitly in argv + hidden_outputs = [], + + # change the subprocess to this cwd (must use obj.input_dir() or output_dir() here) + cwd = None, + + # OS environment variables to pass to the subprocess + # if None, use the default environment variables unchanged + os_env = None) + +@feature('command-output') +@after('init_cmd_output') +def apply_cmd_output(self): + if self.command is None: + raise Utils.WafError("command-output missing command") + if self.command_is_external: + cmd = self.command + cmd_node = None + else: + cmd_node = self.path.find_resource(self.command) + assert cmd_node is not None, ('''Could not find command '%s' in source tree. +Hint: if this is an external command, +use command_is_external=True''') % (self.command,) + cmd = cmd_node + + if self.cwd is None: + cwd = None + else: + assert isinstance(cwd, CmdDirArg) + self.cwd.find_node(self.path) + + args = [] + inputs = [] + outputs = [] + + for arg in self.argv: + if isinstance(arg, cmd_arg): + arg.find_node(self.path) + if isinstance(arg, input_file): + inputs.append(arg.node) + if isinstance(arg, output_file): + outputs.append(arg.node) + + if self.stdout is None: + stdout = None + else: + assert isinstance(self.stdout, str) + stdout = self.path.find_or_declare(self.stdout) + if stdout is None: + raise Utils.WafError("File %s not found" % (self.stdout,)) + outputs.append(stdout) + + if self.stderr is None: + stderr = None + else: + assert isinstance(self.stderr, str) + stderr = self.path.find_or_declare(self.stderr) + if stderr is None: + raise Utils.WafError("File %s not found" % (self.stderr,)) + outputs.append(stderr) + + if self.stdin is None: + stdin = None + else: + assert isinstance(self.stdin, str) + stdin = self.path.find_resource(self.stdin) + if stdin is None: + raise Utils.WafError("File %s not found" % (self.stdin,)) + inputs.append(stdin) + + for hidden_input in self.to_list(self.hidden_inputs): + node = self.path.find_resource(hidden_input) + if node is None: + raise Utils.WafError("File %s not found in dir %s" % (hidden_input, self.path)) + inputs.append(node) + + for hidden_output in self.to_list(self.hidden_outputs): + node = self.path.find_or_declare(hidden_output) + if node is None: + raise Utils.WafError("File %s not found in dir %s" % (hidden_output, self.path)) + outputs.append(node) + + if not (inputs or getattr(self, 'no_inputs', None)): + raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs') + if not (outputs or getattr(self, 'no_outputs', None)): + raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs') + + task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr) + Utils.copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True) + self.tasks.append(task) + + task.inputs = inputs + task.outputs = outputs + task.dep_vars = self.to_list(self.dep_vars) + + for dep in self.dependencies: + assert dep is not self + dep.post() + for dep_task in dep.tasks: + task.set_run_after(dep_task) + + if not task.inputs: + # the case for svnversion, always run, and update the output nodes + task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run + task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__) + + # TODO the case with no outputs? + +def post_run(self): + for x in self.outputs: + h = Utils.h_file(x.abspath(self.env)) + self.generator.bld.node_sigs[self.env.variant()][x.id] = h + +def runnable_status(self): + return Constants.RUN_ME + +Task.task_type_from_func('copy', vars=[], func=action_process_file_func) +TaskGen.task_gen.classes['command-output'] = cmd_output_taskgen + diff --git a/buildtools/wafadmin/Tools/msvc.py b/buildtools/wafadmin/Tools/msvc.py new file mode 100644 index 0000000000..4fde8b1468 --- /dev/null +++ b/buildtools/wafadmin/Tools/msvc.py @@ -0,0 +1,797 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Carlos Rafael Giani, 2006 (dv) +# Tamas Pal, 2007 (folti) +# Nicolas Mercier, 2009 +# Microsoft Visual C++/Intel C++ compiler support - beta, needs more testing + +# usage: +# +# conf.env['MSVC_VERSIONS'] = ['msvc 9.0', 'msvc 8.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0'] +# conf.env['MSVC_TARGETS'] = ['x64'] +# conf.check_tool('msvc') +# OR conf.check_tool('msvc', funs='no_autodetect') +# conf.check_lib_msvc('gdi32') +# conf.check_libs_msvc('kernel32 user32', mandatory=true) +# ... +# obj.uselib = 'KERNEL32 USER32 GDI32' +# +# platforms and targets will be tested in the order they appear; +# the first good configuration will be used +# supported platforms : +# ia64, x64, x86, x86_amd64, x86_ia64 + +# compilers supported : +# msvc => Visual Studio, versions 7.1 (2003), 8,0 (2005), 9.0 (2008) +# wsdk => Windows SDK, versions 6.0, 6.1, 7.0 +# icl => Intel compiler, versions 9,10,11 +# Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i) +# PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i) + + +import os, sys, re, string, optparse +import Utils, TaskGen, Runner, Configure, Task, Options +from Logs import debug, info, warn, error +from TaskGen import after, before, feature + +from Configure import conftest, conf +import ccroot, cc, cxx, ar, winres +from libtool import read_la_file + +try: + import _winreg +except: + import winreg as _winreg + +pproc = Utils.pproc + +# importlibs provided by MSVC/Platform SDK. Do NOT search them.... +g_msvc_systemlibs = """ +aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet +cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs +credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d +ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp +faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid +gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop +kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi +mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree +msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm +netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp +odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32 +osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu +ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm +rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32 +shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32 +traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg +version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm +wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp +""".split() + + +all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64') ] +all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ] +all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')] + +def setup_msvc(conf, versions): + platforms = Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] + desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1] + versiondict = dict(versions) + + for version in desired_versions: + try: + targets = dict(versiondict [version]) + for target in platforms: + try: + arch,(p1,p2,p3) = targets[target] + compiler,revision = version.split() + return compiler,revision,p1,p2,p3 + except KeyError: continue + except KeyError: continue + conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)') + +@conf +def get_msvc_version(conf, compiler, version, target, vcvars): + debug('msvc: get_msvc_version: %r %r %r', compiler, version, target) + batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat') + f = open(batfile, 'w') + f.write("""@echo off +set INCLUDE= +set LIB= +call "%s" %s +echo PATH=%%PATH%% +echo INCLUDE=%%INCLUDE%% +echo LIB=%%LIB%% +""" % (vcvars,target)) + f.close() + sout = Utils.cmd_output(['cmd', '/E:on', '/V:on', '/C', batfile]) + lines = sout.splitlines() + + for x in ('Setting environment', 'Setting SDK environment', 'Intel(R) C++ Compiler'): + if lines[0].find(x) != -1: + break + else: + debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target) + conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)') + + for line in lines[1:]: + if line.startswith('PATH='): + path = line[5:] + MSVC_PATH = path.split(';') + elif line.startswith('INCLUDE='): + MSVC_INCDIR = [i for i in line[8:].split(';') if i] + elif line.startswith('LIB='): + MSVC_LIBDIR = [i for i in line[4:].split(';') if i] + + # Check if the compiler is usable at all. + # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run. + env = {} + env.update(os.environ) + env.update(PATH = path) + compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) + cxx = conf.find_program(compiler_name, path_list=MSVC_PATH) + # delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically. + if env.has_key('CL'): + del(env['CL']) + + try: + p = pproc.Popen([cxx, '/help'], env=env, stdout=pproc.PIPE, stderr=pproc.PIPE) + out, err = p.communicate() + if p.returncode != 0: + raise Exception('return code: %r: %r' % (p.returncode, err)) + except Exception, e: + debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version, target) + debug(str(e)) + conf.fatal('msvc: cannot run the compiler (in get_msvc_version)') + else: + debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target) + + return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR) + +@conf +def gather_wsdk_versions(conf, versions): + version_pattern = re.compile('^v..?.?\...?.?') + try: + all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + try: + all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + return + index = 0 + while 1: + try: + version = _winreg.EnumKey(all_versions, index) + except WindowsError: + break + index = index + 1 + if not version_pattern.match(version): + continue + try: + msvc_version = _winreg.OpenKey(all_versions, version) + path,type = _winreg.QueryValueEx(msvc_version,'InstallationFolder') + except WindowsError: + continue + if os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')): + targets = [] + for target,arch in all_msvc_platforms: + try: + targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))))) + except Configure.ConfigurationError: + pass + versions.append(('wsdk ' + version[1:], targets)) + +@conf +def gather_msvc_versions(conf, versions): + # checks SmartPhones SDKs + try: + ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs') + except WindowsError: + try: + ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs') + except WindowsError: + ce_sdk = '' + if ce_sdk: + supported_wince_platforms = [] + ce_index = 0 + while 1: + try: + sdk_device = _winreg.EnumKey(ce_sdk, ce_index) + except WindowsError: + break + ce_index = ce_index + 1 + sdk = _winreg.OpenKey(ce_sdk, sdk_device) + path,type = _winreg.QueryValueEx(sdk, 'SDKRootDir') + path=str(path) + path,device = os.path.split(path) + if not device: + path,device = os.path.split(path) + for arch,compiler in all_wince_platforms: + platforms = [] + if os.path.isdir(os.path.join(path, device, 'Lib', arch)): + platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch))) + if platforms: + supported_wince_platforms.append((device, platforms)) + # checks MSVC + version_pattern = re.compile('^..?\...?') + for vcver,vcvar in [('VCExpress','exp'), ('VisualStudio','')]: + try: + all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver) + except WindowsError: + try: + all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\'+vcver) + except WindowsError: + continue + index = 0 + while 1: + try: + version = _winreg.EnumKey(all_versions, index) + except WindowsError: + break + index = index + 1 + if not version_pattern.match(version): + continue + try: + msvc_version = _winreg.OpenKey(all_versions, version + "\\Setup\\VS") + path,type = _winreg.QueryValueEx(msvc_version, 'ProductDir') + path=str(path) + targets = [] + if ce_sdk: + for device,platforms in supported_wince_platforms: + cetargets = [] + for platform,compiler,include,lib in platforms: + winCEpath = os.path.join(path, 'VC', 'ce') + if os.path.isdir(winCEpath): + common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')) + if os.path.isdir(os.path.join(winCEpath, 'lib', platform)): + bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs + incdirs = [include, os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include')] + libdirs = [lib, os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform)] + cetargets.append((platform, (platform, (bindirs,incdirs,libdirs)))) + versions.append((device+' '+version, cetargets)) + if os.path.isfile(os.path.join(path, 'VC', 'vcvarsall.bat')): + for target,realtarget in all_msvc_platforms[::-1]: + try: + targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(path, 'VC', 'vcvarsall.bat'))))) + except: + pass + elif os.path.isfile(os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')): + try: + targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat'))))) + except Configure.ConfigurationError: + pass + versions.append(('msvc '+version, targets)) + + except WindowsError: + continue + +@conf +def gather_icl_versions(conf, versions): + version_pattern = re.compile('^...?.?\....?.?') + try: + all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') + except WindowsError: + try: + all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++') + except WindowsError: + return + index = 0 + while 1: + try: + version = _winreg.EnumKey(all_versions, index) + except WindowsError: + break + index = index + 1 + if not version_pattern.match(version): + continue + targets = [] + for target,arch in all_icl_platforms: + try: + icl_version = _winreg.OpenKey(all_versions, version+'\\'+target) + path,type = _winreg.QueryValueEx(icl_version,'ProductDir') + if os.path.isfile(os.path.join(path, 'bin', 'iclvars.bat')): + try: + targets.append((target, (arch, conf.get_msvc_version('intel', version, target, os.path.join(path, 'bin', 'iclvars.bat'))))) + except Configure.ConfigurationError: + pass + except WindowsError: + continue + major = version[0:2] + versions.append(('intel ' + major, targets)) + +@conf +def get_msvc_versions(conf): + if not conf.env.MSVC_INSTALLED_VERSIONS: + lst = [] + conf.gather_msvc_versions(lst) + conf.gather_wsdk_versions(lst) + conf.gather_icl_versions(lst) + conf.env.MSVC_INSTALLED_VERSIONS = lst + return conf.env.MSVC_INSTALLED_VERSIONS + +@conf +def print_all_msvc_detected(conf): + for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']: + info(version) + for target,l in targets: + info("\t"+target) + +def detect_msvc(conf): + versions = get_msvc_versions(conf) + return setup_msvc(conf, versions) + +@conf +def find_lt_names_msvc(self, libname, is_static=False): + """ + Win32/MSVC specific code to glean out information from libtool la files. + this function is not attached to the task_gen class + """ + lt_names=[ + 'lib%s.la' % libname, + '%s.la' % libname, + ] + + for path in self.env['LIBPATH']: + for la in lt_names: + laf=os.path.join(path,la) + dll=None + if os.path.exists(laf): + ltdict=read_la_file(laf) + lt_libdir=None + if ltdict.get('libdir', ''): + lt_libdir = ltdict['libdir'] + if not is_static and ltdict.get('library_names', ''): + dllnames=ltdict['library_names'].split() + dll=dllnames[0].lower() + dll=re.sub('\.dll$', '', dll) + return (lt_libdir, dll, False) + elif ltdict.get('old_library', ''): + olib=ltdict['old_library'] + if os.path.exists(os.path.join(path,olib)): + return (path, olib, True) + elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)): + return (lt_libdir, olib, True) + else: + return (None, olib, True) + else: + raise Utils.WafError('invalid libtool object file: %s' % laf) + return (None, None, None) + +@conf +def libname_msvc(self, libname, is_static=False, mandatory=False): + lib = libname.lower() + lib = re.sub('\.lib$','',lib) + + if lib in g_msvc_systemlibs: + return lib + + lib=re.sub('^lib','',lib) + + if lib == 'm': + return None + + (lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static) + + if lt_path != None and lt_libname != None: + if lt_static == True: + # file existance check has been made by find_lt_names + return os.path.join(lt_path,lt_libname) + + if lt_path != None: + _libpaths=[lt_path] + self.env['LIBPATH'] + else: + _libpaths=self.env['LIBPATH'] + + static_libs=[ + 'lib%ss.lib' % lib, + 'lib%s.lib' % lib, + '%ss.lib' % lib, + '%s.lib' %lib, + ] + + dynamic_libs=[ + 'lib%s.dll.lib' % lib, + 'lib%s.dll.a' % lib, + '%s.dll.lib' % lib, + '%s.dll.a' % lib, + 'lib%s_d.lib' % lib, + '%s_d.lib' % lib, + '%s.lib' %lib, + ] + + libnames=static_libs + if not is_static: + libnames=dynamic_libs + static_libs + + for path in _libpaths: + for libn in libnames: + if os.path.exists(os.path.join(path, libn)): + debug('msvc: lib found: %s', os.path.join(path,libn)) + return re.sub('\.lib$', '',libn) + + #if no lib can be found, just return the libname as msvc expects it + if mandatory: + self.fatal("The library %r could not be found" % libname) + return re.sub('\.lib$', '', libname) + +@conf +def check_lib_msvc(self, libname, is_static=False, uselib_store=None, mandatory=False): + "This is the api to use" + libn = self.libname_msvc(libname, is_static, mandatory) + + if not uselib_store: + uselib_store = libname.upper() + + # Note: ideally we should be able to place the lib in the right env var, either STATICLIB or LIB, + # but we don't distinguish static libs from shared libs. + # This is ok since msvc doesn't have any special linker flag to select static libs (no env['STATICLIB_MARKER']) + if False and is_static: # disabled + self.env['STATICLIB_' + uselib_store] = [libn] + else: + self.env['LIB_' + uselib_store] = [libn] + +@conf +def check_libs_msvc(self, libnames, is_static=False, mandatory=False): + for libname in Utils.to_list(libnames): + self.check_lib_msvc(libname, is_static, mandatory=mandatory) + +@conftest +def no_autodetect(conf): + conf.eval_rules(detect.replace('autodetect', '')) + + +detect = ''' +autodetect +find_msvc +msvc_common_flags +cc_load_tools +cxx_load_tools +cc_add_flags +cxx_add_flags +link_add_flags +''' + +@conftest +def autodetect(conf): + v = conf.env + compiler, version, path, includes, libdirs = detect_msvc(conf) + v['PATH'] = path + v['CPPPATH'] = includes + v['LIBPATH'] = libdirs + v['MSVC_COMPILER'] = compiler + +def _get_prog_names(conf, compiler): + if compiler=='intel': + compiler_name = 'ICL' + linker_name = 'XILINK' + lib_name = 'XILIB' + else: + # assumes CL.exe + compiler_name = 'CL' + linker_name = 'LINK' + lib_name = 'LIB' + return compiler_name, linker_name, lib_name + +@conftest +def find_msvc(conf): + # due to path format limitations, limit operation only to native Win32. Yeah it sucks. + if sys.platform != 'win32': + conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet') + + v = conf.env + + compiler, version, path, includes, libdirs = detect_msvc(conf) + + compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) + has_msvc_manifest = (compiler == 'msvc' and float(version) >= 8) or (compiler == 'wsdk' and float(version) >= 6) or (compiler == 'intel' and float(version) >= 11) + + # compiler + cxx = None + if v.CXX: cxx = v.CXX + elif 'CXX' in conf.environ: cxx = conf.environ['CXX'] + if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path, mandatory=True) + cxx = conf.cmd_to_list(cxx) + + # before setting anything, check if the compiler is really msvc + env = dict(conf.environ) + env.update(PATH = ';'.join(path)) + if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env): + conf.fatal('the msvc compiler could not be identified') + + link = v.LINK_CXX + if not link: + link = conf.find_program(linker_name, path_list=path, mandatory=True) + ar = v.AR + if not ar: + ar = conf.find_program(lib_name, path_list=path, mandatory=True) + + # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later + mt = v.MT + if has_msvc_manifest: + mt = conf.find_program('MT', path_list=path, mandatory=True) + + # no more possibility of failure means the data state will be consistent + # we may store the data safely now + + v.MSVC_MANIFEST = has_msvc_manifest + v.PATH = path + v.CPPPATH = includes + v.LIBPATH = libdirs + + # c/c++ compiler + v.CC = v.CXX = cxx + v.CC_NAME = v.CXX_NAME = 'msvc' + + v.LINK = v.LINK_CXX = link + if not v.LINK_CC: + v.LINK_CC = v.LINK_CXX + + v.AR = ar + v.MT = mt + v.MTFLAGS = v.ARFLAGS = ['/NOLOGO'] + + + conf.check_tool('winres') + + if not conf.env.WINRC: + warn('Resource compiler not found. Compiling resource file is disabled') + + # environment flags + try: v.prepend_value('CPPPATH', conf.environ['INCLUDE']) + except KeyError: pass + try: v.prepend_value('LIBPATH', conf.environ['LIB']) + except KeyError: pass + +@conftest +def msvc_common_flags(conf): + v = conf.env + + v['CPPFLAGS'] = ['/W3', '/nologo'] + + v['CCDEFINES_ST'] = '/D%s' + v['CXXDEFINES_ST'] = '/D%s' + + # TODO just use _WIN32, which defined by the compiler itself! + v['CCDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway + v['CXXDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway + + v['_CCINCFLAGS'] = [] + v['_CCDEFFLAGS'] = [] + v['_CXXINCFLAGS'] = [] + v['_CXXDEFFLAGS'] = [] + + v['CC_SRC_F'] = '' + v['CC_TGT_F'] = ['/c', '/Fo'] + v['CXX_SRC_F'] = '' + v['CXX_TGT_F'] = ['/c', '/Fo'] + + v['CPPPATH_ST'] = '/I%s' # template for adding include paths + + v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:' + + # Subsystem specific flags + v['CPPFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE'] + v['CPPFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE'] + v['CPPFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX'] + v['CPPFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS'] + v['CPPFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE'] + + # CRT specific flags + v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT'] + v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD'] + + # TODO these are defined by the compiler itself! + v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] # this is defined by the compiler itself! + v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] # these are defined by the compiler itself! + + v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd'] + v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd'] + + # TODO these are defined by the compiler itself! + v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] # these are defined by the compiler itself! + v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] # these are defined by the compiler itself! + + # compiler debug levels + v['CCFLAGS'] = ['/TC'] + v['CCFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG'] + v['CCFLAGS_RELEASE'] = ['/O2', '/DNDEBUG'] + v['CCFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI'] + v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI'] + + v['CXXFLAGS'] = ['/TP', '/EHsc'] + v['CXXFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG'] + v['CXXFLAGS_RELEASE'] = ['/O2', '/DNDEBUG'] + + v['CXXFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI'] + v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI'] + + # linker + v['LIB'] = [] + + v['LIB_ST'] = '%s.lib' # template for adding libs + v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths + v['STATICLIB_ST'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib' + v['STATICLIBPATH_ST'] = '/LIBPATH:%s' + + v['LINKFLAGS'] = ['/NOLOGO'] + if v['MSVC_MANIFEST']: + v.append_value('LINKFLAGS', '/MANIFEST') + v['LINKFLAGS_DEBUG'] = ['/DEBUG'] + v['LINKFLAGS_ULTRADEBUG'] = ['/DEBUG'] + + # shared library + v['shlib_CCFLAGS'] = [''] + v['shlib_CXXFLAGS'] = [''] + v['shlib_LINKFLAGS']= ['/DLL'] + v['shlib_PATTERN'] = '%s.dll' + v['implib_PATTERN'] = '%s.lib' + v['IMPLIB_ST'] = '/IMPLIB:%s' + + # static library + v['staticlib_LINKFLAGS'] = [''] + v['staticlib_PATTERN'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib' + + # program + v['program_PATTERN'] = '%s.exe' + + +####################################################################################################### +##### conf above, build below + +@after('apply_link') +@feature('cc', 'cxx') +def apply_flags_msvc(self): + if self.env.CC_NAME != 'msvc' or not self.link_task: + return + + subsystem = getattr(self, 'subsystem', '') + if subsystem: + subsystem = '/subsystem:%s' % subsystem + flags = 'cstaticlib' in self.features and 'ARFLAGS' or 'LINKFLAGS' + self.env.append_value(flags, subsystem) + + if getattr(self, 'link_task', None) and not 'cstaticlib' in self.features: + for f in self.env.LINKFLAGS: + d = f.lower() + if d[1:] == 'debug': + pdbnode = self.link_task.outputs[0].change_ext('.pdb') + pdbfile = pdbnode.bldpath(self.env) + self.link_task.outputs.append(pdbnode) + self.bld.install_files(self.install_path, [pdbnode], env=self.env) + break + +@feature('cprogram', 'cshlib', 'cstaticlib') +@after('apply_lib_vars') +@before('apply_obj_vars') +def apply_obj_vars_msvc(self): + if self.env['CC_NAME'] != 'msvc': + return + + try: + self.meths.remove('apply_obj_vars') + except ValueError: + pass + + libpaths = getattr(self, 'libpaths', []) + if not libpaths: self.libpaths = libpaths + + env = self.env + app = env.append_unique + + cpppath_st = env['CPPPATH_ST'] + lib_st = env['LIB_ST'] + staticlib_st = env['STATICLIB_ST'] + libpath_st = env['LIBPATH_ST'] + staticlibpath_st = env['STATICLIBPATH_ST'] + + for i in env['LIBPATH']: + app('LINKFLAGS', libpath_st % i) + if not libpaths.count(i): + libpaths.append(i) + + for i in env['LIBPATH']: + app('LINKFLAGS', staticlibpath_st % i) + if not libpaths.count(i): + libpaths.append(i) + + # i doubt that anyone will make a fully static binary anyway + if not env['FULLSTATIC']: + if env['STATICLIB'] or env['LIB']: + app('LINKFLAGS', env['SHLIB_MARKER']) # TODO does SHLIB_MARKER work? + + for i in env['STATICLIB']: + app('LINKFLAGS', staticlib_st % i) + + for i in env['LIB']: + app('LINKFLAGS', lib_st % i) + +# split the manifest file processing from the link task, like for the rc processing + +@feature('cprogram', 'cshlib') +@after('apply_link') +def apply_manifest(self): + """Special linker for MSVC with support for embedding manifests into DLL's + and executables compiled by Visual Studio 2005 or probably later. Without + the manifest file, the binaries are unusable. + See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx""" + + if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST: + out_node = self.link_task.outputs[0] + man_node = out_node.parent.find_or_declare(out_node.name + '.manifest') + self.link_task.outputs.append(man_node) + self.link_task.do_manifest = True + +def exec_mf(self): + env = self.env + mtool = env['MT'] + if not mtool: + return 0 + + self.do_manifest = False + + outfile = self.outputs[0].bldpath(env) + + manifest = None + for out_node in self.outputs: + if out_node.name.endswith('.manifest'): + manifest = out_node.bldpath(env) + break + if manifest is None: + # Should never get here. If we do, it means the manifest file was + # never added to the outputs list, thus we don't have a manifest file + # to embed, so we just return. + return 0 + + # embedding mode. Different for EXE's and DLL's. + # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx + mode = '' + if 'cprogram' in self.generator.features: + mode = '1' + elif 'cshlib' in self.generator.features: + mode = '2' + + debug('msvc: embedding manifest') + #flags = ' '.join(env['MTFLAGS'] or []) + + lst = [] + lst.extend([env['MT']]) + lst.extend(Utils.to_list(env['MTFLAGS'])) + lst.extend(Utils.to_list("-manifest")) + lst.extend(Utils.to_list(manifest)) + lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode))) + + #cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags, + # manifest, outfile, mode) + lst = [lst] + return self.exec_command(*lst) + +########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token + +def exec_command_msvc(self, *k, **kw): + "instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in" + if self.env['CC_NAME'] == 'msvc': + if isinstance(k[0], list): + lst = [] + carry = '' + for a in k[0]: + if len(a) == 3 and a.startswith('/F') or a == '/doc' or a[-1] == ':': + carry = a + else: + lst.append(carry + a) + carry = '' + k = [lst] + + env = dict(os.environ) + env.update(PATH = ';'.join(self.env['PATH'])) + kw['env'] = env + + ret = self.generator.bld.exec_command(*k, **kw) + if ret: return ret + if getattr(self, 'do_manifest', None): + ret = exec_mf(self) + return ret + +for k in 'cc cxx winrc cc_link cxx_link static_link qxx'.split(): + cls = Task.TaskBase.classes.get(k, None) + if cls: + cls.exec_command = exec_command_msvc + diff --git a/buildtools/wafadmin/Tools/nasm.py b/buildtools/wafadmin/Tools/nasm.py new file mode 100644 index 0000000000..b99c3c7340 --- /dev/null +++ b/buildtools/wafadmin/Tools/nasm.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2008 + +""" +Nasm processing +""" + +import os +import TaskGen, Task, Utils +from TaskGen import taskgen, before, extension + +nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}' + +EXT_NASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP'] + +@before('apply_link') +def apply_nasm_vars(self): + + # flags + if hasattr(self, 'nasm_flags'): + for flag in self.to_list(self.nasm_flags): + self.env.append_value('NASM_FLAGS', flag) + + # includes - well, if we suppose it works with c processing + if hasattr(self, 'includes'): + for inc in self.to_list(self.includes): + node = self.path.find_dir(inc) + if not node: + raise Utils.WafError('cannot find the dir' + inc) + self.env.append_value('NASM_INCLUDES', '-I%s' % node.srcpath(self.env)) + self.env.append_value('NASM_INCLUDES', '-I%s' % node.bldpath(self.env)) + +@extension(EXT_NASM) +def nasm_file(self, node): + try: obj_ext = self.obj_ext + except AttributeError: obj_ext = '_%d.o' % self.idx + + task = self.create_task('nasm', node, node.change_ext(obj_ext)) + self.compiled_tasks.append(task) + + self.meths.append('apply_nasm_vars') + +# create our action here +Task.simple_task_type('nasm', nasm_str, color='BLUE', ext_out='.o', shell=False) + +def detect(conf): + nasm = conf.find_program(['nasm', 'yasm'], var='NASM', mandatory=True) + diff --git a/buildtools/wafadmin/Tools/ocaml.py b/buildtools/wafadmin/Tools/ocaml.py new file mode 100644 index 0000000000..20c926969a --- /dev/null +++ b/buildtools/wafadmin/Tools/ocaml.py @@ -0,0 +1,298 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) + +"ocaml support" + +import os, re +import TaskGen, Utils, Task, Build +from Logs import error +from TaskGen import taskgen, feature, before, after, extension + +EXT_MLL = ['.mll'] +EXT_MLY = ['.mly'] +EXT_MLI = ['.mli'] +EXT_MLC = ['.c'] +EXT_ML = ['.ml'] + +open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M) +foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M) +def filter_comments(txt): + meh = [0] + def repl(m): + if m.group(1): meh[0] += 1 + elif m.group(2): meh[0] -= 1 + elif not meh[0]: return m.group(0) + return '' + return foo.sub(repl, txt) + +def scan(self): + node = self.inputs[0] + code = filter_comments(node.read(self.env)) + + global open_re + names = [] + import_iterator = open_re.finditer(code) + if import_iterator: + for import_match in import_iterator: + names.append(import_match.group(1)) + found_lst = [] + raw_lst = [] + for name in names: + nd = None + for x in self.incpaths: + nd = x.find_resource(name.lower()+'.ml') + if not nd: nd = x.find_resource(name+'.ml') + if nd: + found_lst.append(nd) + break + else: + raw_lst.append(name) + + return (found_lst, raw_lst) + +native_lst=['native', 'all', 'c_object'] +bytecode_lst=['bytecode', 'all'] +class ocaml_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@feature('ocaml') +def init_ml(self): + Utils.def_attrs(self, + type = 'all', + incpaths_lst = [], + bld_incpaths_lst = [], + mlltasks = [], + mlytasks = [], + mlitasks = [], + native_tasks = [], + bytecode_tasks = [], + linktasks = [], + bytecode_env = None, + native_env = None, + compiled_tasks = [], + includes = '', + uselib = '', + are_deps_set = 0) + +@feature('ocaml') +@after('init_ml') +def init_envs_ml(self): + + self.islibrary = getattr(self, 'islibrary', False) + + global native_lst, bytecode_lst + self.native_env = None + if self.type in native_lst: + self.native_env = self.env.copy() + if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a' + + self.bytecode_env = None + if self.type in bytecode_lst: + self.bytecode_env = self.env.copy() + if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a' + + if self.type == 'c_object': + self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj') + +@feature('ocaml') +@before('apply_vars_ml') +@after('init_envs_ml') +def apply_incpaths_ml(self): + inc_lst = self.includes.split() + lst = self.incpaths_lst + for dir in inc_lst: + node = self.path.find_dir(dir) + if not node: + error("node not found: " + str(dir)) + continue + self.bld.rescan(node) + if not node in lst: lst.append(node) + self.bld_incpaths_lst.append(node) + # now the nodes are added to self.incpaths_lst + +@feature('ocaml') +@before('apply_core') +def apply_vars_ml(self): + for i in self.incpaths_lst: + if self.bytecode_env: + app = self.bytecode_env.append_value + app('OCAMLPATH', '-I') + app('OCAMLPATH', i.srcpath(self.env)) + app('OCAMLPATH', '-I') + app('OCAMLPATH', i.bldpath(self.env)) + + if self.native_env: + app = self.native_env.append_value + app('OCAMLPATH', '-I') + app('OCAMLPATH', i.bldpath(self.env)) + app('OCAMLPATH', '-I') + app('OCAMLPATH', i.srcpath(self.env)) + + varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT'] + for name in self.uselib.split(): + for vname in varnames: + cnt = self.env[vname+'_'+name] + if cnt: + if self.bytecode_env: self.bytecode_env.append_value(vname, cnt) + if self.native_env: self.native_env.append_value(vname, cnt) + +@feature('ocaml') +@after('apply_core') +def apply_link_ml(self): + + if self.bytecode_env: + ext = self.islibrary and '.cma' or '.run' + + linktask = self.create_task('ocalink') + linktask.bytecode = 1 + linktask.set_outputs(self.path.find_or_declare(self.target + ext)) + linktask.obj = self + linktask.env = self.bytecode_env + self.linktasks.append(linktask) + + if self.native_env: + if self.type == 'c_object': ext = '.o' + elif self.islibrary: ext = '.cmxa' + else: ext = '' + + linktask = self.create_task('ocalinkx') + linktask.set_outputs(self.path.find_or_declare(self.target + ext)) + linktask.obj = self + linktask.env = self.native_env + self.linktasks.append(linktask) + + # we produce a .o file to be used by gcc + self.compiled_tasks.append(linktask) + +@extension(EXT_MLL) +def mll_hook(self, node): + mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'), env=self.native_env) + self.mlltasks.append(mll_task) + + self.allnodes.append(mll_task.outputs[0]) + +@extension(EXT_MLY) +def mly_hook(self, node): + mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')], env=self.native_env) + self.mlytasks.append(mly_task) + self.allnodes.append(mly_task.outputs[0]) + + task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'), env=self.native_env) + +@extension(EXT_MLI) +def mli_hook(self, node): + task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'), env=self.native_env) + self.mlitasks.append(task) + +@extension(EXT_MLC) +def mlc_hook(self, node): + task = self.create_task('ocamlcc', node, node.change_ext('.o'), env=self.native_env) + self.compiled_tasks.append(task) + +@extension(EXT_ML) +def ml_hook(self, node): + if self.native_env: + task = self.create_task('ocamlx', node, node.change_ext('.cmx'), env=self.native_env) + task.obj = self + task.incpaths = self.bld_incpaths_lst + self.native_tasks.append(task) + + if self.bytecode_env: + task = self.create_task('ocaml', node, node.change_ext('.cmo'), env=self.bytecode_env) + task.obj = self + task.bytecode = 1 + task.incpaths = self.bld_incpaths_lst + self.bytecode_tasks.append(task) + +def compile_may_start(self): + if not getattr(self, 'flag_deps', ''): + self.flag_deps = 1 + + # the evil part is that we can only compute the dependencies after the + # source files can be read (this means actually producing the source files) + if getattr(self, 'bytecode', ''): alltasks = self.obj.bytecode_tasks + else: alltasks = self.obj.native_tasks + + self.signature() # ensure that files are scanned - unfortunately + tree = self.generator.bld + env = self.env + for node in self.inputs: + lst = tree.node_deps[self.unique_id()] + for depnode in lst: + for t in alltasks: + if t == self: continue + if depnode in t.inputs: + self.set_run_after(t) + + # TODO necessary to get the signature right - for now + delattr(self, 'cache_sig') + self.signature() + + return Task.Task.runnable_status(self) + +b = Task.simple_task_type +cls = b('ocamlx', '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False) +cls.runnable_status = compile_may_start +cls.scan = scan + +b = Task.simple_task_type +cls = b('ocaml', '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False) +cls.runnable_status = compile_may_start +cls.scan = scan + + +b('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', before="ocaml ocamlcc ocamlx") +b('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN') + +b('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', before="ocamlcmi ocaml ocamlcc") +b('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}', color='BLUE', before="ocamlcmi ocaml ocamlcc") + + +def link_may_start(self): + if not getattr(self, 'order', ''): + + # now reorder the inputs given the task dependencies + if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks + else: alltasks = self.obj.native_tasks + + # this part is difficult, we do not have a total order on the tasks + # if the dependencies are wrong, this may not stop + seen = [] + pendant = []+alltasks + while pendant: + task = pendant.pop(0) + if task in seen: continue + for x in task.run_after: + if not x in seen: + pendant.append(task) + break + else: + seen.append(task) + self.inputs = [x.outputs[0] for x in seen] + self.order = 1 + return Task.Task.runnable_status(self) + +act = b('ocalink', '${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', after="ocaml ocamlcc") +act.runnable_status = link_may_start +act = b('ocalinkx', '${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', after="ocamlx ocamlcc") +act.runnable_status = link_may_start + +def detect(conf): + opt = conf.find_program('ocamlopt', var='OCAMLOPT') + occ = conf.find_program('ocamlc', var='OCAMLC') + if (not opt) or (not occ): + conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH') + + v = conf.env + v['OCAMLC'] = occ + v['OCAMLOPT'] = opt + v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX') + v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC') + v['OCAMLFLAGS'] = '' + v['OCAMLLIB'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep + v['LIBPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep + v['CPPPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep + v['LIB_OCAML'] = 'camlrun' + diff --git a/buildtools/wafadmin/Tools/osx.py b/buildtools/wafadmin/Tools/osx.py new file mode 100644 index 0000000000..561eca487d --- /dev/null +++ b/buildtools/wafadmin/Tools/osx.py @@ -0,0 +1,188 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy 2008 + +"""MacOSX related tools + +To compile an executable into a Mac application bundle (a .app), set its 'mac_app' attribute + obj.mac_app = True + +To make a bundled shared library (a .bundle), set the 'mac_bundle' attribute: + obj.mac_bundle = True +""" + +import os, shutil, sys, platform +import TaskGen, Task, Build, Options, Utils +from TaskGen import taskgen, feature, after, before +from Logs import error, debug + +# plist template +app_info = ''' +<?xml version="1.0" encoding="UTF-8"?> +<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd"> +<plist version="0.9"> +<dict> + <key>CFBundlePackageType</key> + <string>APPL</string> + <key>CFBundleGetInfoString</key> + <string>Created by Waf</string> + <key>CFBundleSignature</key> + <string>????</string> + <key>NOTE</key> + <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string> + <key>CFBundleExecutable</key> + <string>%s</string> +</dict> +</plist> +''' + +# see WAF issue 285 +# and also http://trac.macports.org/ticket/17059 +@feature('cc', 'cxx') +@before('apply_lib_vars') +def set_macosx_deployment_target(self): + if self.env['MACOSX_DEPLOYMENT_TARGET']: + os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET'] + elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ: + if sys.platform == 'darwin': + os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2]) + +@feature('cc', 'cxx') +@after('apply_lib_vars') +def apply_framework(self): + for x in self.to_list(self.env['FRAMEWORKPATH']): + frameworkpath_st = '-F%s' + self.env.append_unique('CXXFLAGS', frameworkpath_st % x) + self.env.append_unique('CCFLAGS', frameworkpath_st % x) + self.env.append_unique('LINKFLAGS', frameworkpath_st % x) + + for x in self.to_list(self.env['FRAMEWORK']): + self.env.append_value('LINKFLAGS', ['-framework', x]) + +@taskgen +def create_bundle_dirs(self, name, out): + bld = self.bld + dir = out.parent.get_dir(name) + + if not dir: + dir = out.__class__(name, out.parent, 1) + bld.rescan(dir) + contents = out.__class__('Contents', dir, 1) + bld.rescan(contents) + macos = out.__class__('MacOS', contents, 1) + bld.rescan(macos) + return dir + +def bundle_name_for_output(out): + name = out.name + k = name.rfind('.') + if k >= 0: + name = name[:k] + '.app' + else: + name = name + '.app' + return name + +@taskgen +@after('apply_link') +@feature('cprogram') +def create_task_macapp(self): + """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications + or use obj.mac_app = True to build specific targets as Mac apps""" + if self.env['MACAPP'] or getattr(self, 'mac_app', False): + apptask = self.create_task('macapp') + apptask.set_inputs(self.link_task.outputs) + + out = self.link_task.outputs[0] + + name = bundle_name_for_output(out) + dir = self.create_bundle_dirs(name, out) + + n1 = dir.find_or_declare(['Contents', 'MacOS', out.name]) + + apptask.set_outputs([n1]) + apptask.chmod = 0755 + apptask.install_path = os.path.join(self.install_path, name, 'Contents', 'MacOS') + self.apptask = apptask + +@after('apply_link') +@feature('cprogram') +def create_task_macplist(self): + """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications + or use obj.mac_app = True to build specific targets as Mac apps""" + if self.env['MACAPP'] or getattr(self, 'mac_app', False): + # check if the user specified a plist before using our template + if not getattr(self, 'mac_plist', False): + self.mac_plist = app_info + + plisttask = self.create_task('macplist') + plisttask.set_inputs(self.link_task.outputs) + + out = self.link_task.outputs[0] + self.mac_plist = self.mac_plist % (out.name) + + name = bundle_name_for_output(out) + dir = self.create_bundle_dirs(name, out) + + n1 = dir.find_or_declare(['Contents', 'Info.plist']) + + plisttask.set_outputs([n1]) + plisttask.mac_plist = self.mac_plist + plisttask.install_path = os.path.join(self.install_path, name, 'Contents') + self.plisttask = plisttask + +@after('apply_link') +@feature('cshlib') +def apply_link_osx(self): + name = self.link_task.outputs[0].name + if not self.install_path: + return + if getattr(self, 'vnum', None): + name = name.replace('.dylib', '.%s.dylib' % self.vnum) + + path = os.path.join(Utils.subst_vars(self.install_path, self.env), name) + if '-dynamiclib' in self.env['LINKFLAGS']: + self.env.append_value('LINKFLAGS', '-install_name') + self.env.append_value('LINKFLAGS', path) + +@before('apply_link', 'apply_lib_vars') +@feature('cc', 'cxx') +def apply_bundle(self): + """use env['MACBUNDLE'] to force all shlibs into mac bundles + or use obj.mac_bundle = True for specific targets only""" + if not ('cshlib' in self.features or 'shlib' in self.features): return + if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False): + self.env['shlib_PATTERN'] = self.env['macbundle_PATTERN'] + uselib = self.uselib = self.to_list(self.uselib) + if not 'MACBUNDLE' in uselib: uselib.append('MACBUNDLE') + +@after('apply_link') +@feature('cshlib') +def apply_bundle_remove_dynamiclib(self): + if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False): + if not getattr(self, 'vnum', None): + try: + self.env['LINKFLAGS'].remove('-dynamiclib') + self.env['LINKFLAGS'].remove('-single_module') + except ValueError: + pass + +# TODO REMOVE IN 1.6 (global variable) +app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources'] + +def app_build(task): + env = task.env + shutil.copy2(task.inputs[0].srcpath(env), task.outputs[0].abspath(env)) + + return 0 + +def plist_build(task): + env = task.env + f = open(task.outputs[0].abspath(env), "w") + f.write(task.mac_plist) + f.close() + + return 0 + +Task.task_type_from_func('macapp', vars=[], func=app_build, after="cxx_link cc_link static_link") +Task.task_type_from_func('macplist', vars=[], func=plist_build, after="cxx_link cc_link static_link") + diff --git a/buildtools/wafadmin/Tools/perl.py b/buildtools/wafadmin/Tools/perl.py new file mode 100644 index 0000000000..a6787a86ec --- /dev/null +++ b/buildtools/wafadmin/Tools/perl.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python +# encoding: utf-8 +# andersg at 0x63.nu 2007 + +import os +import Task, Options, Utils +from Configure import conf +from TaskGen import extension, taskgen, feature, before + +xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' +EXT_XS = ['.xs'] + +@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars') +@feature('perlext') +def init_perlext(self): + self.uselib = self.to_list(getattr(self, 'uselib', '')) + if not 'PERL' in self.uselib: self.uselib.append('PERL') + if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT') + self.env['shlib_PATTERN'] = self.env['perlext_PATTERN'] + +@extension(EXT_XS) +def xsubpp_file(self, node): + outnode = node.change_ext('.c') + self.create_task('xsubpp', node, outnode) + self.allnodes.append(outnode) + +Task.simple_task_type('xsubpp', xsubpp_str, color='BLUE', before='cc cxx', shell=False) + +@conf +def check_perl_version(conf, minver=None): + """ + Checks if perl is installed. + + If installed the variable PERL will be set in environment. + + Perl binary can be overridden by --with-perl-binary config variable + + """ + + if getattr(Options.options, 'perlbinary', None): + conf.env.PERL = Options.options.perlbinary + else: + conf.find_program('perl', var='PERL', mandatory=True) + + try: + version = Utils.cmd_output([conf.env.PERL, '-e', 'printf "%vd",$^V']) + except: + conf.fatal('could not determine the perl version') + + conf.env.PERL_VERSION = version + cver = '' + if minver: + try: + ver = tuple(map(int, version.split('.'))) + except: + conf.fatal('unsupported perl version %r' % version) + if ver < minver: + conf.fatal('perl is too old') + + cver = '.'.join(map(str,minver)) + conf.check_message('perl', cver, True, version) + +@conf +def check_perl_module(conf, module): + """ + Check if specified perlmodule is installed. + + Minimum version can be specified by specifying it after modulename + like this: + + conf.check_perl_module("Some::Module 2.92") + """ + cmd = [conf.env['PERL'], '-e', 'use %s' % module] + r = Utils.pproc.call(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) == 0 + conf.check_message("perl module %s" % module, "", r) + return r + +@conf +def check_perl_ext_devel(conf): + """ + Check for configuration needed to build perl extensions. + + Sets different xxx_PERLEXT variables in the environment. + + Also sets the ARCHDIR_PERL variable useful as installation path, + which can be overridden by --with-perl-archdir + """ + if not conf.env.PERL: + conf.fatal('perl detection is required first') + + def read_out(cmd): + return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd])) + + conf.env.LINKFLAGS_PERLEXT = read_out('print $Config{lddlflags}') + conf.env.CPPPATH_PERLEXT = read_out('print "$Config{archlib}/CORE"') + conf.env.CCFLAGS_PERLEXT = read_out('print "$Config{ccflags} $Config{cccdlflags}"') + conf.env.XSUBPP = read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"') + conf.env.EXTUTILS_TYPEMAP = read_out('print "$Config{privlib}/ExtUtils/typemap"') + conf.env.perlext_PATTERN = '%s.' + read_out('print $Config{dlext}')[0] + + if getattr(Options.options, 'perlarchdir', None): + conf.env.ARCHDIR_PERL = Options.options.perlarchdir + else: + conf.env.ARCHDIR_PERL = read_out('print $Config{sitearch}')[0] + +def set_options(opt): + opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None) + opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None) + diff --git a/buildtools/wafadmin/Tools/preproc.py b/buildtools/wafadmin/Tools/preproc.py new file mode 100644 index 0000000000..5055456054 --- /dev/null +++ b/buildtools/wafadmin/Tools/preproc.py @@ -0,0 +1,836 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2009 (ita) + +""" +C/C++ preprocessor for finding dependencies + +Reasons for using the Waf preprocessor by default +1. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files) +2. Not all compilers provide .d files for obtaining the dependencies (portability) +3. A naive file scanner will not catch the constructs such as "#include foo()" +4. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything) + +Regarding the speed concerns: +a. the preprocessing is performed only when files must be compiled +b. the macros are evaluated only for #if/#elif/#include +c. the time penalty is about 10% +d. system headers are not scanned + +Now if you do not want the Waf preprocessor, the tool "gccdeps" uses the .d files produced +during the compilation to track the dependencies (useful when used with the boost libraries). +It only works with gcc though, and it cannot be used with Qt builds. A dumb +file scanner will be added in the future, so we will have most bahaviours. +""" +# TODO: more varargs, pragma once +# TODO: dumb file scanner tracking all includes + +import re, sys, os, string +import Logs, Build, Utils +from Logs import debug, error +import traceback + +class PreprocError(Utils.WafError): + pass + +POPFILE = '-' + + +recursion_limit = 5000 +"do not loop too much on header inclusion" + +go_absolute = 0 +"set to 1 to track headers on files in /usr/include - else absolute paths are ignored" + +standard_includes = ['/usr/include'] +if sys.platform == "win32": + standard_includes = [] + +use_trigraphs = 0 +'apply the trigraph rules first' + +strict_quotes = 0 +"Keep <> for system includes (do not search for those includes)" + +g_optrans = { +'not':'!', +'and':'&&', +'bitand':'&', +'and_eq':'&=', +'or':'||', +'bitor':'|', +'or_eq':'|=', +'xor':'^', +'xor_eq':'^=', +'compl':'~', +} +"these ops are for c++, to reset, set an empty dict" + +# ignore #warning and #error +re_lines = re.compile(\ + '^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$', + re.IGNORECASE | re.MULTILINE) + +re_mac = re.compile("^[a-zA-Z_]\w*") +re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') +re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE) +re_nl = re.compile('\\\\\r*\n', re.MULTILINE) +re_cpp = re.compile( + r"""(/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)|//[^\n]*|("(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|.[^/"'\\]*)""", + re.MULTILINE) +trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')] +chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39} + +NUM = 'i' +OP = 'O' +IDENT = 'T' +STR = 's' +CHAR = 'c' + +tok_types = [NUM, STR, IDENT, OP] +exp_types = [ + r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""", + r'L?"([^"\\]|\\.)*"', + r'[a-zA-Z_]\w*', + r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]', +] +re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M) + +accepted = 'a' +ignored = 'i' +undefined = 'u' +skipped = 's' + +def repl(m): + if m.group(1): + return ' ' + s = m.group(2) + if s is None: + return '' + return s + +def filter_comments(filename): + # return a list of tuples : keyword, line + code = Utils.readf(filename) + if use_trigraphs: + for (a, b) in trig_def: code = code.split(a).join(b) + code = re_nl.sub('', code) + code = re_cpp.sub(repl, code) + return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)] + +prec = {} +# op -> number, needed for such expressions: #if 1 && 2 != 0 +ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ','] +for x in range(len(ops)): + syms = ops[x] + for u in syms.split(): + prec[u] = x + +def reduce_nums(val_1, val_2, val_op): + """apply arithmetic rules and try to return an integer result""" + #print val_1, val_2, val_op + + # now perform the operation, make certain a and b are numeric + try: a = 0 + val_1 + except TypeError: a = int(val_1) + try: b = 0 + val_2 + except TypeError: b = int(val_2) + + d = val_op + if d == '%': c = a%b + elif d=='+': c = a+b + elif d=='-': c = a-b + elif d=='*': c = a*b + elif d=='/': c = a/b + elif d=='^': c = a^b + elif d=='|': c = a|b + elif d=='||': c = int(a or b) + elif d=='&': c = a&b + elif d=='&&': c = int(a and b) + elif d=='==': c = int(a == b) + elif d=='!=': c = int(a != b) + elif d=='<=': c = int(a <= b) + elif d=='<': c = int(a < b) + elif d=='>': c = int(a > b) + elif d=='>=': c = int(a >= b) + elif d=='^': c = int(a^b) + elif d=='<<': c = a<<b + elif d=='>>': c = a>>b + else: c = 0 + return c + +def get_num(lst): + if not lst: raise PreprocError("empty list for get_num") + (p, v) = lst[0] + if p == OP: + if v == '(': + count_par = 1 + i = 1 + while i < len(lst): + (p, v) = lst[i] + + if p == OP: + if v == ')': + count_par -= 1 + if count_par == 0: + break + elif v == '(': + count_par += 1 + i += 1 + else: + raise PreprocError("rparen expected %r" % lst) + + (num, _) = get_term(lst[1:i]) + return (num, lst[i+1:]) + + elif v == '+': + return get_num(lst[1:]) + elif v == '-': + num, lst = get_num(lst[1:]) + return (reduce_nums('-1', num, '*'), lst) + elif v == '!': + num, lst = get_num(lst[1:]) + return (int(not int(num)), lst) + elif v == '~': + return (~ int(num), lst) + else: + raise PreprocError("invalid op token %r for get_num" % lst) + elif p == NUM: + return v, lst[1:] + elif p == IDENT: + # all macros should have been replaced, remaining identifiers eval to 0 + return 0, lst[1:] + else: + raise PreprocError("invalid token %r for get_num" % lst) + +def get_term(lst): + if not lst: raise PreprocError("empty list for get_term") + num, lst = get_num(lst) + if not lst: + return (num, []) + (p, v) = lst[0] + if p == OP: + if v == '&&' and not num: + return (num, []) + elif v == '||' and num: + return (num, []) + elif v == ',': + # skip + return get_term(lst[1:]) + elif v == '?': + count_par = 0 + i = 1 + while i < len(lst): + (p, v) = lst[i] + + if p == OP: + if v == ')': + count_par -= 1 + elif v == '(': + count_par += 1 + elif v == ':': + if count_par == 0: + break + i += 1 + else: + raise PreprocError("rparen expected %r" % lst) + + if int(num): + return get_term(lst[1:i]) + else: + return get_term(lst[i+1:]) + + else: + num2, lst = get_num(lst[1:]) + + if not lst: + # no more tokens to process + num2 = reduce_nums(num, num2, v) + return get_term([(NUM, num2)] + lst) + + # operator precedence + p2, v2 = lst[0] + if p2 != OP: + raise PreprocError("op expected %r" % lst) + + if prec[v2] >= prec[v]: + num2 = reduce_nums(num, num2, v) + return get_term([(NUM, num2)] + lst) + else: + num3, lst = get_num(lst[1:]) + num3 = reduce_nums(num2, num3, v2) + return get_term([(NUM, num), (p, v), (NUM, num3)] + lst) + + + raise PreprocError("cannot reduce %r" % lst) + +def reduce_eval(lst): + """take a list of tokens and output true or false (#if/#elif conditions)""" + num, lst = get_term(lst) + return (NUM, num) + +def stringize(lst): + """use for converting a list of tokens to a string""" + lst = [str(v2) for (p2, v2) in lst] + return "".join(lst) + +def paste_tokens(t1, t2): + """ + here is what we can paste: + a ## b -> ab + > ## = -> >= + a ## 2 -> a2 + """ + p1 = None + if t1[0] == OP and t2[0] == OP: + p1 = OP + elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM): + p1 = IDENT + elif t1[0] == NUM and t2[0] == NUM: + p1 = NUM + if not p1: + raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2)) + return (p1, t1[1] + t2[1]) + +def reduce_tokens(lst, defs, ban=[]): + """replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied""" + i = 0 + + while i < len(lst): + (p, v) = lst[i] + + if p == IDENT and v == "defined": + del lst[i] + if i < len(lst): + (p2, v2) = lst[i] + if p2 == IDENT: + if v2 in defs: + lst[i] = (NUM, 1) + else: + lst[i] = (NUM, 0) + elif p2 == OP and v2 == '(': + del lst[i] + (p2, v2) = lst[i] + del lst[i] # remove the ident, and change the ) for the value + if v2 in defs: + lst[i] = (NUM, 1) + else: + lst[i] = (NUM, 0) + else: + raise PreprocError("invalid define expression %r" % lst) + + elif p == IDENT and v in defs: + + if isinstance(defs[v], str): + a, b = extract_macro(defs[v]) + defs[v] = b + macro_def = defs[v] + to_add = macro_def[1] + + if isinstance(macro_def[0], list): + # macro without arguments + del lst[i] + for x in xrange(len(to_add)): + lst.insert(i, to_add[x]) + i += 1 + else: + # collect the arguments for the funcall + + args = [] + del lst[i] + + if i >= len(lst): + raise PreprocError("expected '(' after %r (got nothing)" % v) + + (p2, v2) = lst[i] + if p2 != OP or v2 != '(': + raise PreprocError("expected '(' after %r" % v) + + del lst[i] + + one_param = [] + count_paren = 0 + while i < len(lst): + p2, v2 = lst[i] + + del lst[i] + if p2 == OP and count_paren == 0: + if v2 == '(': + one_param.append((p2, v2)) + count_paren += 1 + elif v2 == ')': + if one_param: args.append(one_param) + break + elif v2 == ',': + if not one_param: raise PreprocError("empty param in funcall %s" % p) + args.append(one_param) + one_param = [] + else: + one_param.append((p2, v2)) + else: + one_param.append((p2, v2)) + if v2 == '(': count_paren += 1 + elif v2 == ')': count_paren -= 1 + else: + raise PreprocError('malformed macro') + + # substitute the arguments within the define expression + accu = [] + arg_table = macro_def[0] + j = 0 + while j < len(to_add): + (p2, v2) = to_add[j] + + if p2 == OP and v2 == '#': + # stringize is for arguments only + if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table: + toks = args[arg_table[to_add[j+1][1]]] + accu.append((STR, stringize(toks))) + j += 1 + else: + accu.append((p2, v2)) + elif p2 == OP and v2 == '##': + # token pasting, how can man invent such a complicated system? + if accu and j+1 < len(to_add): + # we have at least two tokens + + t1 = accu[-1] + + if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table: + toks = args[arg_table[to_add[j+1][1]]] + + if toks: + accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1]) + accu.extend(toks[1:]) + else: + # error, case "a##" + accu.append((p2, v2)) + accu.extend(toks) + elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__': + # TODO not sure + # first collect the tokens + va_toks = [] + st = len(macro_def[0]) + pt = len(args) + for x in args[pt-st+1:]: + va_toks.extend(x) + va_toks.append((OP, ',')) + if va_toks: va_toks.pop() # extra comma + if len(accu)>1: + (p3, v3) = accu[-1] + (p4, v4) = accu[-2] + if v3 == '##': + # remove the token paste + accu.pop() + if v4 == ',' and pt < st: + # remove the comma + accu.pop() + accu += va_toks + else: + accu[-1] = paste_tokens(t1, to_add[j+1]) + + j += 1 + else: + # invalid paste, case "##a" or "b##" + accu.append((p2, v2)) + + elif p2 == IDENT and v2 in arg_table: + toks = args[arg_table[v2]] + reduce_tokens(toks, defs, ban+[v]) + accu.extend(toks) + else: + accu.append((p2, v2)) + + j += 1 + + + reduce_tokens(accu, defs, ban+[v]) + + for x in xrange(len(accu)-1, -1, -1): + lst.insert(i, accu[x]) + + i += 1 + + +def eval_macro(lst, adefs): + """reduce the tokens from the list lst, and try to return a 0/1 result""" + reduce_tokens(lst, adefs, []) + if not lst: raise PreprocError("missing tokens to evaluate") + (p, v) = reduce_eval(lst) + return int(v) != 0 + +def extract_macro(txt): + """process a macro definition from "#define f(x, y) x * y" into a function or a simple macro without arguments""" + t = tokenize(txt) + if re_fun.search(txt): + p, name = t[0] + + p, v = t[1] + if p != OP: raise PreprocError("expected open parenthesis") + + i = 1 + pindex = 0 + params = {} + prev = '(' + + while 1: + i += 1 + p, v = t[i] + + if prev == '(': + if p == IDENT: + params[v] = pindex + pindex += 1 + prev = p + elif p == OP and v == ')': + break + else: + raise PreprocError("unexpected token (3)") + elif prev == IDENT: + if p == OP and v == ',': + prev = v + elif p == OP and v == ')': + break + else: + raise PreprocError("comma or ... expected") + elif prev == ',': + if p == IDENT: + params[v] = pindex + pindex += 1 + prev = p + elif p == OP and v == '...': + raise PreprocError("not implemented (1)") + else: + raise PreprocError("comma or ... expected (2)") + elif prev == '...': + raise PreprocError("not implemented (2)") + else: + raise PreprocError("unexpected else") + + #~ print (name, [params, t[i+1:]]) + return (name, [params, t[i+1:]]) + else: + (p, v) = t[0] + return (v, [[], t[1:]]) + +re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")') +def extract_include(txt, defs): + """process a line in the form "#include foo" to return a string representing the file""" + m = re_include.search(txt) + if m: + if m.group('a'): return '<', m.group('a') + if m.group('b'): return '"', m.group('b') + + # perform preprocessing and look at the result, it must match an include + toks = tokenize(txt) + reduce_tokens(toks, defs, ['waf_include']) + + if not toks: + raise PreprocError("could not parse include %s" % txt) + + if len(toks) == 1: + if toks[0][0] == STR: + return '"', toks[0][1] + else: + if toks[0][1] == '<' and toks[-1][1] == '>': + return stringize(toks).lstrip('<').rstrip('>') + + raise PreprocError("could not parse include %s." % txt) + +def parse_char(txt): + if not txt: raise PreprocError("attempted to parse a null char") + if txt[0] != '\\': + return ord(txt) + c = txt[1] + if c == 'x': + if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16) + return int(txt[2:], 16) + elif c.isdigit(): + if c == '0' and len(txt)==2: return 0 + for i in 3, 2, 1: + if len(txt) > i and txt[1:1+i].isdigit(): + return (1+i, int(txt[1:1+i], 8)) + else: + try: return chr_esc[c] + except KeyError: raise PreprocError("could not parse char literal '%s'" % txt) + +@Utils.run_once +def tokenize(s): + """convert a string into a list of tokens (shlex.split does not apply to c/c++/d)""" + ret = [] + for match in re_clexer.finditer(s): + m = match.group + for name in tok_types: + v = m(name) + if v: + if name == IDENT: + try: v = g_optrans[v]; name = OP + except KeyError: + # c++ specific + if v.lower() == "true": + v = 1 + name = NUM + elif v.lower() == "false": + v = 0 + name = NUM + elif name == NUM: + if m('oct'): v = int(v, 8) + elif m('hex'): v = int(m('hex'), 16) + elif m('n0'): v = m('n0') + else: + v = m('char') + if v: v = parse_char(v) + else: v = m('n2') or m('n4') + elif name == OP: + if v == '%:': v = '#' + elif v == '%:%:': v = '##' + elif name == STR: + # remove the quotes around the string + v = v[1:-1] + ret.append((name, v)) + break + return ret + +@Utils.run_once +def define_name(line): + return re_mac.match(line).group(0) + +class c_parser(object): + def __init__(self, nodepaths=None, defines=None): + #self.lines = txt.split('\n') + self.lines = [] + + if defines is None: + self.defs = {} + else: + self.defs = dict(defines) # make a copy + self.state = [] + + self.env = None # needed for the variant when searching for files + + self.count_files = 0 + self.currentnode_stack = [] + + self.nodepaths = nodepaths or [] + + self.nodes = [] + self.names = [] + + # file added + self.curfile = '' + self.ban_includes = set([]) + + def cached_find_resource(self, node, filename): + try: + nd = node.bld.cache_nd + except: + nd = node.bld.cache_nd = {} + + tup = (node.id, filename) + try: + return nd[tup] + except KeyError: + ret = node.find_resource(filename) + nd[tup] = ret + return ret + + def tryfind(self, filename): + self.curfile = filename + + # for msvc it should be a for loop on the whole stack + found = self.cached_find_resource(self.currentnode_stack[-1], filename) + + for n in self.nodepaths: + if found: + break + found = self.cached_find_resource(n, filename) + + if found: + self.nodes.append(found) + if filename[-4:] != '.moc': + self.addlines(found) + else: + if not filename in self.names: + self.names.append(filename) + return found + + def addlines(self, node): + + self.currentnode_stack.append(node.parent) + filepath = node.abspath(self.env) + + self.count_files += 1 + if self.count_files > recursion_limit: raise PreprocError("recursion limit exceeded") + pc = self.parse_cache + debug('preproc: reading file %r', filepath) + try: + lns = pc[filepath] + except KeyError: + pass + else: + self.lines.extend(lns) + return + + try: + lines = filter_comments(filepath) + lines.append((POPFILE, '')) + lines.reverse() + pc[filepath] = lines # cache the lines filtered + self.lines.extend(lines) + except IOError: + raise PreprocError("could not read the file %s" % filepath) + except Exception: + if Logs.verbose > 0: + error("parsing %s failed" % filepath) + traceback.print_exc() + + def start(self, node, env): + debug('preproc: scanning %s (in %s)', node.name, node.parent.name) + + self.env = env + variant = node.variant(env) + bld = node.__class__.bld + try: + self.parse_cache = bld.parse_cache + except AttributeError: + bld.parse_cache = {} + self.parse_cache = bld.parse_cache + + self.addlines(node) + if env['DEFLINES']: + lst = [('define', x) for x in env['DEFLINES']] + lst.reverse() + self.lines.extend(lst) + + while self.lines: + (kind, line) = self.lines.pop() + if kind == POPFILE: + self.currentnode_stack.pop() + continue + try: + self.process_line(kind, line) + except Exception, e: + if Logs.verbose: + debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack()) + + def process_line(self, token, line): + """ + WARNING: a new state must be added for if* because the endif + """ + ve = Logs.verbose + if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state) + state = self.state + + # make certain we define the state if we are about to enter in an if block + if token in ['ifdef', 'ifndef', 'if']: + state.append(undefined) + elif token == 'endif': + state.pop() + + # skip lines when in a dead 'if' branch, wait for the endif + if not token in ['else', 'elif', 'endif']: + if skipped in self.state or ignored in self.state: + return + + if token == 'if': + ret = eval_macro(tokenize(line), self.defs) + if ret: state[-1] = accepted + else: state[-1] = ignored + elif token == 'ifdef': + m = re_mac.match(line) + if m and m.group(0) in self.defs: state[-1] = accepted + else: state[-1] = ignored + elif token == 'ifndef': + m = re_mac.match(line) + if m and m.group(0) in self.defs: state[-1] = ignored + else: state[-1] = accepted + elif token == 'include' or token == 'import': + (kind, inc) = extract_include(line, self.defs) + if inc in self.ban_includes: return + if token == 'import': self.ban_includes.add(inc) + if ve: debug('preproc: include found %s (%s) ', inc, kind) + if kind == '"' or not strict_quotes: + self.tryfind(inc) + elif token == 'elif': + if state[-1] == accepted: + state[-1] = skipped + elif state[-1] == ignored: + if eval_macro(tokenize(line), self.defs): + state[-1] = accepted + elif token == 'else': + if state[-1] == accepted: state[-1] = skipped + elif state[-1] == ignored: state[-1] = accepted + elif token == 'define': + try: + self.defs[define_name(line)] = line + except: + raise PreprocError("invalid define line %s" % line) + elif token == 'undef': + m = re_mac.match(line) + if m and m.group(0) in self.defs: + self.defs.__delitem__(m.group(0)) + #print "undef %s" % name + elif token == 'pragma': + if re_pragma_once.match(line.lower()): + self.ban_includes.add(self.curfile) + +def get_deps(node, env, nodepaths=[]): + """ + Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind + #include some_macro() + """ + + gruik = c_parser(nodepaths) + gruik.start(node, env) + return (gruik.nodes, gruik.names) + +#################### dumb dependency scanner + +re_inc = re.compile(\ + '^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$', + re.IGNORECASE | re.MULTILINE) + +def lines_includes(filename): + code = Utils.readf(filename) + if use_trigraphs: + for (a, b) in trig_def: code = code.split(a).join(b) + code = re_nl.sub('', code) + code = re_cpp.sub(repl, code) + return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)] + +def get_deps_simple(node, env, nodepaths=[], defines={}): + """ + Get the dependencies by just looking recursively at the #include statements + """ + + nodes = [] + names = [] + + def find_deps(node): + lst = lines_includes(node.abspath(env)) + + for (_, line) in lst: + (t, filename) = extract_include(line, defines) + if filename in names: + continue + + if filename.endswith('.moc'): + names.append(filename) + + found = None + for n in nodepaths: + if found: + break + found = n.find_resource(filename) + + if not found: + if not filename in names: + names.append(filename) + elif not found in nodes: + nodes.append(found) + find_deps(node) + + find_deps(node) + return (nodes, names) + + diff --git a/buildtools/wafadmin/Tools/python.py b/buildtools/wafadmin/Tools/python.py new file mode 100644 index 0000000000..4f73081635 --- /dev/null +++ b/buildtools/wafadmin/Tools/python.py @@ -0,0 +1,413 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2007 (ita) +# Gustavo Carneiro (gjc), 2007 + +"Python support" + +import os, sys +import TaskGen, Utils, Utils, Runner, Options, Build +from Logs import debug, warn, info +from TaskGen import extension, taskgen, before, after, feature +from Configure import conf + +EXT_PY = ['.py'] +FRAG_2 = ''' +#include "Python.h" +#ifdef __cplusplus +extern "C" { +#endif + void Py_Initialize(void); + void Py_Finalize(void); +#ifdef __cplusplus +} +#endif +int main() +{ + Py_Initialize(); + Py_Finalize(); + return 0; +} +''' + +@feature('pyext') +@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars', 'apply_bundle') +@after('vars_target_cshlib') +def init_pyext(self): + self.default_install_path = '${PYTHONARCHDIR}' + self.uselib = self.to_list(getattr(self, 'uselib', '')) + if not 'PYEXT' in self.uselib: + self.uselib.append('PYEXT') + self.env['MACBUNDLE'] = True + +@before('apply_link', 'apply_lib_vars', 'apply_type_vars') +@after('apply_bundle') +@feature('pyext') +def pyext_shlib_ext(self): + # override shlib_PATTERN set by the osx module + self.env['shlib_PATTERN'] = self.env['pyext_PATTERN'] + +@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars') +@feature('pyembed') +def init_pyembed(self): + self.uselib = self.to_list(getattr(self, 'uselib', '')) + if not 'PYEMBED' in self.uselib: + self.uselib.append('PYEMBED') + +@extension(EXT_PY) +def process_py(self, node): + if not (self.bld.is_install and self.install_path): + return + def inst_py(ctx): + install_pyfile(self, node) + self.bld.add_post_fun(inst_py) + +def install_pyfile(self, node): + path = self.bld.get_install_path(self.install_path + os.sep + node.name, self.env) + + self.bld.install_files(self.install_path, [node], self.env, self.chmod, postpone=False) + if self.bld.is_install < 0: + info("* removing byte compiled python files") + for x in 'co': + try: + os.remove(path + x) + except OSError: + pass + + if self.bld.is_install > 0: + if self.env['PYC'] or self.env['PYO']: + info("* byte compiling %r" % path) + + if self.env['PYC']: + program = (""" +import sys, py_compile +for pyfile in sys.argv[1:]: + py_compile.compile(pyfile, pyfile + 'c') +""") + argv = [self.env['PYTHON'], '-c', program, path] + ret = Utils.pproc.Popen(argv).wait() + if ret: + raise Utils.WafError('bytecode compilation failed %r' % path) + + if self.env['PYO']: + program = (""" +import sys, py_compile +for pyfile in sys.argv[1:]: + py_compile.compile(pyfile, pyfile + 'o') +""") + argv = [self.env['PYTHON'], self.env['PYFLAGS_OPT'], '-c', program, path] + ret = Utils.pproc.Popen(argv).wait() + if ret: + raise Utils.WafError('bytecode compilation failed %r' % path) + +# COMPAT +class py_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@before('apply_core') +@after('vars_target_cprogram', 'vars_target_cshlib') +@feature('py') +def init_py(self): + self.default_install_path = '${PYTHONDIR}' + +def _get_python_variables(python_exe, variables, imports=['import sys']): + """Run a python interpreter and print some variables""" + program = list(imports) + program.append('') + for v in variables: + program.append("print(repr(%s))" % v) + os_env = dict(os.environ) + try: + del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool + except KeyError: + pass + proc = Utils.pproc.Popen([python_exe, "-c", '\n'.join(program)], stdout=Utils.pproc.PIPE, env=os_env) + output = proc.communicate()[0].split("\n") # do not touch, python3 + if proc.returncode: + if Options.options.verbose: + warn("Python program to extract python configuration variables failed:\n%s" + % '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)])) + raise RuntimeError + return_values = [] + for s in output: + s = s.strip() + if not s: + continue + if s == 'None': + return_values.append(None) + elif s[0] == "'" and s[-1] == "'": + return_values.append(s[1:-1]) + elif s[0].isdigit(): + return_values.append(int(s)) + else: break + return return_values + +@conf +def check_python_headers(conf, mandatory=True): + """Check for headers and libraries necessary to extend or embed python. + + On success the environment variables xxx_PYEXT and xxx_PYEMBED are added for uselib + + PYEXT: for compiling python extensions + PYEMBED: for embedding a python interpreter""" + + if not conf.env['CC_NAME'] and not conf.env['CXX_NAME']: + conf.fatal('load a compiler first (gcc, g++, ..)') + + if not conf.env['PYTHON_VERSION']: + conf.check_python_version() + + env = conf.env + python = env['PYTHON'] + if not python: + conf.fatal('could not find the python executable') + + ## On Mac OSX we need to use mac bundles for python plugins + if Options.platform == 'darwin': + conf.check_tool('osx') + + try: + # Get some python configuration variables using distutils + v = 'prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split() + (python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS, + python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED, + python_MACOSX_DEPLOYMENT_TARGET) = \ + _get_python_variables(python, ["get_config_var('%s')" % x for x in v], + ['from distutils.sysconfig import get_config_var']) + except RuntimeError: + conf.fatal("Python development headers not found (-v for details).") + + conf.log.write("""Configuration returned from %r: +python_prefix = %r +python_SO = %r +python_SYSLIBS = %r +python_LDFLAGS = %r +python_SHLIBS = %r +python_LIBDIR = %r +python_LIBPL = %r +INCLUDEPY = %r +Py_ENABLE_SHARED = %r +MACOSX_DEPLOYMENT_TARGET = %r +""" % (python, python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS, + python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED, python_MACOSX_DEPLOYMENT_TARGET)) + + if python_MACOSX_DEPLOYMENT_TARGET: + conf.env['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET + conf.environ['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET + + env['pyext_PATTERN'] = '%s'+python_SO + + # Check for python libraries for embedding + if python_SYSLIBS is not None: + for lib in python_SYSLIBS.split(): + if lib.startswith('-l'): + lib = lib[2:] # strip '-l' + env.append_value('LIB_PYEMBED', lib) + + if python_SHLIBS is not None: + for lib in python_SHLIBS.split(): + if lib.startswith('-l'): + env.append_value('LIB_PYEMBED', lib[2:]) # strip '-l' + else: + env.append_value('LINKFLAGS_PYEMBED', lib) + + if Options.platform != 'darwin' and python_LDFLAGS: + env.append_value('LINKFLAGS_PYEMBED', python_LDFLAGS.split()) + + result = False + name = 'python' + env['PYTHON_VERSION'] + + if python_LIBDIR is not None: + path = [python_LIBDIR] + conf.log.write("\n\n# Trying LIBDIR: %r\n" % path) + result = conf.check(lib=name, uselib='PYEMBED', libpath=path) + + if not result and python_LIBPL is not None: + conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n") + path = [python_LIBPL] + result = conf.check(lib=name, uselib='PYEMBED', libpath=path) + + if not result: + conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") + path = [os.path.join(python_prefix, "libs")] + name = 'python' + env['PYTHON_VERSION'].replace('.', '') + result = conf.check(lib=name, uselib='PYEMBED', libpath=path) + + if result: + env['LIBPATH_PYEMBED'] = path + env.append_value('LIB_PYEMBED', name) + else: + conf.log.write("\n\n### LIB NOT FOUND\n") + + # under certain conditions, python extensions must link to + # python libraries, not just python embedding programs. + if (sys.platform == 'win32' or sys.platform.startswith('os2') + or sys.platform == 'darwin' or Py_ENABLE_SHARED): + env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED'] + env['LIB_PYEXT'] = env['LIB_PYEMBED'] + + # We check that pythonX.Y-config exists, and if it exists we + # use it to get only the includes, else fall back to distutils. + python_config = conf.find_program( + 'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])), + var='PYTHON_CONFIG') + if not python_config: + python_config = conf.find_program( + 'python-config-%s' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])), + var='PYTHON_CONFIG') + + includes = [] + if python_config: + for incstr in Utils.cmd_output("%s %s --includes" % (python, python_config)).strip().split(): + # strip the -I or /I + if (incstr.startswith('-I') + or incstr.startswith('/I')): + incstr = incstr[2:] + # append include path, unless already given + if incstr not in includes: + includes.append(incstr) + conf.log.write("Include path for Python extensions " + "(found via python-config --includes): %r\n" % (includes,)) + env['CPPPATH_PYEXT'] = includes + env['CPPPATH_PYEMBED'] = includes + else: + conf.log.write("Include path for Python extensions " + "(found via distutils module): %r\n" % (INCLUDEPY,)) + env['CPPPATH_PYEXT'] = [INCLUDEPY] + env['CPPPATH_PYEMBED'] = [INCLUDEPY] + + # Code using the Python API needs to be compiled with -fno-strict-aliasing + if env['CC_NAME'] == 'gcc': + env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing') + env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing') + if env['CXX_NAME'] == 'gcc': + env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing') + env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing') + + # See if it compiles + conf.check(define_name='HAVE_PYTHON_H', + uselib='PYEMBED', fragment=FRAG_2, + errmsg='Could not find the python development headers', mandatory=mandatory) + +@conf +def check_python_version(conf, minver=None): + """ + Check if the python interpreter is found matching a given minimum version. + minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver. + + If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' + (eg. '2.4') of the actual python version found, and PYTHONDIR is + defined, pointing to the site-packages directory appropriate for + this python version, where modules/packages/extensions should be + installed. + """ + assert minver is None or isinstance(minver, tuple) + python = conf.env['PYTHON'] + if not python: + conf.fatal('could not find the python executable') + + # Get python version string + cmd = [python, "-c", "import sys\nfor x in sys.version_info: print(str(x))"] + debug('python: Running python command %r' % cmd) + proc = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE) + lines = proc.communicate()[0].split() + assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines) + pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4])) + + # compare python version with the minimum required + result = (minver is None) or (pyver_tuple >= minver) + + if result: + # define useful environment variables + pyver = '.'.join([str(x) for x in pyver_tuple[:2]]) + conf.env['PYTHON_VERSION'] = pyver + + if 'PYTHONDIR' in conf.environ: + pydir = conf.environ['PYTHONDIR'] + else: + if sys.platform == 'win32': + (python_LIBDEST, pydir) = \ + _get_python_variables(python, + ["get_config_var('LIBDEST')", + "get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']], + ['from distutils.sysconfig import get_config_var, get_python_lib']) + else: + python_LIBDEST = None + (pydir,) = \ + _get_python_variables(python, + ["get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']], + ['from distutils.sysconfig import get_config_var, get_python_lib']) + if python_LIBDEST is None: + if conf.env['LIBDIR']: + python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver) + else: + python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver) + + if 'PYTHONARCHDIR' in conf.environ: + pyarchdir = conf.environ['PYTHONARCHDIR'] + else: + (pyarchdir,) = _get_python_variables(python, + ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r)" % conf.env['PREFIX']], + ['from distutils.sysconfig import get_config_var, get_python_lib']) + if not pyarchdir: + pyarchdir = pydir + + if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist + conf.define('PYTHONDIR', pydir) + conf.define('PYTHONARCHDIR', pyarchdir) + + conf.env['PYTHONDIR'] = pydir + + # Feedback + pyver_full = '.'.join(map(str, pyver_tuple[:3])) + if minver is None: + conf.check_message_custom('Python version', '', pyver_full) + else: + minver_str = '.'.join(map(str, minver)) + conf.check_message('Python version', ">= %s" % minver_str, result, option=pyver_full) + + if not result: + conf.fatal('The python version is too old (%r)' % pyver_full) + +@conf +def check_python_module(conf, module_name): + """ + Check if the selected python interpreter can import the given python module. + """ + result = not Utils.pproc.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name], + stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE).wait() + conf.check_message('Python module', module_name, result) + if not result: + conf.fatal('Could not find the python module %r' % module_name) + +def detect(conf): + + if not conf.env.PYTHON: + conf.env.PYTHON = sys.executable + + python = conf.find_program('python', var='PYTHON') + if not python: + conf.fatal('Could not find the path of the python executable') + + v = conf.env + + v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"' + v['PYFLAGS'] = '' + v['PYFLAGS_OPT'] = '-O' + + v['PYC'] = getattr(Options.options, 'pyc', 1) + v['PYO'] = getattr(Options.options, 'pyo', 1) + +def set_options(opt): + opt.add_option('--nopyc', + action='store_false', + default=1, + help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]', + dest = 'pyc') + opt.add_option('--nopyo', + action='store_false', + default=1, + help='Do not install optimised compiled .pyo files (configuration) [Default:install]', + dest='pyo') + diff --git a/buildtools/wafadmin/Tools/qt4.py b/buildtools/wafadmin/Tools/qt4.py new file mode 100644 index 0000000000..84d121a844 --- /dev/null +++ b/buildtools/wafadmin/Tools/qt4.py @@ -0,0 +1,505 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) + +""" +Qt4 support + +If QT4_ROOT is given (absolute path), the configuration will look in it first + +This module also demonstrates how to add tasks dynamically (when the build has started) +""" + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml = False + ContentHandler = object +else: + has_xml = True + +import os, sys +import ccroot, cxx +import TaskGen, Task, Utils, Runner, Options, Node, Configure +from TaskGen import taskgen, feature, after, extension +from Logs import error +from Constants import * + +MOC_H = ['.h', '.hpp', '.hxx', '.hh'] +EXT_RCC = ['.qrc'] +EXT_UI = ['.ui'] +EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C'] + +class qxx_task(Task.Task): + "A cpp task that may create a moc task dynamically" + + before = ['cxx_link', 'static_link'] + + def __init__(self, *k, **kw): + Task.Task.__init__(self, *k, **kw) + self.moc_done = 0 + + def scan(self): + (nodes, names) = ccroot.scan(self) + # for some reasons (variants) the moc node may end in the list of node deps + for x in nodes: + if x.name.endswith('.moc'): + nodes.remove(x) + names.append(x.relpath_gen(self.inputs[0].parent)) + return (nodes, names) + + def runnable_status(self): + if self.moc_done: + # if there is a moc task, delay the computation of the file signature + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + # the moc file enters in the dependency calculation + # so we need to recompute the signature when the moc file is present + self.signature() + return Task.Task.runnable_status(self) + else: + # yes, really, there are people who generate cxx files + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + self.add_moc_tasks() + return ASK_LATER + + def add_moc_tasks(self): + + node = self.inputs[0] + tree = node.__class__.bld + + try: + # compute the signature once to know if there is a moc file to create + self.signature() + except KeyError: + # the moc file may be referenced somewhere else + pass + else: + # remove the signature, it must be recomputed with the moc task + delattr(self, 'cache_sig') + + moctasks=[] + mocfiles=[] + variant = node.variant(self.env) + try: + tmp_lst = tree.raw_deps[self.unique_id()] + tree.raw_deps[self.unique_id()] = [] + except KeyError: + tmp_lst = [] + for d in tmp_lst: + if not d.endswith('.moc'): continue + # paranoid check + if d in mocfiles: + error("paranoia owns") + continue + + # process that base.moc only once + mocfiles.append(d) + + # find the extension (performed only when the .cpp has changes) + base2 = d[:-4] + for path in [node.parent] + self.generator.env['INC_PATHS']: + tree.rescan(path) + vals = getattr(Options.options, 'qt_header_ext', '') or MOC_H + for ex in vals: + h_node = path.find_resource(base2 + ex) + if h_node: + break + else: + continue + break + else: + raise Utils.WafError("no header found for %s which is a moc file" % str(d)) + + m_node = h_node.change_ext('.moc') + tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), m_node.name)] = h_node + + # create the task + task = Task.TaskBase.classes['moc'](self.env, normal=0) + task.set_inputs(h_node) + task.set_outputs(m_node) + + generator = tree.generator + generator.outstanding.insert(0, task) + generator.total += 1 + + moctasks.append(task) + + # remove raw deps except the moc files to save space (optimization) + tmp_lst = tree.raw_deps[self.unique_id()] = mocfiles + + # look at the file inputs, it is set right above + lst = tree.node_deps.get(self.unique_id(), ()) + for d in lst: + name = d.name + if name.endswith('.moc'): + task = Task.TaskBase.classes['moc'](self.env, normal=0) + task.set_inputs(tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), name)]) # 1st element in a tuple + task.set_outputs(d) + + generator = tree.generator + generator.outstanding.insert(0, task) + generator.total += 1 + + moctasks.append(task) + + # simple scheduler dependency: run the moc task before others + self.run_after = moctasks + self.moc_done = 1 + + run = Task.TaskBase.classes['cxx'].__dict__['run'] + +def translation_update(task): + outs = [a.abspath(task.env) for a in task.outputs] + outs = " ".join(outs) + lupdate = task.env['QT_LUPDATE'] + + for x in task.inputs: + file = x.abspath(task.env) + cmd = "%s %s -ts %s" % (lupdate, file, outs) + Utils.pprint('BLUE', cmd) + task.generator.bld.exec_command(cmd) + +class XMLHandler(ContentHandler): + def __init__(self): + self.buf = [] + self.files = [] + def startElement(self, name, attrs): + if name == 'file': + self.buf = [] + def endElement(self, name): + if name == 'file': + self.files.append(''.join(self.buf)) + def characters(self, cars): + self.buf.append(cars) + +def scan(self): + "add the dependency on the files referenced in the qrc" + node = self.inputs[0] + parser = make_parser() + curHandler = XMLHandler() + parser.setContentHandler(curHandler) + fi = open(self.inputs[0].abspath(self.env)) + parser.parse(fi) + fi.close() + + nodes = [] + names = [] + root = self.inputs[0].parent + for x in curHandler.files: + nd = root.find_resource(x) + if nd: nodes.append(nd) + else: names.append(x) + + return (nodes, names) + +@extension(EXT_RCC) +def create_rcc_task(self, node): + "hook for rcc files" + rcnode = node.change_ext('_rc.cpp') + rcctask = self.create_task('rcc', node, rcnode) + cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o')) + self.compiled_tasks.append(cpptask) + return cpptask + +@extension(EXT_UI) +def create_uic_task(self, node): + "hook for uic tasks" + uictask = self.create_task('ui4', node) + uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])] + return uictask + +class qt4_taskgen(cxx.cxx_taskgen): + def __init__(self, *k, **kw): + cxx.cxx_taskgen.__init__(self, *k, **kw) + self.features.append('qt4') + +@extension('.ts') +def add_lang(self, node): + """add all the .ts file into self.lang""" + self.lang = self.to_list(getattr(self, 'lang', [])) + [node] + +@feature('qt4') +@after('apply_link') +def apply_qt4(self): + if getattr(self, 'lang', None): + update = getattr(self, 'update', None) + lst=[] + trans=[] + for l in self.to_list(self.lang): + + if not isinstance(l, Node.Node): + l = self.path.find_resource(l+'.ts') + + t = self.create_task('ts2qm', l, l.change_ext('.qm')) + lst.append(t.outputs[0]) + + if update: + trans.append(t.inputs[0]) + + trans_qt4 = getattr(Options.options, 'trans_qt4', False) + if update and trans_qt4: + # we need the cpp files given, except the rcc task we create after + # FIXME may be broken + u = Task.TaskCmd(translation_update, self.env, 2) + u.inputs = [a.inputs[0] for a in self.compiled_tasks] + u.outputs = trans + + if getattr(self, 'langname', None): + t = Task.TaskBase.classes['qm2rcc'](self.env) + t.set_inputs(lst) + t.set_outputs(self.path.find_or_declare(self.langname+'.qrc')) + t.path = self.path + k = create_rcc_task(self, t.outputs[0]) + self.link_task.inputs.append(k.outputs[0]) + + self.env.append_value('MOC_FLAGS', self.env._CXXDEFFLAGS) + self.env.append_value('MOC_FLAGS', self.env._CXXINCFLAGS) + +@extension(EXT_QT4) +def cxx_hook(self, node): + # create the compilation task: cpp or cc + try: obj_ext = self.obj_ext + except AttributeError: obj_ext = '_%d.o' % self.idx + + task = self.create_task('qxx', node, node.change_ext(obj_ext)) + self.compiled_tasks.append(task) + return task + +def process_qm2rcc(task): + outfile = task.outputs[0].abspath(task.env) + f = open(outfile, 'w') + f.write('<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n') + for k in task.inputs: + f.write(' <file>') + #f.write(k.name) + f.write(k.path_to_parent(task.path)) + f.write('</file>\n') + f.write('</qresource>\n</RCC>') + f.close() + +b = Task.simple_task_type +b('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], shell=False) +cls = b('rcc', '${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', after="qm2rcc", shell=False) +cls.scan = scan +b('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', shell=False) +b('ts2qm', '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}', color='BLUE', before='qm2rcc', shell=False) + +Task.task_type_from_func('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', before='rcc', after='ts2qm') + +def detect_qt4(conf): + env = conf.env + opt = Options.options + + qtdir = getattr(opt, 'qtdir', '') + qtbin = getattr(opt, 'qtbin', '') + qtlibs = getattr(opt, 'qtlibs', '') + useframework = getattr(opt, 'use_qt4_osxframework', True) + + paths = [] + + # the path to qmake has been given explicitely + if qtbin: + paths = [qtbin] + + # the qt directory has been given - we deduce the qt binary path + if not qtdir: + qtdir = conf.environ.get('QT4_ROOT', '') + qtbin = os.path.join(qtdir, 'bin') + paths = [qtbin] + + # no qtdir, look in the path and in /usr/local/Trolltech + if not qtdir: + paths = os.environ.get('PATH', '').split(os.pathsep) + paths.append('/usr/share/qt4/bin/') + try: + lst = os.listdir('/usr/local/Trolltech/') + except OSError: + pass + else: + if lst: + lst.sort() + lst.reverse() + + # keep the highest version + qtdir = '/usr/local/Trolltech/%s/' % lst[0] + qtbin = os.path.join(qtdir, 'bin') + paths.append(qtbin) + + # at the end, try to find qmake in the paths given + # keep the one with the highest version + cand = None + prev_ver = ['4', '0', '0'] + for qmk in ['qmake-qt4', 'qmake4', 'qmake']: + qmake = conf.find_program(qmk, path_list=paths) + if qmake: + try: + version = Utils.cmd_output([qmake, '-query', 'QT_VERSION']).strip() + except ValueError: + pass + else: + if version: + new_ver = version.split('.') + if new_ver > prev_ver: + cand = qmake + prev_ver = new_ver + if cand: + qmake = cand + else: + conf.fatal('could not find qmake for qt4') + + conf.env.QMAKE = qmake + qtincludes = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_HEADERS']).strip() + qtdir = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_PREFIX']).strip() + os.sep + qtbin = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_BINS']).strip() + os.sep + + if not qtlibs: + try: + qtlibs = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_LIBS']).strip() + os.sep + except ValueError: + qtlibs = os.path.join(qtdir, 'lib') + + def find_bin(lst, var): + for f in lst: + ret = conf.find_program(f, path_list=paths) + if ret: + env[var]=ret + break + + vars = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split() + + find_bin(['uic-qt3', 'uic3'], 'QT_UIC3') + find_bin(['uic-qt4', 'uic'], 'QT_UIC') + if not env['QT_UIC']: + conf.fatal('cannot find the uic compiler for qt4') + + try: + version = Utils.cmd_output(env['QT_UIC'] + " -version 2>&1").strip() + except ValueError: + conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path') + + version = version.replace('Qt User Interface Compiler ','') + version = version.replace('User Interface Compiler for Qt', '') + if version.find(" 3.") != -1: + conf.check_message('uic version', '(too old)', 0, option='(%s)'%version) + sys.exit(1) + conf.check_message('uic version', '', 1, option='(%s)'%version) + + find_bin(['moc-qt4', 'moc'], 'QT_MOC') + find_bin(['rcc'], 'QT_RCC') + find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE') + find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE') + + env['UIC3_ST']= '%s -o %s' + env['UIC_ST'] = '%s -o %s' + env['MOC_ST'] = '-o' + env['ui_PATTERN'] = 'ui_%s.h' + env['QT_LRELEASE_FLAGS'] = ['-silent'] + + vars_debug = [a+'_debug' for a in vars] + + try: + conf.find_program('pkg-config', var='pkgconfig', path_list=paths, mandatory=True) + + except Configure.ConfigurationError: + + for lib in vars_debug+vars: + uselib = lib.upper() + + d = (lib.find('_debug') > 0) and 'd' or '' + + # original author seems to prefer static to shared libraries + for (pat, kind) in ((conf.env.staticlib_PATTERN, 'STATIC'), (conf.env.shlib_PATTERN, '')): + + conf.check_message_1('Checking for %s %s' % (lib, kind)) + + for ext in ['', '4']: + path = os.path.join(qtlibs, pat % (lib + d + ext)) + if os.path.exists(path): + env.append_unique(kind + 'LIB_' + uselib, lib + d + ext) + conf.check_message_2('ok ' + path, 'GREEN') + break + path = os.path.join(qtbin, pat % (lib + d + ext)) + if os.path.exists(path): + env.append_unique(kind + 'LIB_' + uselib, lib + d + ext) + conf.check_message_2('ok ' + path, 'GREEN') + break + else: + conf.check_message_2('not found', 'YELLOW') + continue + break + + env.append_unique('LIBPATH_' + uselib, qtlibs) + env.append_unique('CPPPATH_' + uselib, qtincludes) + env.append_unique('CPPPATH_' + uselib, qtincludes + os.sep + lib) + else: + for i in vars_debug+vars: + try: + conf.check_cfg(package=i, args='--cflags --libs --silence-errors', path=conf.env.pkgconfig) + except ValueError: + pass + + # the libpaths are set nicely, unfortunately they make really long command-lines + # remove the qtcore ones from qtgui, etc + def process_lib(vars_, coreval): + for d in vars_: + var = d.upper() + if var == 'QTCORE': continue + + value = env['LIBPATH_'+var] + if value: + core = env[coreval] + accu = [] + for lib in value: + if lib in core: continue + accu.append(lib) + env['LIBPATH_'+var] = accu + + process_lib(vars, 'LIBPATH_QTCORE') + process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG') + + # rpath if wanted + want_rpath = getattr(Options.options, 'want_rpath', 1) + if want_rpath: + def process_rpath(vars_, coreval): + for d in vars_: + var = d.upper() + value = env['LIBPATH_'+var] + if value: + core = env[coreval] + accu = [] + for lib in value: + if var != 'QTCORE': + if lib in core: + continue + accu.append('-Wl,--rpath='+lib) + env['RPATH_'+var] = accu + process_rpath(vars, 'LIBPATH_QTCORE') + process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG') + + env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale' + +def detect(conf): + detect_qt4(conf) + +def set_options(opt): + opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]') + + opt.add_option('--header-ext', + type='string', + default='', + help='header extension for moc files', + dest='qt_header_ext') + + for i in 'qtdir qtbin qtlibs'.split(): + opt.add_option('--'+i, type='string', default='', dest=i) + + if sys.platform == "darwin": + opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True) + + opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False) + diff --git a/buildtools/wafadmin/Tools/ruby.py b/buildtools/wafadmin/Tools/ruby.py new file mode 100644 index 0000000000..d3b75695ae --- /dev/null +++ b/buildtools/wafadmin/Tools/ruby.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +# encoding: utf-8 +# daniel.svensson at purplescout.se 2008 + +import os +import Task, Options, Utils +from TaskGen import before, feature, after +from Configure import conf + +@feature('rubyext') +@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars', 'apply_bundle') +@after('default_cc', 'vars_target_cshlib') +def init_rubyext(self): + self.default_install_path = '${ARCHDIR_RUBY}' + self.uselib = self.to_list(getattr(self, 'uselib', '')) + if not 'RUBY' in self.uselib: + self.uselib.append('RUBY') + if not 'RUBYEXT' in self.uselib: + self.uselib.append('RUBYEXT') + +@feature('rubyext') +@before('apply_link') +def apply_ruby_so_name(self): + self.env['shlib_PATTERN'] = self.env['rubyext_PATTERN'] + +@conf +def check_ruby_version(conf, minver=()): + """ + Checks if ruby is installed. + If installed the variable RUBY will be set in environment. + Ruby binary can be overridden by --with-ruby-binary config variable + """ + + if Options.options.rubybinary: + conf.env.RUBY = Options.options.rubybinary + else: + conf.find_program("ruby", var="RUBY", mandatory=True) + + ruby = conf.env.RUBY + + try: + version = Utils.cmd_output([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() + except: + conf.fatal('could not determine ruby version') + conf.env.RUBY_VERSION = version + + try: + ver = tuple(map(int, version.split("."))) + except: + conf.fatal('unsupported ruby version %r' % version) + + cver = '' + if minver: + if ver < minver: + conf.fatal('ruby is too old') + cver = ".".join([str(x) for x in minver]) + + conf.check_message('ruby', cver, True, version) + +@conf +def check_ruby_ext_devel(conf): + if not conf.env.RUBY: + conf.fatal('ruby detection is required first') + + if not conf.env.CC_NAME and not conf.env.CXX_NAME: + conf.fatal('load a c/c++ compiler first') + + version = tuple(map(int, conf.env.RUBY_VERSION.split("."))) + + def read_out(cmd): + return Utils.to_list(Utils.cmd_output([conf.env.RUBY, '-rrbconfig', '-e', cmd])) + + def read_config(key): + return read_out('puts Config::CONFIG[%r]' % key) + + ruby = conf.env['RUBY'] + archdir = read_config('archdir') + cpppath = archdir + if version >= (1, 9, 0): + ruby_hdrdir = read_config('rubyhdrdir') + cpppath += ruby_hdrdir + cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])] + + conf.check(header_name='ruby.h', includes=cpppath, mandatory=True, errmsg='could not find ruby header file') + + conf.env.LIBPATH_RUBYEXT = read_config('libdir') + conf.env.LIBPATH_RUBYEXT += archdir + conf.env.CPPPATH_RUBYEXT = cpppath + conf.env.CCFLAGS_RUBYEXT = read_config("CCDLFLAGS") + conf.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0] + + # ok this is really stupid, but the command and flags are combined. + # so we try to find the first argument... + flags = read_config('LDSHARED') + while flags and flags[0][0] != '-': + flags = flags[1:] + + # we also want to strip out the deprecated ppc flags + if len(flags) > 1 and flags[1] == "ppc": + flags = flags[2:] + + conf.env.LINKFLAGS_RUBYEXT = flags + conf.env.LINKFLAGS_RUBYEXT += read_config("LIBS") + conf.env.LINKFLAGS_RUBYEXT += read_config("LIBRUBYARG_SHARED") + + if Options.options.rubyarchdir: + conf.env.ARCHDIR_RUBY = Options.options.rubyarchdir + else: + conf.env.ARCHDIR_RUBY = read_config('sitearchdir')[0] + + if Options.options.rubylibdir: + conf.env.LIBDIR_RUBY = Options.options.rubylibdir + else: + conf.env.LIBDIR_RUBY = read_config('sitelibdir')[0] + +def set_options(opt): + opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files') + opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path') + opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary') + diff --git a/buildtools/wafadmin/Tools/suncc.py b/buildtools/wafadmin/Tools/suncc.py new file mode 100644 index 0000000000..b1a2aad403 --- /dev/null +++ b/buildtools/wafadmin/Tools/suncc.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) +# Ralf Habacker, 2006 (rh) + +import os, optparse +import Utils, Options, Configure +import ccroot, ar +from Configure import conftest + +@conftest +def find_scc(conf): + v = conf.env + cc = None + if v['CC']: cc = v['CC'] + elif 'CC' in conf.environ: cc = conf.environ['CC'] + #if not cc: cc = conf.find_program('gcc', var='CC') + if not cc: cc = conf.find_program('cc', var='CC') + if not cc: conf.fatal('suncc was not found') + cc = conf.cmd_to_list(cc) + + try: + if not Utils.cmd_output(cc + ['-flags']): + conf.fatal('suncc %r was not found' % cc) + except ValueError: + conf.fatal('suncc -flags could not be executed') + + v['CC'] = cc + v['CC_NAME'] = 'sun' + +@conftest +def scc_common_flags(conf): + v = conf.env + + # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS + + v['CC_SRC_F'] = '' + v['CC_TGT_F'] = ['-c', '-o', ''] + v['CPPPATH_ST'] = '-I%s' # template for adding include paths + + # linker + if not v['LINK_CC']: v['LINK_CC'] = v['CC'] + v['CCLNK_SRC_F'] = '' + v['CCLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target + + v['LIB_ST'] = '-l%s' # template for adding libs + v['LIBPATH_ST'] = '-L%s' # template for adding libpaths + v['STATICLIB_ST'] = '-l%s' + v['STATICLIBPATH_ST'] = '-L%s' + v['CCDEFINES_ST'] = '-D%s' + + v['SONAME_ST'] = '-Wl,-h -Wl,%s' + v['SHLIB_MARKER'] = '-Bdynamic' + v['STATICLIB_MARKER'] = '-Bstatic' + + # program + v['program_PATTERN'] = '%s' + + # shared library + v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC'] + v['shlib_LINKFLAGS'] = ['-G'] + v['shlib_PATTERN'] = 'lib%s.so' + + # static lib + v['staticlib_LINKFLAGS'] = ['-Bstatic'] + v['staticlib_PATTERN'] = 'lib%s.a' + +detect = ''' +find_scc +find_cpp +find_ar +scc_common_flags +cc_load_tools +cc_add_flags +link_add_flags +''' diff --git a/buildtools/wafadmin/Tools/suncxx.py b/buildtools/wafadmin/Tools/suncxx.py new file mode 100644 index 0000000000..8754b6cc2b --- /dev/null +++ b/buildtools/wafadmin/Tools/suncxx.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) +# Ralf Habacker, 2006 (rh) + +import os, optparse +import Utils, Options, Configure +import ccroot, ar +from Configure import conftest + +@conftest +def find_sxx(conf): + v = conf.env + cc = None + if v['CXX']: cc = v['CXX'] + elif 'CXX' in conf.environ: cc = conf.environ['CXX'] + if not cc: cc = conf.find_program('c++', var='CXX') + if not cc: conf.fatal('sunc++ was not found') + cc = conf.cmd_to_list(cc) + + try: + if not Utils.cmd_output(cc + ['-flags']): + conf.fatal('sunc++ %r was not found' % cc) + except ValueError: + conf.fatal('sunc++ -flags could not be executed') + + v['CXX'] = cc + v['CXX_NAME'] = 'sun' + +@conftest +def sxx_common_flags(conf): + v = conf.env + + # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS + + v['CXX_SRC_F'] = '' + v['CXX_TGT_F'] = ['-c', '-o', ''] + v['CPPPATH_ST'] = '-I%s' # template for adding include paths + + # linker + if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] + v['CXXLNK_SRC_F'] = '' + v['CXXLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target + + v['LIB_ST'] = '-l%s' # template for adding libs + v['LIBPATH_ST'] = '-L%s' # template for adding libpaths + v['STATICLIB_ST'] = '-l%s' + v['STATICLIBPATH_ST'] = '-L%s' + v['CXXDEFINES_ST'] = '-D%s' + + v['SONAME_ST'] = '-Wl,-h -Wl,%s' + v['SHLIB_MARKER'] = '-Bdynamic' + v['STATICLIB_MARKER'] = '-Bstatic' + + # program + v['program_PATTERN'] = '%s' + + # shared library + v['shlib_CXXFLAGS'] = ['-Kpic', '-DPIC'] + v['shlib_LINKFLAGS'] = ['-G'] + v['shlib_PATTERN'] = 'lib%s.so' + + # static lib + v['staticlib_LINKFLAGS'] = ['-Bstatic'] + v['staticlib_PATTERN'] = 'lib%s.a' + +detect = ''' +find_sxx +find_cpp +find_ar +sxx_common_flags +cxx_load_tools +cxx_add_flags +link_add_flags +''' diff --git a/buildtools/wafadmin/Tools/tex.py b/buildtools/wafadmin/Tools/tex.py new file mode 100644 index 0000000000..2dd748b232 --- /dev/null +++ b/buildtools/wafadmin/Tools/tex.py @@ -0,0 +1,251 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) + +"TeX/LaTeX/PDFLaTeX support" + +import os, re +import Utils, TaskGen, Task, Runner, Build +from TaskGen import feature, before +from Logs import error, warn, debug + +re_tex = re.compile(r'\\(?P<type>include|input|import|bringin|lstinputlisting){(?P<file>[^{}]*)}', re.M) +def scan(self): + node = self.inputs[0] + env = self.env + + nodes = [] + names = [] + if not node: return (nodes, names) + + code = Utils.readf(node.abspath(env)) + + curdirnode = self.curdirnode + abs = curdirnode.abspath() + for match in re_tex.finditer(code): + path = match.group('file') + if path: + for k in ['', '.tex', '.ltx']: + # add another loop for the tex include paths? + debug('tex: trying %s%s' % (path, k)) + try: + os.stat(abs+os.sep+path+k) + except OSError: + continue + found = path+k + node = curdirnode.find_resource(found) + if node: + nodes.append(node) + else: + debug('tex: could not find %s' % path) + names.append(path) + + debug("tex: found the following : %s and names %s" % (nodes, names)) + return (nodes, names) + +latex_fun, _ = Task.compile_fun('latex', '${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False) +pdflatex_fun, _ = Task.compile_fun('pdflatex', '${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False) +bibtex_fun, _ = Task.compile_fun('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False) +makeindex_fun, _ = Task.compile_fun('bibtex', '${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False) + +g_bibtex_re = re.compile('bibdata', re.M) +def tex_build(task, command='LATEX'): + env = task.env + bld = task.generator.bld + + if not env['PROMPT_LATEX']: + env.append_value('LATEXFLAGS', '-interaction=batchmode') + env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') + + fun = latex_fun + if command == 'PDFLATEX': + fun = pdflatex_fun + + node = task.inputs[0] + reldir = node.bld_dir(env) + + #lst = [] + #for c in Utils.split_path(reldir): + # if c: lst.append('..') + #srcfile = os.path.join(*(lst + [node.srcpath(env)])) + #sr2 = os.path.join(*(lst + [node.parent.srcpath(env)])) + srcfile = node.abspath(env) + sr2 = node.parent.abspath() + os.pathsep + node.parent.abspath(env) + os.pathsep + + aux_node = node.change_ext('.aux') + idx_node = node.change_ext('.idx') + + nm = aux_node.name + docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux" + + # important, set the cwd for everybody + task.cwd = task.inputs[0].parent.abspath(task.env) + + + warn('first pass on %s' % command) + + task.env.env = {'TEXINPUTS': sr2} + task.env.SRCFILE = srcfile + ret = fun(task) + if ret: + return ret + + # look in the .aux file if there is a bibfile to process + try: + ct = Utils.readf(aux_node.abspath(env)) + except (OSError, IOError): + error('error bibtex scan') + else: + fo = g_bibtex_re.findall(ct) + + # there is a .aux file to process + if fo: + warn('calling bibtex') + + task.env.env = {'BIBINPUTS': sr2, 'BSTINPUTS': sr2} + task.env.SRCFILE = docuname + ret = bibtex_fun(task) + if ret: + error('error when calling bibtex %s' % docuname) + return ret + + # look on the filesystem if there is a .idx file to process + try: + idx_path = idx_node.abspath(env) + os.stat(idx_path) + except OSError: + error('error file.idx scan') + else: + warn('calling makeindex') + + task.env.SRCFILE = idx_node.name + task.env.env = {} + ret = makeindex_fun(task) + if ret: + error('error when calling makeindex %s' % idx_path) + return ret + + + hash = '' + i = 0 + while i < 10: + # prevent against infinite loops - one never knows + i += 1 + + # watch the contents of file.aux + prev_hash = hash + try: + hash = Utils.h_file(aux_node.abspath(env)) + except KeyError: + error('could not read aux.h -> %s' % aux_node.abspath(env)) + pass + + # debug + #print "hash is, ", hash, " ", old_hash + + # stop if file.aux does not change anymore + if hash and hash == prev_hash: + break + + # run the command + warn('calling %s' % command) + + task.env.env = {'TEXINPUTS': sr2 + os.pathsep} + task.env.SRCFILE = srcfile + ret = fun(task) + if ret: + error('error when calling %s %s' % (command, latex_compile_cmd)) + return ret + + return None # ok + +latex_vardeps = ['LATEX', 'LATEXFLAGS'] +def latex_build(task): + return tex_build(task, 'LATEX') + +pdflatex_vardeps = ['PDFLATEX', 'PDFLATEXFLAGS'] +def pdflatex_build(task): + return tex_build(task, 'PDFLATEX') + +class tex_taskgen(TaskGen.task_gen): + def __init__(self, *k, **kw): + TaskGen.task_gen.__init__(self, *k, **kw) + +@feature('tex') +@before('apply_core') +def apply_tex(self): + if not getattr(self, 'type', None) in ['latex', 'pdflatex']: + self.type = 'pdflatex' + + tree = self.bld + outs = Utils.to_list(getattr(self, 'outs', [])) + + # prompt for incomplete files (else the batchmode is used) + self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1) + + deps_lst = [] + + if getattr(self, 'deps', None): + deps = self.to_list(self.deps) + for filename in deps: + n = self.path.find_resource(filename) + if not n in deps_lst: deps_lst.append(n) + + self.source = self.to_list(self.source) + for filename in self.source: + base, ext = os.path.splitext(filename) + + node = self.path.find_resource(filename) + if not node: raise Utils.WafError('cannot find %s' % filename) + + if self.type == 'latex': + task = self.create_task('latex', node, node.change_ext('.dvi')) + elif self.type == 'pdflatex': + task = self.create_task('pdflatex', node, node.change_ext('.pdf')) + + task.env = self.env + task.curdirnode = self.path + + # add the manual dependencies + if deps_lst: + variant = node.variant(self.env) + try: + lst = tree.node_deps[task.unique_id()] + for n in deps_lst: + if not n in lst: + lst.append(n) + except KeyError: + tree.node_deps[task.unique_id()] = deps_lst + + if self.type == 'latex': + if 'ps' in outs: + tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps')) + tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)} + if 'pdf' in outs: + tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf')) + tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)} + elif self.type == 'pdflatex': + if 'ps' in outs: + self.create_task('pdf2ps', task.outputs, node.change_ext('.ps')) + self.source = [] + +def detect(conf): + v = conf.env + for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split(): + conf.find_program(p, var=p.upper()) + v[p.upper()+'FLAGS'] = '' + v['DVIPSFLAGS'] = '-Ppdf' + +b = Task.simple_task_type +b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere +b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere +b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False) +b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False) +b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', after="dvipdf pdflatex", shell=False) + +b = Task.task_type_from_func +cls = b('latex', latex_build, vars=latex_vardeps) +cls.scan = scan +cls = b('pdflatex', pdflatex_build, vars=pdflatex_vardeps) +cls.scan = scan + diff --git a/buildtools/wafadmin/Tools/unittestw.py b/buildtools/wafadmin/Tools/unittestw.py new file mode 100644 index 0000000000..0e30a510fb --- /dev/null +++ b/buildtools/wafadmin/Tools/unittestw.py @@ -0,0 +1,310 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Carlos Rafael Giani, 2006 + +""" +Unit tests run in the shutdown() method, and for c/c++ programs + +One should NOT have to give parameters to programs to execute + +In the shutdown method, add the following code: + + >>> def shutdown(): + ... ut = UnitTest.unit_test() + ... ut.run() + ... ut.print_results() + + +Each object to use as a unit test must be a program and must have X{obj.unit_test=1} +""" +import os, sys +import Build, TaskGen, Utils, Options, Logs, Task +from TaskGen import before, after, feature +from Constants import * + +class unit_test(object): + "Unit test representation" + def __init__(self): + self.returncode_ok = 0 # Unit test returncode considered OK. All returncodes differing from this one + # will cause the unit test to be marked as "FAILED". + + # The following variables are filled with data by run(). + + # print_results() uses these for printing the unit test summary, + # but if there is need for direct access to the results, + # they can be retrieved here, after calling run(). + + self.num_tests_ok = 0 # Number of successful unit tests + self.num_tests_failed = 0 # Number of failed unit tests + self.num_tests_err = 0 # Tests that have not even run + self.total_num_tests = 0 # Total amount of unit tests + self.max_label_length = 0 # Maximum label length (pretty-print the output) + + self.unit_tests = Utils.ordered_dict() # Unit test dictionary. Key: the label (unit test filename relative + # to the build dir), value: unit test filename with absolute path + self.unit_test_results = {} # Dictionary containing the unit test results. + # Key: the label, value: result (true = success false = failure) + self.unit_test_erroneous = {} # Dictionary indicating erroneous unit tests. + # Key: the label, value: true = unit test has an error false = unit test is ok + self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir + self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites) + self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites) + self.run_if_waf_does = 'check' #build was the old default + + def run(self): + "Run the unit tests and gather results (note: no output here)" + + self.num_tests_ok = 0 + self.num_tests_failed = 0 + self.num_tests_err = 0 + self.total_num_tests = 0 + self.max_label_length = 0 + + self.unit_tests = Utils.ordered_dict() + self.unit_test_results = {} + self.unit_test_erroneous = {} + + ld_library_path = [] + + # If waf is not building, don't run anything + if not Options.commands[self.run_if_waf_does]: return + + # Get the paths for the shared libraries, and obtain the unit tests to execute + for obj in Build.bld.all_task_gen: + try: + link_task = obj.link_task + except AttributeError: + pass + else: + lib_path = link_task.outputs[0].parent.abspath(obj.env) + if lib_path not in ld_library_path: + ld_library_path.append(lib_path) + + unit_test = getattr(obj, 'unit_test', '') + if unit_test and 'cprogram' in obj.features: + try: + output = obj.path + filename = os.path.join(output.abspath(obj.env), obj.target) + srcdir = output.abspath() + label = os.path.join(output.bldpath(obj.env), obj.target) + self.max_label_length = max(self.max_label_length, len(label)) + self.unit_tests[label] = (filename, srcdir) + except KeyError: + pass + self.total_num_tests = len(self.unit_tests) + # Now run the unit tests + Utils.pprint('GREEN', 'Running the unit tests') + count = 0 + result = 1 + + for label in self.unit_tests.allkeys: + file_and_src = self.unit_tests[label] + filename = file_and_src[0] + srcdir = file_and_src[1] + count += 1 + line = Build.bld.progress_line(count, self.total_num_tests, Logs.colors.GREEN, Logs.colors.NORMAL) + if Options.options.progress_bar and line: + sys.stderr.write(line) + sys.stderr.flush() + try: + kwargs = {} + kwargs['env'] = os.environ.copy() + if self.change_to_testfile_dir: + kwargs['cwd'] = srcdir + if not self.want_to_see_test_output: + kwargs['stdout'] = Utils.pproc.PIPE # PIPE for ignoring output + if not self.want_to_see_test_error: + kwargs['stderr'] = Utils.pproc.PIPE # PIPE for ignoring output + if ld_library_path: + v = kwargs['env'] + def add_path(dct, path, var): + dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')]) + if sys.platform == 'win32': + add_path(v, ld_library_path, 'PATH') + elif sys.platform == 'darwin': + add_path(v, ld_library_path, 'DYLD_LIBRARY_PATH') + add_path(v, ld_library_path, 'LD_LIBRARY_PATH') + else: + add_path(v, ld_library_path, 'LD_LIBRARY_PATH') + + pp = Utils.pproc.Popen(filename, **kwargs) + (out, err) = pp.communicate() # uh, and the output is ignored?? - fortunately this is going to disappear + + result = int(pp.returncode == self.returncode_ok) + + if result: + self.num_tests_ok += 1 + else: + self.num_tests_failed += 1 + + self.unit_test_results[label] = result + self.unit_test_erroneous[label] = 0 + except OSError: + self.unit_test_erroneous[label] = 1 + self.num_tests_err += 1 + except KeyboardInterrupt: + pass + if Options.options.progress_bar: sys.stdout.write(Logs.colors.cursor_on) + + def print_results(self): + "Pretty-prints a summary of all unit tests, along with some statistics" + + # If waf is not building, don't output anything + if not Options.commands[self.run_if_waf_does]: return + + p = Utils.pprint + # Early quit if no tests were performed + if self.total_num_tests == 0: + p('YELLOW', 'No unit tests present') + return + + for label in self.unit_tests.allkeys: + filename = self.unit_tests[label] + err = 0 + result = 0 + + try: err = self.unit_test_erroneous[label] + except KeyError: pass + + try: result = self.unit_test_results[label] + except KeyError: pass + + n = self.max_label_length - len(label) + if err: n += 4 + elif result: n += 7 + else: n += 3 + + line = '%s %s' % (label, '.' * n) + + if err: p('RED', '%sERROR' % line) + elif result: p('GREEN', '%sOK' % line) + else: p('YELLOW', '%sFAILED' % line) + + percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0 + percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0 + percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0 + + p('NORMAL', ''' +Successful tests: %i (%.1f%%) +Failed tests: %i (%.1f%%) +Erroneous tests: %i (%.1f%%) + +Total number of tests: %i +''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed, + self.num_tests_err, percentage_erroneous, self.total_num_tests)) + p('GREEN', 'Unit tests finished') + + +############################################################################################ + +""" +New unit test system + +The targets with feature 'test' are executed after they are built +bld(features='cprogram cc test', ...) + +To display the results: +import UnitTest +bld.add_post_fun(UnitTest.summary) +""" + +import threading +testlock = threading.Lock() + +def set_options(opt): + opt.add_option('--alltests', action='store_true', default=True, help='Exec all unit tests', dest='all_tests') + +@feature('test') +@after('apply_link', 'vars_target_cprogram') +def make_test(self): + if not 'cprogram' in self.features: + Logs.error('test cannot be executed %s' % self) + return + + self.default_install_path = None + self.create_task('utest', self.link_task.outputs) + +def exec_test(self): + + status = 0 + + variant = self.env.variant() + + filename = self.inputs[0].abspath(self.env) + self.ut_exec = getattr(self, 'ut_exec', [filename]) + if getattr(self.generator, 'ut_fun', None): + self.generator.ut_fun(self) + + try: + fu = getattr(self.generator.bld, 'all_test_paths') + except AttributeError: + fu = os.environ.copy() + self.generator.bld.all_test_paths = fu + + lst = [] + for obj in self.generator.bld.all_task_gen: + link_task = getattr(obj, 'link_task', None) + if link_task and link_task.env.variant() == variant: + lst.append(link_task.outputs[0].parent.abspath(obj.env)) + + def add_path(dct, path, var): + dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')]) + + if sys.platform == 'win32': + add_path(fu, lst, 'PATH') + elif sys.platform == 'darwin': + add_path(fu, lst, 'DYLD_LIBRARY_PATH') + add_path(fu, lst, 'LD_LIBRARY_PATH') + else: + add_path(fu, lst, 'LD_LIBRARY_PATH') + + + cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath(self.env) + proc = Utils.pproc.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE) + (stdout, stderr) = proc.communicate() + + tup = (filename, proc.returncode, stdout, stderr) + self.generator.utest_result = tup + + testlock.acquire() + try: + bld = self.generator.bld + Logs.debug("ut: %r", tup) + try: + bld.utest_results.append(tup) + except AttributeError: + bld.utest_results = [tup] + finally: + testlock.release() + +cls = Task.task_type_from_func('utest', func=exec_test, color='PINK', ext_in='.bin') + +old = cls.runnable_status +def test_status(self): + ret = old(self) + if ret == SKIP_ME and getattr(Options.options, 'all_tests', False): + return RUN_ME + return ret + +cls.runnable_status = test_status +cls.quiet = 1 + +def summary(bld): + lst = getattr(bld, 'utest_results', []) + if lst: + Utils.pprint('CYAN', 'execution summary') + + total = len(lst) + tfail = len([x for x in lst if x[1]]) + + Utils.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total)) + for (f, code, out, err) in lst: + if not code: + Utils.pprint('CYAN', ' %s' % f) + + Utils.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total)) + for (f, code, out, err) in lst: + if code: + Utils.pprint('CYAN', ' %s' % f) + + diff --git a/buildtools/wafadmin/Tools/vala.py b/buildtools/wafadmin/Tools/vala.py new file mode 100644 index 0000000000..753ee8d94e --- /dev/null +++ b/buildtools/wafadmin/Tools/vala.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Ali Sabil, 2007 + +import os.path, shutil +import Task, Runner, Utils, Logs, Build, Node, Options +from TaskGen import extension, after, before + +EXT_VALA = ['.vala', '.gs'] + +class valac_task(Task.Task): + + vars = ("VALAC", "VALAC_VERSION", "VALAFLAGS") + before = ("cc", "cxx") + + def run(self): + env = self.env + inputs = [a.srcpath(env) for a in self.inputs] + valac = env['VALAC'] + vala_flags = env.get_flat('VALAFLAGS') + top_src = self.generator.bld.srcnode.abspath() + top_bld = self.generator.bld.srcnode.abspath(env) + + if env['VALAC_VERSION'] > (0, 1, 6): + cmd = [valac, '-C', '--quiet', vala_flags] + else: + cmd = [valac, '-C', vala_flags] + + if self.threading: + cmd.append('--thread') + + if self.profile: + cmd.append('--profile=%s' % self.profile) + + if self.target_glib: + cmd.append('--target-glib=%s' % self.target_glib) + + features = self.generator.features + + if 'cshlib' in features or 'cstaticlib' in features: + output_dir = self.outputs[0].bld_dir(env) + cmd.append('--library ' + self.target) + if env['VALAC_VERSION'] >= (0, 7, 0): + for x in self.outputs: + if x.name.endswith('.h'): + cmd.append('--header ' + x.bldpath(self.env)) + cmd.append('--basedir ' + top_src) + cmd.append('-d ' + top_bld) + if env['VALAC_VERSION'] > (0, 7, 2) and hasattr(self, 'gir'): + cmd.append('--gir=%s.gir' % self.gir) + + else: + output_dir = self.outputs[0].bld_dir(env) + cmd.append('-d %s' % output_dir) + + for vapi_dir in self.vapi_dirs: + cmd.append('--vapidir=%s' % vapi_dir) + + for package in self.packages: + cmd.append('--pkg %s' % package) + + for package in self.packages_private: + cmd.append('--pkg %s' % package) + + cmd.append(" ".join(inputs)) + result = self.generator.bld.exec_command(" ".join(cmd)) + + if not 'cprogram' in features: + # generate the .deps file + if self.packages: + filename = os.path.join(self.generator.path.abspath(env), "%s.deps" % self.target) + deps = open(filename, 'w') + for package in self.packages: + deps.write(package + '\n') + deps.close() + + # handle vala 0.1.6 who doesn't honor --directory for the generated .vapi + self._fix_output("../%s.vapi" % self.target) + # handle vala >= 0.1.7 who has a weid definition for --directory + self._fix_output("%s.vapi" % self.target) + # handle vala >= 0.2.0 who doesn't honor --directory for the generated .gidl + self._fix_output("%s.gidl" % self.target) + # handle vala >= 0.3.6 who doesn't honor --directory for the generated .gir + self._fix_output("%s.gir" % self.target) + if hasattr(self, 'gir'): + self._fix_output("%s.gir" % self.gir) + + first = None + for node in self.outputs: + if not first: + first = node + else: + if first.parent.id != node.parent.id: + # issue #483 + if env['VALAC_VERSION'] < (0, 7, 0): + shutil.move(first.parent.abspath(self.env) + os.sep + node.name, node.abspath(self.env)) + return result + + def install(self): + bld = self.generator.bld + features = self.generator.features + + if self.attr("install_path") and ("cshlib" in features or "cstaticlib" in features): + headers_list = [o for o in self.outputs if o.suffix() == ".h"] + vapi_list = [o for o in self.outputs if (o.suffix() in (".vapi", ".deps"))] + gir_list = [o for o in self.outputs if o.suffix() == ".gir"] + + for header in headers_list: + top_src = self.generator.bld.srcnode + package = self.env['PACKAGE'] + try: + api_version = Utils.g_module.API_VERSION + except AttributeError: + version = Utils.g_module.VERSION.split(".") + if version[0] == "0": + api_version = "0." + version[1] + else: + api_version = version[0] + ".0" + install_path = '${INCLUDEDIR}/%s-%s/%s' % (package, api_version, header.relpath_gen(top_src)) + bld.install_as(install_path, header, self.env) + bld.install_files('${DATAROOTDIR}/vala/vapi', vapi_list, self.env) + bld.install_files('${DATAROOTDIR}/gir-1.0', gir_list, self.env) + + def _fix_output(self, output): + top_bld = self.generator.bld.srcnode.abspath(self.env) + try: + src = os.path.join(top_bld, output) + dst = self.generator.path.abspath (self.env) + shutil.move(src, dst) + except: + pass + +@extension(EXT_VALA) +def vala_file(self, node): + valatask = getattr(self, "valatask", None) + # there is only one vala task and it compiles all vala files .. :-/ + if not valatask: + valatask = self.create_task('valac') + self.valatask = valatask + self.includes = Utils.to_list(getattr(self, 'includes', [])) + self.uselib = self.to_list(self.uselib) + valatask.packages = [] + valatask.packages_private = Utils.to_list(getattr(self, 'packages_private', [])) + valatask.vapi_dirs = [] + valatask.target = self.target + valatask.threading = False + valatask.install_path = self.install_path + valatask.profile = getattr (self, 'profile', 'gobject') + valatask.target_glib = None #Deprecated + + packages = Utils.to_list(getattr(self, 'packages', [])) + vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', [])) + includes = [] + + if hasattr(self, 'uselib_local'): + local_packages = Utils.to_list(self.uselib_local) + seen = [] + while len(local_packages) > 0: + package = local_packages.pop() + if package in seen: + continue + seen.append(package) + + # check if the package exists + package_obj = self.name_to_obj(package) + if not package_obj: + raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')" % (package, self.name)) + + package_name = package_obj.target + package_node = package_obj.path + package_dir = package_node.relpath_gen(self.path) + + for task in package_obj.tasks: + for output in task.outputs: + if output.name == package_name + ".vapi": + valatask.set_run_after(task) + if package_name not in packages: + packages.append(package_name) + if package_dir not in vapi_dirs: + vapi_dirs.append(package_dir) + if package_dir not in includes: + includes.append(package_dir) + + if hasattr(package_obj, 'uselib_local'): + lst = self.to_list(package_obj.uselib_local) + lst.reverse() + local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages + + valatask.packages = packages + for vapi_dir in vapi_dirs: + try: + valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath()) + valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env)) + except AttributeError: + Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir) + + self.includes.append(node.bld.srcnode.abspath()) + self.includes.append(node.bld.srcnode.abspath(self.env)) + for include in includes: + try: + self.includes.append(self.path.find_dir(include).abspath()) + self.includes.append(self.path.find_dir(include).abspath(self.env)) + except AttributeError: + Logs.warn("Unable to locate include directory: '%s'" % include) + + if valatask.profile == 'gobject': + if hasattr(self, 'target_glib'): + Logs.warn ('target_glib on vala tasks is deprecated --vala-target-glib=MAJOR.MINOR from the vala tool options') + + if getattr(Options.options, 'vala_target_glib', None): + valatask.target_glib = Options.options.vala_target_glib + + if not 'GOBJECT' in self.uselib: + self.uselib.append('GOBJECT') + + if hasattr(self, 'threading'): + if valatask.profile == 'gobject': + valatask.threading = self.threading + if not 'GTHREAD' in self.uselib: + self.uselib.append('GTHREAD') + else: + #Vala doesn't have threading support for dova nor posix + Logs.warn("Profile %s does not have threading support" % valatask.profile) + + if hasattr(self, 'gir'): + valatask.gir = self.gir + + env = valatask.env + + output_nodes = [] + + c_node = node.change_ext('.c') + output_nodes.append(c_node) + self.allnodes.append(c_node) + + if env['VALAC_VERSION'] < (0, 7, 0): + output_nodes.append(node.change_ext('.h')) + else: + if not 'cprogram' in self.features: + output_nodes.append(self.path.find_or_declare('%s.h' % self.target)) + + if not 'cprogram' in self.features: + output_nodes.append(self.path.find_or_declare('%s.vapi' % self.target)) + if env['VALAC_VERSION'] > (0, 7, 2): + if hasattr(self, 'gir'): + output_nodes.append(self.path.find_or_declare('%s.gir' % self.gir)) + elif env['VALAC_VERSION'] > (0, 3, 5): + output_nodes.append(self.path.find_or_declare('%s.gir' % self.target)) + elif env['VALAC_VERSION'] > (0, 1, 7): + output_nodes.append(self.path.find_or_declare('%s.gidl' % self.target)) + if valatask.packages: + output_nodes.append(self.path.find_or_declare('%s.deps' % self.target)) + + valatask.inputs.append(node) + valatask.outputs.extend(output_nodes) + +def detect(conf): + min_version = (0, 1, 6) + min_version_str = "%d.%d.%d" % min_version + + valac = conf.find_program('valac', var='VALAC', mandatory=True) + + if not conf.env["HAVE_GOBJECT"]: + pkg_args = {'package': 'gobject-2.0', + 'uselib_store': 'GOBJECT', + 'args': '--cflags --libs'} + if getattr(Options.options, 'vala_target_glib', None): + pkg_args['atleast_version'] = Options.options.vala_target_glib + + conf.check_cfg(**pkg_args) + + if not conf.env["HAVE_GTHREAD"]: + pkg_args = {'package': 'gthread-2.0', + 'uselib_store': 'GTHREAD', + 'args': '--cflags --libs'} + if getattr(Options.options, 'vala_target_glib', None): + pkg_args['atleast_version'] = Options.options.vala_target_glib + + conf.check_cfg(**pkg_args) + + try: + output = Utils.cmd_output(valac + " --version", silent=True) + version = output.split(' ', 1)[-1].strip().split(".")[0:3] + version = [int(x) for x in version] + valac_version = tuple(version) + except Exception: + valac_version = (0, 0, 0) + + conf.check_message('program version', + 'valac >= ' + min_version_str, + valac_version >= min_version, + "%d.%d.%d" % valac_version) + + conf.check_tool('gnu_dirs') + + if valac_version < min_version: + conf.fatal("valac version too old to be used with this tool") + return + + conf.env['VALAC_VERSION'] = valac_version + conf.env['VALAFLAGS'] = '' + +def set_options (opt): + valaopts = opt.add_option_group('Vala Compiler Options') + valaopts.add_option ('--vala-target-glib', default=None, + dest='vala_target_glib', metavar='MAJOR.MINOR', + help='Target version of glib for Vala GObject code generation') + diff --git a/buildtools/wafadmin/Tools/winres.py b/buildtools/wafadmin/Tools/winres.py new file mode 100644 index 0000000000..2500d431de --- /dev/null +++ b/buildtools/wafadmin/Tools/winres.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Brant Young, 2007 + +"This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}" + +import os, sys, re +import TaskGen, Task +from Utils import quote_whitespace +from TaskGen import extension + +EXT_WINRC = ['.rc'] + +winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}' + +@extension(EXT_WINRC) +def rc_file(self, node): + obj_ext = '.rc.o' + if self.env['WINRC_TGT_F'] == '/fo': obj_ext = '.res' + + rctask = self.create_task('winrc', node, node.change_ext(obj_ext)) + self.compiled_tasks.append(rctask) + +# create our action, for use with rc file +Task.simple_task_type('winrc', winrc_str, color='BLUE', before='cc cxx', shell=False) + +def detect(conf): + v = conf.env + + winrc = v['WINRC'] + v['WINRC_TGT_F'] = '-o' + v['WINRC_SRC_F'] = '-i' + # find rc.exe + if not winrc: + if v['CC_NAME'] in ['gcc', 'cc', 'g++', 'c++']: + winrc = conf.find_program('windres', var='WINRC', path_list = v['PATH']) + elif v['CC_NAME'] == 'msvc': + winrc = conf.find_program('RC', var='WINRC', path_list = v['PATH']) + v['WINRC_TGT_F'] = '/fo' + v['WINRC_SRC_F'] = '' + if not winrc: + conf.fatal('winrc was not found!') + + v['WINRCFLAGS'] = '' + diff --git a/buildtools/wafadmin/Tools/xlc.py b/buildtools/wafadmin/Tools/xlc.py new file mode 100644 index 0000000000..e33b7a1645 --- /dev/null +++ b/buildtools/wafadmin/Tools/xlc.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2008 (ita) +# Ralf Habacker, 2006 (rh) +# Yinon Ehrlich, 2009 +# Michael Kuhn, 2009 + +import os, sys +import Configure, Options, Utils +import ccroot, ar +from Configure import conftest + +@conftest +def find_xlc(conf): + cc = conf.find_program(['xlc_r', 'xlc'], var='CC', mandatory=True) + cc = conf.cmd_to_list(cc) + conf.env.CC_NAME = 'xlc' + conf.env.CC = cc + +@conftest +def find_cpp(conf): + v = conf.env + cpp = None + if v['CPP']: cpp = v['CPP'] + elif 'CPP' in conf.environ: cpp = conf.environ['CPP'] + #if not cpp: cpp = v['CC'] + v['CPP'] = cpp + +@conftest +def xlc_common_flags(conf): + v = conf.env + + # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS + v['CCFLAGS_DEBUG'] = ['-g'] + v['CCFLAGS_RELEASE'] = ['-O2'] + + v['CC_SRC_F'] = '' + v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD + v['CPPPATH_ST'] = '-I%s' # template for adding include paths + + # linker + if not v['LINK_CC']: v['LINK_CC'] = v['CC'] + v['CCLNK_SRC_F'] = '' + v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD + + v['LIB_ST'] = '-l%s' # template for adding libs + v['LIBPATH_ST'] = '-L%s' # template for adding libpaths + v['STATICLIB_ST'] = '-l%s' + v['STATICLIBPATH_ST'] = '-L%s' + v['RPATH_ST'] = '-Wl,-rpath,%s' + v['CCDEFINES_ST'] = '-D%s' + + v['SONAME_ST'] = '' + v['SHLIB_MARKER'] = '' + v['STATICLIB_MARKER'] = '' + v['FULLSTATIC_MARKER'] = '-static' + + # program + v['program_LINKFLAGS'] = ['-Wl,-brtl'] + v['program_PATTERN'] = '%s' + + # shared library + v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro + v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull'] + v['shlib_PATTERN'] = 'lib%s.so' + + # static lib + v['staticlib_LINKFLAGS'] = '' + v['staticlib_PATTERN'] = 'lib%s.a' + +def detect(conf): + conf.find_xlc() + conf.find_cpp() + conf.find_ar() + conf.xlc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/buildtools/wafadmin/Tools/xlcxx.py b/buildtools/wafadmin/Tools/xlcxx.py new file mode 100644 index 0000000000..6e84662a64 --- /dev/null +++ b/buildtools/wafadmin/Tools/xlcxx.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006 (ita) +# Ralf Habacker, 2006 (rh) +# Yinon Ehrlich, 2009 +# Michael Kuhn, 2009 + +import os, sys +import Configure, Options, Utils +import ccroot, ar +from Configure import conftest + +@conftest +def find_xlcxx(conf): + cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX', mandatory=True) + cxx = conf.cmd_to_list(cxx) + conf.env.CXX_NAME = 'xlc++' + conf.env.CXX = cxx + +@conftest +def find_cpp(conf): + v = conf.env + cpp = None + if v['CPP']: cpp = v['CPP'] + elif 'CPP' in conf.environ: cpp = conf.environ['CPP'] + #if not cpp: cpp = v['CXX'] + v['CPP'] = cpp + +@conftest +def xlcxx_common_flags(conf): + v = conf.env + + # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS + v['CXXFLAGS_DEBUG'] = ['-g'] + v['CXXFLAGS_RELEASE'] = ['-O2'] + + v['CXX_SRC_F'] = '' + v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD + v['CPPPATH_ST'] = '-I%s' # template for adding include paths + + # linker + if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] + v['CXXLNK_SRC_F'] = '' + v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD + + v['LIB_ST'] = '-l%s' # template for adding libs + v['LIBPATH_ST'] = '-L%s' # template for adding libpaths + v['STATICLIB_ST'] = '-l%s' + v['STATICLIBPATH_ST'] = '-L%s' + v['RPATH_ST'] = '-Wl,-rpath,%s' + v['CXXDEFINES_ST'] = '-D%s' + + v['SONAME_ST'] = '' + v['SHLIB_MARKER'] = '' + v['STATICLIB_MARKER'] = '' + v['FULLSTATIC_MARKER'] = '-static' + + # program + v['program_LINKFLAGS'] = ['-Wl,-brtl'] + v['program_PATTERN'] = '%s' + + # shared library + v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro + v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull'] + v['shlib_PATTERN'] = 'lib%s.so' + + # static lib + v['staticlib_LINKFLAGS'] = '' + v['staticlib_PATTERN'] = 'lib%s.a' + +def detect(conf): + conf.find_xlcxx() + conf.find_cpp() + conf.find_ar() + conf.xlcxx_common_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() |