summaryrefslogtreecommitdiff
path: root/buildtools
diff options
context:
space:
mode:
authorAndrew Tridgell <tridge@samba.org>2010-02-26 20:11:52 +1100
committerAndrew Tridgell <tridge@samba.org>2010-04-06 20:26:38 +1000
commitcada19f58b3f721a1d7a02afed038e32a3ca8945 (patch)
tree20a77c5a7f4103de514e830f1515638651f4aab5 /buildtools
parentf15a81c1c02426a5afe59a364d00438f06a10501 (diff)
downloadsamba-cada19f58b3f721a1d7a02afed038e32a3ca8945.tar.gz
samba-cada19f58b3f721a1d7a02afed038e32a3ca8945.tar.bz2
samba-cada19f58b3f721a1d7a02afed038e32a3ca8945.zip
build: cope with empty source lists for libs
Diffstat (limited to 'buildtools')
-rwxr-xr-xbuildtools/mktowscript/mktowscript.pl7
-rw-r--r--buildtools/wafsamba/wafsamba.py852
2 files changed, 858 insertions, 1 deletions
diff --git a/buildtools/mktowscript/mktowscript.pl b/buildtools/mktowscript/mktowscript.pl
index 68fb7c69d3..5b3926065a 100755
--- a/buildtools/mktowscript/mktowscript.pl
+++ b/buildtools/mktowscript/mktowscript.pl
@@ -263,6 +263,7 @@ sub process_results($)
printf "\nbld.SAMBA_%s('%s'", $sec->{TYPE}, $s;
my $trailer="";
my $got_src = 0;
+ my $got_private_deps = 0;
foreach my $k (keys %{$sec}) {
#print "key=$k\n";
@@ -293,6 +294,7 @@ sub process_results($)
}
if ($k eq "PRIVATE_DEPENDENCIES") {
$trailer .= sprintf(",\n\tdeps='%s'", strlist($sec->{$k}));
+ $got_private_deps = 1;
next;
}
if ($k eq "PUBLIC_DEPENDENCIES") {
@@ -422,7 +424,10 @@ sub process_results($)
}
die("Unknown keyword $k in $s\n");
}
- die("No source list in $s\n") unless $got_src;
+ die("No source list in $s\n") unless $got_src or $got_private_deps;
+ if (! $got_src) {
+ printf(",''\n\t");
+ }
printf("%s\n\t)\n\n", $trailer);
}
}
diff --git a/buildtools/wafsamba/wafsamba.py b/buildtools/wafsamba/wafsamba.py
new file mode 100644
index 0000000000..54c1235ef3
--- /dev/null
+++ b/buildtools/wafsamba/wafsamba.py
@@ -0,0 +1,852 @@
+# a waf tool to add autoconf-like macros to the configure section
+# and for SAMBA_ macros for building libraries, binaries etc
+
+import Build, os, Logs, sys, Configure, Options, string, Task, Utils, optparse
+from Configure import conf
+from Logs import debug
+from TaskGen import extension
+
+LIB_PATH="shared"
+
+######################################################
+# this is used as a decorator to make functions only
+# run once. Based on the idea from
+# http://stackoverflow.com/questions/815110/is-there-a-decorator-to-simply-cache-function-return-values
+runonce_ret = {}
+def runonce(function):
+ def wrapper(*args):
+ if args in runonce_ret:
+ return runonce_ret[args]
+ else:
+ ret = function(*args)
+ runonce_ret[args] = ret
+ return ret
+ return wrapper
+
+
+####################################################
+# some autoconf like helpers, to make the transition
+# to waf a bit easier for those used to autoconf
+# m4 files
+@runonce
+@conf
+def DEFINE(conf, d, v):
+ conf.define(d, v, quote=False)
+ conf.env.append_value('CCDEFINES', d + '=' + str(v))
+
+@runonce
+def CHECK_HEADER(conf, h):
+ if conf.check(header_name=h):
+ conf.env.hlist.append(h)
+
+@conf
+def CHECK_HEADERS(conf, list):
+ for hdr in list.split():
+ CHECK_HEADER(conf, hdr)
+
+@conf
+def CHECK_TYPES(conf, list):
+ for t in list.split():
+ conf.check(type_name=t, header_name=conf.env.hlist)
+
+@conf
+def CHECK_TYPE_IN(conf, t, hdr):
+ if conf.check(header_name=hdr):
+ conf.check(type_name=t, header_name=hdr)
+
+@conf
+def CHECK_TYPE(conf, t, alternate):
+ if not conf.check(type_name=t, header_name=conf.env.hlist):
+ conf.DEFINE(t, alternate)
+
+@conf
+def CHECK_VARIABLE(conf, v):
+ hdrs=''
+ for h in conf.env.hlist:
+ hdrs += '#include <%s>\n' % h
+ if conf.check(fragment=
+ '%s\nint main(void) {void *_x; _x=(void *)&%s; return 0;}\n' % (hdrs, v),
+ execute=0,
+ msg="Checking for variable %s" % v):
+ conf.DEFINE('HAVE_%s' % v.upper(), 1)
+
+@runonce
+def CHECK_FUNC(conf, f):
+ conf.check(function_name=f, header_name=conf.env.hlist)
+
+
+@conf
+def CHECK_FUNCS(conf, list):
+ for f in list.split():
+ CHECK_FUNC(conf, f)
+
+
+#################################################
+# return True if a configuration option was found
+@conf
+def CONFIG_SET(conf, option):
+ return (option in conf.env) and (conf.env[option] != ())
+Build.BuildContext.CONFIG_SET = CONFIG_SET
+
+
+###########################################################
+# check that the functions in 'list' are available in 'library'
+# if they are, then make that library available as a dependency
+#
+# if the library is not available and mandatory==True, then
+# raise an error.
+#
+# If the library is not available and mandatory==False, then
+# add the library to the list of dependencies to remove from
+# build rules
+@conf
+def CHECK_FUNCS_IN(conf, list, library, mandatory=False):
+ if not conf.check(lib=library, uselib_store=library):
+ conf.ASSERT(not mandatory,
+ "Mandatory library '%s' not found for functions '%s'" % (library, list))
+ # if it isn't a mandatory library, then remove it from dependency lists
+ LOCAL_CACHE_SET(conf, 'EMPTY_TARGETS', library.upper(), True)
+ return
+ for f in list.split():
+ conf.check(function_name=f, lib=library, header_name=conf.env.hlist)
+ conf.env['LIB_' + library.upper()] = library
+ LOCAL_CACHE_SET(conf, 'TARGET_TYPE', library, 'SYSLIB')
+
+
+#################################################
+# write out config.h in the right directory
+@conf
+def SAMBA_CONFIG_H(conf, path=None):
+ if os.path.normpath(conf.curdir) != os.path.normpath(os.environ.get('PWD')):
+ return
+ if path is None:
+ conf.write_config_header('config.h', top=True)
+ else:
+ conf.write_config_header(path)
+
+
+##############################################################
+# setup a configurable path
+@conf
+def CONFIG_PATH(conf, name, default):
+ if not name in conf.env:
+ conf.env[name] = conf.env['PREFIX'] + default
+ conf.define(name, conf.env[name], quote=True)
+
+##############################################################
+# add some CFLAGS to the command line
+@conf
+def ADD_CFLAGS(conf, flags):
+ if not 'EXTRA_CFLAGS' in conf.env:
+ conf.env['EXTRA_CFLAGS'] = []
+ conf.env['EXTRA_CFLAGS'].extend(flags.split())
+
+
+##############################################################
+# work out the current flags. local flags are added first
+def CURRENT_CFLAGS(bld, cflags):
+ if not 'EXTRA_CFLAGS' in bld.env:
+ list = []
+ else:
+ list = bld.env['EXTRA_CFLAGS'];
+ ret = cflags.split()
+ ret.extend(list)
+ return ret
+
+
+################################################################
+# magic rpath handling
+#
+# we want a different rpath when installing and when building
+# Note that this should really check if rpath is available on this platform
+# and it should also honor an --enable-rpath option
+def set_rpath(bld):
+ if Options.is_install:
+ if bld.env['RPATH_ON_INSTALL']:
+ bld.env['RPATH'] = ['-Wl,-rpath=%s/lib' % bld.env.PREFIX]
+ else:
+ bld.env['RPATH'] = []
+ else:
+ rpath = os.path.normpath('%s/%s' % (bld.env['BUILD_DIRECTORY'], LIB_PATH))
+ bld.env.append_value('RPATH', '-Wl,-rpath=%s' % rpath)
+Build.BuildContext.set_rpath = set_rpath
+
+
+#############################################################
+# return a named build cache dictionary, used to store
+# state inside the following functions
+@conf
+def LOCAL_CACHE(ctx, name):
+ if name in ctx.env:
+ return ctx.env[name]
+ ctx.env[name] = {}
+ return ctx.env[name]
+
+
+#############################################################
+# set a value in a local cache
+@conf
+def LOCAL_CACHE_SET(ctx, cachename, key, value):
+ cache = LOCAL_CACHE(ctx, cachename)
+ cache[key] = value
+
+#############################################################
+# set a value in a local cache
+# return False if it's already set
+def SET_TARGET_TYPE(ctx, target, value):
+ cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
+ if target in cache:
+ ASSERT(ctx, cache[target] == value,
+ "Target '%s' re-defined as %s - was %s" % (target, value, cache[target]))
+ debug("task_gen: Skipping duplicate target %s (curdir=%s)" % (target, ctx.curdir))
+ return False
+ assumed = LOCAL_CACHE(ctx, 'ASSUMED_TARGET')
+ if target in assumed:
+ #if assumed[target] != value:
+ # print "Target '%s' was assumed of type '%s' but is '%s'" % (target, assumed[target], value)
+ ASSERT(ctx, assumed[target] == value,
+ "Target '%s' was assumed of type '%s' but is '%s'" % (target, assumed[target], value))
+ predeclared = LOCAL_CACHE(ctx, 'PREDECLARED_TARGET')
+ if target in predeclared:
+ ASSERT(ctx, predeclared[target] == value,
+ "Target '%s' was predeclared of type '%s' but is '%s'" % (target, predeclared[target], value))
+ LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
+ debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
+ return True
+
+
+#############################################################
+# a build assert call
+@conf
+def ASSERT(ctx, expression, msg):
+ if not expression:
+ sys.stderr.write("ERROR: %s\n" % msg)
+ raise AssertionError
+Build.BuildContext.ASSERT = ASSERT
+
+################################################################
+# create a list of files by pre-pending each with a subdir name
+def SUBDIR(bld, subdir, list):
+ ret = ''
+ for l in list.split():
+ ret = ret + subdir + '/' + l + ' '
+ return ret
+Build.BuildContext.SUBDIR = SUBDIR
+
+#################################################################
+# create the samba build environment
+@conf
+def SAMBA_BUILD_ENV(conf):
+ libpath="%s/%s" % (conf.blddir, LIB_PATH)
+ conf.env['BUILD_DIRECTORY'] = conf.blddir
+ if not os.path.exists(libpath):
+ os.mkdir(libpath)
+
+##############################################
+# remove .. elements from a path list
+def NORMPATH(bld, ilist):
+ return " ".join([os.path.normpath(p) for p in ilist.split(" ")])
+Build.BuildContext.NORMPATH = NORMPATH
+
+################################################################
+# add an init_function to the list for a subsystem
+def ADD_INIT_FUNCTION(bld, subsystem, init_function):
+ if init_function is None:
+ return
+ bld.ASSERT(subsystem is not None, "You must specify a subsystem for init_function '%s'" % init_function)
+ cache = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
+ if not subsystem in cache:
+ cache[subsystem] = ''
+ cache[subsystem] += '%s,' % init_function
+Build.BuildContext.ADD_INIT_FUNCTION = ADD_INIT_FUNCTION
+
+#######################################################
+# d1 += d2
+def dict_concat(d1, d2):
+ for t in d2:
+ if t not in d1:
+ d1[t] = d2[t]
+
+################################################################
+# recursively build the dependency list for a target
+def FULL_DEPENDENCIES(bld, cache, target, chain, path):
+ if not target in cache:
+ return {}
+ deps = cache[target].copy()
+ for t in cache[target]:
+ bld.ASSERT(t not in chain, "Circular dependency for %s: %s->%s" % (t, path, t));
+ c2 = chain.copy()
+ c2[t] = True
+ dict_concat(deps, FULL_DEPENDENCIES(bld, cache, t, c2, "%s->%s" % (path, t)))
+ return deps
+
+############################################################
+# check our build dependencies for circular dependencies
+def CHECK_TARGET_DEPENDENCY(bld, target):
+ cache = LOCAL_CACHE(bld, 'LIB_DEPS')
+ return FULL_DEPENDENCIES(bld, cache, target, { target:True }, target)
+
+############################################################
+# check that all dependencies have been declared
+def CHECK_DEPENDENCIES(bld):
+ cache = LOCAL_CACHE(bld, 'LIB_DEPS')
+ target_cache = LOCAL_CACHE(bld, 'TARGET_TYPE')
+ debug('deps: Checking dependencies')
+ for t in cache:
+ deps = CHECK_TARGET_DEPENDENCY(bld, t)
+ for d in deps:
+ #if not d in target_cache:
+ # print "Dependency '%s' of target '%s' not declared" % (d, t)
+ ASSERT(bld, d in target_cache,
+ "Dependency '%s' of target '%s' not declared" % (d, t))
+ debug("deps: Dependencies checked for %u targets" % len(target_cache))
+Build.BuildContext.CHECK_DEPENDENCIES = CHECK_DEPENDENCIES
+
+
+############################################################
+# pre-declare a target as being of a particular type
+def PREDECLARE(bld, target, type):
+ cache = LOCAL_CACHE(bld, 'PREDECLARED_TARGET')
+ target_cache = LOCAL_CACHE(bld, 'TARGET_TYPE')
+ ASSERT(bld, not target in target_cache, "Target '%s' is already declared" % target)
+ ASSERT(bld, not target in cache, "Target '%s' already predeclared" % target)
+ cache[target] = type
+Build.BuildContext.PREDECLARE = PREDECLARE
+
+
+
+################################################################
+# add to the dependency list. Return a new dependency list with
+# any circular dependencies removed
+# returns a tuple containing (systemdeps, localdeps, add_objects)
+def ADD_DEPENDENCIES(bld, name, deps):
+ debug('deps: Calculating dependencies for %s' % name)
+ lib_deps = LOCAL_CACHE(bld, 'LIB_DEPS')
+ if not name in lib_deps:
+ lib_deps[name] = {}
+ list = deps.split()
+ list2 = []
+ for d in list:
+ lib_deps[name][d] = True;
+ try:
+ CHECK_TARGET_DEPENDENCY(bld, name)
+ list2.append(d)
+ except AssertionError:
+ debug("deps: Removing dependency %s from target %s" % (d, name))
+ del(lib_deps[name][d])
+
+ # extract out the system dependencies
+ sysdeps = []
+ localdeps = []
+ add_objects = []
+ cache = LOCAL_CACHE(bld, 'EMPTY_TARGETS')
+ target_cache = LOCAL_CACHE(bld, 'TARGET_TYPE')
+ predeclare = LOCAL_CACHE(bld, 'PREDECLARED_TARGET')
+ for d in list2:
+ recurse = False
+ # strip out any dependencies on empty libraries
+ if d in cache:
+ debug("deps: Removing empty dependency '%s' from '%s'" % (d, name))
+ continue
+ type = None
+
+ if d in target_cache:
+ type = target_cache[d]
+ elif d in predeclare:
+ type = predeclare[d]
+ else:
+ type = 'SUBSYSTEM'
+ LOCAL_CACHE_SET(bld, 'ASSUMED_TARGET', d, type)
+
+ if type == 'SYSLIB':
+ sysdeps.append(d)
+ elif type == 'LIBRARY':
+ localdeps.append(d)
+ elif type == 'SUBSYSTEM':
+ add_objects.append(d)
+ recurse = True
+ elif type == 'MODULE':
+ add_objects.append(d)
+ recurse = True
+ elif type == 'PYTHON':
+ pass
+ elif type == 'ASN1':
+ pass
+ elif type == 'BINARY':
+ pass
+ else:
+ ASSERT(bld, False, "Unknown target type '%s' for dependency %s" % (
+ type, d))
+
+ # for some types we have to build the list recursively
+ if recurse and (d in lib_deps):
+ rec_deps = ' '.join(lib_deps[d].keys())
+ (rec_sysdeps, rec_localdeps, rec_add_objects) = ADD_DEPENDENCIES(bld, d, rec_deps)
+ sysdeps.extend(rec_sysdeps.split())
+ localdeps.extend(rec_localdeps.split())
+ add_objects.extend(rec_add_objects.split())
+
+ debug('deps: Dependencies for %s: sysdeps: %u localdeps: %u add_objects=%u' % (
+ name, len(sysdeps), len(localdeps), len(add_objects)))
+ return (' '.join(sysdeps), ' '.join(localdeps), ' '.join(add_objects))
+
+
+#################################################################
+# return a include list for a set of library dependencies
+def SAMBA_LIBRARY_INCLUDE_LIST(bld, deps):
+ ret = bld.curdir + ' '
+ cache = LOCAL_CACHE(bld, 'INCLUDE_LIST')
+ for l in deps.split():
+ if l in cache:
+ ret = ret + cache[l] + ' '
+ return ret
+Build.BuildContext.SAMBA_LIBRARY_INCLUDE_LIST = SAMBA_LIBRARY_INCLUDE_LIST
+
+#################################################################
+# define a Samba library
+def SAMBA_LIBRARY(bld, libname, source_list,
+ deps='',
+ public_deps='',
+ include_list='.',
+ public_headers=None,
+ vnum=None,
+ cflags='',
+ autoproto=None):
+ if not SET_TARGET_TYPE(bld, libname, 'LIBRARY'):
+ return
+
+ # remember empty libraries, so we can strip the dependencies
+ if (source_list == '') or (source_list == []):
+ LOCAL_CACHE_SET(bld, 'EMPTY_TARGETS', libname, True)
+ return
+
+ (sysdeps, localdeps, add_objects) = ADD_DEPENDENCIES(bld, libname, deps)
+
+ ilist = bld.SAMBA_LIBRARY_INCLUDE_LIST(deps) + bld.SUBDIR(bld.curdir, include_list)
+ ilist = bld.NORMPATH(ilist)
+ bld.SET_BUILD_GROUP('main')
+ bld(
+ features = 'cc cshlib',
+ source = source_list,
+ target=libname,
+ uselib_local = localdeps,
+ uselib = sysdeps,
+ add_objects = add_objects,
+ ccflags = CURRENT_CFLAGS(bld, cflags),
+ includes='. ' + bld.env['BUILD_DIRECTORY'] + '/default ' + ilist,
+ vnum=vnum)
+
+ # put a link to the library in bin/shared
+ soext=""
+ if vnum is not None:
+ soext = '.' + vnum.split('.')[0]
+ bld.SET_BUILD_GROUP('final')
+ bld(
+ source = 'lib%s.so' % libname,
+ rule = 'ln -sf ../${SRC}%s %s/lib%s.so%s' %
+ (soext, LIB_PATH, libname, soext),
+ shell = True,
+ after = 'cc_link',
+ )
+ LOCAL_CACHE_SET(bld, 'INCLUDE_LIST', libname, ilist)
+
+Build.BuildContext.SAMBA_LIBRARY = SAMBA_LIBRARY
+
+
+#################################################################
+# define a Samba binary
+def SAMBA_BINARY(bld, binname, source_list,
+ deps='',
+ include_list='',
+ public_headers=None,
+ modules=None,
+ installdir=None,
+ ldflags=None,
+ cflags='',
+ autoproto=None,
+ use_hostcc=None,
+ compiler=None,
+ group='main',
+ manpages=None):
+ ilist = '. ' + bld.env['BUILD_DIRECTORY'] + '/default ' + bld.SAMBA_LIBRARY_INCLUDE_LIST(deps) + ' ' + include_list
+ ilist = bld.NORMPATH(ilist)
+
+ if not SET_TARGET_TYPE(bld, binname, 'BINARY'):
+ return
+
+ (sysdeps, localdeps, add_objects) = ADD_DEPENDENCIES(bld, binname, deps)
+
+ cache = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
+ if modules is not None:
+ for m in modules.split():
+ bld.ASSERT(m in cache,
+ "No init_function defined for module '%s' in binary '%s'" % (m, binname))
+ cflags += ' -DSTATIC_%s_MODULES="%s"' % (m, cache[m])
+
+ bld.SET_BUILD_GROUP(group)
+ bld(
+ features = 'cc cprogram',
+ source = source_list,
+ target = binname,
+ uselib_local = localdeps,
+ uselib = sysdeps,
+ includes = ilist,
+ ccflags = CURRENT_CFLAGS(bld, cflags),
+ add_objects = add_objects,
+ top=True)
+ # put a link to the binary in bin/
+ if not Options.is_install:
+ bld(
+ source = binname,
+ rule = 'rm -f %s && cp ${SRC} .' % (binname),
+ shell = True,
+ after = 'cc_link'
+ )
+Build.BuildContext.SAMBA_BINARY = SAMBA_BINARY
+
+
+#################################################################
+# define a Samba python module
+def SAMBA_PYTHON(bld, name, source_list,
+ deps='',
+ public_deps='',
+ realname=''):
+
+ if not SET_TARGET_TYPE(bld, name, 'PYTHON'):
+ return
+
+ (sysdeps, localdeps, add_objects) = ADD_DEPENDENCIES(bld, name, deps)
+
+ return
+Build.BuildContext.SAMBA_PYTHON = SAMBA_PYTHON
+
+
+################################################################################
+# a asn1 task which calls out to asn1_compile_wrapper.sh to do the work
+Task.simple_task_type('asn1',
+ '''
+# shell script to convert ASN1 to C. This could be separated out if we want to
+set -e
+compiler=${TGT[0].compiler}
+destdir=${TGT[0].destdir}
+wrapper=${TGT[0].asn1wrapper}
+srcfile=${SRC[0].abspath(env)}
+asn1name=${TGT[0].asn1name}
+options="${TGT[0].asn1options}"
+
+# run the wrapper
+$wrapper . $destdir $compiler $srcfile $asn1name ${options} --one-code-file
+
+# that generated 3 files:
+# ${asn1name}.hx
+# asn1_${asn1name}.x
+# ${asn1name}_files
+
+
+hxfile=$destdir/$asn1name.hx
+xfile=$destdir/asn1_$asn1name.x
+listfilee=$destdir/"$asn1name"_files
+
+cfile=${TGT[0].abspath(env)}
+hfile=${TGT[1].abspath(env)}
+
+cp $hxfile $hfile
+echo '#include "config.h"' > $cfile
+cat $xfile >> $cfile
+rm -f $listfile
+
+''',
+ color='BLUE',
+ ext_out='.c',
+ shell = True)
+
+@extension('.asn1')
+def process_asn1(self, node):
+
+ asn1name = string.replace(node.file(), '.', '_')
+ c_node = NEW_NODE(node, 'asn1_%s.c' % asn1name)
+ h_node = NEW_NODE(node, '%s.h' % asn1name)
+
+ c_node.destdir = "default/source4/heimdal/" + self.asn1directory
+ c_node.asn1options = self.asn1options
+ c_node.asn1name = asn1name
+ c_node.asn1wrapper = "../heimdal_build/asn1_compile_wrapper.sh"
+ c_node.compiler = "default/source4/heimdal_build/asn1_compile"
+
+ self.create_task('asn1', node, [c_node, h_node])
+ self.allnodes.append(c_node)
+
+
+#################################################################
+# define a Samba ASN1 target
+def SAMBA_ASN1(bld, name, source,
+ options='',
+ directory=''):
+ if not SET_TARGET_TYPE(bld, name, 'ASN1'):
+ return
+ bld.SET_BUILD_GROUP('build_source')
+ bld(
+ features = 'cc',
+ source = source,
+ target = name,
+ asn1options = options,
+ asn1directory = directory
+ )
+Build.BuildContext.SAMBA_ASN1 = SAMBA_ASN1
+
+
+
+################################################################################
+# a et task which calls out to compile_et to do the work
+Task.simple_task_type('et',
+ '../heimdal_build/et_compile_wrapper.sh . ${TGT[0].bld_dir(env)} default/source4/heimdal_build/compile_et ${SRC[0].abspath(env)} ${TGT[0].bldpath(env)}',
+ color='BLUE', ext_out='.c',
+ shell = False)
+
+@extension('.et')
+def process_et(self, node):
+ c_node = node.change_ext('.c')
+ h_node = node.change_ext('.h')
+ self.create_task('et', node, [c_node, h_node])
+ self.allnodes.append(c_node)
+
+
+#################################################################
+# define a Samba ET target
+def SAMBA_ERRTABLE(bld, name, source,
+ options='',
+ directory=''):
+ if not SET_TARGET_TYPE(bld, name, 'ET'):
+ return
+ bld.SET_BUILD_GROUP('build_source')
+ bld(
+ features = 'cc',
+ source = source,
+ target = name
+ )
+Build.BuildContext.SAMBA_ERRTABLE = SAMBA_ERRTABLE
+
+##########################################################
+# create a node with a new name, based on an existing node
+def NEW_NODE(node, name):
+ ret = node.parent.find_or_declare([name])
+ ASSERT(node, ret is not None, "Unable to find new target with name '%s' from '%s'" % (
+ name, node.name))
+ return ret
+
+################################################################################
+# a idl task which calls out to pidl to do the work
+Task.simple_task_type('idl', '../../pidl/pidl --header --ndr-parser --client --python --server --outputdir=${TGT[0].outputdir} -- ${SRC}', color='BLUE', ext_out='.c')
+
+@extension('.idl')
+def process_idl(self, node):
+ bname = node.file_base()
+ c_node = NEW_NODE(node, 'ndr_%s.c' % bname)
+ h1_node = NEW_NODE(node, '%s.h' % bname)
+ h2_node = NEW_NODE(node, 'ndr_%s.h' % bname)
+ s_node = NEW_NODE(node, 'ndr_%s_s.c' % bname)
+ cli_node = NEW_NODE(node, 'ndr_%s_c.c' % bname)
+ cli_h_node = NEW_NODE(node, 'ndr_%s_c.h' % bname)
+ py_node = NEW_NODE(node, 'py_%s.c' % bname)
+
+
+ dname = os.path.dirname(node.bld_dir(self.env)) + "/gen_ndr"
+ c_node.outputdir = dname
+
+ self.create_task('idl', node, [c_node, h1_node, h2_node, s_node, cli_node, cli_h_node, py_node])
+
+ # reinject the c node to the list of nodes to process
+ self.allnodes.append(c_node)
+
+
+#################################################################
+# define a PIDL target
+def SAMBA_PIDL(bld, directory, source):
+ name = os.path.basename(string.replace(source, '.idl', ''))
+ name = "%s/ndr_%s.o" % (directory, name)
+
+ if not SET_TARGET_TYPE(bld, name, 'PIDL'):
+ return
+
+ bld.SET_BUILD_GROUP('build_source')
+ bld(
+ features = 'cc',
+ source = source,
+ target = name
+ )
+Build.BuildContext.SAMBA_PIDL = SAMBA_PIDL
+
+
+
+#################################################################
+# define a set of Samba PIDL targets
+def SAMBA_PIDL_LIST(bld, directory, source_list):
+ for p in source_list.split():
+ bld.SAMBA_PIDL(directory, p)
+Build.BuildContext.SAMBA_PIDL_LIST = SAMBA_PIDL_LIST
+
+
+################################################################
+# build a C prototype file automatically
+def AUTOPROTO(bld, header, source_list):
+ if header is not None:
+ bld.SET_BUILD_GROUP('prototypes')
+ bld(
+ source = source_list,
+ target = header,
+ rule = '../script/mkproto.pl --srcdir=.. --builddir=. --public=/dev/null --private=${TGT} ${SRC}'
+ )
+Build.BuildContext.AUTOPROTO = AUTOPROTO
+
+
+#################################################################
+# define a Samba module.
+def SAMBA_MODULE(bld, modname, source_list,
+ deps='',
+ include_list='.',
+ subsystem=None,
+ init_function=None,
+ autoproto=None,
+ aliases=None,
+ cflags='',
+ output_type=None):
+
+ if not SET_TARGET_TYPE(bld, modname, 'MODULE'):
+ return
+
+ # remember empty modules, so we can strip the dependencies
+ if (source_list == '') or (source_list == []):
+ LOCAL_CACHE_SET(bld, 'EMPTY_TARGETS', modname, True)
+ return
+
+ (sysdeps, localdeps, add_objects) = ADD_DEPENDENCIES(bld, modname, deps)
+
+ ilist = bld.SAMBA_LIBRARY_INCLUDE_LIST(deps) + bld.SUBDIR(bld.curdir, include_list)
+ ilist = bld.NORMPATH(ilist)
+ bld.SET_BUILD_GROUP('main')
+ bld(
+ features = 'cc',
+ source = source_list,
+ target=modname,
+ ccflags = CURRENT_CFLAGS(bld, cflags),
+ includes='. ' + bld.env['BUILD_DIRECTORY'] + '/default ' + ilist)
+Build.BuildContext.SAMBA_MODULE = SAMBA_MODULE
+
+
+#################################################################
+# define a Samba subsystem
+def SAMBA_SUBSYSTEM(bld, modname, source_list,
+ deps='',
+ public_deps='',
+ include_list='.',
+ public_headers=None,
+ autoproto=None,
+ cflags='',
+ group='main',
+ config_option=None,
+ init_function_sentinal=None):
+
+ if not SET_TARGET_TYPE(bld, modname, 'SUBSYSTEM'):
+ return
+
+ # if the caller specifies a config_option, then we create a blank
+ # subsystem if that configuration option was found at configure time
+ if (config_option is not None) and bld.CONFIG_SET(config_option):
+ source_list = ''
+
+ # remember empty subsystems, so we can strip the dependencies
+ if (source_list == '') or (source_list == []):
+ LOCAL_CACHE_SET(bld, 'EMPTY_TARGETS', modname, True)
+ return
+
+ (sysdeps, localdeps, add_objects) = ADD_DEPENDENCIES(bld, modname, deps)
+
+ ilist = bld.SAMBA_LIBRARY_INCLUDE_LIST(deps) + bld.SUBDIR(bld.curdir, include_list)
+ ilist = bld.NORMPATH(ilist)
+ bld.SET_BUILD_GROUP(group)
+ bld(
+ features = 'cc',
+ source = source_list,
+ target=modname,
+ ccflags = CURRENT_CFLAGS(bld, cflags),
+ includes='. ' + bld.env['BUILD_DIRECTORY'] + '/default ' + ilist)
+Build.BuildContext.SAMBA_SUBSYSTEM = SAMBA_SUBSYSTEM
+
+
+###############################################################
+# add a new set of build rules from a subdirectory
+# the @runonce decorator ensures we don't end up
+# with duplicate rules
+def BUILD_SUBDIR(bld, dir):
+ path = os.path.normpath(bld.curdir + '/' + dir)
+ cache = LOCAL_CACHE(bld, 'SUBDIR_LIST')
+ if path in cache: return
+ cache[path] = True
+ debug("build: Processing subdirectory %s" % dir)
+ bld.add_subdirs(dir)
+
+Build.BuildContext.BUILD_SUBDIR = BUILD_SUBDIR
+
+
+############################################################
+# this overrides the 'waf -v' debug output to be in a nice
+# unix like format instead of a python list.
+# Thanks to ita on #waf for this
+def exec_command(self, cmd, **kw):
+ import Utils, Logs
+ _cmd = cmd
+ if isinstance(cmd, list):
+ _cmd = ' '.join(cmd)
+ debug('runner: %s' % _cmd)
+ if self.log:
+ self.log.write('%s\n' % cmd)
+ kw['log'] = self.log
+ try:
+ if not kw.get('cwd', None):
+ kw['cwd'] = self.cwd
+ except AttributeError:
+ self.cwd = kw['cwd'] = self.bldnode.abspath()
+ return Utils.exec_command(cmd, **kw)
+Build.BuildContext.exec_command = exec_command
+
+
+##########################################################
+# add a new top level command to waf
+def ADD_COMMAND(opt, name, function):
+ Utils.g_module.__dict__[name] = function
+ opt.name = function
+Options.Handler.ADD_COMMAND = ADD_COMMAND
+
+###########################################################
+# setup build groups used to ensure that the different build
+# phases happen consecutively
+@runonce
+def SETUP_BUILD_GROUPS(bld):
+ bld.env['USING_BUILD_GROUPS'] = True
+ bld.add_group('setup')
+ bld.add_group('build_compilers')
+ bld.add_group('build_source')
+ bld.add_group('prototypes')
+ bld.add_group('main')
+ bld.add_group('final')
+Build.BuildContext.SETUP_BUILD_GROUPS = SETUP_BUILD_GROUPS
+
+
+###########################################################
+# set the current build group
+def SET_BUILD_GROUP(bld, group):
+ if not 'USING_BUILD_GROUPS' in bld.env:
+ return
+ bld.set_group(group)
+Build.BuildContext.SET_BUILD_GROUP = SET_BUILD_GROUP
+
+
+#import TaskGen, Task
+#
+#old_post_run = Task.Task.post_run
+#def new_post_run(self):
+# self.cached = True
+# return old_post_run(self)
+#
+#for y in ['cc', 'cxx']:
+# TaskGen.classes[y].post_run = new_post_run