summaryrefslogtreecommitdiff
path: root/buildtools
diff options
context:
space:
mode:
Diffstat (limited to 'buildtools')
-rwxr-xr-xbuildtools/compare_generated.sh44
-rwxr-xr-xbuildtools/waf-svnbin90898 -> 90557 bytes
-rw-r--r--buildtools/wafsamba/README15
-rw-r--r--buildtools/wafsamba/gccdeps.py108
-rw-r--r--buildtools/wafsamba/samba_asn1.py16
-rw-r--r--buildtools/wafsamba/samba_autoconf.py8
-rw-r--r--buildtools/wafsamba/samba_autoproto.py3
-rw-r--r--buildtools/wafsamba/samba_deps.py823
-rw-r--r--buildtools/wafsamba/samba_errtable.py26
-rw-r--r--buildtools/wafsamba/samba_patterns.py21
-rw-r--r--buildtools/wafsamba/samba_pidl.py13
-rw-r--r--buildtools/wafsamba/samba_python.py37
-rw-r--r--buildtools/wafsamba/samba_utils.py50
-rw-r--r--buildtools/wafsamba/wafsamba.py174
14 files changed, 973 insertions, 365 deletions
diff --git a/buildtools/compare_generated.sh b/buildtools/compare_generated.sh
index 98504a868a..ae20fef37d 100755
--- a/buildtools/compare_generated.sh
+++ b/buildtools/compare_generated.sh
@@ -2,13 +2,49 @@
# compare the generated files from a waf
+old_build=$HOME/samba_old
gen_files=$(cd bin/default && find . -type f -name '*.[ch]')
+2>&1
+
+strip_file()
+{
+ in_file=$1
+ out_file=$2
+ cat $in_file |
+ grep -v 'The following definitions come from' |
+ grep -v 'Automatically generated at' |
+ grep -v 'Generated from' |
+ sed 's|/home/tnagy/samba/source4||g' |
+ sed 's|/home/tnagy/samba/|../|g' |
+ sed 's|bin/default/source4/||g' |
+ sed 's|bin/default/|../|g' |
+ sed 's/define _____/define ___/g' |
+ sed 's/define __*/define _/g' |
+ sed 's/define _DEFAULT_/define _/g' |
+ sed 's/define _SOURCE4_/define ___/g' |
+ sed 's/define ___/define _/g' |
+ sed 's/ifndef ___/ifndef _/g' |
+ sed 's|endif /* ____|endif /* __|g' |
+ sed s/__DEFAULT_SOURCE4/__/ |
+ sed s/__DEFAULT_SOURCE4/__/ |
+ sed s/__DEFAULT/____/ > $out_file
+}
+
+compare_file()
+{
+ f=$f
+ bname=$(basename $f)
+ t1=/tmp/$bname.old.$$
+ t2=/tmp/$bname.new.$$
+ strip_file $old_build/$f $t1
+ strip_file bin/default/$f $t2
+ diff -u -b $t1 $t2 2>&1
+ rm -f $t1 $t2
+}
+
for f in $gen_files; do
- echo
- echo "==================================================="
- echo "Comparing generated file $f"
- diff -u -b $HOME/samba_old/$f bin/default/$f
+ compare_file $f
done
diff --git a/buildtools/waf-svn b/buildtools/waf-svn
index 740082d654..0643c28f72 100755
--- a/buildtools/waf-svn
+++ b/buildtools/waf-svn
Binary files differ
diff --git a/buildtools/wafsamba/README b/buildtools/wafsamba/README
index d813e9ecea..028955c943 100644
--- a/buildtools/wafsamba/README
+++ b/buildtools/wafsamba/README
@@ -2,3 +2,18 @@ This is a set of waf 'tools' to help make building the Samba
components easier, by having common functions in one place. This gives
us a more consistent build, and ensures that our project rules are
obeyed
+
+
+TODO:
+ - fix deps for --target
+ - cache project rules calculation
+ - make pidl rules depend on full pidl sources
+ - make script rules depend on the scripts
+ - add waf test
+ - s3 build
+ - merged build
+ - etags
+ - rest of old make targets
+ - better Makefile waf wrapper
+ -
+
diff --git a/buildtools/wafsamba/gccdeps.py b/buildtools/wafsamba/gccdeps.py
new file mode 100644
index 0000000000..bd03da4f8b
--- /dev/null
+++ b/buildtools/wafsamba/gccdeps.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2010 (ita)
+
+"""
+Execute the tasks with gcc -MD, read the dependencies from the .d file
+and prepare the dependency calculation for the next run
+"""
+
+import os, re, threading
+import Task, Logs, Utils, preproc
+
+lock = threading.Lock()
+
+
+def detect(conf):
+ conf.env.append_unique('CCFLAGS', '-MD')
+
+def scan(self):
+ "the scanner does not do anything initially"
+ nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
+ names = []
+ return (nodes, names)
+
+re_src = re.compile("^(\.\.)[\\/](.*)$")
+
+def post_run(self):
+ # The following code is executed by threads, it is not safe, so a lock is needed...
+
+ if getattr(self, 'cached', None):
+ return Task.Task.post_run(self)
+
+ name = self.outputs[0].abspath(self.env)
+ name = name.rstrip('.o') + '.d'
+ txt = Utils.readf(name)
+ #os.unlink(name)
+
+ txt = txt.replace('\\\n', '')
+
+ lst = txt.strip().split(':')
+ val = ":".join(lst[1:])
+ val = val.split()
+
+ nodes = []
+ bld = self.generator.bld
+
+ f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
+ for x in val:
+ if os.path.isabs(x):
+
+ if not preproc.go_absolute:
+ continue
+
+ lock.acquire()
+ try:
+ node = bld.root.find_resource(x)
+ finally:
+ lock.release()
+ else:
+ g = re.search(re_src, x)
+ if g:
+ x = g.group(2)
+ lock.acquire()
+ try:
+ node = bld.bldnode.parent.find_resource(x)
+ finally:
+ lock.release()
+ else:
+ g = re.search(f, x)
+ if g:
+ x = g.group(2)
+ lock.acquire()
+ try:
+ node = bld.srcnode.find_resource(x)
+ finally:
+ lock.release()
+
+ if id(node) == id(self.inputs[0]):
+ # ignore the source file, it is already in the dependencies
+ # this way, successful config tests may be retrieved from the cache
+ continue
+
+ if not node:
+ raise ValueError('could not find %r for %r' % (x, self))
+ else:
+ nodes.append(node)
+
+ Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
+
+ bld.node_deps[self.unique_id()] = nodes
+ bld.raw_deps[self.unique_id()] = []
+
+ try:
+ del self.cache_sig
+ except:
+ pass
+
+ Task.Task.post_run(self)
+
+for name in 'cc cxx'.split():
+ try:
+ cls = Task.TaskBase.classes[name]
+ except KeyError:
+ pass
+ else:
+ cls.post_run = post_run
+ cls.scan = scan
+
diff --git a/buildtools/wafsamba/samba_asn1.py b/buildtools/wafsamba/samba_asn1.py
index 27dc43931f..6b099062c7 100644
--- a/buildtools/wafsamba/samba_asn1.py
+++ b/buildtools/wafsamba/samba_asn1.py
@@ -1,7 +1,7 @@
# samba ASN1 rules
-from TaskGen import taskgen, before
-import Build, os, string, Utils
+from TaskGen import before
+import Build, os
from samba_utils import *
from samba_autoconf import *
@@ -48,11 +48,16 @@ def SAMBA_ASN1(bld, name, source,
# SRC[0].abspath(env) gives the absolute path to the source directory for the first
# source file. Note that in the case of a option_file, we have more than
# one source file
+ # SRC[1].abspath(env) gives the path of asn1_compile. This makes the asn1 output
+ # correctly depend on the compiler binary
cd_rule = 'cd ${TGT[0].parent.abspath(env)}'
- asn1_rule = cd_rule + ' && ${ASN1COMPILER} ${OPTION_FILE} ${ASN1OPTIONS} --one-code-file ${SRC[0].abspath(env)} ${ASN1NAME}'
+ asn1_rule = cd_rule + ' && ${SRC[1].abspath(env)} ${OPTION_FILE} ${ASN1OPTIONS} --one-code-file ${SRC[0].abspath(env)} ${ASN1NAME}'
+
+ source = TO_LIST(source)
+ source.append('asn1_compile')
if option_file is not None:
- source = [ source, option_file ]
+ source.append(option_file)
t = bld(rule=asn1_rule,
features = 'asn1',
@@ -65,7 +70,6 @@ def SAMBA_ASN1(bld, name, source,
t.env.ASN1NAME = asn1name
t.env.ASN1OPTIONS = options
- t.env.ASN1COMPILER = os.path.join(os.environ.get('PWD'), 'bin/asn1_compile')
if option_file is not None:
t.env.OPTION_FILE = "--option-file=%s" % os.path.normpath(os.path.join(bld.curdir, option_file))
@@ -99,7 +103,7 @@ def SAMBA_ASN1(bld, name, source,
t = bld(features = 'cc',
source = cfile,
target = name,
- ccflags = CURRENT_CFLAGS(bld, name, ''),
+ samba_cflags = CURRENT_CFLAGS(bld, name, ''),
depends_on = '',
samba_deps = TO_LIST('HEIMDAL_ROKEN'),
samba_includes = includes,
diff --git a/buildtools/wafsamba/samba_autoconf.py b/buildtools/wafsamba/samba_autoconf.py
index f7519858cc..b3b9c09d33 100644
--- a/buildtools/wafsamba/samba_autoconf.py
+++ b/buildtools/wafsamba/samba_autoconf.py
@@ -1,10 +1,8 @@
# a waf tool to add autoconf-like macros to the configure section
-import Build, os, Logs, sys, Configure, Options
-import string, Task, Utils, optparse
+import Build, os
+import string
from Configure import conf
-from Logs import debug
-from TaskGen import extension
from samba_utils import *
####################################################
@@ -208,7 +206,7 @@ def CHECK_CODE(conf, code, define,
execute=execute,
define_name = define,
mandatory = mandatory,
- ccflags=TO_LIST(cflags),
+ samba_cflags=TO_LIST(cflags),
includes=includes,
msg=msg):
conf.DEFINE(define, 1)
diff --git a/buildtools/wafsamba/samba_autoproto.py b/buildtools/wafsamba/samba_autoproto.py
index 71274ead75..a6a5e28c49 100644
--- a/buildtools/wafsamba/samba_autoproto.py
+++ b/buildtools/wafsamba/samba_autoproto.py
@@ -1,7 +1,6 @@
# waf build tool for building automatic prototypes from C source
-from TaskGen import taskgen, before
-import Build, os, string, Utils
+import Build
from samba_utils import *
# rule for heimdal prototype generation
diff --git a/buildtools/wafsamba/samba_deps.py b/buildtools/wafsamba/samba_deps.py
index 41b786d6bf..d6b68f7c64 100644
--- a/buildtools/wafsamba/samba_deps.py
+++ b/buildtools/wafsamba/samba_deps.py
@@ -1,7 +1,6 @@
-# Samba automatic dependency handling
+# Samba automatic dependency handling and project rules
-from TaskGen import taskgen, before
-import Build, os, string, Utils, re
+import Build, os, re, Environment
from samba_utils import *
from samba_autoconf import *
@@ -30,156 +29,66 @@ def EXPAND_ALIAS(bld, target):
Build.BuildContext.EXPAND_ALIAS = EXPAND_ALIAS
-def expand_dependencies(bld, dep, chain, path):
- '''expand a dependency recursively
- return a triple of (uselib, uselib_local, add_objects)
- '''
-
- dep = EXPAND_ALIAS(bld, dep)
-
- t = bld.name_to_obj(dep, bld.env)
-
- # check for a cached list
- if t is not None:
- expanded = getattr(t, 'expanded_dependencies', None)
- if expanded is not None:
- return expanded
-
- target_dict = LOCAL_CACHE(bld, 'TARGET_TYPE')
-
- uselib_local = []
- uselib = []
- add_objects = []
-
- recurse = False
-
- bld.ASSERT(dep in target_dict, "Dependency %s not found in %s" % (dep, path))
- type = target_dict[dep]
- if type == 'SYSLIB':
- uselib.append(dep)
- elif type == 'LIBRARY':
- uselib_local.append(dep)
- recurse = True
- elif type == 'SUBSYSTEM':
- add_objects.append(dep)
- recurse = True
- elif type == 'MODULE':
- add_objects.append(dep)
- recurse = True
- elif type == 'PYTHON':
- add_objects.append(dep)
- recurse = True
- elif type == 'ASN1':
- add_objects.append(dep)
- recurse = True
- elif type == 'BINARY':
- pass
- elif type == 'EMPTY':
- pass
- elif type == 'DISABLED':
- debug('deps: Ignoring dependency on disabled target %s: %s' % (dep, path))
- else:
- bld.ASSERT(False, "Unknown target type %s for %s" % (type, dep))
-
- # for some types we have to build the list recursively
- if recurse:
- bld.ASSERT(t is not None, "Unable to find target %s" % dep)
- rec_deps = getattr(t, 'samba_deps', None)
- bld.ASSERT(rec_deps is not None, "Unable to find dependencies of target %s" % dep)
- for d2 in rec_deps:
- try:
- bld.ASSERT(d2 not in chain, "Circular dependency for %s: %s->%s" % (dep, path, d2))
- except:
- print "Removing dependency %s from target %s" % (d2, dep)
- rec_deps.remove(d2)
- continue
- c2 = chain.copy()
- c2[d2] = True
- (rec_uselib, rec_uselib_local,
- rec_add_objects) = expand_dependencies(bld, d2, c2, "%s->%s" % (path, d2))
- uselib.extend(rec_uselib)
- uselib_local.extend(rec_uselib_local)
- add_objects.extend(rec_add_objects)
-
- if t is not None:
- t.expanded_dependencies = (uselib, uselib_local, add_objects)
-
- return (uselib, uselib_local, add_objects)
-
-
-def expand_deplist(self):
- '''return an expanded list of dependencies from the samba_deps attribute'''
-
- if not getattr(self, 'samba_deps', None):
- return ([], [], [])
-
- bld = self.bld
- deps = self.samba_deps
-
- uselib_local = []
- uselib = []
- add_objects = []
-
- for d in deps:
- (u, ul, ao) = expand_dependencies(bld, d, { self.name:True }, self.name)
- uselib.extend(u)
- uselib_local.extend(ul)
- add_objects.extend(ao)
+def expand_subsystem_deps(bld):
+ '''expand the reverse dependencies resulting from subsystem
+ attributes of modules'''
+ subsystems = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
+ aliases = LOCAL_CACHE(bld, 'TARGET_ALIAS')
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ for s in subsystems:
+ if s in aliases:
+ s = aliases[s]
+ bld.ASSERT(s in targets, "Subsystem target %s not declared" % s)
+ type = targets[s]
+ if type == 'DISABLED' or type == 'EMPTY':
+ continue
- return (uselib, uselib_local, add_objects)
+ t = bld.name_to_obj(s, bld.env)
+ bld.ASSERT(t is not None, "Subsystem target %s not found" % s)
+ for d in subsystems[s]:
+ type = targets[d['TARGET']]
+ if type != 'DISABLED' and type != 'EMPTY':
+ t.samba_deps_extended.append(d['TARGET'])
+ t2 = bld.name_to_obj(d['TARGET'], bld.env)
+ t2.samba_includes_extended.extend(t.samba_includes_extended)
+ t2.samba_deps_extended.extend(t.samba_deps_extended)
+ t.samba_deps_extended = unique_list(t.samba_deps_extended)
-@feature('cc', 'cshlib', 'cprogram')
-@before('apply_lib_vars', 'apply_verif', 'apply_objdeps', 'apply_obj_vars', 'apply_incpaths', 'build_includes')
-@after('default_cc')
def build_dependencies(self):
'''This builds the dependency list for a target. It runs after all the targets are declared
The reason this is not just done in the SAMBA_*() rules is that we have no way of knowing
- the full dependency list for a target until we have all of the targets declared. So what we do is
- add a samba_deps attribute on the task generator when we declare it, then
- this rule runs after all the task generators are declared and maps the samba_deps attribute
- to a set of uselib, uselib_local and add_objects dependencies
+ the full dependency list for a target until we have all of the targets declared.
'''
- if getattr(self, 'build_dependencies_done', False):
- return
- self.build_dependencies_done = True
-
- if getattr(self, 'samba_deps', None) is None:
- return
-
- target_dict = LOCAL_CACHE(self.bld, 'TARGET_TYPE')
-
# we only should add extra library and object deps on libraries and binaries
- type = target_dict[self.name]
- if type != 'LIBRARY' and type != 'BINARY':
+ if not self.samba_type in ['LIBRARY', 'BINARY', 'PYTHON']:
return
- (uselib, uselib_local, add_objects) = expand_deplist(self)
+ # we need to link against:
- if 'GLOBAL_DEPENDENCIES' in self.bld.env:
- add_objects.extend(self.bld.env.GLOBAL_DEPENDENCIES)
+ # 1) any direct system libs
+ # 2) any indirect system libs that come from subsystem dependencies
+ # 3) any direct local libs
+ # 4) any indirect local libs that come from subsystem dependencies
+ # 5) any direct objects
+ # 6) any indirect objects that come from subsystem dependencies
- self.uselib = unique_list(uselib)
- self.uselib_local = unique_list(uselib_local)
- self.add_objects = unique_list(add_objects)
+ self.uselib = list(self.final_syslibs)
+ self.uselib_local = list(self.final_libs)
+ self.add_objects = list(self.final_objects)
- debug('deps: dependency counts for %s: uselib=%u uselib_local=%u add_objects=%u' % (
- self.name, len(uselib), len(uselib_local), len(add_objects)))
+ debug('deps: computed dependencies for target %s: uselib=%s uselib_local=%s add_objects=%s',
+ self.sname, self.uselib, self.uselib_local, self.add_objects)
-@feature('cc', 'cshlib', 'cprogram')
-@before('apply_lib_vars', 'apply_verif', 'apply_objdeps', 'apply_obj_vars', 'apply_incpaths', 'add_init_functions')
-@after('build_dependencies')
def build_includes(self):
'''This builds the right set of includes for a target.
- This is closely related to building the set of dependencies, and
- calls into the same expand_dependencies() function to do the work.
-
One tricky part of this is that the includes= attribute for a
target needs to use paths which are relative to that targets
declaration directory (which we can get at via t.path).
@@ -191,82 +100,68 @@ def build_includes(self):
attribute
'''
- if not getattr(self, 'build_dependencies_done', False):
- build_dependencies(self)
- if getattr(self, 'build_includes_done', False):
- return
- self.build_includes_done = True
-
if getattr(self, 'samba_includes', None) is None:
return
bld = self.bld
- (uselib, uselib_local, add_objects) = expand_deplist(self)
-
- # get the list of all dependencies
- all_deps = []
-# all_deps.extend(uselib)
- all_deps.extend(uselib_local)
- all_deps.extend(add_objects)
- all_deps = unique_list(all_deps)
+ inc_deps = self.includes_objects
includes = []
- # build a list of includes
+ # maybe add local includes
if getattr(self, 'local_include', True) == True and getattr(self, 'local_include_first', True):
includes.append('.')
- includes.extend(TO_LIST(self.samba_includes))
+ includes.extend(self.samba_includes_extended)
if 'EXTRA_INCLUDES' in bld.env:
includes.extend(bld.env['EXTRA_INCLUDES'])
includes.append('#')
- mypath = self.path.abspath(bld.env)
+ inc_set = set()
+ inc_abs = []
- for d in all_deps:
+ for d in inc_deps:
t = bld.name_to_obj(d, bld.env)
- bld.ASSERT(t is not None, "Unable to find dependency %s for %s" % (d, self.name))
- t.samba_used = True
- samba_includes = getattr(t, 'samba_includes', None)
- inclist = TO_LIST(samba_includes)
+ bld.ASSERT(t is not None, "Unable to find dependency %s for %s" % (d, self.sname))
+ inclist = getattr(t, 'samba_includes_extended', [])
if getattr(t, 'local_include', True) == True:
inclist.append('.')
if inclist == []:
continue
- tpath = t.path.abspath(bld.env)
- relpath = os.path.relpath(tpath, mypath)
+ tpath = t.samba_abspath
for inc in inclist:
- includes.append(os.path.normpath(os.path.join(relpath, inc)))
+ npath = tpath + '/' + inc
+ if not npath in inc_set:
+ inc_abs.append(npath)
+ inc_set.add(npath)
+
+ mypath = self.path.abspath(bld.env)
+ for inc in inc_abs:
+ relpath = os_path_relpath(inc, mypath)
+ includes.append(relpath)
if getattr(self, 'local_include', True) == True and not getattr(self, 'local_include_first', True):
includes.append('.')
self.includes = unique_list(includes)
- debug('deps: Target %s has includes=%s all_deps=%s' % (self.name, self.includes, all_deps))
+ debug('deps: includes for target %s: includes=%s',
+ self.sname, self.includes)
+
-@feature('cc', 'cshlib', 'cprogram')
-@before('apply_lib_vars', 'apply_verif', 'apply_objdeps', 'apply_obj_vars', 'apply_incpaths')
-@after('build_includes')
def add_init_functions(self):
'''This builds the right set of init functions'''
- if not getattr(self, 'build_includes_done', False):
- build_includes(self)
- if getattr(self, 'add_init_functions_done', False):
- return
- self.add_init_functions_done = True
-
bld = self.bld
subsystems = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
modules = []
- if self.name in subsystems:
- modules.append(self.name)
+ if self.sname in subsystems:
+ modules.append(self.sname)
m = getattr(self, 'samba_modules', None)
if m is not None:
@@ -279,112 +174,530 @@ def add_init_functions(self):
if modules == []:
return
- cflags = getattr(self, 'ccflags', [])
+ sentinal = getattr(self, 'init_function_sentinal', 'NULL')
+
+ cflags = getattr(self, 'samba_cflags', [])[:]
for m in modules:
- if not m in subsystems:
- print "subsystems: %s" % subsystems
bld.ASSERT(m in subsystems,
- "No init_function defined for module '%s' in target '%s'" % (m, self.name))
- cflags.append('-DSTATIC_%s_MODULES="%s"' % (m, ','.join(subsystems[m])))
+ "No init_function defined for module '%s' in target '%s'" % (m, self.sname))
+ init_fn_list = []
+ for d in subsystems[m]:
+ init_fn_list.append(d['INIT_FUNCTION'])
+ cflags.append('-DSTATIC_%s_MODULES=%s' % (m, ','.join(init_fn_list) + ',' + sentinal))
self.ccflags = cflags
-def check_orpaned_targets(bld):
+
+def check_duplicate_sources(bld, tgt_list):
+ '''see if we are compiling the same source file into multiple
+ subsystem targets for the same library or binary'''
+
+ debug('deps: checking for duplicate sources')
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ for t in tgt_list:
+ if not targets[t.sname] in [ 'LIBRARY', 'BINARY', 'PYTHON' ]:
+ continue
+
+ sources = []
+ for obj in t.add_objects:
+ t2 = t.bld.name_to_obj(obj, bld.env)
+ obj_sources = getattr(t2, 'source', '')
+ if obj_sources == '': continue
+ tpath = os_path_relpath(t2.path.abspath(bld.env), t.env['BUILD_DIRECTORY'] + '/default')
+ obj_sources = bld.SUBDIR(tpath, obj_sources)
+ sources.append( { 'dep':obj, 'src':set(TO_LIST(obj_sources)) } )
+ #debug('deps: dependency expansion for target %s add_object %s: %s',
+ # t.sname, obj, obj_sources)
+ for s in sources:
+ for s2 in sources:
+ if s['dep'] == s2['dep']: continue
+ common = s['src'].intersection(s2['src'])
+ if common:
+ bld.ASSERT(False,
+ "Target %s has duplicate source files in %s and %s : %s" % (t.sname,
+ s['dep'], s2['dep'],
+ common))
+
+def check_orpaned_targets(bld, tgt_list):
'''check if any build targets are orphaned'''
target_dict = LOCAL_CACHE(bld, 'TARGET_TYPE')
- # make sure all the earlier functions have run
- for t in bld.all_task_gen:
- if not t.name in target_dict:
+ debug('deps: checking for orphaned targets')
+
+ for t in tgt_list:
+ if getattr(t, 'samba_used', False) == True:
continue
- if not getattr(t, 'add_init_functions_done', False):
- add_init_functions(t)
+ type = target_dict[t.sname]
+ if not type in ['BINARY', 'LIBRARY', 'MODULE', 'ET', 'PYTHON']:
+ if re.search('^PIDL_', t.sname) is None:
+ print "Target %s of type %s is unused by any other target" % (t.sname, type)
+
+
+def show_final_deps(bld, tgt_list):
+ '''show the final dependencies for all targets'''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
- for t in bld.all_task_gen:
- if not t.name in target_dict:
+ for t in tgt_list:
+ if not targets[t.sname] in ['LIBRARY', 'BINARY', 'PYTHON']:
continue
- if getattr(t, 'samba_used', False) == True:
+ debug('deps: final dependencies for target %s: uselib=%s uselib_local=%s add_objects=%s',
+ t.sname, t.uselib, t.uselib_local, t.add_objects)
+
+
+def add_samba_attributes(bld, tgt_list):
+ '''ensure a target has a the required samba attributes'''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ for t in tgt_list:
+ if t.name != '':
+ t.sname = t.name
+ else:
+ t.sname = t.target
+ t.samba_type = targets[t.sname]
+ t.samba_abspath = t.path.abspath(bld.env)
+ t.samba_deps_extended = t.samba_deps[:]
+ t.samba_includes_extended = TO_LIST(t.samba_includes)[:]
+ t.ccflags = getattr(t, 'samba_cflags', '')
+
+def build_direct_deps(bld, tgt_list):
+ '''build the direct_objects and direct_libs sets for each target'''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+ global_deps = bld.env.GLOBAL_DEPENDENCIES
+
+ for t in tgt_list:
+ t.direct_objects = set()
+ t.direct_libs = set()
+ t.direct_syslibs = set()
+ deps = t.samba_deps_extended
+ deps.extend(global_deps)
+ for d in deps:
+ d = EXPAND_ALIAS(bld, d)
+ if not d in targets:
+ print "Unknown dependency %s in %s" % (d, t.sname)
+ raise
+ if targets[d] in [ 'EMPTY', 'DISABLED' ]:
+ continue
+ if targets[d] == 'SYSLIB':
+ t.direct_syslibs.add(d)
+ continue
+ t2 = bld.name_to_obj(d, bld.env)
+ if t2 is None:
+ print "no task %s type %s" % (d, targets[d])
+ if t2.samba_type in [ 'LIBRARY', 'MODULE' ]:
+ t.direct_libs.add(d)
+ elif t2.samba_type in [ 'SUBSYSTEM', 'ASN1', 'PYTHON' ]:
+ t.direct_objects.add(d)
+ debug('deps: built direct dependencies')
+
+
+
+def indirect_libs(bld, t, chain):
+ '''recursively calculate the indirect library dependencies for a target
+
+ An indirect library is a library that results from a dependency on
+ a subsystem
+ '''
+
+ ret = getattr(t, 'indirect_libs', None)
+ if ret is not None:
+ return ret
+
+ ret = set()
+ for obj in t.direct_objects:
+ if obj in chain:
continue
- type = target_dict[t.name]
- if type != 'BINARY' and type != 'LIBRARY' and type != 'MODULE':
- if re.search('^PIDL_', t.name) is None:
- print "Target %s of type %s is unused by any other target" % (t.name, type)
+ chain.add(obj)
+ t2 = bld.name_to_obj(obj, bld.env)
+ r2 = indirect_libs(bld, t2, chain)
+ chain.remove(obj)
+ ret = ret.union(t2.direct_libs)
+ ret = ret.union(r2)
+
+ for obj in t.indirect_objects:
+ if obj in chain:
+ continue
+ chain.add(obj)
+ t2 = bld.name_to_obj(obj, bld.env)
+ r2 = indirect_libs(bld, t2, chain)
+ chain.remove(obj)
+ ret = ret.union(t2.direct_libs)
+ ret = ret.union(r2)
+ t.indirect_libs = ret
-def CHECK_ORPANED_TARGETS(bld):
- bld.add_pre_fun(check_orpaned_targets)
-Build.BuildContext.CHECK_ORPANED_TARGETS = CHECK_ORPANED_TARGETS
+ return ret
-@feature('dfkj*')
-def samba_post_process(self):
- '''samba specific post processing of task'''
- if getattr(self, 'meths', None) is None:
- return
- count = getattr(self, 'moved_to_end', 0)
- if count < 10:
- # there has got to be a better way!!
- self.moved_to_end = count + 1
- self.meths.append('samba_post_process')
- return
+def indirect_syslibs(bld, t, chain):
+ '''recursively calculate the indirect system library dependencies for a target
- samba_post = getattr(self, 'samba_post', None)
- if samba_post is None:
- return
- (tgt, cmd) = samba_post
- self.env.TARGET_DIRECTORY = self.path.abspath(self.env)
- #print "cmd=%s tgt=%s" % (cmd, tgt)
- cmd = Utils.subst_vars(cmd, self.env)
- tgt = Utils.subst_vars(tgt, self.env)
- if os.path.isfile(tgt):
- debug('deps: post processing for %s: %s' % (self.name, cmd))
- ret = os.system(cmd)
- self.bld.ASSERT(ret == 0, "Post processing for %s failed (%d): %s" % (self.name, ret, cmd))
-
-
-##############################
-# handle the creation of links for libraries and binaries
-# note that we use a relative symlink path to allow the whole tree
-# to me moved/copied elsewhere without breaking the links
-t = Task.simple_task_type('symlink_lib', 'ln -sf ../${SRC} ${LINK_TARGET}', color='PINK',
- ext_in='.bin')
-t.quiet = True
-
-@feature('symlink_lib')
-@after('apply_link')
-def symlink_lib(self):
- tsk = self.create_task('symlink_lib', self.link_task.outputs[0])
-
- # calculat the link target and put it in the environment
- soext=""
- vnum = getattr(self, 'vnum', None)
- if vnum is not None:
- soext = '.' + vnum.split('.')[0]
-
- libname = self.target
- tsk.env.LINK_TARGET = '%s/lib%s.so%s' % (LIB_PATH, libname, soext)
- debug('task_gen: LINK_TARGET for %s is %s', self.name, tsk.env.LINK_TARGET)
-
-
-# for binaries we need to copy the executable to avoid the rpath changing
-# in the local bin/ directory on install
-t = Task.simple_task_type('copy_bin', 'rm -f ${BIN_TARGET} && cp ${SRC} ${BIN_TARGET}', color='PINK',
- ext_in='.bin', shell=True)
-t.quiet = True
-
-@feature('copy_bin')
-@after('apply_link')
-def copy_bin(self):
- if Options.is_install:
- # we don't want to copy the install binary, as
- # that has the install rpath, not the build rpath
- # The rpath of the binaries in bin/default/foo/blah is different
- # during the install phase, as distros insist on not using rpath in installed binaries
+ An indirect syslib results from a subsystem dependency
+ '''
+
+ ret = getattr(t, 'indirect_syslibs', None)
+ if ret is not None:
+ return ret
+ ret = set()
+ for obj in t.direct_objects:
+ if obj in chain:
+ continue
+ chain.add(obj)
+ t2 = bld.name_to_obj(obj, bld.env)
+ r2 = indirect_syslibs(bld, t2, chain)
+ chain.remove(obj)
+ ret = ret.union(t2.direct_syslibs)
+ ret = ret.union(r2)
+
+ t.indirect_syslibs = ret
+ return ret
+
+
+def indirect_objects(bld, t, chain):
+ '''recursively calculate the indirect object dependencies for a target
+
+ indirect objects are the set of objects from expanding the
+ subsystem dependencies
+ '''
+
+ ret = getattr(t, 'indirect_objects', None)
+ if ret is not None: return ret
+
+ ret = set()
+ for lib in t.direct_objects:
+ if lib in chain:
+ continue
+ chain.add(lib)
+ t2 = bld.name_to_obj(lib, bld.env)
+ r2 = indirect_objects(bld, t2, chain)
+ chain.remove(lib)
+ ret = ret.union(t2.direct_objects)
+ ret = ret.union(r2)
+
+ t.indirect_objects = ret
+ return ret
+
+
+def expanded_targets(bld, t, chain):
+ '''recursively calculate the expanded targets for a target
+
+ expanded objects are the set of objects, libraries and syslibs
+ from expanding the subsystem dependencies, library dependencies
+ and syslib dependencies
+ '''
+
+ ret = getattr(t, 'expanded_targets', None)
+ if ret is not None: return ret
+
+ ret = t.direct_objects.copy()
+ ret = ret.union(t.direct_libs)
+ ret = ret.union(t.direct_syslibs)
+
+ direct = ret.copy()
+
+ for d in direct:
+ if d in chain: continue
+ chain.add(d)
+ t2 = bld.name_to_obj(d, bld.env)
+ if t2 is None: continue
+ r2 = expanded_targets(bld, t2, chain)
+ chain.remove(d)
+ ret = ret.union(r2)
+
+ if t.sname in ret:
+ ret.remove(t.sname)
+
+ t.expanded_targets = ret
+ return ret
+
+
+def expanded_targets2(bld, t, chain):
+ '''recursively calculate the expanded targets for a target
+
+ expanded objects are the set of objects from expanding the
+ subsystem dependencies and library dependencies
+ '''
+
+ ret = getattr(t, 'expanded_targets2', None)
+ if ret is not None: return ret
+
+ ret = t.final_objects.copy()
+
+ for attr in [ 'final_objects', 'final_libs' ]:
+ f = getattr(t, attr, set())
+ for d in f.copy():
+ if d in chain:
+ continue
+ chain.add(d)
+ t2 = bld.name_to_obj(d, bld.env)
+ if t2 is None: continue
+ r2 = expanded_targets2(bld, t2, chain)
+ chain.remove(d)
+ ret = ret.union(r2)
+
+ if t.sname in ret:
+ ret.remove(t.sname)
+
+ t.expanded_targets2 = ret
+ return ret
+
+
+def includes_objects(bld, t, chain):
+ '''recursively calculate the includes object dependencies for a target
+
+ includes dependencies come from either library or object dependencies
+ '''
+ ret = getattr(t, 'includes_objects', None)
+ if ret is not None:
+ return ret
+
+ ret = t.direct_objects.copy()
+ ret = ret.union(t.direct_libs)
+
+ for obj in t.direct_objects:
+ if obj in chain:
+ continue
+ chain.add(obj)
+ t2 = bld.name_to_obj(obj, bld.env)
+ r2 = includes_objects(bld, t2, chain)
+ chain.remove(obj)
+ ret = ret.union(t2.direct_objects)
+ ret = ret.union(r2)
+
+ for lib in t.direct_libs:
+ if lib in chain:
+ continue
+ chain.add(lib)
+ t2 = bld.name_to_obj(lib, bld.env)
+ r2 = includes_objects(bld, t2, chain)
+ chain.remove(lib)
+ ret = ret.union(t2.direct_objects)
+ ret = ret.union(r2)
+
+ t.includes_objects = ret
+ return ret
+
+
+def build_indirect_deps(bld, tgt_list):
+ '''build the indirect_objects and indirect_libs sets for each target'''
+ for t in tgt_list:
+ indirect_objects(bld, t, set())
+ indirect_libs(bld, t, set())
+ indirect_syslibs(bld, t, set())
+ includes_objects(bld, t, set())
+ expanded_targets(bld, t, set())
+ debug('deps: built indirect dependencies')
+
+
+def re_expand2(bld, tgt_list):
+ for t in tgt_list:
+ t.expanded_targets2 = None
+ for type in ['BINARY','LIBRARY','PYTHON']:
+ for t in tgt_list:
+ if t.samba_type == type:
+ expanded_targets2(bld, t, set())
+ for t in tgt_list:
+ expanded_targets2(bld, t, set())
+
+
+def calculate_final_deps(bld, tgt_list):
+ '''calculate the final library and object dependencies'''
+ for t in tgt_list:
+ # start with the maximum possible list
+ t.final_syslibs = t.direct_syslibs.union(t.indirect_syslibs)
+ t.final_libs = t.direct_libs.union(t.indirect_libs)
+ t.final_objects = t.direct_objects.union(t.indirect_objects)
+
+ for t in tgt_list:
+ # don't depend on ourselves
+ if t.sname in t.final_libs:
+ t.final_libs.remove(t.sname)
+ if t.sname in t.final_objects:
+ t.final_objects.remove(t.sname)
+
+ re_expand2(bld, tgt_list)
+
+ loops = {}
+
+ # find any library loops
+ for t in tgt_list:
+ if t.samba_type in ['LIBRARY', 'PYTHON']:
+ for l in t.final_libs.copy():
+ t2 = bld.name_to_obj(l, bld.env)
+ if t.sname in t2.final_libs:
+ debug('deps: removing library loop %s<->%s', t.sname, l)
+ t2.final_libs.remove(t.sname)
+ loops[t2.sname] = t.sname;
+
+ re_expand2(bld, tgt_list)
+
+ for type in ['BINARY']:
+ while True:
+ changed = False
+ for t in tgt_list:
+ if t.samba_type != type: continue
+ # if we will indirectly link to a target then we don't need it
+ new = t.final_objects.copy()
+ for l in t.final_libs:
+ t2 = bld.name_to_obj(l, bld.env)
+ dup = new.intersection(t2.expanded_targets2)
+ if dup:
+ debug('deps: removing dups from %s: %s also in %s %s',
+ t.sname, dup, t2.samba_type, l)
+ new = new.difference(dup)
+ changed = True
+ if changed:
+ t.final_objects = new
+ break
+ if not changed:
+ break
+ debug('deps: removed duplicate dependencies')
+
+
+######################################################################
+# this provides a way to save our dependency calculations between runs
+savedeps_version = 1
+savedeps_inputs = ['samba_deps', 'samba_includes', 'local_include', 'local_include_first', 'samba_cflags']
+savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes', 'ccflags']
+savedeps_caches = ['GLOBAL_DEPENDENCIES', 'TARGET_ALIAS', 'TARGET_TYPE', 'INIT_FUNCTIONS']
+
+def save_samba_deps(bld, tgt_list):
+ '''save the dependency calculations between builds, to make
+ further builds faster'''
+ denv = Environment.Environment()
+
+ denv.version = savedeps_version
+ denv.savedeps_inputs = savedeps_inputs
+ denv.savedeps_outputs = savedeps_outputs
+ denv.input = {}
+ denv.output = {}
+ denv.caches = {}
+
+ for c in savedeps_caches:
+ denv.caches[c] = LOCAL_CACHE(bld, c)
+
+ for t in tgt_list:
+ # save all the input attributes for each target
+ tdeps = {}
+ for attr in savedeps_inputs:
+ v = getattr(t, attr, None)
+ if v is not None:
+ tdeps[attr] = v
+ if tdeps != {}:
+ denv.input[t.sname] = tdeps
+
+ # save all the output attributes for each target
+ tdeps = {}
+ for attr in savedeps_outputs:
+ v = getattr(t, attr, None)
+ if v is not None:
+ tdeps[attr] = v
+ if tdeps != {}:
+ denv.output[t.sname] = tdeps
+
+ depsfile = os.path.join(bld.bdir, "sambadeps")
+ denv.store(depsfile)
+
+
+def load_samba_deps(bld, tgt_list):
+ '''load a previous set of build dependencies if possible'''
+ depsfile = os.path.join(bld.bdir, "sambadeps")
+ denv = Environment.Environment()
+ try:
+ debug('deps: checking saved dependencies')
+ denv.load(depsfile)
+ if (denv.version != savedeps_version or
+ denv.savedeps_inputs != savedeps_inputs or
+ denv.savedeps_outputs != savedeps_outputs):
+ return False
+ except:
+ return False
+
+ # check if caches are the same
+ for c in savedeps_caches:
+ if c not in denv.caches or denv.caches[c] != LOCAL_CACHE(bld, c):
+ return False
+
+ # check inputs are the same
+ for t in tgt_list:
+ tdeps = {}
+ for attr in savedeps_inputs:
+ v = getattr(t, attr, None)
+ if v is not None:
+ tdeps[attr] = v
+ if t.sname in denv.input:
+ olddeps = denv.input[t.sname]
+ else:
+ olddeps = {}
+ if tdeps != olddeps:
+ #print '%s: \ntdeps=%s \nodeps=%s' % (t.sname, tdeps, olddeps)
+ return False
+
+ # put outputs in place
+ for t in tgt_list:
+ if not t.sname in denv.output: continue
+ tdeps = denv.output[t.sname]
+ for a in tdeps:
+ setattr(t, a, tdeps[a])
+
+ debug('deps: loaded saved dependencies')
+ return True
+
+
+def check_project_rules(bld):
+ '''check the project rules - ensuring the targets are sane'''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ # build a list of task generators we are interested in
+ tgt_list = []
+ for tgt in targets:
+ type = targets[tgt]
+ if not type in ['SUBSYSTEM', 'MODULE', 'BINARY', 'LIBRARY', 'ASN1', 'PYTHON']:
+ continue
+ t = bld.name_to_obj(tgt, bld.env)
+ tgt_list.append(t)
+
+ add_samba_attributes(bld, tgt_list)
+
+ if load_samba_deps(bld, tgt_list):
return
- tsk = self.create_task('copy_bin', self.link_task.outputs[0])
- tsk.env.BIN_TARGET = self.target
- debug('task_gen: BIN_TARGET for %s is %s', self.name, tsk.env.BIN_TARGET)
+ debug('deps: project rules checking started')
+
+ expand_subsystem_deps(bld)
+ build_direct_deps(bld, tgt_list)
+ build_indirect_deps(bld, tgt_list)
+ calculate_final_deps(bld, tgt_list)
+
+ # run the various attribute generators
+ for f in [ build_dependencies, build_includes, add_init_functions ]:
+ debug('deps: project rules checking %s', f)
+ for t in tgt_list: f(t)
+
+ debug('deps: project rules stage1 completed')
+
+ #check_orpaned_targets(bld, tgt_list)
+ #check_duplicate_sources(bld, tgt_list)
+ show_final_deps(bld, tgt_list)
+
+ debug('deps: project rules checking completed - %u targets checked',
+ len(tgt_list))
+
+ save_samba_deps(bld, tgt_list)
+
+
+def CHECK_PROJECT_RULES(bld):
+ '''enable checking of project targets for sanity'''
+ if bld.env.added_project_rules:
+ return
+ bld.env.added_project_rules = True
+ bld.add_pre_fun(check_project_rules)
+Build.BuildContext.CHECK_PROJECT_RULES = CHECK_PROJECT_RULES
diff --git a/buildtools/wafsamba/samba_errtable.py b/buildtools/wafsamba/samba_errtable.py
new file mode 100644
index 0000000000..d324a3541b
--- /dev/null
+++ b/buildtools/wafsamba/samba_errtable.py
@@ -0,0 +1,26 @@
+# waf build tool for building .et files with compile_et
+import Build, os
+from samba_utils import *
+
+def SAMBA_ERRTABLE(bld, name, source):
+ '''Build a heimdal errtable from a .et file'''
+
+ bname = source[0:-3]; # strip off the .et suffix
+
+ if not SET_TARGET_TYPE(bld, name, 'ET'):
+ return
+
+ bld.SET_BUILD_GROUP('build_source')
+
+ out_files = []
+ out_files.append('%s.c' % bname)
+ out_files.append('%s.h' % bname)
+
+ t = bld(rule='${SRC[0].abspath(env)} . ${TGT[0].parent.abspath(env)} default/source4/heimdal_build/compile_et ${SRC[2].abspath(env)} ${TGT[0].bldpath(env)}',
+ ext_out = '.c',
+ before = 'cc',
+ shell = True,
+ source = ['et_compile_wrapper.sh', 'compile_et', source],
+ target = out_files,
+ name = name)
+Build.BuildContext.SAMBA_ERRTABLE = SAMBA_ERRTABLE
diff --git a/buildtools/wafsamba/samba_patterns.py b/buildtools/wafsamba/samba_patterns.py
index ef89db69a6..237020ba29 100644
--- a/buildtools/wafsamba/samba_patterns.py
+++ b/buildtools/wafsamba/samba_patterns.py
@@ -1,28 +1,9 @@
# a waf tool to add extension based build patterns for Samba
-import os, sys, Options
-import string, Task, Utils, optparse
-from Configure import conf
-from Logs import debug
+import Task
from TaskGen import extension
from samba_utils import *
-################################################################################
-# a et task which calls out to compile_et to do the work
-Task.simple_task_type('et',
- '../heimdal_build/et_compile_wrapper.sh . ${TGT[0].bld_dir(env)} default/source4/heimdal_build/compile_et ${SRC[0].abspath(env)} ${TGT[0].bldpath(env)}',
- color='BLUE', ext_out='.c',
- shell = False)
-
-@extension('.et')
-def process_et(self, node):
- c_node = node.change_ext('.c')
- h_node = node.change_ext('.h')
- self.create_task('et', node, [c_node, h_node])
- self.allnodes.append(c_node)
-
-
-
def SAMBA_MKVERSION(bld, target):
'''generate the version.h header for Samba'''
bld.SET_BUILD_GROUP('setup')
diff --git a/buildtools/wafsamba/samba_pidl.py b/buildtools/wafsamba/samba_pidl.py
index d37e7f1d23..e76e029a12 100644
--- a/buildtools/wafsamba/samba_pidl.py
+++ b/buildtools/wafsamba/samba_pidl.py
@@ -1,7 +1,7 @@
# waf build tool for building IDL files with pidl
-from TaskGen import taskgen, before
-import Build, os, string, Utils
+from TaskGen import before
+import Build, os
from samba_utils import *
def SAMBA_PIDL(bld, pname, source, options='', output_dir='.'):
@@ -41,7 +41,11 @@ def SAMBA_PIDL(bld, pname, source, options='', output_dir='.'):
# remember this one for the tables generation
table_header_idx = len(out_files) - 1
- pidl = bld.srcnode.find_resource('pidl/pidl').relpath_gen(bld.path)
+ # depend on the full pidl sources
+ source = TO_LIST(source)
+ pidl_src = [x.relpath_gen(bld.path) for x in
+ bld.srcnode.ant_glob('pidl/**/*', flat=False)]
+ source.extend(pidl_src)
# the cd .. is needed because pidl currently is sensitive to the directory it is run in
t = bld(rule='cd .. && ${PIDL} ${OPTIONS} --outputdir ${OUTPUTDIR} -- ${SRC[0].abspath(env)}',
@@ -54,8 +58,7 @@ def SAMBA_PIDL(bld, pname, source, options='', output_dir='.'):
t.env.PIDL = "../pidl/pidl"
t.env.OPTIONS = TO_LIST(options)
- t.env.OUTPUTDIR = 'bin/' + bld.BUILD_PATH(output_dir)
-
+ t.env.OUTPUTDIR = bld.bldnode.name + '/' + bld.path.find_dir(output_dir).bldpath(t.env)
if table_header_idx is not None:
pidl_headers = LOCAL_CACHE(bld, 'PIDL_HEADERS')
diff --git a/buildtools/wafsamba/samba_python.py b/buildtools/wafsamba/samba_python.py
index 0845a7e61c..4073dcf9bb 100644
--- a/buildtools/wafsamba/samba_python.py
+++ b/buildtools/wafsamba/samba_python.py
@@ -1,7 +1,6 @@
# waf build tool for building IDL files with pidl
-from TaskGen import taskgen, before
-import Build, os, string, Utils
+import Build
from samba_utils import *
from samba_autoconf import *
@@ -18,6 +17,25 @@ def SAMBA_PYTHON(bld, name,
enabled=True):
'''build a python extension for Samba'''
+ # when we support static python modules we'll need to gather
+ # the list from all the SAMBA_PYTHON() targets
+ if init_function_sentinal is not None:
+ cflags += '-DSTATIC_LIBPYTHON_MODULES="%s"' % init_function_sentinal
+
+ if realname is None:
+ # a SAMBA_PYTHON target without a realname is just a
+ # subsystem with needs_python=True
+ return bld.SAMBA_SUBSYSTEM(name,
+ source=source,
+ deps=deps,
+ public_deps=public_deps,
+ cflags=cflags,
+ includes=includes,
+ init_function_sentinal=init_function_sentinal,
+ local_include=local_include,
+ needs_python=True,
+ enabled=enabled)
+
if not enabled:
SET_TARGET_TYPE(bld, name, 'DISABLED')
return
@@ -27,18 +45,19 @@ def SAMBA_PYTHON(bld, name,
deps += ' ' + public_deps
- # when we support static python modules we'll need to gather
- # the list from all the SAMBA_PYTHON() targets
- if init_function_sentinal is not None:
- cflags += '-DSTATIC_LIBPYTHON_MODULES="%s"' % init_function_sentinal
+ if realname is None:
+ link_name = 'python/%s.so' % name
+ else:
+ link_name = 'python/%s' % realname
t = bld(
- features = 'cc cshlib pyext',
+ features = 'cc cshlib pyext symlink_lib',
source = source,
target = name,
- ccflags = CURRENT_CFLAGS(bld, name, cflags),
+ samba_cflags = CURRENT_CFLAGS(bld, name, cflags),
samba_includes = includes,
local_include = local_include,
- samba_deps = TO_LIST(deps)
+ samba_deps = TO_LIST(deps),
+ link_name = link_name
)
Build.BuildContext.SAMBA_PYTHON = SAMBA_PYTHON
diff --git a/buildtools/wafsamba/samba_utils.py b/buildtools/wafsamba/samba_utils.py
index 5db9f2542a..d01edcf3b4 100644
--- a/buildtools/wafsamba/samba_utils.py
+++ b/buildtools/wafsamba/samba_utils.py
@@ -1,11 +1,10 @@
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
-import Build, os, Logs, sys, Configure, Options, string, Task, Utils, optparse
+import Build, os, sys, Options, Utils
from TaskGen import feature, before
from Configure import conf
from Logs import debug
-from TaskGen import extension
import shlex
# TODO: make this a --option
@@ -102,7 +101,7 @@ Build.BuildContext.ASSERT = ASSERT
def SUBDIR(bld, subdir, list):
ret = ''
for l in TO_LIST(list):
- ret = ret + subdir + '/' + l + ' '
+ ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
return ret
Build.BuildContext.SUBDIR = SUBDIR
@@ -189,16 +188,22 @@ def ENABLE_MAGIC_ORDERING(bld):
Build.BuildContext.ENABLE_MAGIC_ORDERING = ENABLE_MAGIC_ORDERING
-def BUILD_PATH(bld, relpath):
- '''return a relative build path, given a relative path
- for example, if called in the source4/librpc directory, with the path
- gen_ndr/tables.c, then it will return default/source4/gen_ndr/tables.c
- '''
+os_path_relpath = getattr(os.path, 'relpath', None)
+if os_path_relpath is None:
+ # Python < 2.6 does not have os.path.relpath, provide a replacement
+ # (imported from Python2.6.5~rc2)
+ def os_path_relpath(path, start):
+ """Return a relative version of a path"""
+ start_list = os.path.abspath(start).split("/")
+ path_list = os.path.abspath(path).split("/")
- ret = os.path.normpath(os.path.join(os.path.relpath(bld.curdir, bld.env.TOPDIR), relpath))
- ret = 'default/%s' % ret
- return ret
-Build.BuildContext.BUILD_PATH = BUILD_PATH
+ # Work out how much of the filepath is shared by start and path.
+ i = len(os.path.commonprefix([start_list, path_list]))
+
+ rel_list = ['..'] * (len(start_list)-i) + path_list[i:]
+ if not rel_list:
+ return start
+ return os.path.join(*rel_list)
# this is a useful way of debugging some of the rules in waf
@@ -276,3 +281,24 @@ Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING
# raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
# y.post()
# self.env.append_unique('INC_PATHS', y.env.INC_PATHS)
+
+
+def recursive_dirlist(dir, relbase):
+ '''recursive directory list'''
+ ret = []
+ for f in os.listdir(dir):
+ f2 = dir + '/' + f
+ if os.path.isdir(f2):
+ ret.extend(recursive_dirlist(f2, relbase))
+ else:
+ ret.append(os_path_relpath(f2, relbase))
+ return ret
+
+
+def mkdir_p(dir):
+ '''like mkdir -p'''
+ if os.path.isdir(dir):
+ return
+ mkdir_p(os.path.dirname(dir))
+ os.mkdir(dir)
+
diff --git a/buildtools/wafsamba/wafsamba.py b/buildtools/wafsamba/wafsamba.py
index e90bd35ef4..5b63c1eef9 100644
--- a/buildtools/wafsamba/wafsamba.py
+++ b/buildtools/wafsamba/wafsamba.py
@@ -1,16 +1,16 @@
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
-import Build, os, Logs, sys, Configure, Options, string, Task, Utils, optparse
+import Build, os, Options, Task, Utils
from Configure import conf
from Logs import debug
-from TaskGen import extension
# bring in the other samba modules
from samba_utils import *
from samba_autoconf import *
from samba_patterns import *
from samba_pidl import *
+from samba_errtable import *
from samba_asn1 import *
from samba_autoproto import *
from samba_python import *
@@ -19,25 +19,26 @@ from samba_deps import *
LIB_PATH="shared"
+
#################################################################
# create the samba build environment
@conf
def SAMBA_BUILD_ENV(conf):
- libpath="%s/%s" % (conf.blddir, LIB_PATH)
conf.env['BUILD_DIRECTORY'] = conf.blddir
- if not os.path.exists(libpath):
- os.mkdir(libpath)
+ mkdir_p(os.path.join(conf.blddir, LIB_PATH))
+ mkdir_p(os.path.join(conf.blddir, 'python/samba/dcerpc'))
+
################################################################
# add an init_function to the list for a subsystem
-def ADD_INIT_FUNCTION(bld, subsystem, init_function):
+def ADD_INIT_FUNCTION(bld, subsystem, target, init_function):
if init_function is None:
return
bld.ASSERT(subsystem is not None, "You must specify a subsystem for init_function '%s'" % init_function)
cache = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
if not subsystem in cache:
cache[subsystem] = []
- cache[subsystem].append(init_function)
+ cache[subsystem].append( { 'TARGET':target, 'INIT_FUNCTION':init_function } )
Build.BuildContext.ADD_INIT_FUNCTION = ADD_INIT_FUNCTION
@@ -50,7 +51,7 @@ def SAMBA_LIBRARY(bld, libname, source,
public_headers=None,
vnum=None,
cflags='',
- output_type=None,
+ external_library=False,
realname=None,
autoproto=None,
group='main',
@@ -73,7 +74,7 @@ def SAMBA_LIBRARY(bld, libname, source,
features = 'cc cshlib symlink_lib',
source = source,
target = libname,
- ccflags = CURRENT_CFLAGS(bld, libname, cflags),
+ samba_cflags = CURRENT_CFLAGS(bld, libname, cflags),
depends_on = depends_on,
samba_deps = TO_LIST(deps),
samba_includes = includes,
@@ -98,10 +99,10 @@ def SAMBA_BINARY(bld, binname, source,
autoproto=None,
use_hostcc=None,
compiler=None,
- group='main',
+ group='binaries',
manpages=None,
local_include=True,
- subsystem=None,
+ subsystem_name=None,
needs_python=False):
if not SET_TARGET_TYPE(bld, binname, 'BINARY'):
@@ -116,39 +117,27 @@ def SAMBA_BINARY(bld, binname, source,
features = features,
source = source,
target = binname,
- ccflags = CURRENT_CFLAGS(bld, binname, cflags),
+ samba_cflags = CURRENT_CFLAGS(bld, binname, cflags),
samba_deps = TO_LIST(deps),
samba_includes = includes,
local_include = local_include,
samba_modules = modules,
top = True,
- samba_subsystem= subsystem
+ samba_subsystem= subsystem_name
)
+ # setup the subsystem_name as an alias for the real
+ # binary name, so it can be found when expanding
+ # subsystem dependencies
+ if subsystem_name is not None:
+ bld.TARGET_ALIAS(subsystem_name, binname)
+
if autoproto is not None:
bld.SAMBA_AUTOPROTO(autoproto, source)
Build.BuildContext.SAMBA_BINARY = SAMBA_BINARY
#################################################################
-# define a Samba ET target
-def SAMBA_ERRTABLE(bld, name, source,
- options='',
- directory=''):
-# print "Skipping ERRTABLE rule for %s with source=%s" % (name, source)
-# return
- if not SET_TARGET_TYPE(bld, name, 'ET'):
- return
- bld.SET_BUILD_GROUP('build_source')
- bld(
- features = 'cc',
- source = source,
- target = name,
- includes = '# #source4/heimdal_build #source4 #lib/replace'
- )
-Build.BuildContext.SAMBA_ERRTABLE = SAMBA_ERRTABLE
-
-#################################################################
# define a Samba module.
def SAMBA_MODULE(bld, modname, source,
deps='',
@@ -159,12 +148,12 @@ def SAMBA_MODULE(bld, modname, source,
autoproto_extra_source='',
aliases=None,
cflags='',
- output_type=None,
+ internal_module=True,
local_include=True,
enabled=True):
- if output_type == 'MERGED_OBJ':
- # treat merged object modules as subsystems for now
+ if internal_module:
+ # treat internal modules as subsystems for now
SAMBA_SUBSYSTEM(bld, modname, source,
deps=deps,
includes=includes,
@@ -177,7 +166,8 @@ def SAMBA_MODULE(bld, modname, source,
# add it to the init_function list
# TODO: we should also create an implicit dependency
# between the subsystem target and this target
- bld.ADD_INIT_FUNCTION(subsystem, init_function)
+ if enabled:
+ bld.ADD_INIT_FUNCTION(subsystem, modname, init_function)
return
if not enabled:
@@ -193,7 +183,7 @@ def SAMBA_MODULE(bld, modname, source,
return
- bld.ADD_INIT_FUNCTION(subsystem, init_function)
+ bld.ADD_INIT_FUNCTION(subsystem, modname, init_function)
if subsystem is not None:
deps += ' ' + subsystem
@@ -203,7 +193,7 @@ def SAMBA_MODULE(bld, modname, source,
features = 'cc',
source = source,
target = modname,
- ccflags = CURRENT_CFLAGS(bld, modname, cflags),
+ samba_cflags = CURRENT_CFLAGS(bld, modname, cflags),
samba_includes = includes,
local_include = local_include,
samba_deps = TO_LIST(deps)
@@ -234,18 +224,14 @@ def SAMBA_SUBSYSTEM(bld, modname, source,
depends_on='',
local_include=True,
local_include_first=True,
- enabled=True):
+ subsystem_name=None,
+ enabled=True,
+ needs_python=False):
if not enabled:
SET_TARGET_TYPE(bld, modname, 'DISABLED')
return
- # if the caller specifies a config_option, then we create a blank
- # subsystem if that configuration option was found at configure time
- if (config_option is not None) and bld.CONFIG_SET(config_option):
- SET_TARGET_TYPE(bld, modname, 'EMPTY')
- return
-
# remember empty subsystems, so we can strip the dependencies
if (source == '') or (source == []):
SET_TARGET_TYPE(bld, modname, 'EMPTY')
@@ -258,16 +244,21 @@ def SAMBA_SUBSYSTEM(bld, modname, source,
bld.SET_BUILD_GROUP(group)
+ features = 'cc'
+ if needs_python:
+ features += ' pyext'
+
t = bld(
- features = 'cc',
+ features = features,
source = source,
target = modname,
- ccflags = CURRENT_CFLAGS(bld, modname, cflags),
+ samba_cflags = CURRENT_CFLAGS(bld, modname, cflags),
depends_on = depends_on,
samba_deps = TO_LIST(deps),
samba_includes = includes,
local_include = local_include,
- local_include_first = local_include_first
+ local_include_first = local_include_first,
+ samba_subsystem= subsystem_name
)
if heimdal_autoproto is not None:
@@ -334,6 +325,7 @@ def SETUP_BUILD_GROUPS(bld):
bld.add_group('build_source')
bld.add_group('prototypes')
bld.add_group('main')
+ bld.add_group('binaries')
bld.add_group('final')
Build.BuildContext.SETUP_BUILD_GROUPS = SETUP_BUILD_GROUPS
@@ -360,3 +352,91 @@ def h_file(filename):
@conf
def ENABLE_TIMESTAMP_DEPENDENCIES(conf):
Utils.h_file = h_file
+
+
+##############################
+# handle the creation of links for libraries and binaries
+# note that we use a relative symlink path to allow the whole tree
+# to me moved/copied elsewhere without breaking the links
+t = Task.simple_task_type('symlink_lib', 'ln -sf ${LINK_SOURCE} ${LINK_TARGET}',
+ color='PINK', ext_in='.bin')
+t.quiet = True
+
+@feature('symlink_lib')
+@after('apply_link')
+def symlink_lib(self):
+ tsk = self.create_task('symlink_lib', self.link_task.outputs[0])
+
+ # calculat the link target and put it in the environment
+ soext=""
+ vnum = getattr(self, 'vnum', None)
+ if vnum is not None:
+ soext = '.' + vnum.split('.')[0]
+
+ link_target = getattr(self, 'link_name', '')
+ if link_target == '':
+ link_target = '%s/lib%s.so%s' % (LIB_PATH, self.sname, soext)
+
+
+ link_source = os_path_relpath(self.link_task.outputs[0].abspath(self.env),
+ os.path.join(self.env.BUILD_DIRECTORY, link_target))
+
+ tsk.env.LINK_TARGET = link_target
+ tsk.env.LINK_SOURCE = link_source[3:]
+ debug('task_gen: LINK for %s is %s -> %s',
+ self.name, tsk.env.LINK_SOURCE, tsk.env.LINK_TARGET)
+
+# for binaries we need to copy the executable to avoid the rpath changing
+# in the local bin/ directory on install
+t = Task.simple_task_type('copy_bin', 'rm -f ${BIN_TARGET} && cp ${SRC} ${BIN_TARGET}', color='PINK',
+ ext_in='.bin', shell=True)
+t.quiet = True
+
+@feature('copy_bin')
+@after('apply_link')
+def copy_bin(self):
+ if Options.is_install:
+ # we don't want to copy the install binary, as
+ # that has the install rpath, not the build rpath
+ # The rpath of the binaries in bin/default/foo/blah is different
+ # during the install phase, as distros insist on not using rpath in installed binaries
+ return
+ tsk = self.create_task('copy_bin', self.link_task.outputs[0])
+
+ tsk.env.BIN_TARGET = self.target
+ debug('task_gen: BIN_TARGET for %s is %s', self.name, tsk.env.BIN_TARGET)
+
+
+
+
+t = Task.simple_task_type('copy_script', 'ln -sf ${SRC[0].abspath(env)} ${LINK_TARGET}',
+ color='PINK', ext_in='.bin', shell=True)
+t.quiet = True
+
+@feature('copy_script')
+@before('apply_link')
+def copy_script(self):
+ tsk = self.create_task('copy_script', self.allnodes[0])
+ tsk.env.TARGET = self.target
+
+def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None):
+ '''used to copy scripts from the source tree into the build directory
+ for use by selftest'''
+
+ source = bld.path.ant_glob(pattern)
+
+ bld.SET_BUILD_GROUP('build_source')
+ for s in TO_LIST(source):
+ iname = s
+ if installname != None:
+ iname = installname
+ target = os.path.join(installdir, iname)
+ tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target))
+ mkdir_p(tgtdir)
+ t = bld(features='copy_script',
+ source=s,
+ target = target,
+ always=True)
+ t.env.LINK_TARGET = target
+
+Build.BuildContext.SAMBA_SCRIPT = SAMBA_SCRIPT