1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
|
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
import Build, os, sys, Options, Utils, Task
from TaskGen import feature, before
from Configure import conf
from Logs import debug
import shlex
# TODO: make this a --option
LIB_PATH="shared"
##########################################################
# create a node with a new name, based on an existing node
def NEW_NODE(node, name):
ret = node.parent.find_or_declare([name])
ASSERT(node, ret is not None, "Unable to find new target with name '%s' from '%s'" % (
name, node.name))
return ret
#############################################################
# set a value in a local cache
# return False if it's already set
@conf
def SET_TARGET_TYPE(ctx, target, value):
cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
if target in cache:
ASSERT(ctx, cache[target] == value,
"Target '%s' re-defined as %s - was %s" % (target, value, cache[target]))
debug("task_gen: Skipping duplicate target %s (curdir=%s)" % (target, ctx.curdir))
return False
LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
return True
def GET_TARGET_TYPE(ctx, target):
'''get target type from cache'''
cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
if not target in cache:
return None
return cache[target]
######################################################
# this is used as a decorator to make functions only
# run once. Based on the idea from
# http://stackoverflow.com/questions/815110/is-there-a-decorator-to-simply-cache-function-return-values
runonce_ret = {}
def runonce(function):
def wrapper(*args):
if args in runonce_ret:
return runonce_ret[args]
else:
ret = function(*args)
runonce_ret[args] = ret
return ret
return wrapper
def install_rpath(bld):
'''the rpath value for installation'''
bld.env['RPATH'] = []
bld.env['RPATH_ST'] = []
if bld.env.RPATH_ON_INSTALL:
return ['-Wl,-rpath=%s/lib' % bld.env.PREFIX]
return []
def build_rpath(bld):
'''the rpath value for build'''
rpath = os.path.normpath('%s/%s' % (bld.env['BUILD_DIRECTORY'], LIB_PATH))
bld.env['RPATH'] = []
bld.env['RPATH_ST'] = []
if bld.env.RPATH_ON_BUILD:
return ['-Wl,-rpath=%s' % rpath]
os.environ['LD_LIBRARY_PATH'] = rpath
return []
#############################################################
# return a named build cache dictionary, used to store
# state inside the following functions
@conf
def LOCAL_CACHE(ctx, name):
if name in ctx.env:
return ctx.env[name]
ctx.env[name] = {}
return ctx.env[name]
#############################################################
# set a value in a local cache
@conf
def LOCAL_CACHE_SET(ctx, cachename, key, value):
cache = LOCAL_CACHE(ctx, cachename)
cache[key] = value
#############################################################
# a build assert call
@conf
def ASSERT(ctx, expression, msg):
if not expression:
sys.stderr.write("ERROR: %s\n" % msg)
raise AssertionError
Build.BuildContext.ASSERT = ASSERT
################################################################
# create a list of files by pre-pending each with a subdir name
def SUBDIR(bld, subdir, list):
ret = ''
for l in TO_LIST(list):
ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
return ret
Build.BuildContext.SUBDIR = SUBDIR
#######################################################
# d1 += d2
def dict_concat(d1, d2):
for t in d2:
if t not in d1:
d1[t] = d2[t]
############################################################
# this overrides the 'waf -v' debug output to be in a nice
# unix like format instead of a python list.
# Thanks to ita on #waf for this
def exec_command(self, cmd, **kw):
import Utils, Logs
_cmd = cmd
if isinstance(cmd, list):
_cmd = ' '.join(cmd)
debug('runner: %s' % _cmd)
if self.log:
self.log.write('%s\n' % cmd)
kw['log'] = self.log
try:
if not kw.get('cwd', None):
kw['cwd'] = self.cwd
except AttributeError:
self.cwd = kw['cwd'] = self.bldnode.abspath()
return Utils.exec_command(cmd, **kw)
Build.BuildContext.exec_command = exec_command
##########################################################
# add a new top level command to waf
def ADD_COMMAND(opt, name, function):
Utils.g_module.__dict__[name] = function
opt.name = function
Options.Handler.ADD_COMMAND = ADD_COMMAND
@feature('cc', 'cshlib', 'cprogram')
@before('apply_core','exec_rule')
def process_depends_on(self):
'''The new depends_on attribute for build rules
allow us to specify a dependency on output from
a source generation rule'''
if getattr(self , 'depends_on', None):
lst = self.to_list(self.depends_on)
for x in lst:
y = self.bld.name_to_obj(x, self.env)
self.bld.ASSERT(y is not None, "Failed to find dependency %s of %s" % (x, self.name))
y.post()
if getattr(y, 'more_includes', None):
self.includes += " " + y.more_includes
#@feature('cprogram', 'cc', 'cshlib')
#@before('apply_core')
#def process_generated_dependencies(self):
# '''Ensure that any dependent source generation happens
# before any task that requires the output'''
# if getattr(self , 'depends_on', None):
# lst = self.to_list(self.depends_on)
# for x in lst:
# y = self.bld.name_to_obj(x, self.env)
# y.post()
#import TaskGen, Task
#
#old_post_run = Task.Task.post_run
#def new_post_run(self):
# self.cached = True
# return old_post_run(self)
#
#for y in ['cc', 'cxx']:
# TaskGen.classes[y].post_run = new_post_run
def ENABLE_MAGIC_ORDERING(bld):
'''enable automatic build order constraint calculation
see page 35 of the waf book'''
print "NOT Enabling magic ordering"
#bld.use_the_magic()
Build.BuildContext.ENABLE_MAGIC_ORDERING = ENABLE_MAGIC_ORDERING
os_path_relpath = getattr(os.path, 'relpath', None)
if os_path_relpath is None:
# Python < 2.6 does not have os.path.relpath, provide a replacement
# (imported from Python2.6.5~rc2)
def os_path_relpath(path, start):
"""Return a relative version of a path"""
start_list = os.path.abspath(start).split("/")
path_list = os.path.abspath(path).split("/")
# Work out how much of the filepath is shared by start and path.
i = len(os.path.commonprefix([start_list, path_list]))
rel_list = ['..'] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return start
return os.path.join(*rel_list)
# this is a useful way of debugging some of the rules in waf
from TaskGen import feature, after
@feature('dbg')
@after('apply_core', 'apply_obj_vars_cc')
def dbg(self):
if self.target == 'HEIMDAL_HEIM_ASN1':
print "@@@@@@@@@@@@@@2", self.includes, self.env._CCINCFLAGS
def unique_list(seq):
'''return a uniquified list in the same order as the existing list'''
seen = {}
result = []
for item in seq:
if item in seen: continue
seen[item] = True
result.append(item)
return result
def TO_LIST(str):
'''Split a list, preserving quoted strings and existing lists'''
if str is None:
return []
if isinstance(str, list):
return str
lst = str.split()
# the string may have had quotes in it, now we
# check if we did have quotes, and use the slower shlex
# if we need to
for e in lst:
if e[0] == '"':
return shlex.split(str)
return lst
@conf
def SUBST_ENV_VAR(ctx, varname):
'''Substitute an environment variable for any embedded variables'''
return Utils.subst_vars(ctx.env[varname], ctx.env)
Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR
def ENFORCE_GROUP_ORDERING(bld):
'''enforce group ordering for the project. This
makes the group ordering apply only when you specify
a target with --target'''
if Options.options.compile_targets:
@feature('*')
def force_previous_groups(self):
my_id = id(self)
bld = self.bld
stop = None
for g in bld.task_manager.groups:
for t in g.tasks_gen:
if id(t) == my_id:
stop = id(g)
break
if stop is None:
return
for g in bld.task_manager.groups:
if id(g) == stop:
break
for t in g.tasks_gen:
t.post()
Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING
# @feature('cc')
# @before('apply_lib_vars')
# def process_objects(self):
# if getattr(self, 'add_objects', None):
# lst = self.to_list(self.add_objects)
# for x in lst:
# y = self.name_to_obj(x)
# if not y:
# raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
# y.post()
# self.env.append_unique('INC_PATHS', y.env.INC_PATHS)
def recursive_dirlist(dir, relbase):
'''recursive directory list'''
ret = []
for f in os.listdir(dir):
f2 = dir + '/' + f
if os.path.isdir(f2):
ret.extend(recursive_dirlist(f2, relbase))
else:
ret.append(os_path_relpath(f2, relbase))
return ret
def mkdir_p(dir):
'''like mkdir -p'''
if os.path.isdir(dir):
return
mkdir_p(os.path.dirname(dir))
os.mkdir(dir)
def SUBST_VARS_RECURSIVE(string, env):
'''recursively expand variables'''
if string is None:
return string
limit=100
while (string.find('${') != -1 and limit > 0):
string = Utils.subst_vars(string, env)
limit -= 1
return string
def RUN_COMMAND(cmd,
env=None,
shell=False):
'''run a external command, return exit code or signal'''
if env:
cmd = SUBST_VARS_RECURSIVE(cmd, env)
status = os.system(cmd)
if os.WIFEXITED(status):
return os.WEXITSTATUS(status)
if os.WIFSIGNALED(status):
return - os.WTERMSIG(status)
print "Unknown exit reason %d for command: %s" (status, cmd)
return -1
# make sure we have md5. some systems don't have it
try:
from hashlib import md5
except:
try:
import md5
except:
import Constants
Constants.SIG_NIL = hash('abcd')
class replace_md5(object):
def __init__(self):
self.val = None
def update(self, val):
self.val = hash((self.val, val))
def digest(self):
return str(self.val)
def hexdigest(self):
return self.digest().encode('hex')
def replace_h_file(filename):
f = open(filename, 'rb')
m = replace_md5()
while (filename):
filename = f.read(100000)
m.update(filename)
f.close()
return m.digest()
Utils.md5 = replace_md5
Task.md5 = replace_md5
Utils.h_file = replace_h_file
|