summaryrefslogtreecommitdiff
path: root/buildtools/wafsamba/samba_utils.py
blob: f5c0c53a75ea721f1fe522a6f64dc98e35016c65 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc

import os, sys, re, fnmatch, shlex
from optparse import SUPPRESS_HELP
import Build, Options, Utils, Task, Logs, Configure
from TaskGen import feature, before, after
from Configure import conf, ConfigurationContext
from Logs import debug

# TODO: make this a --option
LIB_PATH="shared"


# sigh, python octal constants are a mess
MODE_644 = int('644', 8)
MODE_755 = int('755', 8)

@conf
def SET_TARGET_TYPE(ctx, target, value):
    '''set the target type of a target'''
    cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
    if target in cache and cache[target] != 'EMPTY':
        Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target]))
        sys.exit(1)
    LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
    debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
    return True


def GET_TARGET_TYPE(ctx, target):
    '''get target type from cache'''
    cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
    if not target in cache:
        return None
    return cache[target]


def ADD_LD_LIBRARY_PATH(path):
    '''add something to LD_LIBRARY_PATH'''
    if 'LD_LIBRARY_PATH' in os.environ:
        oldpath = os.environ['LD_LIBRARY_PATH']
    else:
        oldpath = ''
    newpath = oldpath.split(':')
    if not path in newpath:
        newpath.append(path)
        os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath)


def needs_private_lib(bld, target):
    '''return True if a target links to a private library'''
    for lib in getattr(target, "final_libs", []):
        t = bld.get_tgen_by_name(lib)
        if t and getattr(t, 'private_library', False):
            return True
    return False


def install_rpath(target):
    '''the rpath value for installation'''
    bld = target.bld
    bld.env['RPATH'] = []
    ret = set()
    if bld.env.RPATH_ON_INSTALL:
        ret.add(bld.EXPAND_VARIABLES(bld.env.LIBDIR))
    if bld.env.RPATH_ON_INSTALL_PRIVATE and needs_private_lib(bld, target):
        ret.add(bld.EXPAND_VARIABLES(bld.env.PRIVATELIBDIR))
    return list(ret)


def build_rpath(bld):
    '''the rpath value for build'''
    rpaths = [os.path.normpath('%s/%s' % (bld.env.BUILD_DIRECTORY, d)) for d in ("shared", "shared/private")]
    bld.env['RPATH'] = []
    if bld.env.RPATH_ON_BUILD:
        return rpaths
    for rpath in rpaths:
        ADD_LD_LIBRARY_PATH(rpath)
    return []


@conf
def LOCAL_CACHE(ctx, name):
    '''return a named build cache dictionary, used to store
       state inside other functions'''
    if name in ctx.env:
        return ctx.env[name]
    ctx.env[name] = {}
    return ctx.env[name]


@conf
def LOCAL_CACHE_SET(ctx, cachename, key, value):
    '''set a value in a local cache'''
    cache = LOCAL_CACHE(ctx, cachename)
    cache[key] = value


@conf
def ASSERT(ctx, expression, msg):
    '''a build assert call'''
    if not expression:
        raise Utils.WafError("ERROR: %s\n" % msg)
Build.BuildContext.ASSERT = ASSERT


def SUBDIR(bld, subdir, list):
    '''create a list of files by pre-pending each with a subdir name'''
    ret = ''
    for l in TO_LIST(list):
        ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
    return ret
Build.BuildContext.SUBDIR = SUBDIR


def dict_concat(d1, d2):
    '''concatenate two dictionaries d1 += d2'''
    for t in d2:
        if t not in d1:
            d1[t] = d2[t]

def ADD_COMMAND(opt, name, function):
    '''add a new top level command to waf'''
    Utils.g_module.__dict__[name] = function
    opt.name = function
Options.Handler.ADD_COMMAND = ADD_COMMAND


@feature('c', 'cc', 'cshlib', 'cprogram')
@before('apply_core','exec_rule')
def process_depends_on(self):
    '''The new depends_on attribute for build rules
       allow us to specify a dependency on output from
       a source generation rule'''
    if getattr(self , 'depends_on', None):
        lst = self.to_list(self.depends_on)
        for x in lst:
            y = self.bld.get_tgen_by_name(x)
            self.bld.ASSERT(y is not None, "Failed to find dependency %s of %s" % (x, self.name))
            y.post()
            if getattr(y, 'more_includes', None):
                  self.includes += " " + y.more_includes


os_path_relpath = getattr(os.path, 'relpath', None)
if os_path_relpath is None:
    # Python < 2.6 does not have os.path.relpath, provide a replacement
    # (imported from Python2.6.5~rc2)
    def os_path_relpath(path, start):
        """Return a relative version of a path"""
        start_list = os.path.abspath(start).split("/")
        path_list = os.path.abspath(path).split("/")

        # Work out how much of the filepath is shared by start and path.
        i = len(os.path.commonprefix([start_list, path_list]))

        rel_list = ['..'] * (len(start_list)-i) + path_list[i:]
        if not rel_list:
            return start
        return os.path.join(*rel_list)


def unique_list(seq):
    '''return a uniquified list in the same order as the existing list'''
    seen = {}
    result = []
    for item in seq:
        if item in seen: continue
        seen[item] = True
        result.append(item)
    return result


def TO_LIST(str, delimiter=None):
    '''Split a list, preserving quoted strings and existing lists'''
    if str is None:
        return []
    if isinstance(str, list):
        # we need to return a new independent list...
        return list(str)
    if len(str) == 0:
        return []
    lst = str.split(delimiter)
    # the string may have had quotes in it, now we
    # check if we did have quotes, and use the slower shlex
    # if we need to
    for e in lst:
        if e[0] == '"':
            return shlex.split(str)
    return lst


def subst_vars_error(string, env):
    '''substitute vars, throw an error if a variable is not defined'''
    lst = re.split('(\$\{\w+\})', string)
    out = []
    for v in lst:
        if re.match('\$\{\w+\}', v):
            vname = v[2:-1]
            if not vname in env:
                raise KeyError("Failed to find variable %s in %s" % (vname, string))
            v = env[vname]
        out.append(v)
    return ''.join(out)


@conf
def SUBST_ENV_VAR(ctx, varname):
    '''Substitute an environment variable for any embedded variables'''
    return subst_vars_error(ctx.env[varname], ctx.env)
Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR


def ENFORCE_GROUP_ORDERING(bld):
    '''enforce group ordering for the project. This
       makes the group ordering apply only when you specify
       a target with --target'''
    if Options.options.compile_targets:
        @feature('*')
        @before('exec_rule', 'apply_core', 'collect')
        def force_previous_groups(self):
            if getattr(self.bld, 'enforced_group_ordering', False):
                return
            self.bld.enforced_group_ordering = True

            def group_name(g):
                tm = self.bld.task_manager
                return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0]

            my_id = id(self)
            bld = self.bld
            stop = None
            for g in bld.task_manager.groups:
                for t in g.tasks_gen:
                    if id(t) == my_id:
                        stop = id(g)
                        debug('group: Forcing up to group %s for target %s',
                              group_name(g), self.name or self.target)
                        break
                if stop is not None:
                    break
            if stop is None:
                return

            for i in xrange(len(bld.task_manager.groups)):
                g = bld.task_manager.groups[i]
                bld.task_manager.current_group = i
                if id(g) == stop:
                    break
                debug('group: Forcing group %s', group_name(g))
                for t in g.tasks_gen:
                    if not getattr(t, 'forced_groups', False):
                        debug('group: Posting %s', t.name or t.target)
                        t.forced_groups = True
                        t.post()
Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING


def recursive_dirlist(dir, relbase, pattern=None):
    '''recursive directory list'''
    ret = []
    for f in os.listdir(dir):
        f2 = dir + '/' + f
        if os.path.isdir(f2):
            ret.extend(recursive_dirlist(f2, relbase))
        else:
            if pattern and not fnmatch.fnmatch(f, pattern):
                continue
            ret.append(os_path_relpath(f2, relbase))
    return ret


def mkdir_p(dir):
    '''like mkdir -p'''
    if not dir:
        return
    if dir.endswith("/"):
        mkdir_p(dir[:-1])
        return
    if os.path.isdir(dir):
        return
    mkdir_p(os.path.dirname(dir))
    os.mkdir(dir)


def SUBST_VARS_RECURSIVE(string, env):
    '''recursively expand variables'''
    if string is None:
        return string
    limit=100
    while (string.find('${') != -1 and limit > 0):
        string = subst_vars_error(string, env)
        limit -= 1
    return string


@conf
def EXPAND_VARIABLES(ctx, varstr, vars=None):
    '''expand variables from a user supplied dictionary

    This is most useful when you pass vars=locals() to expand
    all your local variables in strings
    '''

    if isinstance(varstr, list):
        ret = []
        for s in varstr:
            ret.append(EXPAND_VARIABLES(ctx, s, vars=vars))
        return ret

    if not isinstance(varstr, str):
        return varstr

    import Environment
    env = Environment.Environment()
    ret = varstr
    # substitute on user supplied dict if avaiilable
    if vars is not None:
        for v in vars.keys():
            env[v] = vars[v]
        ret = SUBST_VARS_RECURSIVE(ret, env)

    # if anything left, subst on the environment as well
    if ret.find('${') != -1:
        ret = SUBST_VARS_RECURSIVE(ret, ctx.env)
    # make sure there is nothing left. Also check for the common
    # typo of $( instead of ${
    if ret.find('${') != -1 or ret.find('$(') != -1:
        Logs.error('Failed to substitute all variables in varstr=%s' % ret)
        sys.exit(1)
    return ret
Build.BuildContext.EXPAND_VARIABLES = EXPAND_VARIABLES


def RUN_COMMAND(cmd,
                env=None,
                shell=False):
    '''run a external command, return exit code or signal'''
    if env:
        cmd = SUBST_VARS_RECURSIVE(cmd, env)

    status = os.system(cmd)
    if os.WIFEXITED(status):
        return os.WEXITSTATUS(status)
    if os.WIFSIGNALED(status):
        return - os.WTERMSIG(status)
    Logs.error("Unknown exit reason %d for command: %s" (status, cmd))
    return -1


def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None):
    env = LOAD_ENVIRONMENT()
    if pythonpath is None:
        pythonpath = os.path.join(Utils.g_module.blddir, 'python')
    result = 0
    for interp in env.python_interpreters:
        for testfile in testfiles:
            cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile)
            if extra_env:
                for key, value in extra_env.items():
                    cmd = "%s=%s %s" % (key, value, cmd)
            print('Running Python test with %s: %s' % (interp, testfile))
            ret = RUN_COMMAND(cmd)
            if ret:
                print('Python test failed: %s' % cmd)
                result = ret
    return result


# make sure we have md5. some systems don't have it
try:
    from hashlib import md5
    # Even if hashlib.md5 exists, it may be unusable.
    # Try to use MD5 function. In FIPS mode this will cause an exception
    # and we'll get to the replacement code
    foo = md5('abcd')
except:
    try:
        import md5
        # repeat the same check here, mere success of import is not enough.
        # Try to use MD5 function. In FIPS mode this will cause an exception
        foo = md5.md5('abcd')
    except:
        import Constants
        Constants.SIG_NIL = hash('abcd')
        class replace_md5(object):
            def __init__(self):
                self.val = None
            def update(self, val):
                self.val = hash((self.val, val))
            def digest(self):
                return str(self.val)
            def hexdigest(self):
                return self.digest().encode('hex')
        def replace_h_file(filename):
            f = open(filename, 'rb')
            m = replace_md5()
            while (filename):
                filename = f.read(100000)
                m.update(filename)
            f.close()
            return m.digest()
        Utils.md5 = replace_md5
        Task.md5 = replace_md5
        Utils.h_file = replace_h_file


def LOAD_ENVIRONMENT():
    '''load the configuration environment, allowing access to env vars
       from new commands'''
    import Environment
    env = Environment.Environment()
    try:
        env.load('.lock-wscript')
        env.load(env.blddir + '/c4che/default.cache.py')
    except:
        pass
    return env


def IS_NEWER(bld, file1, file2):
    '''return True if file1 is newer than file2'''
    t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime
    t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime
    return t1 > t2
Build.BuildContext.IS_NEWER = IS_NEWER


@conf
def RECURSE(ctx, directory):
    '''recurse into a directory, relative to the curdir or top level'''
    try:
        visited_dirs = ctx.visited_dirs
    except:
        visited_dirs = ctx.visited_dirs = set()
    d = os.path.join(ctx.curdir, directory)
    if os.path.exists(d):
        abspath = os.path.abspath(d)
    else:
        abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory))
    ctxclass = ctx.__class__.__name__
    key = ctxclass + ':' + abspath
    if key in visited_dirs:
        # already done it
        return
    visited_dirs.add(key)
    relpath = os_path_relpath(abspath, ctx.curdir)
    if ctxclass == 'Handler':
        return ctx.sub_options(relpath)
    if ctxclass == 'ConfigurationContext':
        return ctx.sub_config(relpath)
    if ctxclass == 'BuildContext':
        return ctx.add_subdirs(relpath)
    Logs.error('Unknown RECURSE context class', ctxclass)
    raise
Options.Handler.RECURSE = RECURSE
Build.BuildContext.RECURSE = RECURSE


def CHECK_MAKEFLAGS(bld):
    '''check for MAKEFLAGS environment variable in case we are being
    called from a Makefile try to honor a few make command line flags'''
    if not 'WAF_MAKE' in os.environ:
        return
    makeflags = os.environ.get('MAKEFLAGS')
    if makeflags is None:
        return
    jobs_set = False
    # we need to use shlex.split to cope with the escaping of spaces
    # in makeflags
    for opt in shlex.split(makeflags):
        # options can come either as -x or as x
        if opt[0:2] == 'V=':
            Options.options.verbose = Logs.verbose = int(opt[2:])
            if Logs.verbose > 0:
                Logs.zones = ['runner']
            if Logs.verbose > 2:
                Logs.zones = ['*']
        elif opt[0].isupper() and opt.find('=') != -1:
            # this allows us to set waf options on the make command line
            # for example, if you do "make FOO=blah", then we set the
            # option 'FOO' in Options.options, to blah. If you look in wafsamba/wscript
            # you will see that the command line accessible options have their dest=
            # set to uppercase, to allow for passing of options from make in this way
            # this is also how "make test TESTS=testpattern" works, and
            # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1"
            loc = opt.find('=')
            setattr(Options.options, opt[0:loc], opt[loc+1:])
        elif opt[0] != '-':
            for v in opt:
                if v == 'j':
                    jobs_set = True
                elif v == 'k':
                    Options.options.keep = True
        elif opt == '-j':
            jobs_set = True
        elif opt == '-k':
            Options.options.keep = True
    if not jobs_set:
        # default to one job
        Options.options.jobs = 1

Build.BuildContext.CHECK_MAKEFLAGS = CHECK_MAKEFLAGS

option_groups = {}

def option_group(opt, name):
    '''find or create an option group'''
    global option_groups
    if name in option_groups:
        return option_groups[name]
    gr = opt.add_option_group(name)
    option_groups[name] = gr
    return gr
Options.Handler.option_group = option_group


def save_file(filename, contents, create_dir=False):
    '''save data to a file'''
    if create_dir:
        mkdir_p(os.path.dirname(filename))
    try:
        f = open(filename, 'w')
        f.write(contents)
        f.close()
    except:
        return False
    return True


def load_file(filename):
    '''return contents of a file'''
    try:
        f = open(filename, 'r')
        r = f.read()
        f.close()
    except:
        return None
    return r


def reconfigure(ctx):
    '''rerun configure if necessary'''
    import Configure, samba_wildcard, Scripting
    if not os.path.exists(".lock-wscript"):
        raise Utils.WafError('configure has not been run')
    bld = samba_wildcard.fake_build_environment()
    Configure.autoconfig = True
    Scripting.check_configured(bld)


def map_shlib_extension(ctx, name, python=False):
    '''map a filename with a shared library extension of .so to the real shlib name'''
    if name is None:
        return None
    if name[-1:].isdigit():
        # some libraries have specified versions in the wscript rule
        return name
    (root1, ext1) = os.path.splitext(name)
    if python:
        return ctx.env.pyext_PATTERN % root1
    else:
        (root2, ext2) = os.path.splitext(ctx.env.shlib_PATTERN)
    return root1+ext2
Build.BuildContext.map_shlib_extension = map_shlib_extension

def apply_pattern(filename, pattern):
    '''apply a filename pattern to a filename that may have a directory component'''
    dirname = os.path.dirname(filename)
    if not dirname:
        return pattern % filename
    basename = os.path.basename(filename)
    return os.path.join(dirname, pattern % basename)

def make_libname(ctx, name, nolibprefix=False, version=None, python=False):
    """make a library filename
         Options:
              nolibprefix: don't include the lib prefix
              version    : add a version number
              python     : if we should use python module name conventions"""

    if python:
        libname = apply_pattern(name, ctx.env.pyext_PATTERN)
    else:
        libname = apply_pattern(name, ctx.env.shlib_PATTERN)
    if nolibprefix and libname[0:3] == 'lib':
        libname = libname[3:]
    if version:
        if version[0] == '.':
            version = version[1:]
        (root, ext) = os.path.splitext(libname)
        if ext == ".dylib":
            # special case - version goes before the prefix
            libname = "%s.%s%s" % (root, version, ext)
        else:
            libname = "%s%s.%s" % (root, ext, version)
    return libname
Build.BuildContext.make_libname = make_libname


def get_tgt_list(bld):
    '''return a list of build objects for samba'''

    targets = LOCAL_CACHE(bld, 'TARGET_TYPE')

    # build a list of task generators we are interested in
    tgt_list = []
    for tgt in targets:
        type = targets[tgt]
        if not type in ['SUBSYSTEM', 'MODULE', 'BINARY', 'LIBRARY', 'ASN1', 'PYTHON']:
            continue
        t = bld.get_tgen_by_name(tgt)
        if t is None:
            Logs.error("Target %s of type %s has no task generator" % (tgt, type))
            sys.exit(1)
        tgt_list.append(t)
    return tgt_list

from Constants import WSCRIPT_FILE
def PROCESS_SEPARATE_RULE(self, rule):
    ''' cause waf to process additional script based on `rule'.
        You should have file named wscript_<stage>_rule in the current directory
        where stage is either 'configure' or 'build'
    '''
    stage = ''
    if isinstance(self, Configure.ConfigurationContext):
        stage = 'configure'
    elif isinstance(self, Build.BuildContext):
        stage = 'build'
    file_path = os.path.join(self.curdir, WSCRIPT_FILE+'_'+stage+'_'+rule)
    txt = load_file(file_path)
    if txt:
        dc = {'ctx': self}
        if getattr(self.__class__, 'pre_recurse', None):
            dc = self.pre_recurse(txt, file_path, self.curdir)
        exec(compile(txt, file_path, 'exec'), dc)
        if getattr(self.__class__, 'post_recurse', None):
            dc = self.post_recurse(txt, file_path, self.curdir)

Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE

def AD_DC_BUILD_IS_ENABLED(self):
    if self.CONFIG_SET('AD_DC_BUILD_IS_ENABLED'):
        return True
    return False

Build.BuildContext.AD_DC_BUILD_IS_ENABLED = AD_DC_BUILD_IS_ENABLED

@feature('cprogram', 'cshlib', 'cstaticlib')
@after('apply_lib_vars')
@before('apply_obj_vars')
def samba_before_apply_obj_vars(self):
    """before apply_obj_vars for uselib, this removes the standard paths"""

    def is_standard_libpath(env, path):
        for _path in env.STANDARD_LIBPATH:
            if _path == os.path.normpath(path):
                return True
        return False

    v = self.env

    for i in v['RPATH']:
        if is_standard_libpath(v, i):
            v['RPATH'].remove(i)

    for i in v['LIBPATH']:
        if is_standard_libpath(v, i):
            v['LIBPATH'].remove(i)

def samba_add_onoff_option(opt, option, help=(), dest=None, default=True,
                           with_name="with", without_name="without"):
    if default is None:
        default_str = "auto"
    elif default is True:
        default_str = "yes"
    elif default is False:
        default_str = "no"
    else:
        default_str = str(default)

    if help == ():
        help = ("Build with %s support (default=%s)" % (option, default_str))
    if dest is None:
        dest = "with_%s" % option.replace('-', '_')

    with_val = "--%s-%s" % (with_name, option)
    without_val = "--%s-%s" % (without_name, option)

    #FIXME: This is broken and will always default to "default" no matter if
    # --with or --without is chosen.
    opt.add_option(with_val, help=help, action="store_true", dest=dest,
                   default=default)
    opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false",
                   dest=dest)
Options.Handler.samba_add_onoff_option = samba_add_onoff_option