summaryrefslogtreecommitdiff
path: root/third_party/waf/waflib/extras
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/waf/waflib/extras')
-rw-r--r--third_party/waf/waflib/extras/add_objects.py6
-rw-r--r--third_party/waf/waflib/extras/batched_cc.py37
-rw-r--r--third_party/waf/waflib/extras/biber.py62
-rw-r--r--third_party/waf/waflib/extras/bjam.py132
-rw-r--r--third_party/waf/waflib/extras/blender.py112
-rw-r--r--third_party/waf/waflib/extras/boo.py85
-rw-r--r--third_party/waf/waflib/extras/boost.py456
-rw-r--r--third_party/waf/waflib/extras/build_file_tracker.py23
-rw-r--r--third_party/waf/waflib/extras/build_logs.py10
-rw-r--r--third_party/waf/waflib/extras/buildcopy.py86
-rw-r--r--third_party/waf/waflib/extras/c_bgxlc.py5
-rw-r--r--third_party/waf/waflib/extras/c_dumbpreproc.py8
-rw-r--r--third_party/waf/waflib/extras/c_emscripten.py27
-rw-r--r--third_party/waf/waflib/extras/c_nec.py73
-rw-r--r--third_party/waf/waflib/extras/cabal.py156
-rw-r--r--third_party/waf/waflib/extras/cfg_altoptions.py5
-rw-r--r--third_party/waf/waflib/extras/cfg_cross_gnu.py176
-rw-r--r--third_party/waf/waflib/extras/clang_compilation_database.py43
-rw-r--r--third_party/waf/waflib/extras/codelite.py59
-rw-r--r--third_party/waf/waflib/extras/color_gcc.py7
-rw-r--r--third_party/waf/waflib/extras/color_rvct.py5
-rw-r--r--third_party/waf/waflib/extras/compat15.py17
-rw-r--r--third_party/waf/waflib/extras/cppcheck.py81
-rw-r--r--third_party/waf/waflib/extras/cpplint.py25
-rw-r--r--third_party/waf/waflib/extras/cross_gnu.py231
-rw-r--r--third_party/waf/waflib/extras/cython.py15
-rw-r--r--third_party/waf/waflib/extras/dcc.py8
-rw-r--r--third_party/waf/waflib/extras/distnet.py20
-rw-r--r--third_party/waf/waflib/extras/doxygen.py15
-rw-r--r--third_party/waf/waflib/extras/dpapi.py9
-rw-r--r--third_party/waf/waflib/extras/eclipse.py385
-rw-r--r--third_party/waf/waflib/extras/erlang.py114
-rw-r--r--third_party/waf/waflib/extras/fast_partial.py522
-rw-r--r--third_party/waf/waflib/extras/fc_bgxlf.py36
-rw-r--r--third_party/waf/waflib/extras/fc_cray.py55
-rw-r--r--third_party/waf/waflib/extras/fc_nag.py65
-rw-r--r--third_party/waf/waflib/extras/fc_nec.py64
-rw-r--r--third_party/waf/waflib/extras/fc_open64.py62
-rw-r--r--third_party/waf/waflib/extras/fc_pgfortran.py72
-rw-r--r--third_party/waf/waflib/extras/fc_solstudio.py66
-rw-r--r--third_party/waf/waflib/extras/fc_xlf.py67
-rw-r--r--third_party/waf/waflib/extras/file_to_object.py7
-rw-r--r--third_party/waf/waflib/extras/fluid.py34
-rw-r--r--third_party/waf/waflib/extras/freeimage.py5
-rw-r--r--third_party/waf/waflib/extras/fsb.py5
-rw-r--r--third_party/waf/waflib/extras/fsc.py68
-rw-r--r--third_party/waf/waflib/extras/gccdeps.py11
-rw-r--r--third_party/waf/waflib/extras/go.py255
-rw-r--r--third_party/waf/waflib/extras/gob2.py5
-rw-r--r--third_party/waf/waflib/extras/halide.py8
-rwxr-xr-xthird_party/waf/waflib/extras/javatest.py122
-rw-r--r--third_party/waf/waflib/extras/kde4.py97
-rw-r--r--third_party/waf/waflib/extras/local_rpath.py5
-rw-r--r--third_party/waf/waflib/extras/make.py23
-rw-r--r--third_party/waf/waflib/extras/md5_tstamp.py67
-rw-r--r--third_party/waf/waflib/extras/mem_reducer.py110
-rw-r--r--third_party/waf/waflib/extras/midl.py73
-rw-r--r--third_party/waf/waflib/extras/misc.py410
-rw-r--r--third_party/waf/waflib/extras/msvcdeps.py436
-rw-r--r--third_party/waf/waflib/extras/msvs.py99
-rw-r--r--third_party/waf/waflib/extras/netcache_client.py35
-rw-r--r--third_party/waf/waflib/extras/nobuild.py23
-rw-r--r--third_party/waf/waflib/extras/objcopy.py11
-rw-r--r--third_party/waf/waflib/extras/ocaml.py352
-rw-r--r--third_party/waf/waflib/extras/package.py7
-rw-r--r--third_party/waf/waflib/extras/parallel_debug.py114
-rw-r--r--third_party/waf/waflib/extras/pch.py6
-rw-r--r--third_party/waf/waflib/extras/pep8.py6
-rw-r--r--third_party/waf/waflib/extras/pgicc.py79
-rw-r--r--third_party/waf/waflib/extras/pgicxx.py24
-rwxr-xr-xthird_party/waf/waflib/extras/prefork.py401
-rw-r--r--third_party/waf/waflib/extras/preforkjava.py236
-rw-r--r--third_party/waf/waflib/extras/preforkunix.py317
-rw-r--r--third_party/waf/waflib/extras/print_commands.py84
-rw-r--r--third_party/waf/waflib/extras/proc.py6
-rw-r--r--third_party/waf/waflib/extras/protoc.py183
-rw-r--r--third_party/waf/waflib/extras/pyqt5.py245
-rw-r--r--third_party/waf/waflib/extras/pytest.py229
-rw-r--r--third_party/waf/waflib/extras/qnxnto.py76
-rw-r--r--third_party/waf/waflib/extras/qt4.py699
-rw-r--r--third_party/waf/waflib/extras/relocation.py6
-rw-r--r--third_party/waf/waflib/extras/remote.py7
-rw-r--r--third_party/waf/waflib/extras/resx.py39
-rw-r--r--third_party/waf/waflib/extras/review.py14
-rw-r--r--third_party/waf/waflib/extras/rst.py45
-rw-r--r--third_party/waf/waflib/extras/run_do_script.py143
-rw-r--r--third_party/waf/waflib/extras/run_m_script.py92
-rw-r--r--third_party/waf/waflib/extras/run_py_script.py108
-rw-r--r--third_party/waf/waflib/extras/run_r_script.py90
-rw-r--r--third_party/waf/waflib/extras/sas.py75
-rw-r--r--third_party/waf/waflib/extras/satellite_assembly.py61
-rw-r--r--third_party/waf/waflib/extras/scala.py132
-rw-r--r--third_party/waf/waflib/extras/slow_qt4.py100
-rw-r--r--third_party/waf/waflib/extras/smart_continue.py80
-rw-r--r--third_party/waf/waflib/extras/softlink_libs.py80
-rw-r--r--third_party/waf/waflib/extras/stale.py18
-rw-r--r--third_party/waf/waflib/extras/stracedeps.py19
-rw-r--r--third_party/waf/waflib/extras/swig.py83
-rw-r--r--third_party/waf/waflib/extras/syms.py70
-rw-r--r--third_party/waf/waflib/extras/sync_exec.py8
-rw-r--r--third_party/waf/waflib/extras/ticgt.py304
-rw-r--r--third_party/waf/waflib/extras/unc.py110
-rw-r--r--third_party/waf/waflib/extras/unity.py129
-rw-r--r--third_party/waf/waflib/extras/use_config.py33
-rw-r--r--third_party/waf/waflib/extras/valadoc.py144
-rw-r--r--third_party/waf/waflib/extras/why.py15
-rw-r--r--third_party/waf/waflib/extras/win32_opts.py29
-rw-r--r--third_party/waf/waflib/extras/wix.py91
-rw-r--r--third_party/waf/waflib/extras/xcode6.py731
109 files changed, 8374 insertions, 2957 deletions
diff --git a/third_party/waf/waflib/extras/add_objects.py b/third_party/waf/waflib/extras/add_objects.py
deleted file mode 100644
index 5606fd661e8..00000000000
--- a/third_party/waf/waflib/extras/add_objects.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-from waflib import Logs
-Logs.warn('This tool has been merged to the main library, remove the references to "add_objects"')
diff --git a/third_party/waf/waflib/extras/batched_cc.py b/third_party/waf/waflib/extras/batched_cc.py
index 4e48e780801..935df6cf162 100644
--- a/third_party/waf/waflib/extras/batched_cc.py
+++ b/third_party/waf/waflib/extras/batched_cc.py
@@ -1,23 +1,28 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2015 (ita)
"""
-Build as batches.
-
Instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
cc -c ../file1.c ../file2.c ../file3.c
Files are output on the directory where the compiler is called, and dependencies are more difficult
to track (do not run the command on all source files if only one file changes)
-
As such, we do as if the files were compiled one by one, but no command is actually run:
replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
signatures from each slave and finds out the command-line to run.
-Just import this module in the configuration (no other change required).
-This is provided as an example, for performance unity builds are recommended (fewer tasks and
-fewer jobs to execute). See waflib/extras/unity.py.
+Just import this module to start using it:
+def build(bld):
+ bld.load('batched_cc')
+
+Note that this is provided as an example, unity builds are recommended
+for best performance results (fewer tasks and fewer jobs to execute).
+See waflib/extras/unity.py.
"""
from waflib import Task, Utils
@@ -26,24 +31,21 @@ from waflib.Tools import c, cxx
MAX_BATCH = 50
-c_str = '${CC} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
+c_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
c_fun, _ = Task.compile_fun_noshell(c_str)
-cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
+cxx_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
cxx_fun, _ = Task.compile_fun_noshell(cxx_str)
count = 70000
-class batch_task(Task.Task):
+class batch(Task.Task):
color = 'PINK'
after = ['c', 'cxx']
before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']
def uid(self):
- m = Utils.md5()
- m.update(Task.Task.uid(self))
- m.update(str(self.generator.idx).encode())
- return m.digest()
+ return Utils.h_list([Task.Task.uid(self), self.generator.idx, self.generator.path.abspath(), self.generator.target])
def __str__(self):
return 'Batch compilation for %d slaves' % len(self.slaves)
@@ -74,6 +76,13 @@ class batch_task(Task.Task):
return Task.SKIP_ME
+ def get_cwd(self):
+ return self.slaves[0].outputs[0].parent
+
+ def batch_incpaths(self):
+ st = self.env.CPPPATH_ST
+ return [st % node.abspath() for node in self.generator.includes_nodes]
+
def run(self):
self.outputs = []
@@ -85,7 +94,6 @@ class batch_task(Task.Task):
srclst.append(t.inputs[0].abspath())
self.env.SRCLST = srclst
- self.cwd = slaves[0].outputs[0].parent.abspath()
if self.slaves[0].__class__.__name__ == 'c':
ret = c_fun(self)
@@ -166,3 +174,4 @@ for x in ('c', 'cxx'):
setattr(t, 'run', run)
setattr(t, 'old_post_run', t.post_run)
setattr(t, 'post_run', post_run)
+
diff --git a/third_party/waf/waflib/extras/biber.py b/third_party/waf/waflib/extras/biber.py
new file mode 100644
index 00000000000..b4bb72267e8
--- /dev/null
+++ b/third_party/waf/waflib/extras/biber.py
@@ -0,0 +1,62 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Latex processing using "biber"
+"""
+
+import os
+from waflib import Task, Logs
+
+from waflib.Tools import tex as texmodule
+
+class tex(texmodule.tex):
+ biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False)
+ biber_fun.__doc__ = """
+ Execute the program **biber**
+ """
+
+ def bibfile(self):
+ return None
+
+ def bibunits(self):
+ self.env.env = {}
+ self.env.env.update(os.environ)
+ self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
+ self.env.SRCFILE = self.aux_nodes[0].name[:-4]
+
+ if not self.env['PROMPT_LATEX']:
+ self.env.append_unique('BIBERFLAGS', '--quiet')
+
+ path = self.aux_nodes[0].abspath()[:-4] + '.bcf'
+ if os.path.isfile(path):
+ Logs.warn('calling biber')
+ self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun())
+ else:
+ super(tex, self).bibfile()
+ super(tex, self).bibunits()
+
+class latex(tex):
+ texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
+class pdflatex(tex):
+ texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
+class xelatex(tex):
+ texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
+
+def configure(self):
+ """
+ Almost the same as in tex.py, but try to detect 'biber'
+ """
+ v = self.env
+ for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
+ try:
+ self.find_program(p, var=p.upper())
+ except self.errors.ConfigurationError:
+ pass
+ v['DVIPSFLAGS'] = '-Ppdf'
+
diff --git a/third_party/waf/waflib/extras/bjam.py b/third_party/waf/waflib/extras/bjam.py
new file mode 100644
index 00000000000..b8effa543b7
--- /dev/null
+++ b/third_party/waf/waflib/extras/bjam.py
@@ -0,0 +1,132 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# per rosengren 2011
+
+from os import sep, readlink
+from waflib import Logs
+from waflib.TaskGen import feature, after_method
+from waflib.Task import Task, always_run
+
+def options(opt):
+ grp = opt.add_option_group('Bjam Options')
+ grp.add_option('--bjam_src', default=None, help='You can find it in <boost root>/tools/jam/src')
+ grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in <src>/bin.<uname>/bjam')
+ grp.add_option('--bjam_config', default=None)
+ grp.add_option('--bjam_toolset', default=None)
+
+def configure(cnf):
+ if not cnf.env.BJAM_SRC:
+ cnf.env.BJAM_SRC = cnf.options.bjam_src
+ if not cnf.env.BJAM_UNAME:
+ cnf.env.BJAM_UNAME = cnf.options.bjam_uname
+ try:
+ cnf.find_program('bjam', path_list=[
+ cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME
+ ])
+ except Exception:
+ cnf.env.BJAM = None
+ if not cnf.env.BJAM_CONFIG:
+ cnf.env.BJAM_CONFIG = cnf.options.bjam_config
+ if not cnf.env.BJAM_TOOLSET:
+ cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset
+
+@feature('bjam')
+@after_method('process_rule')
+def process_bjam(self):
+ if not self.bld.env.BJAM:
+ self.create_task('bjam_creator')
+ self.create_task('bjam_build')
+ self.create_task('bjam_installer')
+ if getattr(self, 'always', False):
+ always_run(bjam_creator)
+ always_run(bjam_build)
+ always_run(bjam_installer)
+
+class bjam_creator(Task):
+ ext_out = 'bjam_exe'
+ vars=['BJAM_SRC', 'BJAM_UNAME']
+ def run(self):
+ env = self.env
+ gen = self.generator
+ bjam = gen.bld.root.find_dir(env.BJAM_SRC)
+ if not bjam:
+ Logs.error('Can not find bjam source')
+ return -1
+ bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam'
+ bjam_exe = bjam.find_resource(bjam_exe_relpath)
+ if bjam_exe:
+ env.BJAM = bjam_exe.srcpath()
+ return 0
+ bjam_cmd = ['./build.sh']
+ Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd))
+ result = self.exec_command(bjam_cmd, cwd=bjam.srcpath())
+ if not result == 0:
+ Logs.error('bjam failed')
+ return -1
+ bjam_exe = bjam.find_resource(bjam_exe_relpath)
+ if bjam_exe:
+ env.BJAM = bjam_exe.srcpath()
+ return 0
+ Logs.error('bjam failed')
+ return -1
+
+class bjam_build(Task):
+ ext_in = 'bjam_exe'
+ ext_out = 'install'
+ vars = ['BJAM_TOOLSET']
+ def run(self):
+ env = self.env
+ gen = self.generator
+ path = gen.path
+ bld = gen.bld
+ if hasattr(gen, 'root'):
+ build_root = path.find_node(gen.root)
+ else:
+ build_root = path
+ jam = bld.srcnode.find_resource(env.BJAM_CONFIG)
+ if jam:
+ Logs.debug('bjam: Using jam configuration from ' + jam.srcpath())
+ jam_rel = jam.relpath_gen(build_root)
+ else:
+ Logs.warn('No build configuration in build_config/user-config.jam. Using default')
+ jam_rel = None
+ bjam_exe = bld.srcnode.find_node(env.BJAM)
+ if not bjam_exe:
+ Logs.error('env.BJAM is not set')
+ return -1
+ bjam_exe_rel = bjam_exe.relpath_gen(build_root)
+ cmd = ([bjam_exe_rel] +
+ (['--user-config=' + jam_rel] if jam_rel else []) +
+ ['--stagedir=' + path.get_bld().path_from(build_root)] +
+ ['--debug-configuration'] +
+ ['--with-' + lib for lib in self.generator.target] +
+ (['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) +
+ ['link=' + 'shared'] +
+ ['variant=' + 'release']
+ )
+ Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd))
+ ret = self.exec_command(cmd, cwd=build_root.srcpath())
+ if ret != 0:
+ return ret
+ self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*'))
+ return 0
+
+class bjam_installer(Task):
+ ext_in = 'install'
+ def run(self):
+ gen = self.generator
+ path = gen.path
+ for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')):
+ files = []
+ for n in path.get_bld().ant_glob(pat):
+ try:
+ t = readlink(n.srcpath())
+ gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False)
+ except OSError:
+ files.append(n)
+ gen.bld.install_files(idir, files, postpone=False)
+ return 0
+
diff --git a/third_party/waf/waflib/extras/blender.py b/third_party/waf/waflib/extras/blender.py
new file mode 100644
index 00000000000..2556415a0c8
--- /dev/null
+++ b/third_party/waf/waflib/extras/blender.py
@@ -0,0 +1,112 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Michal Proszek, 2014 (poxip)
+
+"""
+Detect the version of Blender, path
+and install the extension:
+
+ def options(opt):
+ opt.load('blender')
+ def configure(cnf):
+ cnf.load('blender')
+ def build(bld):
+ bld(name='io_mesh_raw',
+ feature='blender',
+ files=['file1.py', 'file2.py']
+ )
+If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name
+Use ./waf configure --system to set the installation directory to system path
+"""
+import os
+import re
+from getpass import getuser
+
+from waflib import Utils
+from waflib.TaskGen import feature
+from waflib.Configure import conf
+
+def options(opt):
+ opt.add_option(
+ '-s', '--system',
+ dest='directory_system',
+ default=False,
+ action='store_true',
+ help='determines installation directory (default: user)'
+ )
+
+@conf
+def find_blender(ctx):
+ '''Return version number of blender, if not exist return None'''
+ blender = ctx.find_program('blender')
+ output = ctx.cmd_and_log(blender + ['--version'])
+ m = re.search(r'Blender\s*((\d+(\.|))*)', output)
+ if not m:
+ ctx.fatal('Could not retrieve blender version')
+
+ try:
+ blender_version = m.group(1)
+ except IndexError:
+ ctx.fatal('Could not retrieve blender version')
+
+ ctx.env['BLENDER_VERSION'] = blender_version
+ return blender
+
+@conf
+def configure_paths(ctx):
+ """Setup blender paths"""
+ # Get the username
+ user = getuser()
+ _platform = Utils.unversioned_sys_platform()
+ config_path = {'user': '', 'system': ''}
+ if _platform.startswith('linux'):
+ config_path['user'] = '/home/%s/.config/blender/' % user
+ config_path['system'] = '/usr/share/blender/'
+ elif _platform == 'darwin':
+ # MAC OS X
+ config_path['user'] = \
+ '/Users/%s/Library/Application Support/Blender/' % user
+ config_path['system'] = '/Library/Application Support/Blender/'
+ elif Utils.is_win32:
+ # Windows
+ appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
+ homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')
+
+ config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path
+ config_path['system'] = \
+ '%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
+ else:
+ ctx.fatal(
+ 'Unsupported platform. '
+ 'Available platforms: Linux, OSX, MS-Windows.'
+ )
+
+ blender_version = ctx.env['BLENDER_VERSION']
+
+ config_path['user'] += blender_version + '/'
+ config_path['system'] += blender_version + '/'
+
+ ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
+ if ctx.options.directory_system:
+ ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']
+
+ ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
+ ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
+ )
+ Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])
+
+def configure(ctx):
+ ctx.find_blender()
+ ctx.configure_paths()
+
+@feature('blender_list')
+def blender(self):
+ # Two ways to install a blender extension: as a module or just .py files
+ dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
+ Utils.check_dir(dest_dir)
+ self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.'))
+
diff --git a/third_party/waf/waflib/extras/boo.py b/third_party/waf/waflib/extras/boo.py
new file mode 100644
index 00000000000..93bff04c538
--- /dev/null
+++ b/third_party/waf/waflib/extras/boo.py
@@ -0,0 +1,85 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Yannick LM 2011
+
+"""
+Support for the boo programming language, for example::
+
+ bld(features = "boo", # necessary feature
+ source = "src.boo", # list of boo files
+ gen = "world.dll", # target
+ type = "library", # library/exe ("-target:xyz" flag)
+ name = "world" # necessary if the target is referenced by 'use'
+ )
+"""
+
+from waflib import Task
+from waflib.Configure import conf
+from waflib.TaskGen import feature, after_method, before_method, extension
+
+@extension('.boo')
+def boo_hook(self, node):
+ # Nothing here yet ...
+ # TODO filter the non-boo source files in 'apply_booc' and remove this method
+ pass
+
+@feature('boo')
+@before_method('process_source')
+def apply_booc(self):
+ """Create a booc task """
+ src_nodes = self.to_nodes(self.source)
+ out_node = self.path.find_or_declare(self.gen)
+
+ self.boo_task = self.create_task('booc', src_nodes, [out_node])
+
+ # Set variables used by the 'booc' task
+ self.boo_task.env.OUT = '-o:%s' % out_node.abspath()
+
+ # type is "exe" by default
+ type = getattr(self, "type", "exe")
+ self.boo_task.env.BOO_TARGET_TYPE = "-target:%s" % type
+
+@feature('boo')
+@after_method('apply_boo')
+def use_boo(self):
+ """"
+ boo applications honor the **use** keyword::
+ """
+ dep_names = self.to_list(getattr(self, 'use', []))
+ for dep_name in dep_names:
+ dep_task_gen = self.bld.get_tgen_by_name(dep_name)
+ if not dep_task_gen:
+ continue
+ dep_task_gen.post()
+ dep_task = getattr(dep_task_gen, 'boo_task', None)
+ if not dep_task:
+ # Try a cs task:
+ dep_task = getattr(dep_task_gen, 'cs_task', None)
+ if not dep_task:
+ # Try a link task:
+ dep_task = getattr(dep_task, 'link_task', None)
+ if not dep_task:
+ # Abort ...
+ continue
+ self.boo_task.set_run_after(dep_task) # order
+ self.boo_task.dep_nodes.extend(dep_task.outputs) # dependency
+ self.boo_task.env.append_value('BOO_FLAGS', '-reference:%s' % dep_task.outputs[0].abspath())
+
+class booc(Task.Task):
+ """Compiles .boo files """
+ color = 'YELLOW'
+ run_str = '${BOOC} ${BOO_FLAGS} ${BOO_TARGET_TYPE} ${OUT} ${SRC}'
+
+@conf
+def check_booc(self):
+ self.find_program('booc', 'BOOC')
+ self.env.BOO_FLAGS = ['-nologo']
+
+def configure(self):
+ """Check that booc is available """
+ self.check_booc()
+
diff --git a/third_party/waf/waflib/extras/boost.py b/third_party/waf/waflib/extras/boost.py
new file mode 100644
index 00000000000..2fe99a40674
--- /dev/null
+++ b/third_party/waf/waflib/extras/boost.py
@@ -0,0 +1,456 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# partially based on boost.py written by Gernot Vormayr
+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
+# modified by Bjoern Michaelsen, 2008
+# modified by Luca Fossati, 2008
+# rewritten for waf 1.5.1, Thomas Nagy, 2008
+# rewritten for waf 1.6.2, Sylvain Rouquette, 2011
+
+'''
+
+This is an extra tool, not bundled with the default waf binary.
+To add the boost tool to the waf file:
+$ ./waf-light --tools=compat15,boost
+ or, if you have waf >= 1.6.2
+$ ./waf update --files=boost
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('compiler_cxx boost')
+
+ def configure(conf):
+ conf.load('compiler_cxx boost')
+ conf.check_boost(lib='system filesystem')
+
+ def build(bld):
+ bld(source='main.cpp', target='app', use='BOOST')
+
+Options are generated, in order to specify the location of boost includes/libraries.
+The `check_boost` configuration function allows to specify the used boost libraries.
+It can also provide default arguments to the --boost-mt command-line arguments.
+Everything will be packaged together in a BOOST component that you can use.
+
+When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
+ - you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
+ Errors: C4530
+ - boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
+ So before calling `conf.check_boost` you might want to disabling by adding
+ conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
+ Errors:
+ - boost might also be compiled with /MT, which links the runtime statically.
+ If you have problems with redefined symbols,
+ self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+ self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
+Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.
+
+'''
+
+import sys
+import re
+from waflib import Utils, Logs, Errors
+from waflib.Configure import conf
+from waflib.TaskGen import feature, after_method
+
+BOOST_LIBS = ['/usr/lib/x86_64-linux-gnu', '/usr/lib/i386-linux-gnu',
+ '/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
+BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
+BOOST_VERSION_FILE = 'boost/version.hpp'
+BOOST_VERSION_CODE = '''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; }
+'''
+
+BOOST_ERROR_CODE = '''
+#include <boost/system/error_code.hpp>
+int main() { boost::system::error_code c; }
+'''
+
+BOOST_THREAD_CODE = '''
+#include <boost/thread.hpp>
+int main() { boost::thread t; }
+'''
+
+BOOST_LOG_CODE = '''
+#include <boost/log/trivial.hpp>
+#include <boost/log/utility/setup/console.hpp>
+#include <boost/log/utility/setup/common_attributes.hpp>
+int main() {
+ using namespace boost::log;
+ add_common_attributes();
+ add_console_log(std::clog, keywords::format = "%Message%");
+ BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl;
+}
+'''
+
+# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
+PLATFORM = Utils.unversioned_sys_platform()
+detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
+detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
+detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
+BOOST_TOOLSETS = {
+ 'borland': 'bcb',
+ 'clang': detect_clang,
+ 'como': 'como',
+ 'cw': 'cw',
+ 'darwin': 'xgcc',
+ 'edg': 'edg',
+ 'g++': detect_mingw,
+ 'gcc': detect_mingw,
+ 'icpc': detect_intel,
+ 'intel': detect_intel,
+ 'kcc': 'kcc',
+ 'kylix': 'bck',
+ 'mipspro': 'mp',
+ 'mingw': 'mgw',
+ 'msvc': 'vc',
+ 'qcc': 'qcc',
+ 'sun': 'sw',
+ 'sunc++': 'sw',
+ 'tru64cxx': 'tru',
+ 'vacpp': 'xlc'
+}
+
+
+def options(opt):
+ opt = opt.add_option_group('Boost Options')
+ opt.add_option('--boost-includes', type='string',
+ default='', dest='boost_includes',
+ help='''path to the directory where the boost includes are,
+ e.g., /path/to/boost_1_55_0/stage/include''')
+ opt.add_option('--boost-libs', type='string',
+ default='', dest='boost_libs',
+ help='''path to the directory where the boost libs are,
+ e.g., path/to/boost_1_55_0/stage/lib''')
+ opt.add_option('--boost-mt', action='store_true',
+ default=False, dest='boost_mt',
+ help='select multi-threaded libraries')
+ opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
+ help='''select libraries with tags (gd for debug, static is automatically added),
+ see doc Boost, Getting Started, chapter 6.1''')
+ opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
+ help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
+ opt.add_option('--boost-toolset', type='string',
+ default='', dest='boost_toolset',
+ help='force a toolset e.g. msvc, vc90, \
+ gcc, mingw, mgw45 (default: auto)')
+ py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
+ opt.add_option('--boost-python', type='string',
+ default=py_version, dest='boost_python',
+ help='select the lib python with this version \
+ (default: %s)' % py_version)
+
+
+@conf
+def __boost_get_version_file(self, d):
+ if not d:
+ return None
+ dnode = self.root.find_dir(d)
+ if dnode:
+ return dnode.find_node(BOOST_VERSION_FILE)
+ return None
+
+@conf
+def boost_get_version(self, d):
+ """silently retrieve the boost version number"""
+ node = self.__boost_get_version_file(d)
+ if node:
+ try:
+ txt = node.read()
+ except EnvironmentError:
+ Logs.error("Could not read the file %r", node.abspath())
+ else:
+ re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M)
+ m1 = re_but1.search(txt)
+ re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M)
+ m2 = re_but2.search(txt)
+ if m1 and m2:
+ return (m1.group(1), m2.group(1))
+ return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":")
+
+@conf
+def boost_get_includes(self, *k, **kw):
+ includes = k and k[0] or kw.get('includes')
+ if includes and self.__boost_get_version_file(includes):
+ return includes
+ for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES:
+ if self.__boost_get_version_file(d):
+ return d
+ if includes:
+ self.end_msg('headers not found in %s' % includes)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
+ self.fatal('The configuration failed')
+
+
+@conf
+def boost_get_toolset(self, cc):
+ toolset = cc
+ if not cc:
+ build_platform = Utils.unversioned_sys_platform()
+ if build_platform in BOOST_TOOLSETS:
+ cc = build_platform
+ else:
+ cc = self.env.CXX_NAME
+ if cc in BOOST_TOOLSETS:
+ toolset = BOOST_TOOLSETS[cc]
+ return isinstance(toolset, str) and toolset or toolset(self.env)
+
+
+@conf
+def __boost_get_libs_path(self, *k, **kw):
+ ''' return the lib path and all the files in it '''
+ if 'files' in kw:
+ return self.root.find_dir('.'), Utils.to_list(kw['files'])
+ libs = k and k[0] or kw.get('libs')
+ if libs:
+ path = self.root.find_dir(libs)
+ files = path.ant_glob('*boost_*')
+ if not libs or not files:
+ for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS:
+ if not d:
+ continue
+ path = self.root.find_dir(d)
+ if path:
+ files = path.ant_glob('*boost_*')
+ if files:
+ break
+ path = self.root.find_dir(d + '64')
+ if path:
+ files = path.ant_glob('*boost_*')
+ if files:
+ break
+ if not path:
+ if libs:
+ self.end_msg('libs not found in %s' % libs)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
+ self.fatal('The configuration failed')
+
+ self.to_log('Found the boost path in %r with the libraries:' % path)
+ for x in files:
+ self.to_log(' %r' % x)
+ return path, files
+
+@conf
+def boost_get_libs(self, *k, **kw):
+ '''
+ return the lib path and the required libs
+ according to the parameters
+ '''
+ path, files = self.__boost_get_libs_path(**kw)
+ files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True)
+ toolset = self.boost_get_toolset(kw.get('toolset', ''))
+ toolset_pat = '(-%s[0-9]{0,3})' % toolset
+ version = '-%s' % self.env.BOOST_VERSION
+
+ def find_lib(re_lib, files):
+ for file in files:
+ if re_lib.search(file.name):
+ self.to_log('Found boost lib %s' % file)
+ return file
+ return None
+
+ def format_lib_name(name):
+ if name.startswith('lib') and self.env.CC_NAME != 'msvc':
+ name = name[3:]
+ return name[:name.rfind('.')]
+
+ def match_libs(lib_names, is_static):
+ libs = []
+ lib_names = Utils.to_list(lib_names)
+ if not lib_names:
+ return libs
+ t = []
+ if kw.get('mt', False):
+ t.append('-mt')
+ if kw.get('abi'):
+ t.append('%s%s' % (is_static and '-s' or '-', kw['abi']))
+ elif is_static:
+ t.append('-s')
+ tags_pat = t and ''.join(t) or ''
+ ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN
+ ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN
+
+ for lib in lib_names:
+ if lib == 'python':
+ # for instance, with python='27',
+ # accepts '-py27', '-py2', '27', '-2.7' and '2'
+ # but will reject '-py3', '-py26', '26' and '3'
+ tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|(-{1}.{3})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'], kw['python'][1])
+ else:
+ tags = tags_pat
+ # Trying libraries, from most strict match to least one
+ for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext),
+ 'boost_%s%s%s%s$' % (lib, tags, version, ext),
+ # Give up trying to find the right version
+ 'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext),
+ 'boost_%s%s%s$' % (lib, tags, ext),
+ 'boost_%s%s$' % (lib, ext),
+ 'boost_%s' % lib]:
+ self.to_log('Trying pattern %s' % pattern)
+ file = find_lib(re.compile(pattern), files)
+ if file:
+ libs.append(format_lib_name(file.name))
+ break
+ else:
+ self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
+ self.fatal('The configuration failed')
+ return libs
+
+ return path.abspath(), match_libs(kw.get('lib'), False), match_libs(kw.get('stlib'), True)
+
+
+@conf
+def check_boost(self, *k, **kw):
+ """
+ Initialize boost libraries to be used.
+
+ Keywords: you can pass the same parameters as with the command line (without "--boost-").
+ Note that the command line has the priority, and should preferably be used.
+ """
+ if not self.env['CXX']:
+ self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
+
+ params = {
+ 'lib': k and k[0] or kw.get('lib'),
+ 'stlib': kw.get('stlib')
+ }
+ for key, value in self.options.__dict__.items():
+ if not key.startswith('boost_'):
+ continue
+ key = key[len('boost_'):]
+ params[key] = value and value or kw.get(key, '')
+
+ var = kw.get('uselib_store', 'BOOST')
+
+ self.start_msg('Checking boost includes')
+ self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
+ versions = self.boost_get_version(inc)
+ self.env.BOOST_VERSION = versions[0]
+ self.env.BOOST_VERSION_NUMBER = int(versions[1])
+ self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000,
+ int(versions[1]) / 100 % 1000,
+ int(versions[1]) % 100))
+ if Logs.verbose:
+ Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var])
+
+ if not params['lib'] and not params['stlib']:
+ return
+ if 'static' in kw or 'static' in params:
+ Logs.warn('boost: static parameter is deprecated, use stlib instead.')
+ self.start_msg('Checking boost libs')
+ path, libs, stlibs = self.boost_get_libs(**params)
+ self.env['LIBPATH_%s' % var] = [path]
+ self.env['STLIBPATH_%s' % var] = [path]
+ self.env['LIB_%s' % var] = libs
+ self.env['STLIB_%s' % var] = stlibs
+ self.end_msg('ok')
+ if Logs.verbose:
+ Logs.pprint('CYAN', ' path : %s' % path)
+ Logs.pprint('CYAN', ' shared libs : %s' % libs)
+ Logs.pprint('CYAN', ' static libs : %s' % stlibs)
+
+
+ def try_link():
+ if (params['lib'] and 'system' in params['lib']) or \
+ params['stlib'] and 'system' in params['stlib']:
+ self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False)
+ if (params['lib'] and 'thread' in params['lib']) or \
+ params['stlib'] and 'thread' in params['stlib']:
+ self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False)
+
+ def is_log_mt():
+ '''Check if found boost_log library is multithread-safe'''
+ for lib in libs:
+ if lib.startswith('boost_log'):
+ lib_log = lib
+ break
+ return '-mt' in lib_log
+
+ if params['lib'] and 'log' in params['lib']:
+ self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK']
+ if not is_log_mt():
+ self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
+ self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)
+ if params['stlib'] and 'log' in params['stlib']:
+ # Static linking is assumed by default
+ if not is_log_mt():
+ self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
+ self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)
+
+ if params.get('linkage_autodetect', False):
+ self.start_msg("Attempting to detect boost linkage flags")
+ toolset = self.boost_get_toolset(kw.get('toolset', ''))
+ if toolset in ('vc',):
+ # disable auto-linking feature, causing error LNK1181
+ # because the code wants to be linked against
+ self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+
+ # if no dlls are present, we guess the .lib files are not stubs
+ has_dlls = False
+ for x in Utils.listdir(path):
+ if x.endswith(self.env.cxxshlib_PATTERN % ''):
+ has_dlls = True
+ break
+ if not has_dlls:
+ self.env['STLIBPATH_%s' % var] = [path]
+ self.env['STLIB_%s' % var] = libs
+ del self.env['LIB_%s' % var]
+ del self.env['LIBPATH_%s' % var]
+
+ # we attempt to play with some known-to-work CXXFLAGS combinations
+ for cxxflags in (['/MD', '/EHsc'], []):
+ self.env.stash()
+ self.env["CXXFLAGS_%s" % var] += cxxflags
+ try:
+ try_link()
+ except Errors.ConfigurationError as e:
+ self.env.revert()
+ exc = e
+ else:
+ self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
+ exc = None
+ self.env.commit()
+ break
+
+ if exc is not None:
+ self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
+ self.fatal('The configuration failed')
+ else:
+ self.start_msg('Checking for boost linkage')
+ try:
+ try_link()
+ except Errors.ConfigurationError as e:
+ self.end_msg("Could not link against boost libraries using supplied options")
+ self.fatal('The configuration failed')
+ self.end_msg('ok')
+
+
+@feature('cxx')
+@after_method('apply_link')
+def install_boost(self):
+ if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'):
+ return
+ install_boost.done = True
+ inst_to = getattr(self, 'install_path', '${BINDIR}')
+ for lib in self.env.LIB_BOOST:
+ try:
+ file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST)
+ self.add_install_files(install_to=inst_to, install_from=self.bld.root.find_node(file))
+ except:
+ continue
+install_boost.done = False
+
diff --git a/third_party/waf/waflib/extras/build_file_tracker.py b/third_party/waf/waflib/extras/build_file_tracker.py
index a00f7b2ad80..28b00e538a3 100644
--- a/third_party/waf/waflib/extras/build_file_tracker.py
+++ b/third_party/waf/waflib/extras/build_file_tracker.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Thomas Nagy, 2015
"""
@@ -8,24 +12,21 @@ want to use this to force partial rebuilds, see playground/track_output_files/ f
Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
-or to hash the file in the build directory with its timestamp (similar to 'update_outputs')
+or to hash the file in the build directory with its timestamp
"""
import os
from waflib import Node, Utils
def get_bld_sig(self):
+ if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
+ return Utils.h_file(self.abspath())
+
try:
- return self.cache_sig
+ # add the creation time to the signature
+ return self.sig + str(os.stat(self.abspath()).st_mtime)
except AttributeError:
- pass
-
- if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
- self.sig = Utils.h_file(self.abspath())
- self.cache_sig = ret = self.sig
- else:
- # add the
- self.cache_sig = ret = self.sig + str(os.stat(self.abspath()).st_mtime)
- return ret
+ return None
Node.Node.get_bld_sig = get_bld_sig
+
diff --git a/third_party/waf/waflib/extras/build_logs.py b/third_party/waf/waflib/extras/build_logs.py
index 2fb8d346874..87a4e2c3b4a 100644
--- a/third_party/waf/waflib/extras/build_logs.py
+++ b/third_party/waf/waflib/extras/build_logs.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2013 (ita)
@@ -17,7 +21,7 @@ try:
up = os.path.dirname(Context.g_module.__file__)
except AttributeError:
up = '.'
-LOGFILE = os.path.join(up, 'logs', '%s.log' % time.strftime('%Y_%m_%d_%H_%M'))
+LOGFILE = os.path.join(up, 'logs', time.strftime('%Y_%m_%d_%H_%M.log'))
wlock = threading.Lock()
class log_to_file(object):
@@ -96,7 +100,7 @@ def exit_cleanup():
fileobj.close()
filename = sys.stdout.filename
- Logs.info('Output logged to %r' % filename)
+ Logs.info('Output logged to %r', filename)
# then copy the log file to "latest.log" if possible
up = os.path.dirname(os.path.abspath(filename))
@@ -104,7 +108,7 @@ def exit_cleanup():
shutil.copy(filename, os.path.join(up, 'latest.log'))
except OSError:
# this may fail on windows due to processes spawned
- #
pass
atexit.register(exit_cleanup)
+
diff --git a/third_party/waf/waflib/extras/buildcopy.py b/third_party/waf/waflib/extras/buildcopy.py
new file mode 100644
index 00000000000..10625d1e26f
--- /dev/null
+++ b/third_party/waf/waflib/extras/buildcopy.py
@@ -0,0 +1,86 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Calle Rosenquist, 2017 (xbreak)
+"""
+Create task that copies source files to the associated build node.
+This is useful to e.g. construct a complete Python package so it can be unit tested
+without installation.
+
+Source files to be copied can be specified either in `buildcopy_source` attribute, or
+`source` attribute. If both are specified `buildcopy_source` has priority.
+
+Examples::
+
+ def build(bld):
+ bld(name = 'bar',
+ features = 'py buildcopy',
+ source = bld.path.ant_glob('src/bar/*.py'))
+
+ bld(name = 'py baz',
+ features = 'buildcopy',
+ buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt'])
+
+"""
+import os, shutil
+from waflib import Errors, Task, TaskGen, Utils, Node
+
+@TaskGen.before_method('process_source')
+@TaskGen.feature('buildcopy')
+def make_buildcopy(self):
+ """
+ Creates the buildcopy task.
+ """
+ def to_src_nodes(lst):
+ """Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives
+ preference to nodes in build.
+ """
+ if isinstance(lst, Node.Node):
+ if not lst.is_src():
+ raise Errors.WafError('buildcopy: node %s is not in src'%lst)
+ if not os.path.isfile(lst.abspath()):
+ raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst)
+ return lst
+
+ if isinstance(lst, str):
+ lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+ node = self.bld.path.get_src().search_node(lst)
+ if node:
+ if not os.path.isfile(node.abspath()):
+ raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
+ return node
+
+ node = self.bld.path.get_src().find_node(lst)
+ if node:
+ if not os.path.isfile(node.abspath()):
+ raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
+ return node
+ raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst))
+
+ nodes = [ to_src_nodes(n) for n in getattr(self, 'buildcopy_source', getattr(self, 'source', [])) ]
+ node_pairs = [(n, n.get_bld()) for n in nodes]
+ self.create_task('buildcopy', [n[0] for n in node_pairs], [n[1] for n in node_pairs], node_pairs=node_pairs)
+
+
+class buildcopy(Task.Task):
+ """
+ Copy for each pair `n` in `node_pairs`: n[0] -> n[1].
+
+ Attribute `node_pairs` should contain a list of tuples describing source and target:
+
+ node_pairs = [(in, out), ...]
+
+ """
+ color = 'PINK'
+
+ def keyword(self):
+ return 'Copying'
+
+ def run(self):
+ for f,t in self.node_pairs:
+ t.parent.mkdir()
+ shutil.copy2(f.abspath(), t.abspath())
diff --git a/third_party/waf/waflib/extras/c_bgxlc.py b/third_party/waf/waflib/extras/c_bgxlc.py
index 7633f566b07..04a3c79cff0 100644
--- a/third_party/waf/waflib/extras/c_bgxlc.py
+++ b/third_party/waf/waflib/extras/c_bgxlc.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# harald at klimachs.de
"""
@@ -29,3 +33,4 @@ def configure(conf):
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/extras/c_dumbpreproc.py b/third_party/waf/waflib/extras/c_dumbpreproc.py
index 9407527aca4..407bcf5a893 100644
--- a/third_party/waf/waflib/extras/c_dumbpreproc.py
+++ b/third_party/waf/waflib/extras/c_dumbpreproc.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
@@ -33,7 +37,8 @@ re_inc = re.compile(
def lines_includes(node):
code = node.read()
if c_preproc.use_trigraphs:
- for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
+ for (a, b) in c_preproc.trig_def:
+ code = code.split(a).join(b)
code = c_preproc.re_nl.sub('', code)
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
@@ -68,3 +73,4 @@ class dumb_parser(parser):
self.tryfind(y)
c_preproc.c_parser = dumb_parser
+
diff --git a/third_party/waf/waflib/extras/c_emscripten.py b/third_party/waf/waflib/extras/c_emscripten.py
index 6e7fbbe1142..b23d770b43b 100644
--- a/third_party/waf/waflib/extras/c_emscripten.py
+++ b/third_party/waf/waflib/extras/c_emscripten.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# -*- coding: utf-8 vi:ts=4:noexpandtab
@@ -32,7 +36,7 @@ def get_emscripten_version(conf, cc):
conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e))
if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1')
+ out = out.decode(sys.stdout.encoding or 'latin-1')
k = {}
out = out.splitlines()
@@ -76,21 +80,12 @@ def configure(conf):
conf.env.ARFLAGS = ['rcs']
conf.env.cshlib_PATTERN = '%s.js'
conf.env.cxxshlib_PATTERN = '%s.js'
- conf.env.cstlib_PATTERN = '%s.bc'
- conf.env.cxxstlib_PATTERN = '%s.bc'
+ conf.env.cstlib_PATTERN = '%s.a'
+ conf.env.cxxstlib_PATTERN = '%s.a'
conf.env.cprogram_PATTERN = '%s.html'
conf.env.cxxprogram_PATTERN = '%s.html'
+ conf.env.CXX_TGT_F = ['-c', '-o', '']
+ conf.env.CC_TGT_F = ['-c', '-o', '']
+ conf.env.CXXLNK_TGT_F = ['-o', '']
+ conf.env.CCLNK_TGT_F = ['-o', '']
conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
-
-@feature('c', 'cxx', 'acm', 'includes')
-@after_method('propagate_uselib_vars', 'process_source', 'apply_incpaths')
-def apply_incpaths_emscripten(self):
- """
- Emscripten doesn't like absolute include paths
- """
- # TODO: in waf 1.9 we can switch back to bldnode as the default since path_from handles cross-drive paths
- if self.env.CC_NAME != 'emscripten' or self.env.CC_NAME != 'emscripten':
- return
- lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
- self.includes_nodes = lst
- self.env['INCPATHS'] = [x.path_from(self.bld.bldnode) for x in lst]
diff --git a/third_party/waf/waflib/extras/c_nec.py b/third_party/waf/waflib/extras/c_nec.py
index 87e0c055f26..1ca665c1b0d 100644
--- a/third_party/waf/waflib/extras/c_nec.py
+++ b/third_party/waf/waflib/extras/c_nec.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# harald at klimachs.de
"""
@@ -24,43 +28,46 @@ def find_sxc(conf):
@conf
def get_sxc_version(conf, fc):
- version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
- cmd = fc + ['-V']
- p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
- out, err = p.communicate()
+ version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-V']
+ p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
+ out, err = p.communicate()
- if out: match = version_re(out)
- else: match = version_re(err)
- if not match:
- conf.fatal('Could not determine the NEC C compiler version.')
- k = match.groupdict()
- conf.env['C_VERSION'] = (k['major'], k['minor'])
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the NEC C compiler version.')
+ k = match.groupdict()
+ conf.env['C_VERSION'] = (k['major'], k['minor'])
@conf
def sxc_common_flags(conf):
- v=conf.env
- v['CC_SRC_F']=[]
- v['CC_TGT_F']=['-c','-o']
- if not v['LINK_CC']:v['LINK_CC']=v['CC']
- v['CCLNK_SRC_F']=[]
- v['CCLNK_TGT_F']=['-o']
- v['CPPPATH_ST']='-I%s'
- v['DEFINES_ST']='-D%s'
- v['LIB_ST']='-l%s'
- v['LIBPATH_ST']='-L%s'
- v['STLIB_ST']='-l%s'
- v['STLIBPATH_ST']='-L%s'
- v['RPATH_ST']=''
- v['SONAME_ST']=[]
- v['SHLIB_MARKER']=[]
- v['STLIB_MARKER']=[]
- v['LINKFLAGS_cprogram']=['']
- v['cprogram_PATTERN']='%s'
- v['CFLAGS_cshlib']=['-fPIC']
- v['LINKFLAGS_cshlib']=['']
- v['cshlib_PATTERN']='lib%s.so'
- v['LINKFLAGS_cstlib']=[]
- v['cstlib_PATTERN']='lib%s.a'
+ v=conf.env
+ v['CC_SRC_F']=[]
+ v['CC_TGT_F']=['-c','-o']
+ if not v['LINK_CC']:
+ v['LINK_CC']=v['CC']
+ v['CCLNK_SRC_F']=[]
+ v['CCLNK_TGT_F']=['-o']
+ v['CPPPATH_ST']='-I%s'
+ v['DEFINES_ST']='-D%s'
+ v['LIB_ST']='-l%s'
+ v['LIBPATH_ST']='-L%s'
+ v['STLIB_ST']='-l%s'
+ v['STLIBPATH_ST']='-L%s'
+ v['RPATH_ST']=''
+ v['SONAME_ST']=[]
+ v['SHLIB_MARKER']=[]
+ v['STLIB_MARKER']=[]
+ v['LINKFLAGS_cprogram']=['']
+ v['cprogram_PATTERN']='%s'
+ v['CFLAGS_cshlib']=['-fPIC']
+ v['LINKFLAGS_cshlib']=['']
+ v['cshlib_PATTERN']='lib%s.so'
+ v['LINKFLAGS_cstlib']=[]
+ v['cstlib_PATTERN']='lib%s.a'
def configure(conf):
conf.find_sxc()
diff --git a/third_party/waf/waflib/extras/cabal.py b/third_party/waf/waflib/extras/cabal.py
new file mode 100644
index 00000000000..a26f6342e9e
--- /dev/null
+++ b/third_party/waf/waflib/extras/cabal.py
@@ -0,0 +1,156 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Anton Feldmann, 2012
+# "Base for cabal"
+
+from waflib import Task, Utils
+from waflib.TaskGen import extension
+from waflib.Utils import threading
+from shutil import rmtree
+
+lock = threading.Lock()
+registering = False
+
+def configure(self):
+ self.find_program('cabal', var='CABAL')
+ self.find_program('ghc-pkg', var='GHCPKG')
+ pkgconfd = self.bldnode.abspath() + '/package.conf.d'
+ self.env.PREFIX = self.bldnode.abspath() + '/dist'
+ self.env.PKGCONFD = pkgconfd
+ if self.root.find_node(pkgconfd + '/package.cache'):
+ self.msg('Using existing package database', pkgconfd, color='CYAN')
+ else:
+ pkgdir = self.root.find_dir(pkgconfd)
+ if pkgdir:
+ self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED')
+ rmtree(pkgdir.abspath())
+ pkgdir = None
+
+ self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd])
+ self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN')
+
+@extension('.cabal')
+def process_cabal(self, node):
+ out_dir_node = self.bld.root.find_dir(self.bld.out_dir)
+ package_node = node.change_ext('.package')
+ package_node = out_dir_node.find_or_declare(package_node.name)
+ build_node = node.parent.get_bld()
+ build_path = build_node.abspath()
+ config_node = build_node.find_or_declare('setup-config')
+ inplace_node = build_node.find_or_declare('package.conf.inplace')
+
+ config_task = self.create_task('cabal_configure', node)
+ config_task.cwd = node.parent.abspath()
+ config_task.depends_on = getattr(self, 'depends_on', '')
+ config_task.build_path = build_path
+ config_task.set_outputs(config_node)
+
+ build_task = self.create_task('cabal_build', config_node)
+ build_task.cwd = node.parent.abspath()
+ build_task.build_path = build_path
+ build_task.set_outputs(inplace_node)
+
+ copy_task = self.create_task('cabal_copy', inplace_node)
+ copy_task.cwd = node.parent.abspath()
+ copy_task.depends_on = getattr(self, 'depends_on', '')
+ copy_task.build_path = build_path
+
+ last_task = copy_task
+ task_list = [config_task, build_task, copy_task]
+
+ if (getattr(self, 'register', False)):
+ register_task = self.create_task('cabal_register', inplace_node)
+ register_task.cwd = node.parent.abspath()
+ register_task.set_run_after(copy_task)
+ register_task.build_path = build_path
+
+ pkgreg_task = self.create_task('ghcpkg_register', inplace_node)
+ pkgreg_task.cwd = node.parent.abspath()
+ pkgreg_task.set_run_after(register_task)
+ pkgreg_task.build_path = build_path
+
+ last_task = pkgreg_task
+ task_list += [register_task, pkgreg_task]
+
+ touch_task = self.create_task('cabal_touch', inplace_node)
+ touch_task.set_run_after(last_task)
+ touch_task.set_outputs(package_node)
+ touch_task.build_path = build_path
+
+ task_list += [touch_task]
+
+ return task_list
+
+def get_all_src_deps(node):
+ hs_deps = node.ant_glob('**/*.hs')
+ hsc_deps = node.ant_glob('**/*.hsc')
+ lhs_deps = node.ant_glob('**/*.lhs')
+ c_deps = node.ant_glob('**/*.c')
+ cpp_deps = node.ant_glob('**/*.cpp')
+ proto_deps = node.ant_glob('**/*.proto')
+ return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], [])
+
+class Cabal(Task.Task):
+ def scan(self):
+ return (get_all_src_deps(self.generator.path), ())
+
+class cabal_configure(Cabal):
+ run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}'
+ shell = True
+
+ def scan(self):
+ out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir)
+ deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)]
+ return (deps, ())
+
+class cabal_build(Cabal):
+ run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/'
+ shell = True
+
+class cabal_copy(Cabal):
+ run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}'
+ shell = True
+
+class cabal_register(Cabal):
+ run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}'
+ shell = True
+
+class ghcpkg_register(Cabal):
+ run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config'
+ shell = True
+
+ def runnable_status(self):
+ global lock, registering
+
+ val = False
+ lock.acquire()
+ val = registering
+ lock.release()
+
+ if val:
+ return Task.ASK_LATER
+
+ ret = Task.Task.runnable_status(self)
+ if ret == Task.RUN_ME:
+ lock.acquire()
+ registering = True
+ lock.release()
+
+ return ret
+
+ def post_run(self):
+ global lock, registering
+
+ lock.acquire()
+ registering = False
+ lock.release()
+
+ return Task.Task.post_run(self)
+
+class cabal_touch(Cabal):
+ run_str = 'touch ${TGT}'
+
diff --git a/third_party/waf/waflib/extras/cfg_altoptions.py b/third_party/waf/waflib/extras/cfg_altoptions.py
index 4a82a70dede..d42c5ccfa86 100644
--- a/third_party/waf/waflib/extras/cfg_altoptions.py
+++ b/third_party/waf/waflib/extras/cfg_altoptions.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Tool to extend c_config.check_cfg()
@@ -107,3 +111,4 @@ def check_cfg(conf, *k, **kw):
conf.end_msg('%s += %s' % (k, v))
return True
+
diff --git a/third_party/waf/waflib/extras/cfg_cross_gnu.py b/third_party/waf/waflib/extras/cfg_cross_gnu.py
deleted file mode 100644
index 0fb2efb080d..00000000000
--- a/third_party/waf/waflib/extras/cfg_cross_gnu.py
+++ /dev/null
@@ -1,176 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 vi:ts=4:noexpandtab
-# Tool to provide dedicated variables for cross-compilation
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2014"
-
-"""
-
-This tool allows to use environment variables to define cross-compilation things,
-mostly used when you use build variants.
-
-The variables are obtained from the environment in 3 ways:
-
-1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
-2. By defining HOST_x
-3. By defining ${CHOST//-/_}_x
-
-Usage:
-
-- In your build script::
-
- def configure(cfg):
- ...
- conf.load('c_cross_gnu')
- for variant in x_variants:
- conf.xcheck_host()
- conf.xcheck_host_var('POUET')
- ...
-
- ...
-
-- Then::
-
- CHOST=arm-hardfloat-linux-gnueabi waf configure
-
- env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
-
- CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
-
- HOST_CC="clang -..." waf configure
-
-"""
-
-import os
-from waflib import Utils, Configure
-
-try:
- from shlex import quote
-except ImportError:
- from pipes import quote
-
-def get_chost_stuff(conf):
- """
- Get the CHOST environment variable contents
- """
- chost = None
- chost_envar = None
- if conf.env.CHOST:
- chost = conf.env.CHOST[0]
- chost_envar = chost.replace('-', '_')
- return chost, chost_envar
-
-
-@Configure.conf
-def xcheck_envar(conf, name, wafname=None, cross=False):
- wafname = wafname or name
- envar = os.environ.get(name, None)
-
- if envar is None:
- return
-
- value = Utils.to_list(envar) if envar != '' else [envar]
-
- conf.env[wafname] = value
- if cross:
- pretty = 'cross-compilation %s' % wafname
- else:
- pretty = wafname
- conf.msg('Will use %s' % pretty,
- " ".join(quote(x) for x in value))
-
-@Configure.conf
-def xcheck_host_prog(conf, name, tool, wafname=None):
- wafname = wafname or name
-
- chost, chost_envar = get_chost_stuff(conf)
-
- specific = None
- if chost:
- specific = os.environ.get('%s_%s' % (chost_envar, name), None)
-
- if specific:
- value = Utils.to_list(specific)
- conf.env[wafname] += value
- conf.msg('Will use cross-compilation %s from %s_%s' \
- % (name, chost_envar, name),
- " ".join(quote(x) for x in value))
- return
- else:
- envar = os.environ.get('HOST_%s' % name, None)
- if envar is not None:
- value = Utils.to_list(envar)
- conf.env[wafname] = value
- conf.msg('Will use cross-compilation %s from HOST_%s' \
- % (name, name),
- " ".join(quote(x) for x in value))
- return
-
- if conf.env[wafname]:
- return
-
- value = None
- if chost:
- value = '%s-%s' % (chost, tool)
-
- if value:
- conf.env[wafname] = value
- conf.msg('Will use cross-compilation %s from CHOST' \
- % wafname, value)
-
-@Configure.conf
-def xcheck_host_envar(conf, name, wafname=None):
- wafname = wafname or name
-
- chost, chost_envar = get_chost_stuff(conf)
-
- specific = None
- if chost:
- specific = os.environ.get('%s_%s' % (chost_envar, name), None)
-
- if specific:
- value = Utils.to_list(specific)
- conf.env[wafname] += value
- conf.msg('Will use cross-compilation %s from %s_%s' \
- % (name, chost_envar, name),
- " ".join(quote(x) for x in value))
- return
-
-
- envar = os.environ.get('HOST_%s' % name, None)
- if envar is None:
- return
-
- value = Utils.to_list(envar) if envar != '' else [envar]
-
- conf.env[wafname] = value
- conf.msg('Will use cross-compilation %s from HOST_%s' \
- % (name, name),
- " ".join(quote(x) for x in value))
-
-
-@Configure.conf
-def xcheck_host(conf):
- conf.xcheck_envar('CHOST', cross=True)
- conf.xcheck_host_prog('CC', 'gcc')
- conf.xcheck_host_prog('CXX', 'g++')
- conf.xcheck_host_prog('LINK_CC', 'gcc')
- conf.xcheck_host_prog('LINK_CXX', 'g++')
- conf.xcheck_host_prog('AR', 'ar')
- conf.xcheck_host_prog('AS', 'as')
- conf.xcheck_host_prog('LD', 'ld')
- conf.xcheck_host_envar('CFLAGS')
- conf.xcheck_host_envar('CXXFLAGS')
- conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
- conf.xcheck_host_envar('LIB')
- conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
- conf.xcheck_host_envar('PKG_CONFIG_PATH')
-
- if not conf.env.env:
- conf.env.env = {}
- conf.env.env.update(os.environ)
- if conf.env.PKG_CONFIG_LIBDIR:
- conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
- if conf.env.PKG_CONFIG_PATH:
- conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]
diff --git a/third_party/waf/waflib/extras/clang_compilation_database.py b/third_party/waf/waflib/extras/clang_compilation_database.py
index e7230d4c7f4..756180f8bb6 100644
--- a/third_party/waf/waflib/extras/clang_compilation_database.py
+++ b/third_party/waf/waflib/extras/clang_compilation_database.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Christoph Koke, 2013
@@ -15,15 +19,16 @@ Usage:
"""
import sys, os, json, shlex, pipes
-from waflib import Logs, TaskGen
-from waflib.Tools import c, cxx
+from waflib import Logs, TaskGen, Task
+
+Task.Task.keep_last_cmd = True
if sys.hexversion >= 0x3030000:
quote = shlex.quote
else:
quote = pipes.quote
-@TaskGen.feature('*')
+@TaskGen.feature('c', 'cxx')
@TaskGen.after_method('process_use')
def collect_compilation_db_tasks(self):
"Add a compilation database entry for compiled tasks"
@@ -33,19 +38,20 @@ def collect_compilation_db_tasks(self):
clang_db = self.bld.clang_compilation_database_tasks = []
self.bld.add_post_fun(write_compilation_database)
+ tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
for task in getattr(self, 'compiled_tasks', []):
- if isinstance(task, (c.c, cxx.cxx)):
+ if isinstance(task, tup):
clang_db.append(task)
def write_compilation_database(ctx):
"Write the clang compilation database as JSON"
database_file = ctx.bldnode.make_node('compile_commands.json')
- Logs.info("Build commands will be stored in %s" % database_file.path_from(ctx.path))
+ Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path))
try:
root = json.load(database_file)
except IOError:
root = []
- clang_db = dict((x["file"], x) for x in root)
+ clang_db = dict((x['file'], x) for x in root)
for task in getattr(ctx, 'clang_compilation_database_tasks', []):
try:
cmd = task.last_cmd
@@ -63,3 +69,28 @@ def write_compilation_database(ctx):
clang_db[filename] = entry
root = list(clang_db.values())
database_file.write(json.dumps(root, indent=2))
+
+# Override the runnable_status function to do a dummy/dry run when the file doesn't need to be compiled.
+# This will make sure compile_commands.json is always fully up to date.
+# Previously you could end up with a partial compile_commands.json if the build failed.
+for x in ('c', 'cxx'):
+ if x not in Task.classes:
+ continue
+
+ t = Task.classes[x]
+
+ def runnable_status(self):
+ def exec_command(cmd, **kw):
+ pass
+
+ run_status = self.old_runnable_status()
+ if run_status == Task.SKIP_ME:
+ setattr(self, 'old_exec_command', getattr(self, 'exec_command', None))
+ setattr(self, 'exec_command', exec_command)
+ self.run()
+ setattr(self, 'exec_command', getattr(self, 'old_exec_command', None))
+ return run_status
+
+ setattr(t, 'old_runnable_status', getattr(t, 'runnable_status', None))
+ setattr(t, 'runnable_status', runnable_status)
+
diff --git a/third_party/waf/waflib/extras/codelite.py b/third_party/waf/waflib/extras/codelite.py
index c12ae4b9796..df5949a763f 100644
--- a/third_party/waf/waflib/extras/codelite.py
+++ b/third_party/waf/waflib/extras/codelite.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# CodeLite Project
# Christian Klein (chrikle@berlios.de)
# Created: Jan 2012
@@ -97,7 +101,7 @@ PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
${if (project.get_key(x)=="sourcefile")}
<File Name="${x.abspath()}"/>
${endif}
- ${endfor}
+ ${endfor}
</VirtualDirectory>
<VirtualDirectory Name="include">
${for x in project.source}
@@ -139,7 +143,7 @@ PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
$b = project.build_properties[0]}
<RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
<CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
- <BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand>
+ <BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand>
<Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
<Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>
<Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
@@ -244,7 +248,8 @@ def compile_template(line):
extr = []
def repl(match):
g = match.group
- if g('dollar'): return "$"
+ if g('dollar'):
+ return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
@@ -269,14 +274,14 @@ def compile_template(line):
app("lst.append(%r)" % params[x])
f = extr[x]
- if f.startswith('if') or f.startswith('for'):
+ if f.startswith(('if', 'for')):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
- elif f.startswith('endif') or f.startswith('endfor'):
+ elif f.startswith(('endif', 'endfor')):
indent -= 1
- elif f.startswith('else') or f.startswith('elif'):
+ elif f.startswith(('else', 'elif')):
indent -= 1
app(f + ':')
indent += 1
@@ -302,8 +307,8 @@ def rm_blank_lines(txt):
BOM = '\xef\xbb\xbf'
try:
- BOM = bytes(BOM, 'iso8859-1') # python 3
-except NameError:
+ BOM = bytes(BOM, 'latin-1') # python 3
+except (TypeError, NameError):
pass
def stealth_write(self, data, flags='wb'):
@@ -315,7 +320,7 @@ def stealth_write(self, data, flags='wb'):
data = data.decode(sys.getfilesystemencoding(), 'replace')
data = data.encode('utf-8')
- if self.name.endswith('.project') or self.name.endswith('.project'):
+ if self.name.endswith('.project'):
data = BOM + data
try:
@@ -325,7 +330,7 @@ def stealth_write(self, data, flags='wb'):
except (IOError, ValueError):
self.write(data, flags=flags)
else:
- Logs.debug('codelite: skipping %s' % self.abspath())
+ Logs.debug('codelite: skipping %r', self)
Node.Node.stealth_write = stealth_write
re_quote = re.compile("[^a-zA-Z0-9-]")
@@ -470,7 +475,7 @@ class vsnode_project(vsnode):
return lst
def write(self):
- Logs.debug('codelite: creating %r' % self.path)
+ Logs.debug('codelite: creating %r', self.path)
#print "self.name:",self.name
# first write the project file
@@ -491,7 +496,7 @@ class vsnode_project(vsnode):
required for writing the source files
"""
name = node.name
- if name.endswith('.cpp') or name.endswith('.c'):
+ if name.endswith(('.cpp', '.c')):
return 'sourcefile'
return 'headerfile'
@@ -527,21 +532,21 @@ class vsnode_project(vsnode):
def get_rebuild_command(self, props):
return "%s clean build %s" % self.get_build_params(props)
-
+
def get_install_command(self, props):
return "%s install %s" % self.get_build_params(props)
def get_build_and_install_command(self, props):
return "%s build install %s" % self.get_build_params(props)
-
+
def get_build_and_install_all_command(self, props):
return "%s build install" % self.get_build_params(props)[0]
-
+
def get_clean_all_command(self, props):
return "%s clean" % self.get_build_params(props)[0]
-
+
def get_build_all_command(self, props):
return "%s build" % self.get_build_params(props)[0]
-
+
def get_rebuild_all_command(self, props):
return "%s clean build" % self.get_build_params(props)[0]
@@ -588,10 +593,10 @@ class vsnode_project_view(vsnode_alias):
vsnode_alias.__init__(self, ctx, node, name)
self.tg = self.ctx() # fake one, cannot remove
self.exclude_files = Node.exclude_regs + '''
-waf-1.8.*
-waf3-1.8.*/**
-.waf-1.8.*
-.waf3-1.8.*/**
+waf-2*
+waf3-2*/**
+.waf-2*
+.waf3-2*/**
**/*.sdf
**/*.suo
**/*.ncb
@@ -666,7 +671,7 @@ class vsnode_target(vsnode_project):
tsk = self.tg.link_task
except AttributeError:
pass
- else:
+ else:
x.output_file = tsk.outputs[0].abspath()
x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
@@ -738,9 +743,7 @@ class codelite_generator(BuildContext):
return getattr(x, 'path', None) and x.path.abspath() or x.name
self.all_projects.sort(key=sortfun)
-
def write_files(self):
-
"""
Write the project and solution files from the data collected
so far. It is unlikely that you will want to change this
@@ -751,7 +754,7 @@ class codelite_generator(BuildContext):
# and finally write the solution file
node = self.get_solution_node()
node.parent.mkdir()
- Logs.warn('Creating %r' % node)
+ Logs.warn('Creating %r', node)
#a = dir(self.root)
#for b in a:
# print b
@@ -810,7 +813,7 @@ class codelite_generator(BuildContext):
p = self.vsnode_target(self, tg)
p.collect_source() # delegate this processing
- p.collect_properties()
+ p.collect_properties()
self.all_projects.append(p)
def add_aliases(self):
@@ -874,7 +877,3 @@ class codelite_generator(BuildContext):
p.iter_path = p.tg.path
make_parents(p)
-
-
-def options(ctx):
- pass
diff --git a/third_party/waf/waflib/extras/color_gcc.py b/third_party/waf/waflib/extras/color_gcc.py
index b3587e8db44..73b001768c5 100644
--- a/third_party/waf/waflib/extras/color_gcc.py
+++ b/third_party/waf/waflib/extras/color_gcc.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
@@ -18,7 +22,7 @@ class ColorGCCFormatter(Logs.formatter):
while frame:
func = frame.f_code.co_name
if func == 'exec_command':
- cmd = frame.f_locals['cmd']
+ cmd = frame.f_locals.get('cmd')
if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
lines = []
for line in rec.msg.splitlines():
@@ -36,3 +40,4 @@ class ColorGCCFormatter(Logs.formatter):
def options(opt):
Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors))
+
diff --git a/third_party/waf/waflib/extras/color_rvct.py b/third_party/waf/waflib/extras/color_rvct.py
index 837fca2edf1..8138b39d63a 100644
--- a/third_party/waf/waflib/extras/color_rvct.py
+++ b/third_party/waf/waflib/extras/color_rvct.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
@@ -48,3 +52,4 @@ class RcvtFormatter(Logs.formatter):
def options(opt):
Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors))
+
diff --git a/third_party/waf/waflib/extras/compat15.py b/third_party/waf/waflib/extras/compat15.py
index 69722ffa0c9..ec6a16493a3 100644
--- a/third_party/waf/waflib/extras/compat15.py
+++ b/third_party/waf/waflib/extras/compat15.py
@@ -71,7 +71,7 @@ def cmd_output(cmd, **kw):
try:
p = Utils.subprocess.Popen(cmd, **kw)
output = p.communicate()[0]
- except OSError ,e:
+ except OSError as e:
raise ValueError(str(e))
if p.returncode:
@@ -126,7 +126,7 @@ Options.OptionsContext.tool_options = Context.Context.load
Options.Handler = Options.OptionsContext
Task.simple_task_type = Task.task_type_from_func = Task.task_factory
-Task.TaskBase.classes = Task.classes
+Task.Task.classes = Task.classes
def setitem(self, key, value):
if key.startswith('CCFLAGS'):
@@ -320,10 +320,12 @@ def apply_objdeps(self):
lst = y.to_list(y.add_objects)
lst.reverse()
for u in lst:
- if u in seen: continue
+ if u in seen:
+ continue
added = 1
names = [u]+names
- if added: continue # list of names modified, loop
+ if added:
+ continue # list of names modified, loop
# safe to process the current object
y.post()
@@ -345,8 +347,10 @@ def add_obj_file(self, file):
"""Small example on how to link object files as if they were source
obj = bld.create_obj('cc')
obj.add_obj_file('foo.o')"""
- if not hasattr(self, 'obj_files'): self.obj_files = []
- if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
+ if not hasattr(self, 'obj_files'):
+ self.obj_files = []
+ if not 'process_obj_files' in self.meths:
+ self.meths.append('process_obj_files')
self.obj_files.append(file)
@@ -403,3 +407,4 @@ def before(*k):
k = [repl.get(key, key) for key in k]
return TaskGen.before_method(*k)
TaskGen.before = before
+
diff --git a/third_party/waf/waflib/extras/cppcheck.py b/third_party/waf/waflib/extras/cppcheck.py
index 3bbeabf200a..4a4e59da5f4 100644
--- a/third_party/waf/waflib/extras/cppcheck.py
+++ b/third_party/waf/waflib/extras/cppcheck.py
@@ -1,4 +1,8 @@
#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
# -*- encoding: utf-8 -*-
# Michel Mooij, michel.mooij7@gmail.com
@@ -43,6 +47,11 @@ building the task.
The result of the source code analysis will be stored both as xml and html
files in the build location for the task. Should any error be detected by
cppcheck the build will be aborted and a link to the html report will be shown.
+By default, one index.html file is created for each task generator. A global
+index.html file can be obtained by setting the following variable
+in the configuration section:
+
+ conf.env.CPPCHECK_SINGLE_HTML = False
When needed source code checking by cppcheck can be disabled per task, per
detected error or warning for a particular task. It can be also be disabled for
@@ -85,7 +94,7 @@ script that comes shipped with the cppcheck tool.
import sys
import xml.etree.ElementTree as ElementTree
-from waflib import Task, TaskGen, Logs, Context
+from waflib import Task, TaskGen, Logs, Context, Options
PYGMENTS_EXC_MSG= '''
The required module 'pygments' could not be found. Please install it using your
@@ -134,6 +143,9 @@ def options(opt):
default='20', action='store',
help='maximum preprocessor (--max-configs) define iterations (default=20)')
+ opt.add_option('--cppcheck-jobs', dest='cppcheck_jobs',
+ default='1', action='store',
+ help='number of jobs (-j) to do the checking work (default=1)')
def configure(conf):
if conf.options.cppcheck_skip:
@@ -143,20 +155,27 @@ def configure(conf):
conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
+ conf.env.CPPCHECK_JOBS = conf.options.cppcheck_jobs
+ if conf.options.cppcheck_jobs != '1' and ('unusedFunction' in conf.options.cppcheck_bin_enable or 'unusedFunction' in conf.options.cppcheck_lib_enable or 'all' in conf.options.cppcheck_bin_enable or 'all' in conf.options.cppcheck_lib_enable):
+ Logs.warn('cppcheck: unusedFunction cannot be used with multiple threads, cppcheck will disable it automatically')
conf.find_program('cppcheck', var='CPPCHECK')
+ # set to True to get a single index.html file
+ conf.env.CPPCHECK_SINGLE_HTML = False
@TaskGen.feature('c')
@TaskGen.feature('cxx')
def cppcheck_execute(self):
- if len(self.env.CPPCHECK_SKIP) or self.bld.options.cppcheck_skip:
+ if hasattr(self.bld, 'conf'):
+ return
+ if len(self.env.CPPCHECK_SKIP) or Options.options.cppcheck_skip:
return
if getattr(self, 'cppcheck_skip', False):
return
task = self.create_task('cppcheck')
task.cmd = _tgen_create_cmd(self)
task.fatal = []
- if not self.bld.options.cppcheck_err_resume:
+ if not Options.options.cppcheck_err_resume:
task.fatal.append('error')
@@ -167,10 +186,12 @@ def _tgen_create_cmd(self):
max_configs = self.env.CPPCHECK_MAX_CONFIGS
bin_enable = self.env.CPPCHECK_BIN_ENABLE
lib_enable = self.env.CPPCHECK_LIB_ENABLE
+ jobs = self.env.CPPCHECK_JOBS
cmd = self.env.CPPCHECK
args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
args.append('--max-configs=%s' % max_configs)
+ args.append('-j %s' % jobs)
if 'cxx' in features:
args.append('--language=c++')
@@ -179,7 +200,7 @@ def _tgen_create_cmd(self):
args.append('--language=c')
args.append('--std=%s' % std_c)
- if self.bld.options.cppcheck_check_config:
+ if Options.options.cppcheck_check_config:
args.append('--check-config')
if set(['cprogram','cxxprogram']) & set(features):
@@ -215,8 +236,11 @@ class cppcheck(Task.Task):
root = ElementTree.fromstring(s)
cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
cmd.text = str(self.cmd)
- body = ElementTree.tostring(root)
- node = self.generator.path.get_bld().find_or_declare('cppcheck.xml')
+ body = ElementTree.tostring(root).decode('us-ascii')
+ body_html_name = 'cppcheck-%s.xml' % self.generator.get_name()
+ if self.env.CPPCHECK_SINGLE_HTML:
+ body_html_name = 'cppcheck.xml'
+ node = self.generator.path.get_bld().find_or_declare(body_html_name)
node.write(header + body)
def _get_defects(self, xml_string):
@@ -244,10 +268,10 @@ class cppcheck(Task.Task):
def _create_html_files(self, defects):
sources = {}
- defects = [defect for defect in defects if defect.has_key('file')]
+ defects = [defect for defect in defects if 'file' in defect]
for defect in defects:
name = defect['file']
- if not sources.has_key(name):
+ if not name in sources:
sources[name] = [defect]
else:
sources[name].append(defect)
@@ -255,10 +279,13 @@ class cppcheck(Task.Task):
files = {}
css_style_defs = None
bpath = self.generator.path.get_bld().abspath()
- names = sources.keys()
+ names = list(sources.keys())
for i in range(0,len(names)):
name = names[i]
- htmlfile = 'cppcheck/%i.html' % (i)
+ if self.env.CPPCHECK_SINGLE_HTML:
+ htmlfile = 'cppcheck/%i.html' % (i)
+ else:
+ htmlfile = 'cppcheck/%s%i.html' % (self.generator.get_name(),i)
errors = sources[name]
files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
css_style_defs = self._create_html_file(name, htmlfile, errors)
@@ -279,19 +306,25 @@ class cppcheck(Task.Task):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
+ if div.get('id') == 'menu':
+ indexlink = div.find('a')
+ if self.env.CPPCHECK_SINGLE_HTML:
+ indexlink.attrib['href'] = 'index.html'
+ else:
+ indexlink.attrib['href'] = 'index-%s.html' % name
if div.get('id') == 'content':
content = div
srcnode = self.generator.bld.root.find_node(sourcefile)
- hl_lines = [e['line'] for e in errors if e.has_key('line')]
+ hl_lines = [e['line'] for e in errors if 'line' in e]
formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
- formatter.errors = [e for e in errors if e.has_key('line')]
+ formatter.errors = [e for e in errors if 'line' in e]
css_style_defs = formatter.get_style_defs('.highlight')
lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
s = pygments.highlight(srcnode.read(), lexer, formatter)
table = ElementTree.fromstring(s)
content.append(table)
- s = ElementTree.tostring(root, method='html')
+ s = ElementTree.tostring(root, method='html').decode('us-ascii')
s = CCPCHECK_HTML_TYPE + s
node = self.generator.path.get_bld().find_or_declare(htmlfile)
node.write(s)
@@ -315,10 +348,19 @@ class cppcheck(Task.Task):
if div.get('id') == 'content':
content = div
self._create_html_table(content, files)
+ if div.get('id') == 'menu':
+ indexlink = div.find('a')
+ if self.env.CPPCHECK_SINGLE_HTML:
+ indexlink.attrib['href'] = 'index.html'
+ else:
+ indexlink.attrib['href'] = 'index-%s.html' % name
- s = ElementTree.tostring(root, method='html')
+ s = ElementTree.tostring(root, method='html').decode('us-ascii')
s = CCPCHECK_HTML_TYPE + s
- node = self.generator.path.get_bld().find_or_declare('cppcheck/index.html')
+ index_html_name = 'cppcheck/index-%s.html' % name
+ if self.env.CPPCHECK_SINGLE_HTML:
+ index_html_name = 'cppcheck/index.html'
+ node = self.generator.path.get_bld().find_or_declare(index_html_name)
node.write(s)
return node
@@ -330,9 +372,9 @@ class cppcheck(Task.Task):
row = ElementTree.fromstring(s)
table.append(row)
- errors = sorted(val['errors'], key=lambda e: int(e['line']) if e.has_key('line') else sys.maxint)
+ errors = sorted(val['errors'], key=lambda e: int(e['line']) if 'line' in e else sys.maxint)
for e in errors:
- if not e.has_key('line'):
+ if not 'line' in e:
s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
else:
attr = ''
@@ -382,7 +424,7 @@ class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
for error in self.errors:
if int(error['line']) == line_no:
t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
- line_no = line_no + 1
+ line_no += 1
yield i, t
@@ -413,7 +455,7 @@ CPPCHECK_HTML_FILE = """
<div>cppcheck - a tool for static C/C++ code analysis</div>
<div>
Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
- Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
+ Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
IRC: #cppcheck at irc.freenode.net
</div>
&nbsp;
@@ -544,3 +586,4 @@ th, td {
}
"""
+
diff --git a/third_party/waf/waflib/extras/cpplint.py b/third_party/waf/waflib/extras/cpplint.py
index e574ab115d6..eba979b5328 100644
--- a/third_party/waf/waflib/extras/cpplint.py
+++ b/third_party/waf/waflib/extras/cpplint.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
#
# written by Sylvain Rouquette, 2014
@@ -35,6 +39,7 @@ When using this tool, the wscript will look like:
bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp'))
'''
+from __future__ import absolute_import
import sys, re
import logging
import threading
@@ -69,6 +74,9 @@ def options(opt):
help='specify the log level (default: 1)')
opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK',
help='break the build if error >= level (default: 5)')
+ opt.add_option('--cpplint-root', type='string',
+ default=None, dest='CPPLINT_ROOT',
+ help='root directory used to derive header guard')
opt.add_option('--cpplint-skip', action='store_true',
default=False, dest='CPPLINT_SKIP',
help='skip cpplint during build')
@@ -87,7 +95,7 @@ def configure(conf):
conf.end_msg('not found, skipping it.')
-class cpplint_formatter(Logs.formatter):
+class cpplint_formatter(Logs.formatter, object):
def __init__(self, fmt):
logging.Formatter.__init__(self, CPPLINT_FORMAT)
self.fmt = fmt
@@ -101,7 +109,7 @@ class cpplint_formatter(Logs.formatter):
return super(cpplint_formatter, self).format(rec)
-class cpplint_handler(Logs.log_handler):
+class cpplint_handler(Logs.log_handler, object):
def __init__(self, stream=sys.stderr, **kw):
super(cpplint_handler, self).__init__(stream, **kw)
self.stream = stream
@@ -181,9 +189,10 @@ class cpplint(Task.Task):
global critical_errors
with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT):
if self.env.CPPLINT_OUTPUT != 'waf':
- cpplint_tool._cpplint_state.output_format = self.env.CPPLINT_OUTPUT
- cpplint_tool._cpplint_state.SetFilters(self.env.CPPLINT_FILTERS)
+ cpplint_tool._SetOutputFormat(self.env.CPPLINT_OUTPUT)
+ cpplint_tool._SetFilters(self.env.CPPLINT_FILTERS)
cpplint_tool._line_length = self.env.CPPLINT_LINE_LENGTH
+ cpplint_tool._root = self.env.CPPLINT_ROOT
cpplint_tool.ProcessFile(self.inputs[0].abspath(), self.env.CPPLINT_LEVEL)
return critical_errors
@@ -194,16 +203,16 @@ def cpplint_includes(self, node):
@TaskGen.feature('cpplint')
@TaskGen.before_method('process_source')
def post_cpplint(self):
- if self.env.CPPLINT_SKIP:
- return
-
if not self.env.CPPLINT_INITIALIZED:
for key, value in Options.options.__dict__.items():
if not key.startswith('CPPLINT_') or self.env[key]:
- continue
+ continue
self.env[key] = value
self.env.CPPLINT_INITIALIZED = True
+ if self.env.CPPLINT_SKIP:
+ return
+
if not self.env.CPPLINT_OUTPUT in CPPLINT_RE:
return
diff --git a/third_party/waf/waflib/extras/cross_gnu.py b/third_party/waf/waflib/extras/cross_gnu.py
new file mode 100644
index 00000000000..b6a4036f58e
--- /dev/null
+++ b/third_party/waf/waflib/extras/cross_gnu.py
@@ -0,0 +1,231 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/python
+# -*- coding: utf-8 vi:ts=4:noexpandtab
+# Tool to provide dedicated variables for cross-compilation
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+This tool allows to use environment variables to define cross-compilation
+variables intended for build variants.
+
+The variables are obtained from the environment in 3 ways:
+
+1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
+2. By defining HOST_x
+3. By defining ${CHOST//-/_}_x
+
+else one can set ``cfg.env.CHOST`` in ``wscript`` before loading ``cross_gnu``.
+
+Usage:
+
+- In your build script::
+
+ def configure(cfg):
+ ...
+ for variant in x_variants:
+ setenv(variant)
+ conf.load('cross_gnu')
+ conf.xcheck_host_var('POUET')
+ ...
+
+
+- Then::
+
+ CHOST=arm-hardfloat-linux-gnueabi waf configure
+ env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
+ CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
+ HOST_CC="clang -..." waf configure
+
+This example ``wscript`` compiles to Microchip PIC (xc16-gcc-xyz must be in PATH):
+
+.. code:: python
+
+ from waflib import Configure
+
+ #from https://gist.github.com/rpuntaie/2bddfb5d7b77db26415ee14371289971
+ import waf_variants
+
+ variants='pc fw/variant1 fw/variant2'.split()
+
+ top = "."
+ out = "../build"
+
+ PIC = '33FJ128GP804' #dsPICxxx
+
+ @Configure.conf
+ def gcc_modifier_xc16(cfg):
+ v = cfg.env
+ v.cprogram_PATTERN = '%s.elf'
+ v.LINKFLAGS_cprogram = ','.join(['-Wl','','','--defsym=__MPLAB_BUILD=0','','--script=p'+PIC+'.gld',
+ '--stack=16','--check-sections','--data-init','--pack-data','--handles','--isr','--no-gc-sections',
+ '--fill-upper=0','--stackguard=16','--no-force-link','--smart-io']) #,'--report-mem'])
+ v.CFLAGS_cprogram=['-mcpu='+PIC,'-omf=elf','-mlarge-code','-msmart-io=1',
+ '-msfr-warn=off','-mno-override-inline','-finline','-Winline']
+
+ def configure(cfg):
+ if 'fw' in cfg.variant: #firmware
+ cfg.env.DEST_OS = 'xc16' #cfg.env.CHOST = 'xc16' #works too
+ cfg.load('c cross_gnu') #cfg.env.CHOST becomes ['xc16']
+ ...
+ else: #configure for pc SW
+ ...
+
+ def build(bld):
+ if 'fw' in bld.variant: #firmware
+ bld.program(source='maintst.c', target='maintst');
+ bld(source='maintst.elf', target='maintst.hex', rule="xc16-bin2hex ${SRC} -a -omf=elf")
+ else: #build for pc SW
+ ...
+
+"""
+
+import os
+from waflib import Utils, Configure
+from waflib.Tools import ccroot, gcc
+
+try:
+ from shlex import quote
+except ImportError:
+ from pipes import quote
+
+def get_chost_stuff(conf):
+ """
+ Get the CHOST environment variable contents
+ """
+ chost = None
+ chost_envar = None
+ if conf.env.CHOST:
+ chost = conf.env.CHOST[0]
+ chost_envar = chost.replace('-', '_')
+ return chost, chost_envar
+
+
+@Configure.conf
+def xcheck_var(conf, name, wafname=None, cross=False):
+ wafname = wafname or name
+
+ if wafname in conf.env:
+ value = conf.env[wafname]
+ if isinstance(value, str):
+ value = [value]
+ else:
+ envar = os.environ.get(name)
+ if not envar:
+ return
+ value = Utils.to_list(envar) if envar != '' else [envar]
+
+ conf.env[wafname] = value
+ if cross:
+ pretty = 'cross-compilation %s' % wafname
+ else:
+ pretty = wafname
+ conf.msg('Will use %s' % pretty, " ".join(quote(x) for x in value))
+
+@Configure.conf
+def xcheck_host_prog(conf, name, tool, wafname=None):
+ wafname = wafname or name
+
+ chost, chost_envar = get_chost_stuff(conf)
+
+ specific = None
+ if chost:
+ specific = os.environ.get('%s_%s' % (chost_envar, name))
+
+ if specific:
+ value = Utils.to_list(specific)
+ conf.env[wafname] += value
+ conf.msg('Will use cross-compilation %s from %s_%s' % (name, chost_envar, name),
+ " ".join(quote(x) for x in value))
+ return
+ else:
+ envar = os.environ.get('HOST_%s' % name)
+ if envar is not None:
+ value = Utils.to_list(envar)
+ conf.env[wafname] = value
+ conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
+ " ".join(quote(x) for x in value))
+ return
+
+ if conf.env[wafname]:
+ return
+
+ value = None
+ if chost:
+ value = '%s-%s' % (chost, tool)
+
+ if value:
+ conf.env[wafname] = value
+ conf.msg('Will use cross-compilation %s from CHOST' % wafname, value)
+
+@Configure.conf
+def xcheck_host_envar(conf, name, wafname=None):
+ wafname = wafname or name
+
+ chost, chost_envar = get_chost_stuff(conf)
+
+ specific = None
+ if chost:
+ specific = os.environ.get('%s_%s' % (chost_envar, name))
+
+ if specific:
+ value = Utils.to_list(specific)
+ conf.env[wafname] += value
+ conf.msg('Will use cross-compilation %s from %s_%s' \
+ % (name, chost_envar, name),
+ " ".join(quote(x) for x in value))
+ return
+
+
+ envar = os.environ.get('HOST_%s' % name)
+ if envar is None:
+ return
+
+ value = Utils.to_list(envar) if envar != '' else [envar]
+
+ conf.env[wafname] = value
+ conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
+ " ".join(quote(x) for x in value))
+
+
+@Configure.conf
+def xcheck_host(conf):
+ conf.xcheck_var('CHOST', cross=True)
+ conf.env.CHOST = conf.env.CHOST or [conf.env.DEST_OS]
+ conf.env.DEST_OS = conf.env.CHOST[0].replace('-','_')
+ conf.xcheck_host_prog('CC', 'gcc')
+ conf.xcheck_host_prog('CXX', 'g++')
+ conf.xcheck_host_prog('LINK_CC', 'gcc')
+ conf.xcheck_host_prog('LINK_CXX', 'g++')
+ conf.xcheck_host_prog('AR', 'ar')
+ conf.xcheck_host_prog('AS', 'as')
+ conf.xcheck_host_prog('LD', 'ld')
+ conf.xcheck_host_envar('CFLAGS')
+ conf.xcheck_host_envar('CXXFLAGS')
+ conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
+ conf.xcheck_host_envar('LIB')
+ conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
+ conf.xcheck_host_envar('PKG_CONFIG_PATH')
+
+ if not conf.env.env:
+ conf.env.env = {}
+ conf.env.env.update(os.environ)
+ if conf.env.PKG_CONFIG_LIBDIR:
+ conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
+ if conf.env.PKG_CONFIG_PATH:
+ conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]
+
+def configure(conf):
+ """
+ Configuration example for gcc, it will not work for g++/clang/clang++
+ """
+ conf.xcheck_host()
+ conf.gcc_common_flags()
+ conf.gcc_modifier_platform()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/cython.py b/third_party/waf/waflib/extras/cython.py
index 26d1c6f147c..20e94d051d8 100644
--- a/third_party/waf/waflib/extras/cython.py
+++ b/third_party/waf/waflib/extras/cython.py
@@ -1,8 +1,12 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Thomas Nagy, 2010-2015
-import os, re
+import re
from waflib import Task, Logs
from waflib.TaskGen import extension
@@ -68,9 +72,9 @@ class cython(Task.Task):
def post_run(self):
for x in self.outputs:
if x.name.endswith('.h'):
- if not os.path.exists(x.abspath()):
+ if not x.exists():
if Logs.verbose:
- Logs.warn('Expected %r' % x.abspath())
+ Logs.warn('Expected %r', x.abspath())
x.write('')
return Task.Task.post_run(self)
@@ -92,7 +96,7 @@ class cython(Task.Task):
else:
mods.append(m.group(2))
- Logs.debug("cython: mods %r" % mods)
+ Logs.debug('cython: mods %r', mods)
incs = getattr(self.generator, 'cython_includes', [])
incs = [self.generator.path.find_dir(x) for x in incs]
incs.append(node.parent)
@@ -113,7 +117,7 @@ class cython(Task.Task):
if implicit:
found.append(implicit)
- Logs.debug("cython: found %r" % found)
+ Logs.debug('cython: found %r', found)
# Now the .h created - store them in bld.raw_deps for later use
has_api = False
@@ -143,3 +147,4 @@ def configure(ctx):
ctx.find_program('cython', var='CYTHON')
if ctx.options.cython_flags:
ctx.env.CYTHONFLAGS = ctx.options.cython_flags
+
diff --git a/third_party/waf/waflib/extras/dcc.py b/third_party/waf/waflib/extras/dcc.py
index 8fd209686bb..d1b85bc56e6 100644
--- a/third_party/waf/waflib/extras/dcc.py
+++ b/third_party/waf/waflib/extras/dcc.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Jérôme Carretero, 2011 (zougloub)
@@ -33,7 +37,8 @@ def dcc_common_flags(conf):
v['CC_TGT_F'] = ['-c', '-o']
# linker
- if not v['LINK_CC']: v['LINK_CC'] = v['CC']
+ if not v['LINK_CC']:
+ v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = []
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
@@ -68,3 +73,4 @@ def options(opt):
Add the ``--with-diab-bindir`` command-line options.
"""
opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="")
+
diff --git a/third_party/waf/waflib/extras/distnet.py b/third_party/waf/waflib/extras/distnet.py
index ac8c34491e1..a8dd2d2eb42 100644
--- a/third_party/waf/waflib/extras/distnet.py
+++ b/third_party/waf/waflib/extras/distnet.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
"""
waf-powered distributed network builds, with a network cache.
@@ -107,10 +111,10 @@ class package(Context.Context):
tarinfo.name = os.path.split(x)[1]
else:
tarinfo.name = endname + x # todo, if tuple, then..
- Logs.debug("adding %r to %s" % (tarinfo.name, filename))
+ Logs.debug('distnet: adding %r to %s', tarinfo.name, filename)
with open(x, 'rb') as f:
tar.addfile(tarinfo, f)
- Logs.info('Created %s' % filename)
+ Logs.info('Created %s', filename)
class publish(Context.Context):
fun = 'publish'
@@ -223,7 +227,7 @@ class package_reader(Context.Context):
try:
response = urlopen(req, timeout=TIMEOUT)
except URLError as e:
- Logs.warn('The package server is down! %r' % e)
+ Logs.warn('The package server is down! %r', e)
self.constraints = self.local_resolve(text)
else:
ret = response.read()
@@ -243,11 +247,11 @@ class package_reader(Context.Context):
reasons = c.why()
if len(reasons) == 1:
- Logs.error('%s but no matching package could be found in this repository' % reasons[0])
+ Logs.error('%s but no matching package could be found in this repository', reasons[0])
else:
- Logs.error('Conflicts on package %r:' % c.pkgname)
+ Logs.error('Conflicts on package %r:', c.pkgname)
for r in reasons:
- Logs.error(' %s' % r)
+ Logs.error(' %s', r)
if errors:
self.fatal('The package requirements cannot be satisfied!')
@@ -255,7 +259,6 @@ class package_reader(Context.Context):
try:
return self.cache_constraints[(pkgname, pkgver)]
except KeyError:
- #Logs.error("no key %r" % (pkgname, pkgver))
text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
ret = parse_constraints(text)
self.cache_constraints[(pkgname, pkgver)] = ret
@@ -329,7 +332,7 @@ class package_reader(Context.Context):
def solution_to_constraints(self, versions, constraints):
solution = []
- for p in versions.keys():
+ for p in versions:
c = constraint()
solution.append(c)
@@ -429,3 +432,4 @@ def configure(conf):
def build(bld):
load_tools(bld, bld.variant)
+
diff --git a/third_party/waf/waflib/extras/doxygen.py b/third_party/waf/waflib/extras/doxygen.py
index 9e17595f89a..f6e951b8f32 100644
--- a/third_party/waf/waflib/extras/doxygen.py
+++ b/third_party/waf/waflib/extras/doxygen.py
@@ -1,4 +1,8 @@
#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
# encoding: UTF-8
# Thomas Nagy 2008-2010 (ita)
@@ -119,9 +123,10 @@ class doxygen(Task.Task):
def scan(self):
exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
+ exclude_patterns = [pattern.replace('*/', '**/') for pattern in exclude_patterns]
file_patterns = self.pars.get('FILE_PATTERNS','').split()
if not file_patterns:
- file_patterns = DOXY_FILE_PATTERNS
+ file_patterns = DOXY_FILE_PATTERNS.split()
if self.pars.get('RECURSIVE') == 'YES':
file_patterns = ["**/%s" % pattern for pattern in file_patterns]
nodes = []
@@ -148,7 +153,7 @@ class doxygen(Task.Task):
def post_run(self):
nodes = self.output_dir.ant_glob('**/*', quiet=True)
for x in nodes:
- x.sig = Utils.h_file(x.abspath())
+ self.generator.bld.node_sigs[x] = self.uid()
self.add_install()
return Task.Task.post_run(self)
@@ -157,8 +162,8 @@ class doxygen(Task.Task):
self.outputs += nodes
if getattr(self.generator, 'install_path', None):
if not getattr(self.generator, 'doxy_tar', None):
- self.generator.bld.install_files(self.generator.install_path,
- self.outputs,
+ self.generator.add_install_files(install_to=self.generator.install_path,
+ install_from=self.outputs,
postpone=False,
cwd=self.output_dir,
relative_trick=True)
@@ -211,7 +216,7 @@ def process_doxy(self):
else:
tsk.env['TAROPTS'] = ['cf']
if getattr(self, 'install_path', None):
- self.bld.install_files(self.install_path, tsk.outputs)
+ self.add_install_files(install_to=self.install_path, install_from=tsk.outputs)
def configure(conf):
'''
diff --git a/third_party/waf/waflib/extras/dpapi.py b/third_party/waf/waflib/extras/dpapi.py
index 4df64bfa4e5..0e226b1d776 100644
--- a/third_party/waf/waflib/extras/dpapi.py
+++ b/third_party/waf/waflib/extras/dpapi.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Matt Clarkson, 2012
'''
@@ -35,7 +39,7 @@ def get_data(blob_out):
pbData = blob_out.pbData
buffer = c_buffer(cbData)
memcpy(buffer, pbData, cbData)
- LocalFree(pbData);
+ LocalFree(pbData)
return buffer.raw
@conf
@@ -56,7 +60,7 @@ def dpapi_encrypt_data(self, input_bytes, entropy = extra_entropy):
blob_entropy = DATA_BLOB(len(entropy), buffer_entropy)
blob_out = DATA_BLOB()
- if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy),
+ if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy),
None, None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
return get_data(blob_out)
else:
@@ -84,3 +88,4 @@ def dpapi_decrypt_data(self, encrypted_bytes, entropy = extra_entropy):
return get_data(blob_out)
else:
self.fatal('Failed to decrypt data')
+
diff --git a/third_party/waf/waflib/extras/eclipse.py b/third_party/waf/waflib/extras/eclipse.py
new file mode 100644
index 00000000000..9209a2546b6
--- /dev/null
+++ b/third_party/waf/waflib/extras/eclipse.py
@@ -0,0 +1,385 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Eclipse CDT 5.0 generator for Waf
+# Richard Quirk 2009-1011 (New BSD License)
+# Thomas Nagy 2011 (ported to Waf 1.6)
+
+"""
+Usage:
+
+def options(opt):
+ opt.load('eclipse')
+
+$ waf configure eclipse
+"""
+
+import sys, os
+from waflib import Utils, Logs, Context, Build, TaskGen, Scripting, Errors, Node
+from xml.dom.minidom import Document
+
+STANDARD_INCLUDES = [ '/usr/local/include', '/usr/include' ]
+
+oe_cdt = 'org.eclipse.cdt'
+cdt_mk = oe_cdt + '.make.core'
+cdt_core = oe_cdt + '.core'
+cdt_bld = oe_cdt + '.build.core'
+
+class eclipse(Build.BuildContext):
+ cmd = 'eclipse'
+ fun = Scripting.default_cmd
+
+ def execute(self):
+ """
+ Entry point
+ """
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
+
+ appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
+ self.create_cproject(appname, pythonpath=self.env['ECLIPSE_PYTHON_PATH'])
+
+ def create_cproject(self, appname, workspace_includes=[], pythonpath=[]):
+ """
+ Create the Eclipse CDT .project and .cproject files
+ @param appname The name that will appear in the Project Explorer
+ @param build The BuildContext object to extract includes from
+ @param workspace_includes Optional project includes to prevent
+ "Unresolved Inclusion" errors in the Eclipse editor
+ @param pythonpath Optional project specific python paths
+ """
+ hasc = hasjava = haspython = False
+ source_dirs = []
+ cpppath = self.env['CPPPATH']
+ javasrcpath = []
+ includes = STANDARD_INCLUDES
+ if sys.platform != 'win32':
+ cc = self.env.CC or self.env.CXX
+ if cc:
+ cmd = cc + ['-xc++', '-E', '-Wp,-v', '-']
+ try:
+ gccout = self.cmd_and_log(cmd, output=Context.STDERR, quiet=Context.BOTH, input='\n'.encode()).splitlines()
+ except Errors.WafError:
+ pass
+ else:
+ includes = []
+ for ipath in gccout:
+ if ipath.startswith(' /'):
+ includes.append(ipath[1:])
+ cpppath += includes
+ Logs.warn('Generating Eclipse CDT project files')
+
+ for g in self.groups:
+ for tg in g:
+ if not isinstance(tg, TaskGen.task_gen):
+ continue
+
+ # Add local Python modules paths to configuration so object resolving will work in IDE
+ if 'py' in tg.features:
+ pypath = tg.path.relpath()
+ py_installfrom = getattr(tg, 'install_from', None)
+ if py_installfrom:
+ pypath += os.sep + py_installfrom
+ pythonpath.append(pypath)
+ haspython = True
+
+
+ # Add Java source directories so object resolving works in IDE
+ if 'java' in tg.features:
+ java_src = tg.path.relpath()
+ java_srcdir = getattr(tg, 'srcdir', None)
+ if java_srcdir:
+ if isinstance(java_srcdir, Node.Node):
+ java_srcdir = [java_srcdir]
+ for x in Utils.to_list(java_srcdir):
+ if isinstance(x, Node.Node):
+ x = x.name
+ if java_src == '.':
+ this_src = x
+ else:
+ this_src = java_src + os.sep + x
+ javasrcpath.append(this_src)
+ else:
+ javasrcpath.append(java_src)
+ hasjava = True
+
+ tg.post()
+ if not getattr(tg, 'link_task', None):
+ continue
+
+ features = Utils.to_list(getattr(tg, 'features', ''))
+
+ is_cc = 'c' in features or 'cxx' in features
+
+ incnodes = tg.to_incnodes(tg.to_list(getattr(tg, 'includes', [])) + tg.env['INCLUDES'])
+ for p in incnodes:
+ path = p.path_from(self.srcnode)
+
+ if (path.startswith("/")):
+ cpppath.append(path)
+ else:
+ workspace_includes.append(path)
+
+ if is_cc and path not in source_dirs:
+ source_dirs.append(path)
+
+ hasc = True
+
+ project = self.impl_create_project(sys.executable, appname, hasc, hasjava, haspython)
+ self.srcnode.make_node('.project').write(project.toprettyxml())
+
+ if hasc:
+ waf = os.path.abspath(sys.argv[0])
+ project = self.impl_create_cproject(sys.executable, waf, appname, workspace_includes, cpppath, source_dirs)
+ self.srcnode.make_node('.cproject').write(project.toprettyxml())
+
+ if haspython:
+ project = self.impl_create_pydevproject(sys.path, pythonpath)
+ self.srcnode.make_node('.pydevproject').write(project.toprettyxml())
+
+ if hasjava:
+ project = self.impl_create_javaproject(javasrcpath)
+ self.srcnode.make_node('.classpath').write(project.toprettyxml())
+
+ def impl_create_project(self, executable, appname, hasc, hasjava, haspython):
+ doc = Document()
+ projectDescription = doc.createElement('projectDescription')
+ self.add(doc, projectDescription, 'name', appname)
+ self.add(doc, projectDescription, 'comment')
+ self.add(doc, projectDescription, 'projects')
+ buildSpec = self.add(doc, projectDescription, 'buildSpec')
+ buildCommand = self.add(doc, buildSpec, 'buildCommand')
+ self.add(doc, buildCommand, 'name', oe_cdt + '.managedbuilder.core.genmakebuilder')
+ self.add(doc, buildCommand, 'triggers', 'clean,full,incremental,')
+ arguments = self.add(doc, buildCommand, 'arguments')
+ # the default make-style targets are overwritten by the .cproject values
+ dictionaries = {
+ cdt_mk + '.contents': cdt_mk + '.activeConfigSettings',
+ cdt_mk + '.enableAutoBuild': 'false',
+ cdt_mk + '.enableCleanBuild': 'true',
+ cdt_mk + '.enableFullBuild': 'true',
+ }
+ for k, v in dictionaries.items():
+ self.addDictionary(doc, arguments, k, v)
+
+ natures = self.add(doc, projectDescription, 'natures')
+
+ if hasc:
+ nature_list = """
+ core.ccnature
+ managedbuilder.core.ScannerConfigNature
+ managedbuilder.core.managedBuildNature
+ core.cnature
+ """.split()
+ for n in nature_list:
+ self.add(doc, natures, 'nature', oe_cdt + '.' + n)
+
+ if haspython:
+ self.add(doc, natures, 'nature', 'org.python.pydev.pythonNature')
+ if hasjava:
+ self.add(doc, natures, 'nature', 'org.eclipse.jdt.core.javanature')
+
+ doc.appendChild(projectDescription)
+ return doc
+
+ def impl_create_cproject(self, executable, waf, appname, workspace_includes, cpppath, source_dirs=[]):
+ doc = Document()
+ doc.appendChild(doc.createProcessingInstruction('fileVersion', '4.0.0'))
+ cconf_id = cdt_core + '.default.config.1'
+ cproject = doc.createElement('cproject')
+ storageModule = self.add(doc, cproject, 'storageModule',
+ {'moduleId': cdt_core + '.settings'})
+ cconf = self.add(doc, storageModule, 'cconfiguration', {'id':cconf_id})
+
+ storageModule = self.add(doc, cconf, 'storageModule',
+ {'buildSystemId': oe_cdt + '.managedbuilder.core.configurationDataProvider',
+ 'id': cconf_id,
+ 'moduleId': cdt_core + '.settings',
+ 'name': 'Default'})
+
+ self.add(doc, storageModule, 'externalSettings')
+
+ extensions = self.add(doc, storageModule, 'extensions')
+ extension_list = """
+ VCErrorParser
+ MakeErrorParser
+ GCCErrorParser
+ GASErrorParser
+ GLDErrorParser
+ """.split()
+ self.add(doc, extensions, 'extension', {'id': cdt_core + '.ELF', 'point':cdt_core + '.BinaryParser'})
+ for e in extension_list:
+ self.add(doc, extensions, 'extension', {'id': cdt_core + '.' + e, 'point':cdt_core + '.ErrorParser'})
+
+ storageModule = self.add(doc, cconf, 'storageModule',
+ {'moduleId': 'cdtBuildSystem', 'version': '4.0.0'})
+ config = self.add(doc, storageModule, 'configuration',
+ {'artifactName': appname,
+ 'id': cconf_id,
+ 'name': 'Default',
+ 'parent': cdt_bld + '.prefbase.cfg'})
+ folderInfo = self.add(doc, config, 'folderInfo',
+ {'id': cconf_id+'.', 'name': '/', 'resourcePath': ''})
+
+ toolChain = self.add(doc, folderInfo, 'toolChain',
+ {'id': cdt_bld + '.prefbase.toolchain.1',
+ 'name': 'No ToolChain',
+ 'resourceTypeBasedDiscovery': 'false',
+ 'superClass': cdt_bld + '.prefbase.toolchain'})
+
+ self.add(doc, toolChain, 'targetPlatform', {'binaryParser': 'org.eclipse.cdt.core.ELF', 'id': cdt_bld + '.prefbase.toolchain.1', 'name': ''})
+
+ waf_build = '"%s" %s'%(waf, eclipse.fun)
+ waf_clean = '"%s" clean'%(waf)
+ self.add(doc, toolChain, 'builder',
+ {'autoBuildTarget': waf_build,
+ 'command': executable,
+ 'enableAutoBuild': 'false',
+ 'cleanBuildTarget': waf_clean,
+ 'enableIncrementalBuild': 'true',
+ 'id': cdt_bld + '.settings.default.builder.1',
+ 'incrementalBuildTarget': waf_build,
+ 'managedBuildOn': 'false',
+ 'name': 'Gnu Make Builder',
+ 'superClass': cdt_bld + '.settings.default.builder'})
+
+ tool_index = 1;
+ for tool_name in ("Assembly", "GNU C++", "GNU C"):
+ tool = self.add(doc, toolChain, 'tool',
+ {'id': cdt_bld + '.settings.holder.' + str(tool_index),
+ 'name': tool_name,
+ 'superClass': cdt_bld + '.settings.holder'})
+ if cpppath or workspace_includes:
+ incpaths = cdt_bld + '.settings.holder.incpaths'
+ option = self.add(doc, tool, 'option',
+ {'id': incpaths + '.' + str(tool_index),
+ 'name': 'Include Paths',
+ 'superClass': incpaths,
+ 'valueType': 'includePath'})
+ for i in workspace_includes:
+ self.add(doc, option, 'listOptionValue',
+ {'builtIn': 'false',
+ 'value': '"${workspace_loc:/%s/%s}"'%(appname, i)})
+ for i in cpppath:
+ self.add(doc, option, 'listOptionValue',
+ {'builtIn': 'false',
+ 'value': '"%s"'%(i)})
+ if tool_name == "GNU C++" or tool_name == "GNU C":
+ self.add(doc,tool,'inputType',{ 'id':'org.eclipse.cdt.build.core.settings.holder.inType.' + str(tool_index), \
+ 'languageId':'org.eclipse.cdt.core.gcc' if tool_name == "GNU C" else 'org.eclipse.cdt.core.g++','languageName':tool_name, \
+ 'sourceContentType':'org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader', \
+ 'superClass':'org.eclipse.cdt.build.core.settings.holder.inType' })
+ tool_index += 1
+
+ if source_dirs:
+ sourceEntries = self.add(doc, config, 'sourceEntries')
+ for i in source_dirs:
+ self.add(doc, sourceEntries, 'entry',
+ {'excluding': i,
+ 'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
+ 'kind': 'sourcePath',
+ 'name': ''})
+ self.add(doc, sourceEntries, 'entry',
+ {
+ 'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
+ 'kind': 'sourcePath',
+ 'name': i})
+
+ storageModule = self.add(doc, cconf, 'storageModule',
+ {'moduleId': cdt_mk + '.buildtargets'})
+ buildTargets = self.add(doc, storageModule, 'buildTargets')
+ def addTargetWrap(name, runAll):
+ return self.addTarget(doc, buildTargets, executable, name,
+ '"%s" %s'%(waf, name), runAll)
+ addTargetWrap('configure', True)
+ addTargetWrap('dist', False)
+ addTargetWrap('install', False)
+ addTargetWrap('check', False)
+
+ storageModule = self.add(doc, cproject, 'storageModule',
+ {'moduleId': 'cdtBuildSystem',
+ 'version': '4.0.0'})
+
+ self.add(doc, storageModule, 'project', {'id': '%s.null.1'%appname, 'name': appname})
+
+ doc.appendChild(cproject)
+ return doc
+
+ def impl_create_pydevproject(self, system_path, user_path):
+ # create a pydevproject file
+ doc = Document()
+ doc.appendChild(doc.createProcessingInstruction('eclipse-pydev', 'version="1.0"'))
+ pydevproject = doc.createElement('pydev_project')
+ prop = self.add(doc, pydevproject,
+ 'pydev_property',
+ 'python %d.%d'%(sys.version_info[0], sys.version_info[1]))
+ prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_VERSION')
+ prop = self.add(doc, pydevproject, 'pydev_property', 'Default')
+ prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_INTERPRETER')
+ # add waf's paths
+ wafadmin = [p for p in system_path if p.find('wafadmin') != -1]
+ if wafadmin:
+ prop = self.add(doc, pydevproject, 'pydev_pathproperty',
+ {'name':'org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH'})
+ for i in wafadmin:
+ self.add(doc, prop, 'path', i)
+ if user_path:
+ prop = self.add(doc, pydevproject, 'pydev_pathproperty',
+ {'name':'org.python.pydev.PROJECT_SOURCE_PATH'})
+ for i in user_path:
+ self.add(doc, prop, 'path', '/${PROJECT_DIR_NAME}/'+i)
+
+ doc.appendChild(pydevproject)
+ return doc
+
+ def impl_create_javaproject(self, javasrcpath):
+ # create a .classpath file for java usage
+ doc = Document()
+ javaproject = doc.createElement('classpath')
+ if javasrcpath:
+ for i in javasrcpath:
+ self.add(doc, javaproject, 'classpathentry',
+ {'kind': 'src', 'path': i})
+
+ self.add(doc, javaproject, 'classpathentry', {'kind': 'con', 'path': 'org.eclipse.jdt.launching.JRE_CONTAINER'})
+ self.add(doc, javaproject, 'classpathentry', {'kind': 'output', 'path': self.bldnode.name })
+ doc.appendChild(javaproject)
+ return doc
+
+ def addDictionary(self, doc, parent, k, v):
+ dictionary = self.add(doc, parent, 'dictionary')
+ self.add(doc, dictionary, 'key', k)
+ self.add(doc, dictionary, 'value', v)
+ return dictionary
+
+ def addTarget(self, doc, buildTargets, executable, name, buildTarget, runAllBuilders=True):
+ target = self.add(doc, buildTargets, 'target',
+ {'name': name,
+ 'path': '',
+ 'targetID': oe_cdt + '.build.MakeTargetBuilder'})
+ self.add(doc, target, 'buildCommand', executable)
+ self.add(doc, target, 'buildArguments', None)
+ self.add(doc, target, 'buildTarget', buildTarget)
+ self.add(doc, target, 'stopOnError', 'true')
+ self.add(doc, target, 'useDefaultCommand', 'false')
+ self.add(doc, target, 'runAllBuilders', str(runAllBuilders).lower())
+
+ def add(self, doc, parent, tag, value = None):
+ el = doc.createElement(tag)
+ if (value):
+ if type(value) == type(str()):
+ el.appendChild(doc.createTextNode(value))
+ elif type(value) == type(dict()):
+ self.setAttributes(el, value)
+ parent.appendChild(el)
+ return el
+
+ def setAttributes(self, node, attrs):
+ for k, v in attrs.items():
+ node.setAttribute(k, v)
+
diff --git a/third_party/waf/waflib/extras/erlang.py b/third_party/waf/waflib/extras/erlang.py
new file mode 100644
index 00000000000..b2aa5a16579
--- /dev/null
+++ b/third_party/waf/waflib/extras/erlang.py
@@ -0,0 +1,114 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+# Przemyslaw Rzepecki, 2016
+
+"""
+Erlang support
+"""
+
+import re
+from waflib import Task, TaskGen
+from waflib.TaskGen import feature, after_method, before_method
+# to load the method "to_incnodes" below
+from waflib.Tools import ccroot
+
+# Those flags are required by the Erlang VM to execute/evaluate code in
+# non-interactive mode. It is used in this tool to create Erlang modules
+# documentation and run unit tests. The user can pass additional arguments to the
+# 'erl' command with ERL_FLAGS environment variable.
+EXEC_NON_INTERACTIVE = ['-noshell', '-noinput', '-eval']
+
+def configure(conf):
+ conf.find_program('erlc', var='ERLC')
+ conf.find_program('erl', var='ERL')
+ conf.add_os_flags('ERLC_FLAGS')
+ conf.add_os_flags('ERL_FLAGS')
+ conf.env.ERLC_DEF_PATTERN = '-D%s'
+ conf.env.ERLC_INC_PATTERN = '-I%s'
+
+@TaskGen.extension('.erl')
+def process_erl_node(self, node):
+ tsk = self.create_task('erl', node, node.change_ext('.beam'))
+ tsk.erlc_incnodes = [tsk.outputs[0].parent] + self.to_incnodes(self.includes)
+ tsk.env.append_value('ERLC_INCPATHS', [x.abspath() for x in tsk.erlc_incnodes])
+ tsk.env.append_value('ERLC_DEFINES', self.to_list(getattr(self, 'defines', [])))
+ tsk.env.append_value('ERLC_FLAGS', self.to_list(getattr(self, 'flags', [])))
+ tsk.cwd = tsk.outputs[0].parent
+
+class erl(Task.Task):
+ color = 'GREEN'
+ run_str = '${ERLC} ${ERL_FLAGS} ${ERLC_INC_PATTERN:ERLC_INCPATHS} ${ERLC_DEF_PATTERN:ERLC_DEFINES} ${SRC}'
+
+ def scan(task):
+ node = task.inputs[0]
+
+ deps = []
+ scanned = set([])
+ nodes_to_scan = [node]
+
+ for n in nodes_to_scan:
+ if n.abspath() in scanned:
+ continue
+
+ for i in re.findall('-include\("(.*)"\)\.', n.read()):
+ for d in task.erlc_incnodes:
+ r = d.find_node(i)
+ if r:
+ deps.append(r)
+ nodes_to_scan.append(r)
+ break
+ scanned.add(n.abspath())
+
+ return (deps, [])
+
+@TaskGen.extension('.beam')
+def process(self, node):
+ pass
+
+
+class erl_test(Task.Task):
+ color = 'BLUE'
+ run_str = '${ERL} ${ERL_FLAGS} ${ERL_TEST_FLAGS}'
+
+@feature('eunit')
+@after_method('process_source')
+def add_erl_test_run(self):
+ test_modules = [t.outputs[0] for t in self.tasks]
+ test_task = self.create_task('erl_test')
+ test_task.set_inputs(self.source + test_modules)
+ test_task.cwd = test_modules[0].parent
+
+ test_task.env.append_value('ERL_FLAGS', self.to_list(getattr(self, 'flags', [])))
+
+ test_list = ", ".join([m.change_ext("").path_from(test_task.cwd)+":test()" for m in test_modules])
+ test_flag = 'halt(case lists:all(fun(Elem) -> Elem == ok end, [%s]) of true -> 0; false -> 1 end).' % test_list
+ test_task.env.append_value('ERL_TEST_FLAGS', EXEC_NON_INTERACTIVE)
+ test_task.env.append_value('ERL_TEST_FLAGS', test_flag)
+
+
+class edoc(Task.Task):
+ color = 'BLUE'
+ run_str = "${ERL} ${ERL_FLAGS} ${ERL_DOC_FLAGS}"
+ def keyword(self):
+ return 'Generating edoc'
+
+@feature('edoc')
+@before_method('process_source')
+def add_edoc_task(self):
+ # do not process source, it would create double erl->beam task
+ self.meths.remove('process_source')
+ e = self.path.find_resource(self.source)
+ t = e.change_ext('.html')
+ png = t.parent.make_node('erlang.png')
+ css = t.parent.make_node('stylesheet.css')
+ tsk = self.create_task('edoc', e, [t, png, css])
+ tsk.cwd = tsk.outputs[0].parent
+ tsk.env.append_value('ERL_DOC_FLAGS', EXEC_NON_INTERACTIVE)
+ tsk.env.append_value('ERL_DOC_FLAGS', 'edoc:files(["%s"]), halt(0).' % tsk.inputs[0].abspath())
+ # TODO the above can break if a file path contains '"'
+
diff --git a/third_party/waf/waflib/extras/fast_partial.py b/third_party/waf/waflib/extras/fast_partial.py
new file mode 100644
index 00000000000..bd9b2b833dc
--- /dev/null
+++ b/third_party/waf/waflib/extras/fast_partial.py
@@ -0,0 +1,522 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2017-2018 (ita)
+
+"""
+A system for fast partial rebuilds
+
+Creating a large amount of task objects up front can take some time.
+By making a few assumptions, it is possible to avoid posting creating
+task objects for targets that are already up-to-date.
+
+On a silly benchmark the gain observed for 1M tasks can be 5m->10s
+for a single file change.
+
+Usage::
+
+ def options(opt):
+ opt.load('fast_partial')
+
+Assuptions:
+* Mostly for C/C++/Fortran targets with link tasks (object-only targets are not handled)
+* For full project builds: no --targets and no pruning from subfolders
+* The installation phase is ignored
+* `use=` dependencies are specified up front even across build groups
+* Task generator source files are not obtained from globs
+
+Implementation details:
+* The first layer obtains file timestamps to recalculate file hashes only
+ when necessary (similar to md5_tstamp); the timestamps are then stored
+ in a dedicated pickle file
+* A second layer associates each task generator to a file set to help
+ detecting changes. Task generators are to create their tasks only when
+ the related files have been modified. A specific db file is created
+ to store such data (5m -> 1m10)
+* A third layer binds build context proxies onto task generators, replacing
+ the default context. While loading data for the full build uses more memory
+ (4GB -> 9GB), partial builds are then much faster (1m10 -> 13s)
+* A fourth layer enables a 2-level cache on file signatures to
+ reduce the size of the main pickle file (13s -> 10s)
+"""
+
+import os
+from waflib import Build, Context, Errors, Logs, Task, TaskGen, Utils
+from waflib.TaskGen import feature, after_method, taskgen_method
+import waflib.Node
+
+DONE = 0
+DIRTY = 1
+NEEDED = 2
+
+SKIPPABLE = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib', 'cprogram', 'cxxprogram']
+
+TSTAMP_DB = '.wafpickle_tstamp_db_file'
+
+SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split()
+
+class bld_proxy(object):
+ def __init__(self, bld):
+ object.__setattr__(self, 'bld', bld)
+
+ object.__setattr__(self, 'node_class', type('Nod3', (waflib.Node.Node,), {}))
+ self.node_class.__module__ = 'waflib.Node'
+ self.node_class.ctx = self
+
+ object.__setattr__(self, 'root', self.node_class('', None))
+ for x in SAVED_ATTRS:
+ if x != 'root':
+ object.__setattr__(self, x, {})
+
+ self.fix_nodes()
+
+ def __setattr__(self, name, value):
+ bld = object.__getattribute__(self, 'bld')
+ setattr(bld, name, value)
+
+ def __delattr__(self, name):
+ bld = object.__getattribute__(self, 'bld')
+ delattr(bld, name)
+
+ def __getattribute__(self, name):
+ try:
+ return object.__getattribute__(self, name)
+ except AttributeError:
+ bld = object.__getattribute__(self, 'bld')
+ return getattr(bld, name)
+
+ def __call__(self, *k, **kw):
+ return self.bld(*k, **kw)
+
+ def fix_nodes(self):
+ for x in ('srcnode', 'path', 'bldnode'):
+ node = self.root.find_dir(getattr(self.bld, x).abspath())
+ object.__setattr__(self, x, node)
+
+ def set_key(self, store_key):
+ object.__setattr__(self, 'store_key', store_key)
+
+ def fix_tg_path(self, *tgs):
+ # changing Node objects on task generators is possible
+ # yet, all Node objects must belong to the same parent
+ for tg in tgs:
+ tg.path = self.root.make_node(tg.path.abspath())
+
+ def restore(self):
+ dbfn = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
+ Logs.debug('rev_use: reading %s', dbfn)
+ try:
+ data = Utils.readf(dbfn, 'rb')
+ except (EnvironmentError, EOFError):
+ # handle missing file/empty file
+ Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn)
+ else:
+ try:
+ waflib.Node.pickle_lock.acquire()
+ waflib.Node.Nod3 = self.node_class
+ try:
+ data = Build.cPickle.loads(data)
+ except Exception as e:
+ Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e)
+ else:
+ for x in SAVED_ATTRS:
+ object.__setattr__(self, x, data.get(x, {}))
+ finally:
+ waflib.Node.pickle_lock.release()
+ self.fix_nodes()
+
+ def store(self):
+ data = {}
+ for x in Build.SAVED_ATTRS:
+ data[x] = getattr(self, x)
+ db = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
+
+ try:
+ waflib.Node.pickle_lock.acquire()
+ waflib.Node.Nod3 = self.node_class
+ x = Build.cPickle.dumps(data, Build.PROTOCOL)
+ finally:
+ waflib.Node.pickle_lock.release()
+
+ Logs.debug('rev_use: storing %s', db)
+ Utils.writef(db + '.tmp', x, m='wb')
+ try:
+ st = os.stat(db)
+ os.remove(db)
+ if not Utils.is_win32:
+ os.chown(db + '.tmp', st.st_uid, st.st_gid)
+ except (AttributeError, OSError):
+ pass
+ os.rename(db + '.tmp', db)
+
+class bld(Build.BuildContext):
+ def __init__(self, **kw):
+ super(bld, self).__init__(**kw)
+ self.hashes_md5_tstamp = {}
+
+ def __call__(self, *k, **kw):
+ # this is one way of doing it, one could use a task generator method too
+ bld = kw['bld'] = bld_proxy(self)
+ ret = TaskGen.task_gen(*k, **kw)
+ self.task_gen_cache_names = {}
+ self.add_to_group(ret, group=kw.get('group'))
+ ret.bld = bld
+ bld.set_key(ret.path.abspath().replace(os.sep, '') + str(ret.idx))
+ return ret
+
+ def is_dirty(self):
+ return True
+
+ def store_tstamps(self):
+ # Called after a build is finished
+ # For each task generator, record all files involved in task objects
+ # optimization: done only if there was something built
+ do_store = False
+ try:
+ f_deps = self.f_deps
+ except AttributeError:
+ f_deps = self.f_deps = {}
+ self.f_tstamps = {}
+
+ allfiles = set()
+ for g in self.groups:
+ for tg in g:
+ try:
+ staleness = tg.staleness
+ except AttributeError:
+ staleness = DIRTY
+
+ if staleness != DIRTY:
+ # DONE case: there was nothing built
+ # NEEDED case: the tg was brought in because of 'use' propagation
+ # but nothing really changed for them, there may be incomplete
+ # tasks (object files) and in this case it is best to let the next build
+ # figure out if an input/output file changed
+ continue
+
+ do_cache = False
+ for tsk in tg.tasks:
+ if tsk.hasrun == Task.SUCCESS:
+ do_cache = True
+ pass
+ elif tsk.hasrun == Task.SKIPPED:
+ pass
+ else:
+ # one failed task, clear the cache for this tg
+ try:
+ del f_deps[(tg.path.abspath(), tg.idx)]
+ except KeyError:
+ pass
+ else:
+ # just store the new state because there is a change
+ do_store = True
+
+ # skip the rest because there is no valid cache possible
+ break
+ else:
+ if not do_cache:
+ # all skipped, but is there anything in cache?
+ try:
+ f_deps[(tg.path.abspath(), tg.idx)]
+ except KeyError:
+ # probably cleared because a wscript file changed
+ # store it
+ do_cache = True
+
+ if do_cache:
+
+ # there was a rebuild, store the data structure too
+ tg.bld.store()
+
+ # all tasks skipped but no cache
+ # or a successful task build
+ do_store = True
+ st = set()
+ for tsk in tg.tasks:
+ st.update(tsk.inputs)
+ st.update(self.node_deps.get(tsk.uid(), []))
+
+ # TODO do last/when loading the tgs?
+ lst = []
+ for k in ('wscript', 'wscript_build'):
+ n = tg.path.find_node(k)
+ if n:
+ n.get_bld_sig()
+ lst.append(n.abspath())
+
+ lst.extend(sorted(x.abspath() for x in st))
+ allfiles.update(lst)
+ f_deps[(tg.path.abspath(), tg.idx)] = lst
+
+ for x in allfiles:
+ # f_tstamps has everything, while md5_tstamp can be relatively empty on partial builds
+ self.f_tstamps[x] = self.hashes_md5_tstamp[x][0]
+
+ if do_store:
+ dbfn = os.path.join(self.variant_dir, TSTAMP_DB)
+ Logs.debug('rev_use: storing %s', dbfn)
+ dbfn_tmp = dbfn + '.tmp'
+ x = Build.cPickle.dumps([self.f_tstamps, f_deps], Build.PROTOCOL)
+ Utils.writef(dbfn_tmp, x, m='wb')
+ os.rename(dbfn_tmp, dbfn)
+ Logs.debug('rev_use: stored %s', dbfn)
+
+ def store(self):
+ self.store_tstamps()
+ if self.producer.dirty:
+ Build.BuildContext.store(self)
+
+ def compute_needed_tgs(self):
+ # assume the 'use' keys are not modified during the build phase
+
+ dbfn = os.path.join(self.variant_dir, TSTAMP_DB)
+ Logs.debug('rev_use: Loading %s', dbfn)
+ try:
+ data = Utils.readf(dbfn, 'rb')
+ except (EnvironmentError, EOFError):
+ Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn)
+ self.f_deps = {}
+ self.f_tstamps = {}
+ else:
+ try:
+ self.f_tstamps, self.f_deps = Build.cPickle.loads(data)
+ except Exception as e:
+ Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e)
+ self.f_deps = {}
+ self.f_tstamps = {}
+ else:
+ Logs.debug('rev_use: Loaded %s', dbfn)
+
+
+ # 1. obtain task generators that contain rebuilds
+ # 2. obtain the 'use' graph and its dual
+ stales = set()
+ reverse_use_map = Utils.defaultdict(list)
+ use_map = Utils.defaultdict(list)
+
+ for g in self.groups:
+ for tg in g:
+ if tg.is_stale():
+ stales.add(tg)
+
+ try:
+ lst = tg.use = Utils.to_list(tg.use)
+ except AttributeError:
+ pass
+ else:
+ for x in lst:
+ try:
+ xtg = self.get_tgen_by_name(x)
+ except Errors.WafError:
+ pass
+ else:
+ use_map[tg].append(xtg)
+ reverse_use_map[xtg].append(tg)
+
+ Logs.debug('rev_use: found %r stale tgs', len(stales))
+
+ # 3. dfs to post downstream tg as stale
+ visited = set()
+ def mark_down(tg):
+ if tg in visited:
+ return
+ visited.add(tg)
+ Logs.debug('rev_use: marking down %r as stale', tg.name)
+ tg.staleness = DIRTY
+ for x in reverse_use_map[tg]:
+ mark_down(x)
+ for tg in stales:
+ mark_down(tg)
+
+ # 4. dfs to find ancestors tg to mark as needed
+ self.needed_tgs = needed_tgs = set()
+ def mark_needed(tg):
+ if tg in needed_tgs:
+ return
+ needed_tgs.add(tg)
+ if tg.staleness == DONE:
+ Logs.debug('rev_use: marking up %r as needed', tg.name)
+ tg.staleness = NEEDED
+ for x in use_map[tg]:
+ mark_needed(x)
+ for xx in visited:
+ mark_needed(xx)
+
+ # so we have the whole tg trees to post in the set "needed"
+ # load their build trees
+ for tg in needed_tgs:
+ tg.bld.restore()
+ tg.bld.fix_tg_path(tg)
+
+ # the stale ones should be fully build, while the needed ones
+ # may skip a few tasks, see create_compiled_task and apply_link_after below
+ Logs.debug('rev_use: amount of needed task gens: %r', len(needed_tgs))
+
+ def post_group(self):
+ # assumption: we can ignore the folder/subfolders cuts
+ def tgpost(tg):
+ try:
+ f = tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+
+ if not self.targets or self.targets == '*':
+ for tg in self.groups[self.current_group]:
+ # this can cut quite a lot of tg objects
+ if tg in self.needed_tgs:
+ tgpost(tg)
+ else:
+ # default implementation
+ return Build.BuildContext.post_group()
+
+ def get_build_iterator(self):
+ if not self.targets or self.targets == '*':
+ self.compute_needed_tgs()
+ return Build.BuildContext.get_build_iterator(self)
+
+@taskgen_method
+def is_stale(self):
+ # assume no globs
+ self.staleness = DIRTY
+
+ # 1. the case of always stale targets
+ if getattr(self, 'always_stale', False):
+ return True
+
+ # 2. check if the db file exists
+ db = os.path.join(self.bld.variant_dir, Context.DBFILE)
+ try:
+ dbstat = os.stat(db).st_mtime
+ except OSError:
+ Logs.debug('rev_use: must post %r because this is a clean build')
+ return True
+
+ # 3. check if the configuration changed
+ if os.stat(self.bld.bldnode.find_node('c4che/build.config.py').abspath()).st_mtime > dbstat:
+ Logs.debug('rev_use: must post %r because the configuration has changed', self.name)
+ return True
+
+ # 3.a any tstamp data?
+ try:
+ f_deps = self.bld.f_deps
+ except AttributeError:
+ Logs.debug('rev_use: must post %r because there is no f_deps', self.name)
+ return True
+
+ # 4. check if this is the first build (no cache)
+ try:
+ lst = f_deps[(self.path.abspath(), self.idx)]
+ except KeyError:
+ Logs.debug('rev_use: must post %r because there it has no cached data', self.name)
+ return True
+
+ try:
+ cache = self.bld.cache_tstamp_rev_use
+ except AttributeError:
+ cache = self.bld.cache_tstamp_rev_use = {}
+
+ # 5. check the timestamp of each dependency files listed is unchanged
+ f_tstamps = self.bld.f_tstamps
+ for x in lst:
+ try:
+ old_ts = f_tstamps[x]
+ except KeyError:
+ Logs.debug('rev_use: must post %r because %r is not in cache', self.name, x)
+ return True
+
+ try:
+ try:
+ ts = cache[x]
+ except KeyError:
+ ts = cache[x] = os.stat(x).st_mtime
+ except OSError:
+ del f_deps[(self.path.abspath(), self.idx)]
+ Logs.debug('rev_use: must post %r because %r does not exist anymore', self.name, x)
+ return True
+ else:
+ if ts != old_ts:
+ Logs.debug('rev_use: must post %r because the timestamp on %r changed %r %r', self.name, x, old_ts, ts)
+ return True
+
+ self.staleness = DONE
+ return False
+
+@taskgen_method
+def create_compiled_task(self, name, node):
+ # skip the creation of object files
+ # assumption: object-only targets are not skippable
+ if self.staleness == NEEDED:
+ # only libraries/programs can skip object files
+ for x in SKIPPABLE:
+ if x in self.features:
+ return None
+
+ out = '%s.%d.o' % (node.name, self.idx)
+ task = self.create_task(name, node, node.parent.find_or_declare(out))
+ try:
+ self.compiled_tasks.append(task)
+ except AttributeError:
+ self.compiled_tasks = [task]
+ return task
+
+@feature(*SKIPPABLE)
+@after_method('apply_link')
+def apply_link_after(self):
+ # cprogram/cxxprogram might be unnecessary
+ if self.staleness != NEEDED:
+ return
+ for tsk in self.tasks:
+ tsk.hasrun = Task.SKIPPED
+
+def path_from(self, node):
+ # handle nodes of distinct types
+ if node.ctx is not self.ctx:
+ node = self.ctx.root.make_node(node.abspath())
+ return self.default_path_from(node)
+waflib.Node.Node.default_path_from = waflib.Node.Node.path_from
+waflib.Node.Node.path_from = path_from
+
+def h_file(self):
+ # similar to md5_tstamp.py, but with 2-layer cache
+ # global_cache for the build context common for all task generators
+ # local_cache for the build context proxy (one by task generator)
+ #
+ # the global cache is not persistent
+ # the local cache is persistent and meant for partial builds
+ #
+ # assume all calls are made from a single thread
+ #
+ filename = self.abspath()
+ st = os.stat(filename)
+
+ global_cache = self.ctx.bld.hashes_md5_tstamp
+ local_cache = self.ctx.hashes_md5_tstamp
+
+ if filename in global_cache:
+ # value already calculated in this build
+ cval = global_cache[filename]
+
+ # the value in global cache is assumed to be calculated once
+ # reverifying it could cause task generators
+ # to get distinct tstamp values, thus missing rebuilds
+ local_cache[filename] = cval
+ return cval[1]
+
+ if filename in local_cache:
+ cval = local_cache[filename]
+ if cval[0] == st.st_mtime:
+ # correct value from a previous build
+ # put it in the global cache
+ global_cache[filename] = cval
+ return cval[1]
+
+ ret = Utils.h_file(filename)
+ local_cache[filename] = global_cache[filename] = (st.st_mtime, ret)
+ return ret
+waflib.Node.Node.h_file = h_file
+
diff --git a/third_party/waf/waflib/extras/fc_bgxlf.py b/third_party/waf/waflib/extras/fc_bgxlf.py
new file mode 100644
index 00000000000..c58e7ac0181
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_bgxlf.py
@@ -0,0 +1,36 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].insert(0, 'fc_bgxlf')
+
+@conf
+def find_bgxlf(conf):
+ fc = conf.find_program(['bgxlf2003_r','bgxlf2003'], var='FC')
+ conf.get_xlf_version(fc)
+ conf.env.FC_NAME = 'BGXLF'
+
+@conf
+def bg_flags(self):
+ self.env.SONAME_ST = ''
+ self.env.FCSHLIB_MARKER = ''
+ self.env.FCSTLIB_MARKER = ''
+ self.env.FCFLAGS_fcshlib = ['-fPIC']
+ self.env.LINKFLAGS_fcshlib = ['-G', '-Wl,-bexpfull']
+
+def configure(conf):
+ conf.find_bgxlf()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.xlf_flags()
+ conf.bg_flags()
+
diff --git a/third_party/waf/waflib/extras/fc_cray.py b/third_party/waf/waflib/extras/fc_cray.py
new file mode 100644
index 00000000000..0ae9f7ec91a
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_cray.py
@@ -0,0 +1,55 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_cray')
+
+@conf
+def find_crayftn(conf):
+ """Find the Cray fortran compiler (will look in the environment variable 'FC')"""
+ fc = conf.find_program(['crayftn'], var='FC')
+ conf.get_crayftn_version(fc)
+ conf.env.FC_NAME = 'CRAY'
+ conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
+
+@conf
+def crayftn_flags(conf):
+ v = conf.env
+ v['_FCMODOUTFLAGS'] = ['-em', '-J.'] # enable module files and put them in the current directoy
+ v['FCFLAGS_DEBUG'] = ['-m1'] # more verbose compiler warnings
+ v['FCFLAGS_fcshlib'] = ['-h pic']
+ v['LINKFLAGS_fcshlib'] = ['-h shared']
+
+ v['FCSTLIB_MARKER'] = '-h static'
+ v['FCSHLIB_MARKER'] = '-h dynamic'
+
+@conf
+def get_crayftn_version(conf, fc):
+ version_re = re.compile(r"Cray Fortran\s*:\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-V']
+ out,err = fc_config.getoutput(conf, cmd, stdin=False)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the Cray Fortran compiler version.')
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+ conf.find_crayftn()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.crayftn_flags()
+
diff --git a/third_party/waf/waflib/extras/fc_nag.py b/third_party/waf/waflib/extras/fc_nag.py
new file mode 100644
index 00000000000..1779e1988f5
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_nag.py
@@ -0,0 +1,65 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].insert(0, 'fc_nag')
+
+@conf
+def find_nag(conf):
+ """Find the NAG Fortran Compiler (will look in the environment variable 'FC')"""
+
+ fc = conf.find_program(['nagfor'], var='FC')
+ conf.get_nag_version(fc)
+ conf.env.FC_NAME = 'NAG'
+ conf.env.FC_MOD_CAPITALIZATION = 'lower'
+
+@conf
+def nag_flags(conf):
+ v = conf.env
+ v.FCFLAGS_DEBUG = ['-C=all']
+ v.FCLNK_TGT_F = ['-o', '']
+ v.FC_TGT_F = ['-c', '-o', '']
+
+@conf
+def nag_modifier_platform(conf):
+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+ nag_modifier_func = getattr(conf, 'nag_modifier_' + dest_os, None)
+ if nag_modifier_func:
+ nag_modifier_func()
+
+@conf
+def get_nag_version(conf, fc):
+ """Get the NAG compiler version"""
+
+ version_re = re.compile(r"^NAG Fortran Compiler *Release *(?P<major>\d*)\.(?P<minor>\d*)", re.M).search
+ cmd = fc + ['-V']
+
+ out, err = fc_config.getoutput(conf,cmd,stdin=False)
+ if out:
+ match = version_re(out)
+ if not match:
+ match = version_re(err)
+ else: match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the NAG version.')
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+ conf.find_nag()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.nag_flags()
+ conf.nag_modifier_platform()
+
diff --git a/third_party/waf/waflib/extras/fc_nec.py b/third_party/waf/waflib/extras/fc_nec.py
new file mode 100644
index 00000000000..2d0ca71ab39
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_nec.py
@@ -0,0 +1,64 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_nec')
+
+@conf
+def find_sxfc(conf):
+ """Find the NEC fortran compiler (will look in the environment variable 'FC')"""
+ fc = conf.find_program(['sxf90','sxf03'], var='FC')
+ conf.get_sxfc_version(fc)
+ conf.env.FC_NAME = 'NEC'
+ conf.env.FC_MOD_CAPITALIZATION = 'lower'
+
+@conf
+def sxfc_flags(conf):
+ v = conf.env
+ v['_FCMODOUTFLAGS'] = [] # enable module files and put them in the current directoy
+ v['FCFLAGS_DEBUG'] = [] # more verbose compiler warnings
+ v['FCFLAGS_fcshlib'] = []
+ v['LINKFLAGS_fcshlib'] = []
+
+ v['FCSTLIB_MARKER'] = ''
+ v['FCSHLIB_MARKER'] = ''
+
+@conf
+def get_sxfc_version(conf, fc):
+ version_re = re.compile(r"FORTRAN90/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-V']
+ out,err = fc_config.getoutput(conf, cmd, stdin=False)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ version_re=re.compile(r"NEC Fortran 2003 Compiler for\s*(?P<major>\S*)\s*\(c\)\s*(?P<minor>\d*)",re.I).search
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the NEC Fortran compiler version.')
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+ conf.find_sxfc()
+ conf.find_program('sxar',var='AR')
+ conf.add_os_flags('ARFLAGS')
+ if not conf.env.ARFLAGS:
+ conf.env.ARFLAGS=['rcs']
+
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.sxfc_flags()
diff --git a/third_party/waf/waflib/extras/fc_open64.py b/third_party/waf/waflib/extras/fc_open64.py
new file mode 100644
index 00000000000..6001b2083a6
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_open64.py
@@ -0,0 +1,62 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].insert(0, 'fc_open64')
+
+@conf
+def find_openf95(conf):
+ """Find the Open64 Fortran Compiler (will look in the environment variable 'FC')"""
+
+ fc = conf.find_program(['openf95', 'openf90'], var='FC')
+ conf.get_open64_version(fc)
+ conf.env.FC_NAME = 'OPEN64'
+ conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
+
+@conf
+def openf95_flags(conf):
+ v = conf.env
+ v['FCFLAGS_DEBUG'] = ['-fullwarn']
+
+@conf
+def openf95_modifier_platform(conf):
+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+ openf95_modifier_func = getattr(conf, 'openf95_modifier_' + dest_os, None)
+ if openf95_modifier_func:
+ openf95_modifier_func()
+
+@conf
+def get_open64_version(conf, fc):
+ """Get the Open64 compiler version"""
+
+ version_re = re.compile(r"Open64 Compiler Suite: *Version *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-version']
+
+ out, err = fc_config.getoutput(conf,cmd,stdin=False)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the Open64 version.')
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+ conf.find_openf95()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.openf95_flags()
+ conf.openf95_modifier_platform()
+
diff --git a/third_party/waf/waflib/extras/fc_pgfortran.py b/third_party/waf/waflib/extras/fc_pgfortran.py
new file mode 100644
index 00000000000..bc62279a306
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_pgfortran.py
@@ -0,0 +1,72 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib.Tools import fc, fc_config, fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_pgfortran')
+
+@conf
+def find_pgfortran(conf):
+ """Find the PGI fortran compiler (will look in the environment variable 'FC')"""
+ fc = conf.find_program(['pgfortran', 'pgf95', 'pgf90'], var='FC')
+ conf.get_pgfortran_version(fc)
+ conf.env.FC_NAME = 'PGFC'
+
+@conf
+def pgfortran_flags(conf):
+ v = conf.env
+ v['FCFLAGS_fcshlib'] = ['-shared']
+ v['FCFLAGS_DEBUG'] = ['-Minform=inform', '-Mstandard'] # why not
+ v['FCSTLIB_MARKER'] = '-Bstatic'
+ v['FCSHLIB_MARKER'] = '-Bdynamic'
+ v['SONAME_ST'] = '-soname %s'
+
+@conf
+def get_pgfortran_version(conf,fc):
+ version_re = re.compile(r"The Portland Group", re.I).search
+ cmd = fc + ['-V']
+ out,err = fc_config.getoutput(conf, cmd, stdin=False)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not verify PGI signature')
+ cmd = fc + ['-help=variable']
+ out,err = fc_config.getoutput(conf, cmd, stdin=False)
+ if out.find('COMPVER')<0:
+ conf.fatal('Could not determine the compiler type')
+ k = {}
+ prevk = ''
+ out = out.splitlines()
+ for line in out:
+ lst = line.partition('=')
+ if lst[1] == '=':
+ key = lst[0].rstrip()
+ if key == '':
+ key = prevk
+ val = lst[2].rstrip()
+ k[key] = val
+ else:
+ prevk = line.partition(' ')[0]
+ def isD(var):
+ return var in k
+ def isT(var):
+ return var in k and k[var]!='0'
+ conf.env['FC_VERSION'] = (k['COMPVER'].split('.'))
+
+def configure(conf):
+ conf.find_pgfortran()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.pgfortran_flags()
+
diff --git a/third_party/waf/waflib/extras/fc_solstudio.py b/third_party/waf/waflib/extras/fc_solstudio.py
new file mode 100644
index 00000000000..8880d31c58e
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_solstudio.py
@@ -0,0 +1,66 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['linux'].append('fc_solstudio')
+
+@conf
+def find_solstudio(conf):
+ """Find the Solaris Studio compiler (will look in the environment variable 'FC')"""
+
+ fc = conf.find_program(['sunf95', 'f95', 'sunf90', 'f90'], var='FC')
+ conf.get_solstudio_version(fc)
+ conf.env.FC_NAME = 'SOL'
+
+@conf
+def solstudio_flags(conf):
+ v = conf.env
+ v['FCFLAGS_fcshlib'] = ['-Kpic']
+ v['FCFLAGS_DEBUG'] = ['-w3']
+ v['LINKFLAGS_fcshlib'] = ['-G']
+ v['FCSTLIB_MARKER'] = '-Bstatic'
+ v['FCSHLIB_MARKER'] = '-Bdynamic'
+ v['SONAME_ST'] = '-h %s'
+
+@conf
+def solstudio_modifier_platform(conf):
+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+ solstudio_modifier_func = getattr(conf, 'solstudio_modifier_' + dest_os, None)
+ if solstudio_modifier_func:
+ solstudio_modifier_func()
+
+@conf
+def get_solstudio_version(conf, fc):
+ """Get the compiler version"""
+
+ version_re = re.compile(r"Sun Fortran 95 *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-V']
+
+ out, err = fc_config.getoutput(conf,cmd,stdin=False)
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the Sun Studio Fortran version.')
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+
+def configure(conf):
+ conf.find_solstudio()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.solstudio_flags()
+ conf.solstudio_modifier_platform()
+
diff --git a/third_party/waf/waflib/extras/fc_xlf.py b/third_party/waf/waflib/extras/fc_xlf.py
new file mode 100644
index 00000000000..fe8ea4539ce
--- /dev/null
+++ b/third_party/waf/waflib/extras/fc_xlf.py
@@ -0,0 +1,67 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+import re
+from waflib import Utils,Errors
+from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_fc import fc_compiler
+fc_compiler['aix'].insert(0, 'fc_xlf')
+
+@conf
+def find_xlf(conf):
+ """Find the xlf program (will look in the environment variable 'FC')"""
+
+ fc = conf.find_program(['xlf2003_r', 'xlf2003', 'xlf95_r', 'xlf95', 'xlf90_r', 'xlf90', 'xlf_r', 'xlf'], var='FC')
+ conf.get_xlf_version(fc)
+ conf.env.FC_NAME='XLF'
+
+@conf
+def xlf_flags(conf):
+ v = conf.env
+ v['FCDEFINES_ST'] = '-WF,-D%s'
+ v['FCFLAGS_fcshlib'] = ['-qpic=small']
+ v['FCFLAGS_DEBUG'] = ['-qhalt=w']
+ v['LINKFLAGS_fcshlib'] = ['-Wl,-shared']
+
+@conf
+def xlf_modifier_platform(conf):
+ dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+ xlf_modifier_func = getattr(conf, 'xlf_modifier_' + dest_os, None)
+ if xlf_modifier_func:
+ xlf_modifier_func()
+
+@conf
+def get_xlf_version(conf, fc):
+ """Get the compiler version"""
+
+ cmd = fc + ['-qversion']
+ try:
+ out, err = conf.cmd_and_log(cmd, output=0)
+ except Errors.WafError:
+ conf.fatal('Could not find xlf %r' % cmd)
+
+ for v in (r"IBM XL Fortran.* V(?P<major>\d*)\.(?P<minor>\d*)",):
+ version_re = re.compile(v, re.I).search
+ match = version_re(out or err)
+ if match:
+ k = match.groupdict()
+ conf.env['FC_VERSION'] = (k['major'], k['minor'])
+ break
+ else:
+ conf.fatal('Could not determine the XLF version.')
+
+def configure(conf):
+ conf.find_xlf()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.xlf_flags()
+ conf.xlf_modifier_platform()
+
diff --git a/third_party/waf/waflib/extras/file_to_object.py b/third_party/waf/waflib/extras/file_to_object.py
index c2e8809b3ec..a295998e9e6 100644
--- a/third_party/waf/waflib/extras/file_to_object.py
+++ b/third_party/waf/waflib/extras/file_to_object.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Tool to embed file into objects
@@ -39,7 +43,7 @@ def filename_c_escape(x):
class file_to_object_s(Task.Task):
color = 'CYAN'
- dep_vars = ('DEST_CPU', 'DEST_BINFMT')
+ vars = ['DEST_CPU', 'DEST_BINFMT']
def run(self):
name = []
@@ -134,3 +138,4 @@ def tg_file_to_object(self):
def configure(conf):
conf.load('gas')
conf.env.F2O_METHOD = ["c"]
+
diff --git a/third_party/waf/waflib/extras/fluid.py b/third_party/waf/waflib/extras/fluid.py
new file mode 100644
index 00000000000..caa8afdfbc0
--- /dev/null
+++ b/third_party/waf/waflib/extras/fluid.py
@@ -0,0 +1,34 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/python
+# encoding: utf-8
+# Grygoriy Fuchedzhy 2009
+
+"""
+Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjunction with the 'cxx' feature.
+"""
+
+from waflib import Task
+from waflib.TaskGen import extension
+
+class fluid(Task.Task):
+ color = 'BLUE'
+ ext_out = ['.h']
+ run_str = '${FLUID} -c -o ${TGT[0].abspath()} -h ${TGT[1].abspath()} ${SRC}'
+
+@extension('.fl')
+def process_fluid(self, node):
+ """add the .fl to the source list; the cxx file generated will be compiled when possible"""
+ cpp = node.change_ext('.cpp')
+ hpp = node.change_ext('.hpp')
+ self.create_task('fluid', node, [cpp, hpp])
+
+ if 'cxx' in self.features:
+ self.source.append(cpp)
+
+def configure(conf):
+ conf.find_program('fluid', var='FLUID')
+ conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
+
diff --git a/third_party/waf/waflib/extras/freeimage.py b/third_party/waf/waflib/extras/freeimage.py
index 8933abe2edc..59d557a1cf7 100644
--- a/third_party/waf/waflib/extras/freeimage.py
+++ b/third_party/waf/waflib/extras/freeimage.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
#
@@ -71,3 +75,4 @@ def configure(conf):
if platform == 'win32' and not conf.options.fi_path:
return
conf.check_freeimage(conf.options.fi_path, conf.options.fip)
+
diff --git a/third_party/waf/waflib/extras/fsb.py b/third_party/waf/waflib/extras/fsb.py
index ba475d815d3..d31e33a4597 100644
--- a/third_party/waf/waflib/extras/fsb.py
+++ b/third_party/waf/waflib/extras/fsb.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Thomas Nagy, 2011 (ita)
"""
@@ -28,3 +32,4 @@ class FSBContext(Build.BuildContext):
def compile(self):
pass
+
diff --git a/third_party/waf/waflib/extras/fsc.py b/third_party/waf/waflib/extras/fsc.py
new file mode 100644
index 00000000000..6325de4a40b
--- /dev/null
+++ b/third_party/waf/waflib/extras/fsc.py
@@ -0,0 +1,68 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Experimental F# stuff
+
+FSC="mono /path/to/fsc.exe" waf configure build
+"""
+
+from waflib import Utils, Task
+from waflib.TaskGen import before_method, after_method, feature
+from waflib.Tools import ccroot, cs
+
+ccroot.USELIB_VARS['fsc'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
+
+@feature('fs')
+@before_method('process_source')
+def apply_fsc(self):
+ cs_nodes = []
+ no_nodes = []
+ for x in self.to_nodes(self.source):
+ if x.name.endswith('.fs'):
+ cs_nodes.append(x)
+ else:
+ no_nodes.append(x)
+ self.source = no_nodes
+
+ bintype = getattr(self, 'type', self.gen.endswith('.dll') and 'library' or 'exe')
+ self.cs_task = tsk = self.create_task('fsc', cs_nodes, self.path.find_or_declare(self.gen))
+ tsk.env.CSTYPE = '/target:%s' % bintype
+ tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
+
+ inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
+ if inst_to:
+ # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
+ mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
+ self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
+
+feature('fs')(cs.use_cs)
+after_method('apply_fsc')(cs.use_cs)
+
+feature('fs')(cs.debug_cs)
+after_method('apply_fsc', 'use_cs')(cs.debug_cs)
+
+class fsc(Task.Task):
+ """
+ Compile F# files
+ """
+ color = 'YELLOW'
+ run_str = '${FSC} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
+
+def configure(conf):
+ """
+ Find a F# compiler, set the variable FSC for the compiler and FS_NAME (mono or fsc)
+ """
+ conf.find_program(['fsc.exe', 'fsharpc'], var='FSC')
+ conf.env.ASS_ST = '/r:%s'
+ conf.env.RES_ST = '/resource:%s'
+
+ conf.env.FS_NAME = 'fsc'
+ if str(conf.env.FSC).lower().find('fsharpc') > -1:
+ conf.env.FS_NAME = 'mono'
+
diff --git a/third_party/waf/waflib/extras/gccdeps.py b/third_party/waf/waflib/extras/gccdeps.py
index 26b8bdba938..81f60d5aaca 100644
--- a/third_party/waf/waflib/extras/gccdeps.py
+++ b/third_party/waf/waflib/extras/gccdeps.py
@@ -11,9 +11,12 @@ Execute the tasks with gcc -MD, read the dependencies from the .d file
and prepare the dependency calculation for the next run.
This affects the cxx class, so make sure to load Qt5 after this tool.
-Usage:
+Usage::
+
+ def options(opt):
+ opt.load('compiler_cxx')
def configure(conf):
- conf.load('gccdeps')
+ conf.load('compiler_cxx gccdeps')
"""
import os, re, threading
@@ -209,3 +212,7 @@ def configure(conf):
else:
conf.env.append_value('CXXFLAGS', gccdeps_flags)
conf.env.append_unique('ENABLE_GCCDEPS', 'cxx')
+
+def options(opt):
+ raise ValueError('Do not load gccdeps options')
+
diff --git a/third_party/waf/waflib/extras/go.py b/third_party/waf/waflib/extras/go.py
deleted file mode 100644
index 2ba54b8116a..00000000000
--- a/third_party/waf/waflib/extras/go.py
+++ /dev/null
@@ -1,255 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Tom Wambold tom5760 gmail.com 2009
-# Thomas Nagy 2010
-
-"""
-Go as a language may look nice, but its toolchain is one of the worse a developer
-has ever seen. It keeps changing though, and I would like to believe that it will get
-better eventually, but the crude reality is that this tool and the examples are
-getting broken every few months.
-
-If you have been lured into trying to use Go, you should stick to their Makefiles.
-"""
-
-import os, platform
-
-from waflib import Utils, Task, TaskGen
-from waflib.TaskGen import feature, extension, after_method, before_method
-from waflib.Tools.ccroot import link_task, stlink_task, propagate_uselib_vars, process_use
-
-class go(Task.Task):
- run_str = '${GOC} ${GOCFLAGS} ${CPPPATH_ST:INCPATHS} -o ${TGT} ${SRC}'
-
-class gopackage(stlink_task):
- run_str = '${GOP} grc ${TGT} ${SRC}'
-
-class goprogram(link_task):
- run_str = '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}'
- inst_to = '${BINDIR}'
- chmod = Utils.O755
-
-class cgopackage(stlink_task):
- color = 'YELLOW'
- inst_to = '${LIBDIR}'
- ext_in = ['.go']
- ext_out = ['.a']
-
- def run(self):
- src_dir = self.generator.bld.path
- source = self.inputs
- target = self.outputs[0].change_ext('')
-
- #print ("--> %s" % self.outputs)
- #print ('++> %s' % self.outputs[1])
- bld_dir = self.outputs[1]
- bld_dir.mkdir()
- obj_dir = bld_dir.make_node('_obj')
- obj_dir.mkdir()
-
- bld_srcs = []
- for s in source:
- # FIXME: it seems gomake/cgo stumbles on filenames like a/b/c.go
- # -> for the time being replace '/' with '_'...
- #b = bld_dir.make_node(s.path_from(src_dir))
- b = bld_dir.make_node(s.path_from(src_dir).replace(os.sep,'_'))
- b.parent.mkdir()
- #print ('++> %s' % (s.path_from(src_dir),))
- try:
- try:os.remove(b.abspath())
- except Exception:pass
- os.symlink(s.abspath(), b.abspath())
- except Exception:
- # if no support for symlinks, copy the file from src
- b.write(s.read())
- bld_srcs.append(b)
- #print("--|> [%s]" % b.abspath())
- b.sig = Utils.h_file(b.abspath())
- pass
- #self.set_inputs(bld_srcs)
- #self.generator.bld.raw_deps[self.uid()] = [self.signature()] + bld_srcs
- makefile_node = bld_dir.make_node("Makefile")
- makefile_tmpl = '''\
-# Copyright 2009 The Go Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file. ---
-
-include $(GOROOT)/src/Make.inc
-
-TARG=%(target)s
-
-GCIMPORTS= %(gcimports)s
-
-CGOFILES=\\
-\t%(source)s
-
-CGO_CFLAGS= %(cgo_cflags)s
-
-CGO_LDFLAGS= %(cgo_ldflags)s
-
-include $(GOROOT)/src/Make.pkg
-
-%%: install %%.go
- $(GC) $*.go
- $(LD) -o $@ $*.$O
-
-''' % {
-'gcimports': ' '.join(l for l in self.env['GOCFLAGS']),
-'cgo_cflags' : ' '.join(l for l in self.env['GOCFLAGS']),
-'cgo_ldflags': ' '.join(l for l in self.env['GOLFLAGS']),
-'target': target.path_from(obj_dir),
-'source': ' '.join([b.path_from(bld_dir) for b in bld_srcs])
-}
- makefile_node.write(makefile_tmpl)
- #print ("::makefile: %s"%makefile_node.abspath())
- cmd = Utils.subst_vars('gomake ${GOMAKE_FLAGS}', self.env).strip()
- o = self.outputs[0].change_ext('.gomake.log')
- fout_node = bld_dir.find_or_declare(o.name)
- fout = open(fout_node.abspath(), 'w')
- rc = self.generator.bld.exec_command(
- cmd,
- stdout=fout,
- stderr=fout,
- cwd=bld_dir.abspath(),
- )
- if rc != 0:
- import waflib.Logs as msg
- msg.error('** error running [%s] (cgo-%s)' % (cmd, target))
- msg.error(fout_node.read())
- return rc
- self.generator.bld.read_stlib(
- target,
- paths=[obj_dir.abspath(),],
- )
- tgt = self.outputs[0]
- if tgt.parent != obj_dir:
- install_dir = os.path.join('${LIBDIR}',
- tgt.parent.path_from(obj_dir))
- else:
- install_dir = '${LIBDIR}'
- #print('===> %s (%s)' % (tgt.abspath(), install_dir))
- self.generator.bld.install_files(
- install_dir,
- tgt.abspath(),
- relative_trick=False,
- postpone=False,
- )
- return rc
-
-@extension('.go')
-def compile_go(self, node):
- #print('*'*80, self.name)
- if not ('cgopackage' in self.features):
- return self.create_compiled_task('go', node)
- #print ('compile_go-cgo...')
- #bld_dir = node.parent.get_bld()
- #obj_dir = bld_dir.make_node('_obj')
- return self.create_task('cgopackage', node, node.change_ext('.a'))
-
-@feature('gopackage', 'goprogram', 'cgopackage')
-@before_method('process_source')
-def go_compiler_is_foobar(self):
- if self.env.GONAME == 'gcc':
- return
- self.source = self.to_nodes(self.source)
- src = []
- go = []
- for node in self.source:
- if node.name.endswith('.go'):
- go.append(node)
- else:
- src.append(node)
- self.source = src
- if not ('cgopackage' in self.features):
- #print('--> [%s]... (%s)' % (go[0], getattr(self, 'target', 'N/A')))
- tsk = self.create_compiled_task('go', go[0])
- tsk.inputs.extend(go[1:])
- else:
- #print ('+++ [%s] +++' % self.target)
- bld_dir = self.path.get_bld().make_node('cgopackage--%s' % self.target.replace(os.sep,'_'))
- obj_dir = bld_dir.make_node('_obj')
- target = obj_dir.make_node(self.target+'.a')
- tsk = self.create_task('cgopackage', go, [target, bld_dir])
- self.link_task = tsk
-
-@feature('gopackage', 'goprogram', 'cgopackage')
-@after_method('process_source', 'apply_incpaths',)
-def go_local_libs(self):
- names = self.to_list(getattr(self, 'use', []))
- #print ('== go-local-libs == [%s] == use: %s' % (self.name, names))
- for name in names:
- tg = self.bld.get_tgen_by_name(name)
- if not tg:
- raise Utils.WafError('no target of name %r necessary for %r in go uselib local' % (name, self))
- tg.post()
- #print ("-- tg[%s]: %s" % (self.name,name))
- lnk_task = getattr(tg, 'link_task', None)
- if lnk_task:
- for tsk in self.tasks:
- if isinstance(tsk, (go, gopackage, cgopackage)):
- tsk.set_run_after(lnk_task)
- tsk.dep_nodes.extend(lnk_task.outputs)
- path = lnk_task.outputs[0].parent.abspath()
- if isinstance(lnk_task, (go, gopackage)):
- # handle hierarchical packages
- path = lnk_task.generator.path.get_bld().abspath()
- elif isinstance(lnk_task, (cgopackage,)):
- # handle hierarchical cgopackages
- cgo_obj_dir = lnk_task.outputs[1].find_or_declare('_obj')
- path = cgo_obj_dir.abspath()
- # recursively add parent GOCFLAGS...
- self.env.append_unique('GOCFLAGS',
- getattr(lnk_task.env, 'GOCFLAGS',[]))
- # ditto for GOLFLAGS...
- self.env.append_unique('GOLFLAGS',
- getattr(lnk_task.env, 'GOLFLAGS',[]))
- self.env.append_unique('GOCFLAGS', ['-I%s' % path])
- self.env.append_unique('GOLFLAGS', ['-L%s' % path])
- for n in getattr(tg, 'includes_nodes', []):
- self.env.append_unique('GOCFLAGS', ['-I%s' % n.abspath()])
- pass
- pass
-
-def configure(conf):
-
- def set_def(var, val):
- if not conf.env[var]:
- conf.env[var] = val
-
- goarch = os.getenv('GOARCH')
- if goarch == '386':
- set_def('GO_PLATFORM', 'i386')
- elif goarch == 'amd64':
- set_def('GO_PLATFORM', 'x86_64')
- elif goarch == 'arm':
- set_def('GO_PLATFORM', 'arm')
- else:
- set_def('GO_PLATFORM', platform.machine())
-
- if conf.env.GO_PLATFORM == 'x86_64':
- set_def('GO_COMPILER', '6g')
- set_def('GO_LINKER', '6l')
- elif conf.env.GO_PLATFORM in ('i386', 'i486', 'i586', 'i686'):
- set_def('GO_COMPILER', '8g')
- set_def('GO_LINKER', '8l')
- elif conf.env.GO_PLATFORM == 'arm':
- set_def('GO_COMPILER', '5g')
- set_def('GO_LINKER', '5l')
- set_def('GO_EXTENSION', '.5')
-
- if not (conf.env.GO_COMPILER or conf.env.GO_LINKER):
- raise conf.fatal('Unsupported platform ' + platform.machine())
-
- set_def('GO_PACK', 'gopack')
- set_def('gopackage_PATTERN', '%s.a')
- set_def('CPPPATH_ST', '-I%s')
-
- set_def('GOMAKE_FLAGS', ['--quiet'])
- conf.find_program(conf.env.GO_COMPILER, var='GOC')
- conf.find_program(conf.env.GO_LINKER, var='GOL')
- conf.find_program(conf.env.GO_PACK, var='GOP')
-
- conf.find_program('cgo', var='CGO')
-
-TaskGen.feature('go')(process_use)
-TaskGen.feature('go')(propagate_uselib_vars)
diff --git a/third_party/waf/waflib/extras/gob2.py b/third_party/waf/waflib/extras/gob2.py
index 637f2934dc7..629ccdca8ed 100644
--- a/third_party/waf/waflib/extras/gob2.py
+++ b/third_party/waf/waflib/extras/gob2.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
@@ -14,3 +18,4 @@ TaskGen.declare_chain(
def configure(conf):
conf.find_program('gob2', var='GOB2')
conf.env['GOB2FLAGS'] = ''
+
diff --git a/third_party/waf/waflib/extras/halide.py b/third_party/waf/waflib/extras/halide.py
index acec8eca8b4..df7509a6d11 100644
--- a/third_party/waf/waflib/extras/halide.py
+++ b/third_party/waf/waflib/extras/halide.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Halide code generation tool
@@ -89,7 +93,8 @@ def halide(self):
# Return a node with a new extension, in an appropriate folder
name = src.name
xpos = src.name.rfind('.')
- if xpos == -1: xpos = len(src.name)
+ if xpos == -1:
+ xpos = len(src.name)
newname = name[:xpos] + ext
if src.is_child_of(bld.bldnode):
node = src.get_src().parent.find_or_declare(newname)
@@ -147,3 +152,4 @@ def options(opt):
opt.add_option('--halide-root',
help="path to Halide include and lib files",
)
+
diff --git a/third_party/waf/waflib/extras/javatest.py b/third_party/waf/waflib/extras/javatest.py
new file mode 100755
index 00000000000..0c315745353
--- /dev/null
+++ b/third_party/waf/waflib/extras/javatest.py
@@ -0,0 +1,122 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Federico Pellegrin, 2017 (fedepell)
+
+"""
+Provides Java Unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
+task via the **javatest** feature.
+
+This gives the possibility to run unit test and have them integrated into the
+standard waf unit test environment. It has been tested with TestNG and JUnit
+but should be easily expandable to other frameworks given the flexibility of
+ut_str provided by the standard waf unit test environment.
+
+Example usage:
+
+def options(opt):
+ opt.load('java waf_unit_test javatest')
+
+def configure(conf):
+ conf.load('java javatest')
+
+def build(bld):
+
+ [ ... mainprog is built here ... ]
+
+ bld(features = 'javac javatest',
+ srcdir = 'test/',
+ outdir = 'test',
+ sourcepath = ['test'],
+ classpath = [ 'src' ],
+ basedir = 'test',
+ use = ['JAVATEST', 'mainprog'], # mainprog is the program being tested in src/
+ ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}',
+ jtest_source = bld.path.ant_glob('test/*.xml'),
+ )
+
+
+At command line the CLASSPATH where to find the testing environment and the
+test runner (default TestNG) that will then be seen in the environment as
+CLASSPATH_JAVATEST (then used for use) and JTRUNNER and can be used for
+dependencies and ut_str generation.
+
+Example configure for TestNG:
+ waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar --jtrunner=org.testng.TestNG
+ or as default runner is TestNG:
+ waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar
+
+Example configure for JUnit:
+ waf configure --jtpath=/tmp/junit.jar --jtrunner=org.junit.runner.JUnitCore
+
+The runner class presence on the system is checked for at configuration stage.
+
+"""
+
+import os
+from waflib import Task, TaskGen, Options
+
+@TaskGen.feature('javatest')
+@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath')
+def make_javatest(self):
+ """
+ Creates a ``utest`` task with a populated environment for Java Unit test execution
+
+ """
+ tsk = self.create_task('utest')
+ tsk.set_run_after(self.javac_task)
+
+ # Put test input files as waf_unit_test relies on that for some prints and log generation
+ # If jtest_source is there, this is specially useful for passing XML for TestNG
+ # that contain test specification, use that as inputs, otherwise test sources
+ if getattr(self, 'jtest_source', None):
+ tsk.inputs = self.to_nodes(self.jtest_source)
+ else:
+ if self.javac_task.srcdir[0].exists():
+ tsk.inputs = self.javac_task.srcdir[0].ant_glob('**/*.java', remove=False)
+
+ if getattr(self, 'ut_str', None):
+ self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
+ tsk.vars = lst + tsk.vars
+
+ if getattr(self, 'ut_cwd', None):
+ if isinstance(self.ut_cwd, str):
+ # we want a Node instance
+ if os.path.isabs(self.ut_cwd):
+ self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
+ else:
+ self.ut_cwd = self.path.make_node(self.ut_cwd)
+ else:
+ self.ut_cwd = self.bld.bldnode
+
+ # Get parent CLASSPATH and add output dir of test, we run from wscript dir
+ # We have to change it from list to the standard java -cp format (: separated)
+ tsk.env.CLASSPATH = ':'.join(self.env.CLASSPATH) + ':' + self.outdir.abspath()
+
+ if not self.ut_cwd.exists():
+ self.ut_cwd.mkdir()
+
+ if not hasattr(self, 'ut_env'):
+ self.ut_env = dict(os.environ)
+
+def configure(ctx):
+ cp = ctx.env.CLASSPATH or '.'
+ if getattr(Options.options, 'jtpath', None):
+ ctx.env.CLASSPATH_JAVATEST = getattr(Options.options, 'jtpath').split(':')
+ cp += ':' + getattr(Options.options, 'jtpath')
+
+ if getattr(Options.options, 'jtrunner', None):
+ ctx.env.JTRUNNER = getattr(Options.options, 'jtrunner')
+
+ if ctx.check_java_class(ctx.env.JTRUNNER, with_classpath=cp):
+ ctx.fatal('Could not run test class %r' % ctx.env.JTRUNNER)
+
+def options(opt):
+ opt.add_option('--jtpath', action='store', default='', dest='jtpath',
+ help='Path to jar(s) needed for javatest execution, colon separated, if not in the system CLASSPATH')
+ opt.add_option('--jtrunner', action='store', default='org.testng.TestNG', dest='jtrunner',
+ help='Class to run javatest test [default: org.testng.TestNG]')
+
diff --git a/third_party/waf/waflib/extras/kde4.py b/third_party/waf/waflib/extras/kde4.py
new file mode 100644
index 00000000000..13ac82f4049
--- /dev/null
+++ b/third_party/waf/waflib/extras/kde4.py
@@ -0,0 +1,97 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+Support for the KDE4 libraries and msgfmt
+"""
+
+import os, re
+from waflib import Task, Utils
+from waflib.TaskGen import feature
+
+@feature('msgfmt')
+def apply_msgfmt(self):
+ """
+ Process all languages to create .mo files and to install them::
+
+ def build(bld):
+ bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}')
+ """
+ for lang in self.to_list(self.langs):
+ node = self.path.find_resource(lang+'.po')
+ task = self.create_task('msgfmt', node, node.change_ext('.mo'))
+
+ langname = lang.split('/')
+ langname = langname[-1]
+
+ inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}')
+
+ self.add_install_as(
+ inst_to = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
+ inst_from = task.outputs[0],
+ chmod = getattr(self, 'chmod', Utils.O644))
+
+class msgfmt(Task.Task):
+ """
+ Transform .po files into .mo files
+ """
+ color = 'BLUE'
+ run_str = '${MSGFMT} ${SRC} -o ${TGT}'
+
+def configure(self):
+ """
+ Detect kde4-config and set various variables for the *use* system::
+
+ def options(opt):
+ opt.load('compiler_cxx kde4')
+ def configure(conf):
+ conf.load('compiler_cxx kde4')
+ def build(bld):
+ bld.program(source='main.c', target='app', use='KDECORE KIO KHTML')
+ """
+ kdeconfig = self.find_program('kde4-config')
+ prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip()
+ fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
+ try:
+ os.stat(fname)
+ except OSError:
+ fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
+ try:
+ os.stat(fname)
+ except OSError:
+ self.fatal('could not open %s' % fname)
+
+ try:
+ txt = Utils.readf(fname)
+ except EnvironmentError:
+ self.fatal('could not read %s' % fname)
+
+ txt = txt.replace('\\\n', '\n')
+ fu = re.compile('#(.*)\n')
+ txt = fu.sub('', txt)
+
+ setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
+ found = setregexp.findall(txt)
+
+ for (_, key, val) in found:
+ #print key, val
+ self.env[key] = val
+
+ # well well, i could just write an interpreter for cmake files
+ self.env['LIB_KDECORE']= ['kdecore']
+ self.env['LIB_KDEUI'] = ['kdeui']
+ self.env['LIB_KIO'] = ['kio']
+ self.env['LIB_KHTML'] = ['khtml']
+ self.env['LIB_KPARTS'] = ['kparts']
+
+ self.env['LIBPATH_KDECORE'] = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR]
+ self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']]
+ self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE'])
+
+ self.find_program('msgfmt', var='MSGFMT')
+
diff --git a/third_party/waf/waflib/extras/local_rpath.py b/third_party/waf/waflib/extras/local_rpath.py
index 8942e97708f..6020b45f7a9 100644
--- a/third_party/waf/waflib/extras/local_rpath.py
+++ b/third_party/waf/waflib/extras/local_rpath.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Thomas Nagy, 2011 (ita)
from waflib.TaskGen import after_method, feature
@@ -16,3 +20,4 @@ def add_rpath_stuff(self):
continue
self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
all.extend(self.to_list(getattr(tg, 'use', [])))
+
diff --git a/third_party/waf/waflib/extras/make.py b/third_party/waf/waflib/extras/make.py
index 8b99c4dd0e0..7b75d5511ba 100644
--- a/third_party/waf/waflib/extras/make.py
+++ b/third_party/waf/waflib/extras/make.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)
@@ -48,7 +52,7 @@ class MakeContext(BuildContext):
for pat in self.files.split(','):
matcher = self.get_matcher(pat)
for tg in g:
- if isinstance(tg, Task.TaskBase):
+ if isinstance(tg, Task.Task):
lst = [tg]
else:
lst = tg.tasks
@@ -56,7 +60,7 @@ class MakeContext(BuildContext):
all_tasks.append(tsk)
do_exec = False
- for node in getattr(tsk, 'inputs', []):
+ for node in tsk.inputs:
try:
uses[node].append(tsk)
except:
@@ -66,7 +70,7 @@ class MakeContext(BuildContext):
do_exec = True
break
- for node in getattr(tsk, 'outputs', []):
+ for node in tsk.outputs:
try:
provides[node].append(tsk)
except:
@@ -86,14 +90,14 @@ class MakeContext(BuildContext):
result = all_tasks
else:
# this is like a big filter...
- result = set([])
- seen = set([])
+ result = set()
+ seen = set()
cur = set(tasks)
while cur:
result |= cur
- tosee = set([])
+ tosee = set()
for tsk in cur:
- for node in getattr(tsk, 'inputs', []):
+ for node in tsk.inputs:
if node in seen:
continue
seen.add(node)
@@ -129,9 +133,9 @@ class MakeContext(BuildContext):
pattern = re.compile(pat)
def match(node, output):
- if output == True and not out:
+ if output and not out:
return False
- if output == False and not inn:
+ if not output and not inn:
return False
if anode:
@@ -139,3 +143,4 @@ class MakeContext(BuildContext):
else:
return pattern.match(node.abspath())
return match
+
diff --git a/third_party/waf/waflib/extras/md5_tstamp.py b/third_party/waf/waflib/extras/md5_tstamp.py
deleted file mode 100644
index 63b71d8d27a..00000000000
--- a/third_party/waf/waflib/extras/md5_tstamp.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-This module assumes that only one build context is running at a given time, which
-is not the case if you want to execute configuration tests in parallel.
-
-Store some values on the buildcontext mapping file paths to
-stat values and md5 values (timestamp + md5)
-this way the md5 hashes are computed only when timestamp change (can be faster)
-There is usually little or no gain from enabling this, but it can be used to enable
-the second level cache with timestamps (WAFCACHE)
-
-You may have to run distclean or to remove the build directory before enabling/disabling
-this hashing scheme
-"""
-
-import os, stat
-from waflib import Utils, Build, Context
-
-STRONGEST = True
-
-try:
- Build.BuildContext.store_real
-except AttributeError:
-
- Context.DBFILE += '_md5tstamp'
-
- Build.hashes_md5_tstamp = {}
- Build.SAVED_ATTRS.append('hashes_md5_tstamp')
- def store(self):
- # save the hash cache as part of the default pickle file
- self.hashes_md5_tstamp = Build.hashes_md5_tstamp
- self.store_real()
- Build.BuildContext.store_real = Build.BuildContext.store
- Build.BuildContext.store = store
-
- def restore(self):
- # we need a module variable for h_file below
- self.restore_real()
- try:
- Build.hashes_md5_tstamp = self.hashes_md5_tstamp or {}
- except AttributeError:
- Build.hashes_md5_tstamp = {}
- Build.BuildContext.restore_real = Build.BuildContext.restore
- Build.BuildContext.restore = restore
-
- def h_file(filename):
- st = os.stat(filename)
- if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
-
- if filename in Build.hashes_md5_tstamp:
- if Build.hashes_md5_tstamp[filename][0] == str(st.st_mtime):
- return Build.hashes_md5_tstamp[filename][1]
- if STRONGEST:
- ret = Utils.h_file_no_md5(filename)
- Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), ret)
- return ret
- else:
- m = Utils.md5()
- m.update(str(st.st_mtime))
- m.update(str(st.st_size))
- m.update(filename)
- Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), m.digest())
- return m.digest()
- Utils.h_file_no_md5 = Utils.h_file
- Utils.h_file = h_file
diff --git a/third_party/waf/waflib/extras/mem_reducer.py b/third_party/waf/waflib/extras/mem_reducer.py
deleted file mode 100644
index e97c8d7272c..00000000000
--- a/third_party/waf/waflib/extras/mem_reducer.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-
-"""
-This tool can help to reduce the memory usage in very large builds featuring many tasks with after/before attributes.
-It may also improve the overall build time by decreasing the amount of iterations over tasks.
-
-Usage:
-def options(opt):
- opt.load('mem_reducer')
-"""
-
-import itertools
-from waflib import Utils, Task, Runner
-
-class SetOfTasks(object):
- """Wraps a set and a task which has a list of other sets.
- The interface is meant to mimic the interface of set. Add missing functions as needed.
- """
- def __init__(self, owner):
- self._set = owner.run_after
- self._owner = owner
-
- def __iter__(self):
- for g in self._owner.run_after_groups:
- #print len(g)
- for task in g:
- yield task
- for task in self._set:
- yield task
-
- def add(self, obj):
- self._set.add(obj)
-
- def update(self, obj):
- self._set.update(obj)
-
-def set_precedence_constraints(tasks):
- cstr_groups = Utils.defaultdict(list)
- for x in tasks:
- x.run_after = SetOfTasks(x)
- x.run_after_groups = []
- x.waiting_sets = []
-
- h = x.hash_constraints()
- cstr_groups[h].append(x)
-
- # create sets which can be reused for all tasks
- for k in cstr_groups.keys():
- cstr_groups[k] = set(cstr_groups[k])
-
- # this list should be short
- for key1, key2 in itertools.combinations(cstr_groups.keys(), 2):
- group1 = cstr_groups[key1]
- group2 = cstr_groups[key2]
- # get the first entry of the set
- t1 = next(iter(group1))
- t2 = next(iter(group2))
-
- # add the constraints based on the comparisons
- if Task.is_before(t1, t2):
- for x in group2:
- x.run_after_groups.append(group1)
- for k in group1:
- k.waiting_sets.append(group1)
- elif Task.is_before(t2, t1):
- for x in group1:
- x.run_after_groups.append(group2)
- for k in group2:
- k.waiting_sets.append(group2)
-
-Task.set_precedence_constraints = set_precedence_constraints
-
-def get_out(self):
- tsk = self.out.get()
- if not self.stop:
- self.add_more_tasks(tsk)
- self.count -= 1
- self.dirty = True
-
- # shrinking sets
- try:
- ws = tsk.waiting_sets
- except AttributeError:
- pass
- else:
- for k in ws:
- try:
- k.remove(tsk)
- except KeyError:
- pass
-
- return tsk
-Runner.Parallel.get_out = get_out
-
-def skip(self, tsk):
- tsk.hasrun = Task.SKIPPED
-
- # shrinking sets
- try:
- ws = tsk.waiting_sets
- except AttributeError:
- pass
- else:
- for k in ws:
- try:
- k.remove(tsk)
- except KeyError:
- pass
-Runner.Parallel.skip = skip
diff --git a/third_party/waf/waflib/extras/midl.py b/third_party/waf/waflib/extras/midl.py
new file mode 100644
index 00000000000..c81e0e317ad
--- /dev/null
+++ b/third_party/waf/waflib/extras/midl.py
@@ -0,0 +1,73 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# Issue 1185 ultrix gmail com
+
+"""
+Microsoft Interface Definition Language support. Given ComObject.idl, this tool
+will generate ComObject.tlb ComObject_i.h ComObject_i.c ComObject_p.c and dlldata.c
+
+To declare targets using midl::
+
+ def configure(conf):
+ conf.load('msvc')
+ conf.load('midl')
+
+ def build(bld):
+ bld(
+ features='c cshlib',
+ # Note: ComObject_i.c is generated from ComObject.idl
+ source = 'main.c ComObject.idl ComObject_i.c',
+ target = 'ComObject.dll')
+"""
+
+from waflib import Task, Utils
+from waflib.TaskGen import feature, before_method
+import os
+
+def configure(conf):
+ conf.find_program(['midl'], var='MIDL')
+
+ conf.env.MIDLFLAGS = [
+ '/nologo',
+ '/D',
+ '_DEBUG',
+ '/W1',
+ '/char',
+ 'signed',
+ '/Oicf',
+ ]
+
+@feature('c', 'cxx')
+@before_method('process_source')
+def idl_file(self):
+ # Do this before process_source so that the generated header can be resolved
+ # when scanning source dependencies.
+ idl_nodes = []
+ src_nodes = []
+ for node in Utils.to_list(self.source):
+ if str(node).endswith('.idl'):
+ idl_nodes.append(node)
+ else:
+ src_nodes.append(node)
+
+ for node in self.to_nodes(idl_nodes):
+ t = node.change_ext('.tlb')
+ h = node.change_ext('_i.h')
+ c = node.change_ext('_i.c')
+ p = node.change_ext('_p.c')
+ d = node.parent.find_or_declare('dlldata.c')
+ self.create_task('midl', node, [t, h, c, p, d])
+
+ self.source = src_nodes
+
+class midl(Task.Task):
+ """
+ Compile idl files
+ """
+ color = 'YELLOW'
+ run_str = '${MIDL} ${MIDLFLAGS} ${CPPPATH_ST:INCLUDES} /tlb ${TGT[0].bldpath()} /header ${TGT[1].bldpath()} /iid ${TGT[2].bldpath()} /proxy ${TGT[3].bldpath()} /dlldata ${TGT[4].bldpath()} ${SRC}'
+ before = ['winrc']
+
diff --git a/third_party/waf/waflib/extras/misc.py b/third_party/waf/waflib/extras/misc.py
deleted file mode 100644
index 802323ddcc7..00000000000
--- a/third_party/waf/waflib/extras/misc.py
+++ /dev/null
@@ -1,410 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
-
-"""
-This tool is totally deprecated
-
-Try using:
- .pc.in files for .pc files
- the feature intltool_in - see demos/intltool
- make-like rules
-"""
-
-import shutil, re, os
-from waflib import Node, Task, Utils, Errors
-from waflib.TaskGen import feature, after_method, before_method
-from waflib.Logs import debug
-
-def copy_attrs(orig, dest, names, only_if_set=False):
- """
- copy class attributes from an object to another
- """
- for a in Utils.to_list(names):
- u = getattr(orig, a, ())
- if u or not only_if_set:
- setattr(dest, a, u)
-
-def copy_func(tsk):
- "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
- infile = tsk.inputs[0].abspath()
- outfile = tsk.outputs[0].abspath()
- try:
- shutil.copy2(infile, outfile)
- except EnvironmentError:
- return 1
- else:
- if tsk.chmod: os.chmod(outfile, tsk.chmod)
- return 0
-
-def action_process_file_func(tsk):
- "Ask the function attached to the task to process it"
- if not tsk.fun: raise Errors.WafError('task must have a function attached to it for copy_func to work!')
- return tsk.fun(tsk)
-
-@feature('cmd')
-def apply_cmd(self):
- "call a command everytime"
- if not self.fun: raise Errors.WafError('cmdobj needs a function!')
- tsk = Task.TaskBase()
- tsk.fun = self.fun
- tsk.env = self.env
- self.tasks.append(tsk)
- tsk.install_path = self.install_path
-
-@feature('copy')
-@before_method('process_source')
-def apply_copy(self):
- Utils.def_attrs(self, fun=copy_func)
- self.default_install_path = 0
-
- lst = self.to_list(self.source)
- self.meths.remove('process_source')
-
- for filename in lst:
- node = self.path.find_resource(filename)
- if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
-
- target = self.target
- if not target or len(lst)>1: target = node.name
-
- # TODO the file path may be incorrect
- newnode = self.path.find_or_declare(target)
-
- tsk = self.create_task('copy', node, newnode)
- tsk.fun = self.fun
- tsk.chmod = getattr(self, 'chmod', Utils.O644)
-
- if not tsk.env:
- tsk.debug()
- raise Errors.WafError('task without an environment')
-
-def subst_func(tsk):
- "Substitutes variables in a .in file"
-
- m4_re = re.compile('@(\w+)@', re.M)
-
- code = tsk.inputs[0].read() #Utils.readf(infile)
-
- # replace all % by %% to prevent errors by % signs in the input file while string formatting
- code = code.replace('%', '%%')
-
- s = m4_re.sub(r'%(\1)s', code)
-
- env = tsk.env
- di = getattr(tsk, 'dict', {}) or getattr(tsk.generator, 'dict', {})
- if not di:
- names = m4_re.findall(code)
- for i in names:
- di[i] = env.get_flat(i) or env.get_flat(i.upper())
-
- tsk.outputs[0].write(s % di)
-
-@feature('subst')
-@before_method('process_source')
-def apply_subst(self):
- Utils.def_attrs(self, fun=subst_func)
- lst = self.to_list(self.source)
- self.meths.remove('process_source')
-
- self.dict = getattr(self, 'dict', {})
-
- for filename in lst:
- node = self.path.find_resource(filename)
- if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
-
- if self.target:
- newnode = self.path.find_or_declare(self.target)
- else:
- newnode = node.change_ext('')
-
- try:
- self.dict = self.dict.get_merged_dict()
- except AttributeError:
- pass
-
- if self.dict and not self.env['DICT_HASH']:
- self.env = self.env.derive()
- keys = list(self.dict.keys())
- keys.sort()
- lst = [self.dict[x] for x in keys]
- self.env['DICT_HASH'] = str(Utils.h_list(lst))
-
- tsk = self.create_task('copy', node, newnode)
- tsk.fun = self.fun
- tsk.dict = self.dict
- tsk.dep_vars = ['DICT_HASH']
- tsk.chmod = getattr(self, 'chmod', Utils.O644)
-
- if not tsk.env:
- tsk.debug()
- raise Errors.WafError('task without an environment')
-
-####################
-## command-output ####
-####################
-
-class cmd_arg(object):
- """command-output arguments for representing files or folders"""
- def __init__(self, name, template='%s'):
- self.name = name
- self.template = template
- self.node = None
-
-class input_file(cmd_arg):
- def find_node(self, base_path):
- assert isinstance(base_path, Node.Node)
- self.node = base_path.find_resource(self.name)
- if self.node is None:
- raise Errors.WafError("Input file %s not found in " % (self.name, base_path))
-
- def get_path(self, env, absolute):
- if absolute:
- return self.template % self.node.abspath()
- else:
- return self.template % self.node.srcpath()
-
-class output_file(cmd_arg):
- def find_node(self, base_path):
- assert isinstance(base_path, Node.Node)
- self.node = base_path.find_or_declare(self.name)
- if self.node is None:
- raise Errors.WafError("Output file %s not found in " % (self.name, base_path))
-
- def get_path(self, env, absolute):
- if absolute:
- return self.template % self.node.abspath()
- else:
- return self.template % self.node.bldpath()
-
-class cmd_dir_arg(cmd_arg):
- def find_node(self, base_path):
- assert isinstance(base_path, Node.Node)
- self.node = base_path.find_dir(self.name)
- if self.node is None:
- raise Errors.WafError("Directory %s not found in " % (self.name, base_path))
-
-class input_dir(cmd_dir_arg):
- def get_path(self, dummy_env, dummy_absolute):
- return self.template % self.node.abspath()
-
-class output_dir(cmd_dir_arg):
- def get_path(self, env, dummy_absolute):
- return self.template % self.node.abspath()
-
-
-class command_output(Task.Task):
- color = "BLUE"
- def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
- Task.Task.__init__(self, env=env)
- assert isinstance(command, (str, Node.Node))
- self.command = command
- self.command_args = command_args
- self.stdin = stdin
- self.stdout = stdout
- self.cwd = cwd
- self.os_env = os_env
- self.stderr = stderr
-
- if command_node is not None: self.dep_nodes = [command_node]
- self.dep_vars = [] # additional environment variables to look
-
- def run(self):
- task = self
- #assert len(task.inputs) > 0
-
- def input_path(node, template):
- if task.cwd is None:
- return template % node.bldpath()
- else:
- return template % node.abspath()
- def output_path(node, template):
- fun = node.abspath
- if task.cwd is None: fun = node.bldpath
- return template % fun()
-
- if isinstance(task.command, Node.Node):
- argv = [input_path(task.command, '%s')]
- else:
- argv = [task.command]
-
- for arg in task.command_args:
- if isinstance(arg, str):
- argv.append(arg)
- else:
- assert isinstance(arg, cmd_arg)
- argv.append(arg.get_path(task.env, (task.cwd is not None)))
-
- if task.stdin:
- stdin = open(input_path(task.stdin, '%s'))
- else:
- stdin = None
-
- if task.stdout:
- stdout = open(output_path(task.stdout, '%s'), "w")
- else:
- stdout = None
-
- if task.stderr:
- stderr = open(output_path(task.stderr, '%s'), "w")
- else:
- stderr = None
-
- if task.cwd is None:
- cwd = ('None (actually %r)' % os.getcwd())
- else:
- cwd = repr(task.cwd)
- debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
- (cwd, stdin, stdout, argv))
-
- if task.os_env is None:
- os_env = os.environ
- else:
- os_env = task.os_env
- command = Utils.subprocess.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
- return command.wait()
-
-@feature('command-output')
-def init_cmd_output(self):
- Utils.def_attrs(self,
- stdin = None,
- stdout = None,
- stderr = None,
- # the command to execute
- command = None,
-
- # whether it is an external command; otherwise it is assumed
- # to be an executable binary or script that lives in the
- # source or build tree.
- command_is_external = False,
-
- # extra parameters (argv) to pass to the command (excluding
- # the command itself)
- argv = [],
-
- # dependencies to other objects -> this is probably not what you want (ita)
- # values must be 'task_gen' instances (not names!)
- dependencies = [],
-
- # dependencies on env variable contents
- dep_vars = [],
-
- # input files that are implicit, i.e. they are not
- # stdin, nor are they mentioned explicitly in argv
- hidden_inputs = [],
-
- # output files that are implicit, i.e. they are not
- # stdout, nor are they mentioned explicitly in argv
- hidden_outputs = [],
-
- # change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
- cwd = None,
-
- # OS environment variables to pass to the subprocess
- # if None, use the default environment variables unchanged
- os_env = None)
-
-@feature('command-output')
-@after_method('init_cmd_output')
-def apply_cmd_output(self):
- if self.command is None:
- raise Errors.WafError("command-output missing command")
- if self.command_is_external:
- cmd = self.command
- cmd_node = None
- else:
- cmd_node = self.path.find_resource(self.command)
- assert cmd_node is not None, ('''Could not find command '%s' in source tree.
-Hint: if this is an external command,
-use command_is_external=True''') % (self.command,)
- cmd = cmd_node
-
- if self.cwd is None:
- cwd = None
-
- inputs = []
- outputs = []
-
- for arg in self.argv:
- if isinstance(arg, cmd_arg):
- arg.find_node(self.path)
- if isinstance(arg, input_file):
- inputs.append(arg.node)
- if isinstance(arg, output_file):
- outputs.append(arg.node)
-
- if self.stdout is None:
- stdout = None
- else:
- assert isinstance(self.stdout, str)
- stdout = self.path.find_or_declare(self.stdout)
- if stdout is None:
- raise Errors.WafError("File %s not found" % (self.stdout,))
- outputs.append(stdout)
-
- if self.stderr is None:
- stderr = None
- else:
- assert isinstance(self.stderr, str)
- stderr = self.path.find_or_declare(self.stderr)
- if stderr is None:
- raise Errors.WafError("File %s not found" % (self.stderr,))
- outputs.append(stderr)
-
- if self.stdin is None:
- stdin = None
- else:
- assert isinstance(self.stdin, str)
- stdin = self.path.find_resource(self.stdin)
- if stdin is None:
- raise Errors.WafError("File %s not found" % (self.stdin,))
- inputs.append(stdin)
-
- for hidden_input in self.to_list(self.hidden_inputs):
- node = self.path.find_resource(hidden_input)
- if node is None:
- raise Errors.WafError("File %s not found in dir %s" % (hidden_input, self.path))
- inputs.append(node)
-
- for hidden_output in self.to_list(self.hidden_outputs):
- node = self.path.find_or_declare(hidden_output)
- if node is None:
- raise Errors.WafError("File %s not found in dir %s" % (hidden_output, self.path))
- outputs.append(node)
-
- if not (inputs or getattr(self, 'no_inputs', None)):
- raise Errors.WafError('command-output objects must have at least one input file or give self.no_inputs')
- if not (outputs or getattr(self, 'no_outputs', None)):
- raise Errors.WafError('command-output objects must have at least one output file or give self.no_outputs')
-
- cwd = self.bld.variant_dir
- task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
- task.generator = self
- copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
- self.tasks.append(task)
-
- task.inputs = inputs
- task.outputs = outputs
- task.dep_vars = self.to_list(self.dep_vars)
-
- for dep in self.dependencies:
- assert dep is not self
- dep.post()
- for dep_task in dep.tasks:
- task.set_run_after(dep_task)
-
- if not task.inputs:
- # the case for svnversion, always run, and update the output nodes
- task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
- task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
-
- # TODO the case with no outputs?
-
-def post_run(self):
- for x in self.outputs:
- x.sig = Utils.h_file(x.abspath())
-
-def runnable_status(self):
- return self.RUN_ME
-
-Task.task_factory('copy', vars=[], func=action_process_file_func)
diff --git a/third_party/waf/waflib/extras/msvcdeps.py b/third_party/waf/waflib/extras/msvcdeps.py
index 98b06776d01..a6ea52af76b 100644
--- a/third_party/waf/waflib/extras/msvcdeps.py
+++ b/third_party/waf/waflib/extras/msvcdeps.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Copyright Garmin International or its subsidiaries, 2012-2013
@@ -14,12 +18,18 @@ this tool as compared to c_preproc.
The technique of gutting scan() and pushing the dependency calculation
down to post_run() is cribbed from gccdeps.py.
+
+This affects the cxx class, so make sure to load Qt5 after this tool.
+
+Usage::
+
+ def options(opt):
+ opt.load('compiler_cxx')
+ def configure(conf):
+ conf.load('compiler_cxx msvcdeps')
'''
-import os
-import sys
-import tempfile
-import threading
+import os, sys, tempfile, threading
from waflib import Context, Errors, Logs, Task, Utils
from waflib.Tools import c_preproc, c, cxx, msvc
@@ -37,226 +47,214 @@ supported_compilers = ['msvc']
@feature('c', 'cxx')
@before_method('process_source')
def apply_msvcdeps_flags(taskgen):
- if taskgen.env.CC_NAME not in supported_compilers:
- return
+ if taskgen.env.CC_NAME not in supported_compilers:
+ return
- for flag in ('CFLAGS', 'CXXFLAGS'):
- if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
- taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
+ for flag in ('CFLAGS', 'CXXFLAGS'):
+ if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
+ taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
- # Figure out what casing conventions the user's shell used when
- # launching Waf
- (drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath())
- taskgen.msvcdeps_drive_lowercase = drive == drive.lower()
+ # Figure out what casing conventions the user's shell used when
+ # launching Waf
+ (drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath())
+ taskgen.msvcdeps_drive_lowercase = drive == drive.lower()
def path_to_node(base_node, path, cached_nodes):
- # Take the base node and the path and return a node
- # Results are cached because searching the node tree is expensive
- # The following code is executed by threads, it is not safe, so a lock is needed...
- if getattr(path, '__hash__'):
- node_lookup_key = (base_node, path)
- else:
- # Not hashable, assume it is a list and join into a string
- node_lookup_key = (base_node, os.path.sep.join(path))
- try:
- lock.acquire()
- node = cached_nodes[node_lookup_key]
- except KeyError:
- node = base_node.find_resource(path)
- cached_nodes[node_lookup_key] = node
- finally:
- lock.release()
- return node
+ # Take the base node and the path and return a node
+ # Results are cached because searching the node tree is expensive
+ # The following code is executed by threads, it is not safe, so a lock is needed...
+ if getattr(path, '__hash__'):
+ node_lookup_key = (base_node, path)
+ else:
+ # Not hashable, assume it is a list and join into a string
+ node_lookup_key = (base_node, os.path.sep.join(path))
+ try:
+ lock.acquire()
+ node = cached_nodes[node_lookup_key]
+ except KeyError:
+ node = base_node.find_resource(path)
+ cached_nodes[node_lookup_key] = node
+ finally:
+ lock.release()
+ return node
+
+def post_run(self):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(self.derived_msvcdeps, self).post_run()
+
+ # TODO this is unlikely to work with netcache
+ if getattr(self, 'cached', None):
+ return Task.Task.post_run(self)
+
+ bld = self.generator.bld
+ unresolved_names = []
+ resolved_nodes = []
+
+ lowercase = self.generator.msvcdeps_drive_lowercase
+ correct_case_path = bld.path.abspath()
+ correct_case_path_len = len(correct_case_path)
+ correct_case_path_norm = os.path.normcase(correct_case_path)
+
+ # Dynamically bind to the cache
+ try:
+ cached_nodes = bld.cached_nodes
+ except AttributeError:
+ cached_nodes = bld.cached_nodes = {}
+
+ for path in self.msvcdeps_paths:
+ node = None
+ if os.path.isabs(path):
+ # Force drive letter to match conventions of main source tree
+ drive, tail = os.path.splitdrive(path)
+
+ if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm:
+ # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path.
+ path = correct_case_path + path[correct_case_path_len:]
+ else:
+ # Check the drive letter
+ if lowercase and (drive != drive.lower()):
+ path = drive.lower() + tail
+ elif (not lowercase) and (drive != drive.upper()):
+ path = drive.upper() + tail
+ node = path_to_node(bld.root, path, cached_nodes)
+ else:
+ base_node = bld.bldnode
+ # when calling find_resource, make sure the path does not begin by '..'
+ path = [k for k in Utils.split_path(path) if k and k != '.']
+ while path[0] == '..':
+ path = path[1:]
+ base_node = base_node.parent
+
+ node = path_to_node(base_node, path, cached_nodes)
+
+ if not node:
+ raise ValueError('could not find %r for %r' % (path, self))
+ else:
+ if not c_preproc.go_absolute:
+ if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
+ # System library
+ Logs.debug('msvcdeps: Ignoring system include %r', node)
+ continue
+
+ if id(node) == id(self.inputs[0]):
+ # Self-dependency
+ continue
+
+ resolved_nodes.append(node)
+
+ bld.node_deps[self.uid()] = resolved_nodes
+ bld.raw_deps[self.uid()] = unresolved_names
+
+ try:
+ del self.cache_sig
+ except AttributeError:
+ pass
+
+ Task.Task.post_run(self)
+
+def scan(self):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(self.derived_msvcdeps, self).scan()
+
+ resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
+ unresolved_names = []
+ return (resolved_nodes, unresolved_names)
+
+def sig_implicit_deps(self):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(self.derived_msvcdeps, self).sig_implicit_deps()
+
+ try:
+ return Task.Task.sig_implicit_deps(self)
+ except Errors.WafError:
+ return Utils.SIG_NIL
+
+def exec_command(self, cmd, **kw):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(self.derived_msvcdeps, self).exec_command(cmd, **kw)
+
+ if not 'cwd' in kw:
+ kw['cwd'] = self.get_cwd()
+
+ if self.env.PATH:
+ env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
+ env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
+
+ # The Visual Studio IDE adds an environment variable that causes
+ # the MS compiler to send its textual output directly to the
+ # debugging window rather than normal stdout/stderr.
+ #
+ # This is unrecoverably bad for this tool because it will cause
+ # all the dependency scanning to see an empty stdout stream and
+ # assume that the file being compiled uses no headers.
+ #
+ # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
+ #
+ # Attempting to repair the situation by deleting the offending
+ # envvar at this point in tool execution will not be good enough--
+ # its presence poisons the 'waf configure' step earlier. We just
+ # want to put a sanity check here in order to help developers
+ # quickly diagnose the issue if an otherwise-good Waf tree
+ # is then executed inside the MSVS IDE.
+ assert 'VS_UNICODE_OUTPUT' not in kw['env']
+
+ cmd, args = self.split_argfile(cmd)
+ try:
+ (fd, tmp) = tempfile.mkstemp()
+ os.write(fd, '\r\n'.join(args).encode())
+ os.close(fd)
+
+ self.msvcdeps_paths = []
+ kw['env'] = kw.get('env', os.environ.copy())
+ kw['cwd'] = kw.get('cwd', os.getcwd())
+ kw['quiet'] = Context.STDOUT
+ kw['output'] = Context.STDOUT
+
+ out = []
+ if Logs.verbose:
+ Logs.debug('argfile: @%r -> %r', tmp, args)
+ try:
+ raw_out = self.generator.bld.cmd_and_log(cmd + ['@' + tmp], **kw)
+ ret = 0
+ except Errors.WafError as e:
+ raw_out = e.stdout
+ ret = e.returncode
+
+ for line in raw_out.splitlines():
+ if line.startswith(INCLUDE_PATTERN):
+ inc_path = line[len(INCLUDE_PATTERN):].strip()
+ Logs.debug('msvcdeps: Regex matched %s', inc_path)
+ self.msvcdeps_paths.append(inc_path)
+ else:
+ out.append(line)
+
+ # Pipe through the remaining stdout content (not related to /showIncludes)
+ if self.generator.bld.logger:
+ self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
+ else:
+ sys.stdout.write(os.linesep.join(out) + os.linesep)
+
+ return ret
+ finally:
+ try:
+ os.remove(tmp)
+ except OSError:
+ # anti-virus and indexers can keep files open -_-
+ pass
+
-'''
-Register a task subclass that has hooks for running our custom
-dependency calculations rather than the C/C++ stock c_preproc
-method.
-'''
def wrap_compiled_task(classname):
- derived_class = type(classname, (Task.classes[classname],), {})
-
- def post_run(self):
- if self.env.CC_NAME not in supported_compilers:
- return super(derived_class, self).post_run()
-
- if getattr(self, 'cached', None):
- return Task.Task.post_run(self)
-
- bld = self.generator.bld
- unresolved_names = []
- resolved_nodes = []
-
- lowercase = self.generator.msvcdeps_drive_lowercase
- correct_case_path = bld.path.abspath()
- correct_case_path_len = len(correct_case_path)
- correct_case_path_norm = os.path.normcase(correct_case_path)
-
- # Dynamically bind to the cache
- try:
- cached_nodes = bld.cached_nodes
- except AttributeError:
- cached_nodes = bld.cached_nodes = {}
-
- for path in self.msvcdeps_paths:
- node = None
- if os.path.isabs(path):
- # Force drive letter to match conventions of main source tree
- drive, tail = os.path.splitdrive(path)
-
- if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm:
- # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path.
- path = correct_case_path + path[correct_case_path_len:]
- else:
- # Check the drive letter
- if lowercase and (drive != drive.lower()):
- path = drive.lower() + tail
- elif (not lowercase) and (drive != drive.upper()):
- path = drive.upper() + tail
- node = path_to_node(bld.root, path, cached_nodes)
- else:
- base_node = bld.bldnode
- # when calling find_resource, make sure the path does not begin by '..'
- path = [k for k in Utils.split_path(path) if k and k != '.']
- while path[0] == '..':
- path = path[1:]
- base_node = base_node.parent
-
- node = path_to_node(base_node, path, cached_nodes)
-
- if not node:
- raise ValueError('could not find %r for %r' % (path, self))
- else:
- if not c_preproc.go_absolute:
- if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
- # System library
- Logs.debug('msvcdeps: Ignoring system include %r' % node)
- continue
-
- if id(node) == id(self.inputs[0]):
- # Self-dependency
- continue
-
- resolved_nodes.append(node)
-
- bld.node_deps[self.uid()] = resolved_nodes
- bld.raw_deps[self.uid()] = unresolved_names
-
- try:
- del self.cache_sig
- except:
- pass
-
- Task.Task.post_run(self)
-
- def scan(self):
- if self.env.CC_NAME not in supported_compilers:
- return super(derived_class, self).scan()
-
- resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
- unresolved_names = []
- return (resolved_nodes, unresolved_names)
-
- def sig_implicit_deps(self):
- if self.env.CC_NAME not in supported_compilers:
- return super(derived_class, self).sig_implicit_deps()
-
- try:
- return Task.Task.sig_implicit_deps(self)
- except Errors.WafError:
- return Utils.SIG_NIL
-
- def exec_response_command(self, cmd, **kw):
- # exec_response_command() is only called from inside msvc.py anyway
- assert self.env.CC_NAME in supported_compilers
-
- # Only bother adding '/showIncludes' to compile tasks
- if isinstance(self, (c.c, cxx.cxx)):
- try:
- # The Visual Studio IDE adds an environment variable that causes
- # the MS compiler to send its textual output directly to the
- # debugging window rather than normal stdout/stderr.
- #
- # This is unrecoverably bad for this tool because it will cause
- # all the dependency scanning to see an empty stdout stream and
- # assume that the file being compiled uses no headers.
- #
- # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
- #
- # Attempting to repair the situation by deleting the offending
- # envvar at this point in tool execution will not be good enough--
- # its presence poisons the 'waf configure' step earlier. We just
- # want to put a sanity check here in order to help developers
- # quickly diagnose the issue if an otherwise-good Waf tree
- # is then executed inside the MSVS IDE.
- assert 'VS_UNICODE_OUTPUT' not in kw['env']
-
- tmp = None
-
- # This block duplicated from Waflib's msvc.py
- if sys.platform.startswith('win') and isinstance(cmd, list) and len(' '.join(cmd)) >= 8192:
- program = cmd[0]
- cmd = [self.quote_response_command(x) for x in cmd]
- (fd, tmp) = tempfile.mkstemp()
- os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode())
- os.close(fd)
- cmd = [program, '@' + tmp]
- # ... end duplication
-
- self.msvcdeps_paths = []
-
- kw['env'] = kw.get('env', os.environ.copy())
- kw['cwd'] = kw.get('cwd', os.getcwd())
- kw['quiet'] = Context.STDOUT
- kw['output'] = Context.STDOUT
-
- out = []
-
- try:
- raw_out = self.generator.bld.cmd_and_log(cmd, **kw)
- ret = 0
- except Errors.WafError as e:
- raw_out = e.stdout
- ret = e.returncode
-
- for line in raw_out.splitlines():
- if line.startswith(INCLUDE_PATTERN):
- inc_path = line[len(INCLUDE_PATTERN):].strip()
- Logs.debug('msvcdeps: Regex matched %s' % inc_path)
- self.msvcdeps_paths.append(inc_path)
- else:
- out.append(line)
-
- # Pipe through the remaining stdout content (not related to /showIncludes)
- if self.generator.bld.logger:
- self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
- else:
- sys.stdout.write(os.linesep.join(out) + os.linesep)
-
- finally:
- if tmp:
- try:
- os.remove(tmp)
- except OSError:
- pass
-
- return ret
- else:
- # Use base class's version of this method for linker tasks
- return super(derived_class, self).exec_response_command(cmd, **kw)
-
- def can_retrieve_cache(self):
- # msvcdeps and netcaching are incompatible, so disable the cache
- if self.env.CC_NAME not in supported_compilers:
- return super(derived_class, self).can_retrieve_cache()
- self.nocache = True # Disable sending the file to the cache
- return False
-
- derived_class.post_run = post_run
- derived_class.scan = scan
- derived_class.sig_implicit_deps = sig_implicit_deps
- derived_class.exec_response_command = exec_response_command
- derived_class.can_retrieve_cache = can_retrieve_cache
+ derived_class = type(classname, (Task.classes[classname],), {})
+ derived_class.derived_msvcdeps = derived_class
+ derived_class.post_run = post_run
+ derived_class.scan = scan
+ derived_class.sig_implicit_deps = sig_implicit_deps
+ derived_class.exec_command = exec_command
for k in ('c', 'cxx'):
- wrap_compiled_task(k)
+ if k in Task.classes:
+ wrap_compiled_task(k)
+
+def options(opt):
+ raise ValueError('Do not load msvcdeps options')
+
diff --git a/third_party/waf/waflib/extras/msvs.py b/third_party/waf/waflib/extras/msvs.py
index 5f76c269d1c..b12d9c068e5 100644
--- a/third_party/waf/waflib/extras/msvs.py
+++ b/third_party/waf/waflib/extras/msvs.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Avalanche Studios 2009-2011
# Thomas Nagy 2011
@@ -41,34 +45,42 @@ It can be a good idea to add the sync_exec tool too.
To generate solution files:
$ waf configure msvs
-To customize the outputs, provide subclasses in your wscript files:
-
-from waflib.extras import msvs
-class vsnode_target(msvs.vsnode_target):
- def get_build_command(self, props):
- # likely to be required
- return "waf.bat build"
- def collect_source(self):
- # likely to be required
- ...
-class msvs_bar(msvs.msvs_generator):
- def init(self):
- msvs.msvs_generator.init(self)
- self.vsnode_target = vsnode_target
+To customize the outputs, provide subclasses in your wscript files::
+
+ from waflib.extras import msvs
+ class vsnode_target(msvs.vsnode_target):
+ def get_build_command(self, props):
+ # likely to be required
+ return "waf.bat build"
+ def collect_source(self):
+ # likely to be required
+ ...
+ class msvs_bar(msvs.msvs_generator):
+ def init(self):
+ msvs.msvs_generator.init(self)
+ self.vsnode_target = vsnode_target
The msvs class re-uses the same build() function for reading the targets (task generators),
-you may therefore specify msvs settings on the context object:
+you may therefore specify msvs settings on the context object::
-def build(bld):
- bld.solution_name = 'foo.sln'
- bld.waf_command = 'waf.bat'
- bld.projects_dir = bld.srcnode.make_node('.depproj')
- bld.projects_dir.mkdir()
+ def build(bld):
+ bld.solution_name = 'foo.sln'
+ bld.waf_command = 'waf.bat'
+ bld.projects_dir = bld.srcnode.make_node('.depproj')
+ bld.projects_dir.mkdir()
For visual studio 2008, the command is called 'msvs2008', and the classes
such as vsnode_target are wrapped by a decorator class 'wrap_2008' to
provide special functionality.
+To customize platform toolsets, pass additional parameters, for example::
+
+ class msvs_2013(msvs.msvs_generator):
+ cmd = 'msvs2013'
+ numver = '13.00'
+ vsver = '2013'
+ platform_toolset_ver = 'v120'
+
ASSUMPTIONS:
* a project can be either a directory or a target, vcxproj files are written only for targets that have source files
* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
@@ -105,7 +117,7 @@ PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'" Label="Configuration">
<ConfigurationType>Makefile</ConfigurationType>
<OutDir>${b.outdir}</OutDir>
- <PlatformToolset>v110</PlatformToolset>
+ <PlatformToolset>${project.platform_toolset_ver}</PlatformToolset>
</PropertyGroup>
${endfor}
@@ -293,7 +305,8 @@ def compile_template(line):
extr = []
def repl(match):
g = match.group
- if g('dollar'): return "$"
+ if g('dollar'):
+ return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
@@ -318,14 +331,14 @@ def compile_template(line):
app("lst.append(%r)" % params[x])
f = extr[x]
- if f.startswith('if') or f.startswith('for'):
+ if f.startswith(('if', 'for')):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
- elif f.startswith('endif') or f.startswith('endfor'):
+ elif f.startswith(('endif', 'endfor')):
indent -= 1
- elif f.startswith('else') or f.startswith('elif'):
+ elif f.startswith(('else', 'elif')):
indent -= 1
app(f + ':')
indent += 1
@@ -351,7 +364,7 @@ def rm_blank_lines(txt):
BOM = '\xef\xbb\xbf'
try:
- BOM = bytes(BOM, 'iso8859-1') # python 3
+ BOM = bytes(BOM, 'latin-1') # python 3
except TypeError:
pass
@@ -364,7 +377,7 @@ def stealth_write(self, data, flags='wb'):
data = data.decode(sys.getfilesystemencoding(), 'replace')
data = data.encode('utf-8')
- if self.name.endswith('.vcproj') or self.name.endswith('.vcxproj'):
+ if self.name.endswith(('.vcproj', '.vcxproj')):
data = BOM + data
try:
@@ -374,7 +387,7 @@ def stealth_write(self, data, flags='wb'):
except (IOError, ValueError):
self.write(data, flags=flags)
else:
- Logs.debug('msvs: skipping %s' % self.win32path())
+ Logs.debug('msvs: skipping %s', self.win32path())
Node.Node.stealth_write = stealth_write
re_win32 = re.compile(r'^([/\\]cygdrive)?[/\\]([a-z])([^a-z0-9_-].*)', re.I)
@@ -509,6 +522,7 @@ class vsnode_project(vsnode):
self.path = node
self.uuid = make_uuid(node.win32path())
self.name = node.name
+ self.platform_toolset_ver = getattr(ctx, 'platform_toolset_ver', None)
self.title = self.path.win32path()
self.source = [] # list of node objects
self.build_properties = [] # list of properties (nmake commands, output dir, etc)
@@ -528,7 +542,7 @@ class vsnode_project(vsnode):
return lst
def write(self):
- Logs.debug('msvs: creating %r' % self.path)
+ Logs.debug('msvs: creating %r', self.path)
# first write the project file
template1 = compile_template(PROJECT_TEMPLATE)
@@ -548,7 +562,7 @@ class vsnode_project(vsnode):
required for writing the source files
"""
name = node.name
- if name.endswith('.cpp') or name.endswith('.c'):
+ if name.endswith(('.cpp', '.c')):
return 'ClCompile'
return 'ClInclude'
@@ -628,10 +642,10 @@ class vsnode_project_view(vsnode_alias):
vsnode_alias.__init__(self, ctx, node, name)
self.tg = self.ctx() # fake one, cannot remove
self.exclude_files = Node.exclude_regs + '''
-waf-1.8.*
-waf3-1.8.*/**
-.waf-1.8.*
-.waf3-1.8.*/**
+waf-2*
+waf3-2*/**
+.waf-2*
+.waf3-2*/**
**/*.sdf
**/*.suo
**/*.ncb
@@ -715,6 +729,9 @@ class msvs_generator(BuildContext):
'''generates a visual studio 2010 solution'''
cmd = 'msvs'
fun = 'build'
+ numver = '11.00' # Visual Studio Version Number
+ vsver = '2010' # Visual Studio Version Year
+ platform_toolset_ver = 'v110' # Platform Toolset Version Number
def init(self):
"""
@@ -744,8 +761,9 @@ class msvs_generator(BuildContext):
if not getattr(self, 'vsnode_project_view', None):
self.vsnode_project_view = vsnode_project_view
- self.numver = '11.00'
- self.vsver = '2010'
+ self.numver = self.__class__.numver
+ self.vsver = self.__class__.vsver
+ self.platform_toolset_ver = self.__class__.platform_toolset_ver
def execute(self):
"""
@@ -789,7 +807,7 @@ class msvs_generator(BuildContext):
# and finally write the solution file
node = self.get_solution_node()
node.parent.mkdir()
- Logs.warn('Creating %r' % node)
+ Logs.warn('Creating %r', node)
template1 = compile_template(SOLUTION_TEMPLATE)
sln_str = template1(self)
sln_str = rm_blank_lines(sln_str)
@@ -968,7 +986,7 @@ def wrap_2008(cls):
return ''
def write(self):
- Logs.debug('msvs: creating %r' % self.path)
+ Logs.debug('msvs: creating %r', self.path)
template1 = compile_template(self.project_template)
proj_str = template1(self)
proj_str = rm_blank_lines(proj_str)
@@ -980,6 +998,8 @@ class msvs_2008_generator(msvs_generator):
'''generates a visual studio 2008 solution'''
cmd = 'msvs2008'
fun = msvs_generator.fun
+ numver = '10.00'
+ vsver = '2008'
def init(self):
if not getattr(self, 'project_extension', None):
@@ -997,8 +1017,6 @@ class msvs_2008_generator(msvs_generator):
self.vsnode_project_view = wrap_2008(vsnode_project_view)
msvs_generator.init(self)
- self.numver = '10.00'
- self.vsver = '2008'
def options(ctx):
"""
@@ -1031,3 +1049,4 @@ def options(ctx):
else:
old(ctx)
BuildContext.execute = override_build_state
+
diff --git a/third_party/waf/waflib/extras/netcache_client.py b/third_party/waf/waflib/extras/netcache_client.py
index 63859b6a207..ffd40d87f42 100644
--- a/third_party/waf/waflib/extras/netcache_client.py
+++ b/third_party/waf/waflib/extras/netcache_client.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Thomas Nagy, 2011-2015 (ita)
"""
@@ -40,7 +44,7 @@ all_sigs_in_cache = (0.0, [])
def put_data(conn, data):
if sys.hexversion > 0x3000000:
- data = data.encode('iso8859-1')
+ data = data.encode('latin-1')
cnt = 0
while cnt < len(data):
sent = conn.send(data[cnt:])
@@ -107,8 +111,8 @@ def read_header(conn):
buf.append(data)
cnt += len(data)
if sys.hexversion > 0x3000000:
- ret = ''.encode('iso8859-1').join(buf)
- ret = ret.decode('iso8859-1')
+ ret = ''.encode('latin-1').join(buf)
+ ret = ret.decode('latin-1')
else:
ret = ''.join(buf)
return ret
@@ -140,13 +144,13 @@ def check_cache(conn, ssig):
cnt += len(data)
if sys.hexversion > 0x3000000:
- ret = ''.encode('iso8859-1').join(buf)
- ret = ret.decode('iso8859-1')
+ ret = ''.encode('latin-1').join(buf)
+ ret = ret.decode('latin-1')
else:
ret = ''.join(buf)
all_sigs_in_cache = (time.time(), ret.splitlines())
- Logs.debug('netcache: server cache has %r entries' % len(all_sigs_in_cache[1]))
+ Logs.debug('netcache: server cache has %r entries', len(all_sigs_in_cache[1]))
if not ssig in all_sigs_in_cache[1]:
raise ValueError('no file %s in cache' % ssig)
@@ -215,26 +219,25 @@ def can_retrieve_cache(self):
recv_file(conn, ssig, cnt, p)
cnt += 1
except MissingFile as e:
- Logs.debug('netcache: file is not in the cache %r' % e)
+ Logs.debug('netcache: file is not in the cache %r', e)
err = True
-
except Exception as e:
- Logs.debug('netcache: could not get the files %r' % e)
+ Logs.debug('netcache: could not get the files %r', self.outputs)
+ if Logs.verbose > 1:
+ Logs.debug('netcache: exception %r', e)
err = True
# broken connection? remove this one
close_connection(conn)
conn = None
+ else:
+ Logs.debug('netcache: obtained %r from cache', self.outputs)
+
finally:
release_connection(conn)
if err:
return False
- for node in self.outputs:
- node.sig = sig
- #if self.generator.bld.progress_bar < 1:
- # self.generator.bld.to_log('restoring from cache %r\n' % node.abspath())
-
self.cached = True
return True
@@ -263,8 +266,9 @@ def put_files_cache(self):
if not conn:
conn = get_connection(push=True)
sock_send(conn, ssig, cnt, node.abspath())
+ Logs.debug('netcache: sent %r', node)
except Exception as e:
- Logs.debug("netcache: could not push the files %r" % e)
+ Logs.debug('netcache: could not push the files %r', e)
# broken connection? remove this one
close_connection(conn)
@@ -387,3 +391,4 @@ def build(bld):
push_addr = None
setup_netcache(bld, push_addr, pull_addr)
+
diff --git a/third_party/waf/waflib/extras/nobuild.py b/third_party/waf/waflib/extras/nobuild.py
deleted file mode 100644
index c628af834dd..00000000000
--- a/third_party/waf/waflib/extras/nobuild.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-"""
-Override the build commands to write empty files.
-This is useful for profiling and evaluating the Python overhead.
-
-To use::
-
- def build(bld):
- ...
- bld.load('nobuild')
-
-"""
-
-from waflib import Task
-def build(bld):
- def run(self):
- for x in self.outputs:
- x.write('')
- for (name, cls) in Task.classes.items():
- cls.run = run
diff --git a/third_party/waf/waflib/extras/objcopy.py b/third_party/waf/waflib/extras/objcopy.py
index 939c2c1291b..baa54c0bd54 100644
--- a/third_party/waf/waflib/extras/objcopy.py
+++ b/third_party/waf/waflib/extras/objcopy.py
@@ -1,9 +1,13 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/python
# Grygoriy Fuchedzhy 2010
"""
Support for converting linked targets to ihex, srec or binary files using
-objcopy. Use the 'objcopy' feature in conjuction with the 'cc' or 'cxx'
+objcopy. Use the 'objcopy' feature in conjunction with the 'cc' or 'cxx'
feature. The 'objcopy' feature uses the following attributes:
objcopy_bfdname Target object format name (eg. ihex, srec, binary).
@@ -43,9 +47,8 @@ def map_objcopy(self):
pass
if self.objcopy_install_path:
- self.bld.install_files(self.objcopy_install_path,
- task.outputs[0],
- env=task.env.derive())
+ self.add_install_files(install_to=self.objcopy_install_path, install_from=task.outputs[0])
def configure(ctx):
ctx.find_program('objcopy', var='OBJCOPY', mandatory=True)
+
diff --git a/third_party/waf/waflib/extras/ocaml.py b/third_party/waf/waflib/extras/ocaml.py
new file mode 100644
index 00000000000..56f851e72cf
--- /dev/null
+++ b/third_party/waf/waflib/extras/ocaml.py
@@ -0,0 +1,352 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"ocaml support"
+
+import os, re
+from waflib import Utils, Task
+from waflib.Logs import error
+from waflib.TaskGen import feature, before_method, after_method, extension
+
+EXT_MLL = ['.mll']
+EXT_MLY = ['.mly']
+EXT_MLI = ['.mli']
+EXT_MLC = ['.c']
+EXT_ML = ['.ml']
+
+open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
+foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
+def filter_comments(txt):
+ meh = [0]
+ def repl(m):
+ if m.group(1):
+ meh[0] += 1
+ elif m.group(2):
+ meh[0] -= 1
+ elif not meh[0]:
+ return m.group()
+ return ''
+ return foo.sub(repl, txt)
+
+def scan(self):
+ node = self.inputs[0]
+ code = filter_comments(node.read())
+
+ global open_re
+ names = []
+ import_iterator = open_re.finditer(code)
+ if import_iterator:
+ for import_match in import_iterator:
+ names.append(import_match.group(1))
+ found_lst = []
+ raw_lst = []
+ for name in names:
+ nd = None
+ for x in self.incpaths:
+ nd = x.find_resource(name.lower()+'.ml')
+ if not nd:
+ nd = x.find_resource(name+'.ml')
+ if nd:
+ found_lst.append(nd)
+ break
+ else:
+ raw_lst.append(name)
+
+ return (found_lst, raw_lst)
+
+native_lst=['native', 'all', 'c_object']
+bytecode_lst=['bytecode', 'all']
+
+@feature('ocaml')
+def init_ml(self):
+ Utils.def_attrs(self,
+ type = 'all',
+ incpaths_lst = [],
+ bld_incpaths_lst = [],
+ mlltasks = [],
+ mlytasks = [],
+ mlitasks = [],
+ native_tasks = [],
+ bytecode_tasks = [],
+ linktasks = [],
+ bytecode_env = None,
+ native_env = None,
+ compiled_tasks = [],
+ includes = '',
+ uselib = '',
+ are_deps_set = 0)
+
+@feature('ocaml')
+@after_method('init_ml')
+def init_envs_ml(self):
+
+ self.islibrary = getattr(self, 'islibrary', False)
+
+ global native_lst, bytecode_lst
+ self.native_env = None
+ if self.type in native_lst:
+ self.native_env = self.env.derive()
+ if self.islibrary:
+ self.native_env['OCALINKFLAGS'] = '-a'
+
+ self.bytecode_env = None
+ if self.type in bytecode_lst:
+ self.bytecode_env = self.env.derive()
+ if self.islibrary:
+ self.bytecode_env['OCALINKFLAGS'] = '-a'
+
+ if self.type == 'c_object':
+ self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
+
+@feature('ocaml')
+@before_method('apply_vars_ml')
+@after_method('init_envs_ml')
+def apply_incpaths_ml(self):
+ inc_lst = self.includes.split()
+ lst = self.incpaths_lst
+ for dir in inc_lst:
+ node = self.path.find_dir(dir)
+ if not node:
+ error("node not found: " + str(dir))
+ continue
+ if not node in lst:
+ lst.append(node)
+ self.bld_incpaths_lst.append(node)
+ # now the nodes are added to self.incpaths_lst
+
+@feature('ocaml')
+@before_method('process_source')
+def apply_vars_ml(self):
+ for i in self.incpaths_lst:
+ if self.bytecode_env:
+ app = self.bytecode_env.append_value
+ app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
+
+ if self.native_env:
+ app = self.native_env.append_value
+ app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
+
+ varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
+ for name in self.uselib.split():
+ for vname in varnames:
+ cnt = self.env[vname+'_'+name]
+ if cnt:
+ if self.bytecode_env:
+ self.bytecode_env.append_value(vname, cnt)
+ if self.native_env:
+ self.native_env.append_value(vname, cnt)
+
+@feature('ocaml')
+@after_method('process_source')
+def apply_link_ml(self):
+
+ if self.bytecode_env:
+ ext = self.islibrary and '.cma' or '.run'
+
+ linktask = self.create_task('ocalink')
+ linktask.bytecode = 1
+ linktask.set_outputs(self.path.find_or_declare(self.target + ext))
+ linktask.env = self.bytecode_env
+ self.linktasks.append(linktask)
+
+ if self.native_env:
+ if self.type == 'c_object':
+ ext = '.o'
+ elif self.islibrary:
+ ext = '.cmxa'
+ else:
+ ext = ''
+
+ linktask = self.create_task('ocalinkx')
+ linktask.set_outputs(self.path.find_or_declare(self.target + ext))
+ linktask.env = self.native_env
+ self.linktasks.append(linktask)
+
+ # we produce a .o file to be used by gcc
+ self.compiled_tasks.append(linktask)
+
+@extension(*EXT_MLL)
+def mll_hook(self, node):
+ mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'))
+ mll_task.env = self.native_env.derive()
+ self.mlltasks.append(mll_task)
+
+ self.source.append(mll_task.outputs[0])
+
+@extension(*EXT_MLY)
+def mly_hook(self, node):
+ mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')])
+ mly_task.env = self.native_env.derive()
+ self.mlytasks.append(mly_task)
+ self.source.append(mly_task.outputs[0])
+
+ task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'))
+ task.env = self.native_env.derive()
+
+@extension(*EXT_MLI)
+def mli_hook(self, node):
+ task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'))
+ task.env = self.native_env.derive()
+ self.mlitasks.append(task)
+
+@extension(*EXT_MLC)
+def mlc_hook(self, node):
+ task = self.create_task('ocamlcc', node, node.change_ext('.o'))
+ task.env = self.native_env.derive()
+ self.compiled_tasks.append(task)
+
+@extension(*EXT_ML)
+def ml_hook(self, node):
+ if self.native_env:
+ task = self.create_task('ocamlx', node, node.change_ext('.cmx'))
+ task.env = self.native_env.derive()
+ task.incpaths = self.bld_incpaths_lst
+ self.native_tasks.append(task)
+
+ if self.bytecode_env:
+ task = self.create_task('ocaml', node, node.change_ext('.cmo'))
+ task.env = self.bytecode_env.derive()
+ task.bytecode = 1
+ task.incpaths = self.bld_incpaths_lst
+ self.bytecode_tasks.append(task)
+
+def compile_may_start(self):
+
+ if not getattr(self, 'flag_deps', ''):
+ self.flag_deps = 1
+
+ # the evil part is that we can only compute the dependencies after the
+ # source files can be read (this means actually producing the source files)
+ if getattr(self, 'bytecode', ''):
+ alltasks = self.generator.bytecode_tasks
+ else:
+ alltasks = self.generator.native_tasks
+
+ self.signature() # ensure that files are scanned - unfortunately
+ tree = self.generator.bld
+ for node in self.inputs:
+ lst = tree.node_deps[self.uid()]
+ for depnode in lst:
+ for t in alltasks:
+ if t == self:
+ continue
+ if depnode in t.inputs:
+ self.set_run_after(t)
+
+ # TODO necessary to get the signature right - for now
+ delattr(self, 'cache_sig')
+ self.signature()
+
+ return Task.Task.runnable_status(self)
+
+class ocamlx(Task.Task):
+ """native caml compilation"""
+ color = 'GREEN'
+ run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
+ scan = scan
+ runnable_status = compile_may_start
+
+class ocaml(Task.Task):
+ """bytecode caml compilation"""
+ color = 'GREEN'
+ run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
+ scan = scan
+ runnable_status = compile_may_start
+
+class ocamlcmi(Task.Task):
+ """interface generator (the .i files?)"""
+ color = 'BLUE'
+ run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}'
+ before = ['ocamlcc', 'ocaml', 'ocamlcc']
+
+class ocamlcc(Task.Task):
+ """ocaml to c interfaces"""
+ color = 'GREEN'
+ run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}'
+
+class ocamllex(Task.Task):
+ """lexical generator"""
+ color = 'BLUE'
+ run_str = '${OCAMLLEX} ${SRC} -o ${TGT}'
+ before = ['ocamlcmi', 'ocaml', 'ocamlcc']
+
+class ocamlyacc(Task.Task):
+ """parser generator"""
+ color = 'BLUE'
+ run_str = '${OCAMLYACC} -b ${tsk.base()} ${SRC}'
+ before = ['ocamlcmi', 'ocaml', 'ocamlcc']
+
+ def base(self):
+ node = self.outputs[0]
+ s = os.path.splitext(node.name)[0]
+ return node.bld_dir() + os.sep + s
+
+def link_may_start(self):
+
+ if getattr(self, 'bytecode', 0):
+ alltasks = self.generator.bytecode_tasks
+ else:
+ alltasks = self.generator.native_tasks
+
+ for x in alltasks:
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'order', ''):
+
+ # now reorder the inputs given the task dependencies
+ # this part is difficult, we do not have a total order on the tasks
+ # if the dependencies are wrong, this may not stop
+ seen = []
+ pendant = []+alltasks
+ while pendant:
+ task = pendant.pop(0)
+ if task in seen:
+ continue
+ for x in task.run_after:
+ if not x in seen:
+ pendant.append(task)
+ break
+ else:
+ seen.append(task)
+ self.inputs = [x.outputs[0] for x in seen]
+ self.order = 1
+ return Task.Task.runnable_status(self)
+
+class ocalink(Task.Task):
+ """bytecode caml link"""
+ color = 'YELLOW'
+ run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}'
+ runnable_status = link_may_start
+ after = ['ocaml', 'ocamlcc']
+
+class ocalinkx(Task.Task):
+ """native caml link"""
+ color = 'YELLOW'
+ run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}'
+ runnable_status = link_may_start
+ after = ['ocamlx', 'ocamlcc']
+
+def configure(conf):
+ opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False)
+ occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False)
+ if (not opt) or (not occ):
+ conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
+
+ v = conf.env
+ v['OCAMLC'] = occ
+ v['OCAMLOPT'] = opt
+ v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False)
+ v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False)
+ v['OCAMLFLAGS'] = ''
+ where = conf.cmd_and_log(conf.env.OCAMLC + ['-where']).strip()+os.sep
+ v['OCAMLLIB'] = where
+ v['LIBPATH_OCAML'] = where
+ v['INCLUDES_OCAML'] = where
+ v['LIB_OCAML'] = 'camlrun'
+
diff --git a/third_party/waf/waflib/extras/package.py b/third_party/waf/waflib/extras/package.py
index 387a3cdc3b4..669aa170bd5 100644
--- a/third_party/waf/waflib/extras/package.py
+++ b/third_party/waf/waflib/extras/package.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Thomas Nagy, 2011
"""
@@ -64,7 +68,7 @@ def download_archive(self, src, dst):
else:
tmp = self.root.make_node(dst)
tmp.write(web.read())
- Logs.warn('Downloaded %s from %s' % (tmp.abspath(), url))
+ Logs.warn('Downloaded %s from %s', tmp.abspath(), url)
break
else:
self.fatal('Could not get the package %s' % src)
@@ -73,3 +77,4 @@ def download_archive(self, src, dst):
def load_packages(self):
self.get_package_cache_dir()
# read the dependencies, get the archives, ..
+
diff --git a/third_party/waf/waflib/extras/parallel_debug.py b/third_party/waf/waflib/extras/parallel_debug.py
index 94191250549..d365024e205 100644
--- a/third_party/waf/waflib/extras/parallel_debug.py
+++ b/third_party/waf/waflib/extras/parallel_debug.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Thomas Nagy, 2007-2010 (ita)
"""
@@ -12,13 +16,12 @@ a file named pdebug.svg in the source directory::
...
"""
-import time, sys, re
-try: from Queue import Queue
-except: from queue import Queue
-from waflib import Runner, Options, Utils, Task, Logs, Errors
-
-#import random
-#random.seed(100)
+import re, sys, threading, time, traceback
+try:
+ from Queue import Queue
+except:
+ from queue import Queue
+from waflib import Runner, Options, Task, Logs, Errors
SVG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
@@ -38,7 +41,7 @@ svg.addEventListener('mouseover', function(e) {
if (x) {
g.setAttribute('class', g.getAttribute('class') + ' over');
x.setAttribute('class', x.getAttribute('class') + ' over');
- showInfo(e, g.id);
+ showInfo(e, g.id, e.target.attributes.tooltip.value);
}
}, false);
@@ -52,11 +55,12 @@ svg.addEventListener('mouseout', function(e) {
}
}, false);
-function showInfo(evt, txt) {
+function showInfo(evt, txt, details) {
+${if project.tooltip}
tooltip = document.getElementById('tooltip');
var t = document.getElementById('tooltiptext');
- t.firstChild.data = txt;
+ t.firstChild.data = txt + " " + details;
var x = evt.clientX + 9;
if (x > 250) { x -= t.getComputedTextLength() + 16; }
@@ -66,6 +70,7 @@ function showInfo(evt, txt) {
var r = document.getElementById('tooltiprect');
r.setAttribute('width', t.getComputedTextLength() + 6);
+${endif}
}
function hideInfo(evt) {
@@ -77,8 +82,7 @@ function hideInfo(evt) {
<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
<rect
x='${project.x}' y='${project.y}' width='${project.width}' height='${project.height}'
- style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"
- />
+ style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"></rect>
${if project.title}
<text x="${project.title_x}" y="${project.title_y}"
@@ -89,7 +93,7 @@ ${endif}
${for cls in project.groups}
<g id='${cls.classname}'>
${for rect in cls.rects}
- <rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
+ <rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' tooltip='${rect.name}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
${endfor}
</g>
${endfor}
@@ -103,10 +107,12 @@ ${for info in project.infos}
</g>
${endfor}
+${if project.tooltip}
<g transform="translate(0,0)" visibility="hidden" id="tooltip">
<rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
- <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;" />
+ <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
</g>
+${endif}
</svg>
"""
@@ -125,7 +131,8 @@ def compile_template(line):
extr = []
def repl(match):
g = match.group
- if g('dollar'): return "$"
+ if g('dollar'):
+ return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
@@ -150,14 +157,14 @@ def compile_template(line):
app("lst.append(%r)" % params[x])
f = extr[x]
- if f.startswith('if') or f.startswith('for'):
+ if f.startswith(('if', 'for')):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
- elif f.startswith('endif') or f.startswith('endfor'):
+ elif f.startswith(('endif', 'endfor')):
indent -= 1
- elif f.startswith('else') or f.startswith('elif'):
+ elif f.startswith(('else', 'elif')):
indent -= 1
app(f + ':')
indent += 1
@@ -207,31 +214,23 @@ def map_to_color(name):
return color2code['RED']
def process(self):
- m = self.master
- if m.stop:
- m.out.put(self)
- return
-
- self.master.set_running(1, id(Utils.threading.currentThread()), self)
-
- # remove the task signature immediately before it is executed
- # in case of failure the task will be executed again
+ m = self.generator.bld.producer
try:
+ # TODO another place for this?
del self.generator.bld.task_sigs[self.uid()]
- except:
+ except KeyError:
pass
+ self.generator.bld.producer.set_running(1, self)
+
try:
- self.generator.bld.returned_tasks.append(self)
- self.log_display(self.generator.bld)
ret = self.run()
except Exception:
- self.err_msg = Utils.ex_stack()
+ self.err_msg = traceback.format_exc()
self.hasrun = Task.EXCEPTION
# TODO cleanup
m.error_handler(self)
- m.out.put(self)
return
if ret:
@@ -243,17 +242,17 @@ def process(self):
except Errors.WafError:
pass
except Exception:
- self.err_msg = Utils.ex_stack()
+ self.err_msg = traceback.format_exc()
self.hasrun = Task.EXCEPTION
else:
self.hasrun = Task.SUCCESS
if self.hasrun != Task.SUCCESS:
m.error_handler(self)
- self.master.set_running(-1, id(Utils.threading.currentThread()), self)
- m.out.put(self)
-Task.TaskBase.process_back = Task.TaskBase.process
-Task.TaskBase.process = process
+ self.generator.bld.producer.set_running(-1, self)
+
+Task.Task.process_back = Task.Task.process
+Task.Task.process = process
old_start = Runner.Parallel.start
def do_start(self):
@@ -268,8 +267,26 @@ def do_start(self):
make_picture(self)
Runner.Parallel.start = do_start
-def set_running(self, by, i, tsk):
- self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by) )
+lock_running = threading.Lock()
+def set_running(self, by, tsk):
+ with lock_running:
+ try:
+ cache = self.lock_cache
+ except AttributeError:
+ cache = self.lock_cache = {}
+
+ i = 0
+ if by > 0:
+ vals = cache.values()
+ for i in range(self.numjobs):
+ if i not in vals:
+ cache[tsk] = i
+ break
+ else:
+ i = cache[tsk]
+ del cache[tsk]
+
+ self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by, ",".join(map(str, tsk.outputs))) )
Runner.Parallel.set_running = set_running
def name2class(name):
@@ -309,7 +326,7 @@ def make_picture(producer):
acc = []
for x in tmp:
thread_count += x[6]
- acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
+ acc.append("%d %d %f %r %d %d %d %s" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count, x[7]))
data_node = producer.bld.path.make_node('pdebug.dat')
data_node.write('\n'.join(acc))
@@ -350,7 +367,7 @@ def make_picture(producer):
end = line[2]
#print id, thread_id, begin, end
#acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
- acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
+ acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3], line[7]) )
break
if Options.options.dmaxtime < 0.1:
@@ -377,16 +394,18 @@ def make_picture(producer):
model.width = gwidth + 4
model.height = gheight + 4
+ model.tooltip = not Options.options.dnotooltip
+
model.title = Options.options.dtitle
model.title_x = gwidth / 2
model.title_y = gheight + - 5
groups = {}
- for (x, y, w, h, clsname) in acc:
+ for (x, y, w, h, clsname, name) in acc:
try:
- groups[clsname].append((x, y, w, h))
+ groups[clsname].append((x, y, w, h, name))
except:
- groups[clsname] = [(x, y, w, h)]
+ groups[clsname] = [(x, y, w, h, name)]
# groups of rectangles (else js highlighting is slow)
model.groups = []
@@ -395,13 +414,14 @@ def make_picture(producer):
model.groups.append(g)
g.classname = name2class(cls)
g.rects = []
- for (x, y, w, h) in groups[cls]:
+ for (x, y, w, h, name) in groups[cls]:
r = tobject()
g.rects.append(r)
r.x = 2 + x * ratio
r.y = 2 + y
r.width = w * ratio
r.height = h
+ r.name = name
r.color = map_to_color(cls)
cnt = THREAD_AMOUNT
@@ -430,7 +450,7 @@ def make_picture(producer):
node = producer.bld.path.make_node('pdebug.svg')
node.write(txt)
- Logs.warn('Created the diagram %r' % node.abspath())
+ Logs.warn('Created the diagram %r', node)
def options(opt):
opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
@@ -439,3 +459,5 @@ def options(opt):
opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
+ opt.add_option('--dnotooltip', action='store_true', help='disable tooltips', default=False, dest='dnotooltip')
+
diff --git a/third_party/waf/waflib/extras/pch.py b/third_party/waf/waflib/extras/pch.py
index 8b107ac59da..98b23f47fe4 100644
--- a/third_party/waf/waflib/extras/pch.py
+++ b/third_party/waf/waflib/extras/pch.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Alexander Afanasyev (UCLA), 2014
"""
@@ -129,7 +133,7 @@ def add_pch(self):
x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target])
class gchx(Task.Task):
- run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()}'
+ run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
scan = c_preproc.scan
color = 'BLUE'
ext_out=['.h']
diff --git a/third_party/waf/waflib/extras/pep8.py b/third_party/waf/waflib/extras/pep8.py
index 3709d9be6fc..278662f8051 100644
--- a/third_party/waf/waflib/extras/pep8.py
+++ b/third_party/waf/waflib/extras/pep8.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
#
# written by Sylvain Rouquette, 2011
@@ -9,7 +13,7 @@ $ easy_install pep8
or
$ pip install pep8
-To add the boost tool to the waf file:
+To add the pep8 tool to the waf file:
$ ./waf-light --tools=compat15,pep8
or, if you have waf >= 1.6.2
$ ./waf update --files=pep8
diff --git a/third_party/waf/waflib/extras/pgicc.py b/third_party/waf/waflib/extras/pgicc.py
new file mode 100644
index 00000000000..ba50e172bc0
--- /dev/null
+++ b/third_party/waf/waflib/extras/pgicc.py
@@ -0,0 +1,79 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Antoine Dechaume 2011
+
+"""
+Detect the PGI C compiler
+"""
+
+import sys, re
+from waflib import Errors
+from waflib.Configure import conf
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('pgicc')
+
+@conf
+def find_pgi_compiler(conf, var, name):
+ """
+ Find the program name, and execute it to ensure it really is itself.
+ """
+ if sys.platform == 'cygwin':
+ conf.fatal('The PGI compiler does not work on Cygwin')
+
+ v = conf.env
+ cc = None
+ if v[var]:
+ cc = v[var]
+ elif var in conf.environ:
+ cc = conf.environ[var]
+ if not cc:
+ cc = conf.find_program(name, var=var)
+ if not cc:
+ conf.fatal('PGI Compiler (%s) was not found' % name)
+
+ v[var + '_VERSION'] = conf.get_pgi_version(cc)
+ v[var] = cc
+ v[var + '_NAME'] = 'pgi'
+
+@conf
+def get_pgi_version(conf, cc):
+ """Find the version of a pgi compiler."""
+ version_re = re.compile(r"The Portland Group", re.I).search
+ cmd = cc + ['-V', '-E'] # Issue 1078, prevent wrappers from linking
+
+ try:
+ out, err = conf.cmd_and_log(cmd, output=0)
+ except Errors.WafError:
+ conf.fatal('Could not find pgi compiler %r' % cmd)
+
+ if out:
+ match = version_re(out)
+ else:
+ match = version_re(err)
+
+ if not match:
+ conf.fatal('Could not verify PGI signature')
+
+ cmd = cc + ['-help=variable']
+ try:
+ out, err = conf.cmd_and_log(cmd, output=0)
+ except Errors.WafError:
+ conf.fatal('Could not find pgi compiler %r' % cmd)
+
+ version = re.findall('^COMPVER\s*=(.*)', out, re.M)
+ if len(version) != 1:
+ conf.fatal('Could not determine the compiler version')
+ return version[0]
+
+def configure(conf):
+ conf.find_pgi_compiler('CC', 'pgcc')
+ conf.find_ar()
+ conf.gcc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
+
diff --git a/third_party/waf/waflib/extras/pgicxx.py b/third_party/waf/waflib/extras/pgicxx.py
new file mode 100644
index 00000000000..7d077d74789
--- /dev/null
+++ b/third_party/waf/waflib/extras/pgicxx.py
@@ -0,0 +1,24 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Antoine Dechaume 2011
+
+"""
+Detect the PGI C++ compiler
+"""
+
+from waflib.Tools.compiler_cxx import cxx_compiler
+cxx_compiler['linux'].append('pgicxx')
+
+from waflib.extras import pgicc
+
+def configure(conf):
+ conf.find_pgi_compiler('CXX', 'pgCC')
+ conf.find_ar()
+ conf.gxx_common_flags()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/prefork.py b/third_party/waf/waflib/extras/prefork.py
deleted file mode 100755
index b912c5b1b7c..00000000000
--- a/third_party/waf/waflib/extras/prefork.py
+++ /dev/null
@@ -1,401 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-"""
-Execute commands through pre-forked servers. This tool creates as many servers as build threads.
-On a benchmark executed on Linux Kubuntu 14, 8 virtual cores and SSD drive::
-
- ./genbench.py /tmp/build 200 100 15 5
- waf clean build -j24
- # no prefork: 2m7.179s
- # prefork: 0m55.400s
-
-To use::
-
- def options(opt):
- # optional, will spawn 40 servers early
- opt.load('prefork')
-
- def build(bld):
- bld.load('prefork')
- ...
- more code
-
-The servers and the build process are using a shared nonce to prevent undesirable external connections.
-"""
-
-import os, re, socket, threading, sys, subprocess, time, atexit, traceback, random, signal
-try:
- import SocketServer
-except ImportError:
- import socketserver as SocketServer
-try:
- from queue import Queue
-except ImportError:
- from Queue import Queue
-try:
- import cPickle
-except ImportError:
- import pickle as cPickle
-
-SHARED_KEY = None
-HEADER_SIZE = 64
-
-REQ = 'REQ'
-RES = 'RES'
-BYE = 'BYE'
-
-def make_header(params, cookie=''):
- header = ','.join(params)
- header = header.ljust(HEADER_SIZE - len(cookie))
- assert(len(header) == HEADER_SIZE - len(cookie))
- header = header + cookie
- if sys.hexversion > 0x3000000:
- header = header.encode('iso8859-1')
- return header
-
-def safe_compare(x, y):
- sum = 0
- for (a, b) in zip(x, y):
- sum |= ord(a) ^ ord(b)
- return sum == 0
-
-re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
-class req(SocketServer.StreamRequestHandler):
- def handle(self):
- try:
- while self.process_command():
- pass
- except KeyboardInterrupt:
- return
- except Exception as e:
- print(e)
-
- def send_response(self, ret, out, err, exc):
- if out or err or exc:
- data = (out, err, exc)
- data = cPickle.dumps(data, -1)
- else:
- data = ''
-
- params = [RES, str(ret), str(len(data))]
-
- # no need for the cookie in the response
- self.wfile.write(make_header(params))
- if data:
- self.wfile.write(data)
- self.wfile.flush()
-
- def process_command(self):
- query = self.rfile.read(HEADER_SIZE)
- if not query:
- return None
- #print(len(query))
- assert(len(query) == HEADER_SIZE)
- if sys.hexversion > 0x3000000:
- query = query.decode('iso8859-1')
-
- # magic cookie
- key = query[-20:]
- if not safe_compare(key, SHARED_KEY):
- print('%r %r' % (key, SHARED_KEY))
- self.send_response(-1, '', '', 'Invalid key given!')
- return 'meh'
-
- query = query[:-20]
- #print "%r" % query
- if not re_valid_query.match(query):
- self.send_response(-1, '', '', 'Invalid query %r' % query)
- raise ValueError('Invalid query %r' % query)
-
- query = query.strip().split(',')
-
- if query[0] == REQ:
- self.run_command(query[1:])
- elif query[0] == BYE:
- raise ValueError('Exit')
- else:
- raise ValueError('Invalid query %r' % query)
- return 'ok'
-
- def run_command(self, query):
-
- size = int(query[0])
- data = self.rfile.read(size)
- assert(len(data) == size)
- kw = cPickle.loads(data)
-
- # run command
- ret = out = err = exc = None
- cmd = kw['cmd']
- del kw['cmd']
- #print(cmd)
-
- try:
- if kw['stdout'] or kw['stderr']:
- p = subprocess.Popen(cmd, **kw)
- (out, err) = p.communicate()
- ret = p.returncode
- else:
- ret = subprocess.Popen(cmd, **kw).wait()
- except KeyboardInterrupt:
- raise
- except Exception as e:
- ret = -1
- exc = str(e) + traceback.format_exc()
-
- self.send_response(ret, out, err, exc)
-
-def create_server(conn, cls):
- # child processes do not need the key, so we remove it from the OS environment
- global SHARED_KEY
- SHARED_KEY = os.environ['SHARED_KEY']
- os.environ['SHARED_KEY'] = ''
-
- ppid = int(os.environ['PREFORKPID'])
- def reap():
- if os.sep != '/':
- os.waitpid(ppid, 0)
- else:
- while 1:
- try:
- os.kill(ppid, 0)
- except OSError:
- break
- else:
- time.sleep(1)
- os.kill(os.getpid(), signal.SIGKILL)
- t = threading.Thread(target=reap)
- t.setDaemon(True)
- t.start()
-
- server = SocketServer.TCPServer(conn, req)
- print(server.server_address[1])
- sys.stdout.flush()
- #server.timeout = 6000 # seconds
- server.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- try:
- server.serve_forever(poll_interval=0.001)
- except KeyboardInterrupt:
- pass
-
-if __name__ == '__main__':
- conn = ("127.0.0.1", 0)
- #print("listening - %r %r\n" % conn)
- create_server(conn, req)
-else:
-
- from waflib import Logs, Utils, Runner, Errors, Options
-
- def init_task_pool(self):
- # lazy creation, and set a common pool for all task consumers
- pool = self.pool = []
- for i in range(self.numjobs):
- consumer = Runner.get_pool()
- pool.append(consumer)
- consumer.idx = i
- self.ready = Queue(0)
- def setq(consumer):
- consumer.ready = self.ready
- try:
- threading.current_thread().idx = consumer.idx
- except Exception as e:
- print(e)
- for x in pool:
- x.ready.put(setq)
- return pool
- Runner.Parallel.init_task_pool = init_task_pool
-
- def make_server(bld, idx):
- cmd = [sys.executable, os.path.abspath(__file__)]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
- return proc
-
- def make_conn(bld, srv):
- port = srv.port
- conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- conn.connect(('127.0.0.1', port))
- return conn
-
-
- SERVERS = []
- CONNS = []
- def close_all():
- global SERVERS, CONNS
- while CONNS:
- conn = CONNS.pop()
- try:
- conn.close()
- except:
- pass
- while SERVERS:
- srv = SERVERS.pop()
- try:
- srv.kill()
- except:
- pass
- atexit.register(close_all)
-
- def put_data(conn, data):
- cnt = 0
- while cnt < len(data):
- sent = conn.send(data[cnt:])
- if sent == 0:
- raise RuntimeError('connection ended')
- cnt += sent
-
- def read_data(conn, siz):
- cnt = 0
- buf = []
- while cnt < siz:
- data = conn.recv(min(siz - cnt, 1024))
- if not data:
- raise RuntimeError('connection ended %r %r' % (cnt, siz))
- buf.append(data)
- cnt += len(data)
- if sys.hexversion > 0x3000000:
- ret = ''.encode('iso8859-1').join(buf)
- else:
- ret = ''.join(buf)
- return ret
-
- def exec_command(self, cmd, **kw):
- if 'stdout' in kw:
- if kw['stdout'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
- elif 'stderr' in kw:
- if kw['stderr'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
-
- kw['shell'] = isinstance(cmd, str)
- Logs.debug('runner: %r' % cmd)
- Logs.debug('runner_env: kw=%s' % kw)
-
- if self.logger:
- self.logger.info(cmd)
-
- if 'stdout' not in kw:
- kw['stdout'] = subprocess.PIPE
- if 'stderr' not in kw:
- kw['stderr'] = subprocess.PIPE
-
- if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
- raise Errors.WafError("Program %s not found!" % cmd[0])
-
- idx = threading.current_thread().idx
- kw['cmd'] = cmd
-
- # serialization..
- #print("sub %r %r" % (idx, cmd))
- #print("write to %r %r" % (idx, cmd))
-
- data = cPickle.dumps(kw, -1)
- params = [REQ, str(len(data))]
- header = make_header(params, self.SHARED_KEY)
-
- conn = CONNS[idx]
-
- put_data(conn, header + data)
- #put_data(conn, data)
-
- #print("running %r %r" % (idx, cmd))
- #print("read from %r %r" % (idx, cmd))
-
- data = read_data(conn, HEADER_SIZE)
- if sys.hexversion > 0x3000000:
- data = data.decode('iso8859-1')
-
- #print("received %r" % data)
- lst = data.split(',')
- ret = int(lst[1])
- dlen = int(lst[2])
-
- out = err = None
- if dlen:
- data = read_data(conn, dlen)
- (out, err, exc) = cPickle.loads(data)
- if exc:
- raise Errors.WafError('Execution failure: %s' % exc)
-
- if out:
- if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.debug('out: %s' % out)
- else:
- Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
- if err:
- if not isinstance(err, str):
- err = err.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.error('err: %s' % err)
- else:
- Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
-
- return ret
-
- def init_key(ctx):
- try:
- key = ctx.SHARED_KEY = os.environ['SHARED_KEY']
- except KeyError:
- key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)])
- os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key
-
- os.environ['PREFORKPID'] = str(os.getpid())
- return key
-
- def init_servers(ctx, maxval):
- while len(SERVERS) < maxval:
- i = len(SERVERS)
- srv = make_server(ctx, i)
- SERVERS.append(srv)
- while len(CONNS) < maxval:
- i = len(CONNS)
- srv = SERVERS[i]
-
- # postpone the connection
- srv.port = int(srv.stdout.readline())
-
- conn = None
- for x in range(30):
- try:
- conn = make_conn(ctx, srv)
- break
- except socket.error:
- time.sleep(0.01)
- if not conn:
- raise ValueError('Could not start the server!')
- if srv.poll() is not None:
- Logs.warn('Looks like it it not our server process - concurrent builds are unsupported at this stage')
- raise ValueError('Could not start the server')
- CONNS.append(conn)
-
- def init_smp(self):
- if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
- return
- if Utils.unversioned_sys_platform() in ('freebsd',):
- pid = os.getpid()
- cmd = ['cpuset', '-l', '0', '-p', str(pid)]
- elif Utils.unversioned_sys_platform() in ('linux',):
- pid = os.getpid()
- cmd = ['taskset', '-pc', '0', str(pid)]
- if cmd:
- self.cmd_and_log(cmd, quiet=0)
-
- def options(opt):
- init_key(opt)
- init_servers(opt, 40)
- opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
-
- def build(bld):
- if bld.cmd == 'clean':
- return
-
- init_key(bld)
- init_servers(bld, bld.jobs)
- init_smp(bld)
-
- bld.__class__.exec_command_old = bld.__class__.exec_command
- bld.__class__.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/preforkjava.py b/third_party/waf/waflib/extras/preforkjava.py
deleted file mode 100644
index e93461b4da9..00000000000
--- a/third_party/waf/waflib/extras/preforkjava.py
+++ /dev/null
@@ -1,236 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-# TODO: have the child process terminate if the parent is killed abruptly
-
-import os, socket, threading, sys, subprocess, time, atexit, random
-try:
- from queue import Queue
-except ImportError:
- from Queue import Queue
-
-import json as pickle
-
-SHARED_KEY = None
-HEADER_SIZE = 64
-
-REQ = 'REQ'
-RES = 'RES'
-BYE = 'BYE'
-
-def make_header(params, cookie=''):
- header = ','.join(params)
- header = header.ljust(HEADER_SIZE - len(cookie))
- assert(len(header) == HEADER_SIZE - len(cookie))
- header = header + cookie
- if sys.hexversion > 0x3000000:
- header = header.encode('iso8859-1')
- return header
-
-if 1:
- from waflib import Logs, Utils, Runner, Errors, Options
-
- def init_task_pool(self):
- # lazy creation, and set a common pool for all task consumers
- pool = self.pool = []
- for i in range(self.numjobs):
- consumer = Runner.get_pool()
- pool.append(consumer)
- consumer.idx = i
- self.ready = Queue(0)
- def setq(consumer):
- consumer.ready = self.ready
- try:
- threading.current_thread().idx = consumer.idx
- except Exception as e:
- print(e)
- for x in pool:
- x.ready.put(setq)
- return pool
- Runner.Parallel.init_task_pool = init_task_pool
-
- def make_server(bld, idx):
- top = getattr(bld, 'preforkjava_top', os.path.dirname(os.path.abspath('__file__')))
- cp = getattr(bld, 'preforkjava_cp', os.path.join(top, 'minimal-json-0.9.3-SNAPSHOT.jar') + os.pathsep + top)
-
- for x in cp.split(os.pathsep):
- if x and not os.path.exists(x):
- Logs.warn('Invalid classpath: %r' % cp)
- Logs.warn('Set for example bld.preforkjava_cp to /path/to/minimal-json:/path/to/Prefork.class/')
-
- cwd = getattr(bld, 'preforkjava_cwd', top)
- port = getattr(bld, 'preforkjava_port', 51200)
- cmd = getattr(bld, 'preforkjava_cmd', 'java -cp %s%s Prefork %d' % (cp, os.pathsep, port))
- proc = subprocess.Popen(cmd.split(), shell=False, cwd=cwd)
- proc.port = port
- return proc
-
- def make_conn(bld, srv):
- #port = PORT + idx
- port = srv.port
- conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- conn.connect(('127.0.0.1', port))
- return conn
-
- SERVERS = []
- CONNS = []
- def close_all():
- global SERVERS
- while SERVERS:
- srv = SERVERS.pop()
- #pid = srv.pid
- try:
- srv.kill()
- except Exception:
- pass
- atexit.register(close_all)
-
- def put_data(conn, data):
- cnt = 0
- while cnt < len(data):
- sent = conn.send(data[cnt:])
- if sent == 0:
- raise RuntimeError('connection ended')
- cnt += sent
-
- def read_data(conn, siz):
- cnt = 0
- buf = []
- while cnt < siz:
- data = conn.recv(min(siz - cnt, 1024))
- if not data:
- raise RuntimeError('connection ended %r %r' % (cnt, siz))
- buf.append(data)
- cnt += len(data)
- if sys.hexversion > 0x3000000:
- ret = ''.encode('iso8859-1').join(buf)
- else:
- ret = ''.join(buf)
- return ret
-
- def exec_command(self, cmd, **kw):
- if 'stdout' in kw:
- if kw['stdout'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
- elif 'stderr' in kw:
- if kw['stderr'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
-
- kw['shell'] = isinstance(cmd, str)
- Logs.debug('runner: %r' % cmd)
- Logs.debug('runner_env: kw=%s' % kw)
-
- if self.logger:
- self.logger.info(cmd)
-
- if 'stdout' not in kw:
- kw['stdout'] = subprocess.PIPE
- if 'stderr' not in kw:
- kw['stderr'] = subprocess.PIPE
-
- if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
- raise Errors.WafError("Program %s not found!" % cmd[0])
-
- idx = threading.current_thread().idx
- kw['cmd'] = cmd
-
- data = pickle.dumps(kw)
- params = [REQ, str(len(data))]
- header = make_header(params, self.SHARED_KEY)
-
- conn = CONNS[idx]
-
- if sys.hexversion > 0x3000000:
- data = data.encode('iso8859-1')
- put_data(conn, header + data)
-
- data = read_data(conn, HEADER_SIZE)
- if sys.hexversion > 0x3000000:
- data = data.decode('iso8859-1')
-
- #print("received %r" % data)
- lst = data.split(',')
- ret = int(lst[1])
- dlen = int(lst[2])
-
- out = err = None
- if dlen:
- data = read_data(conn, dlen)
- (out, err, exc) = pickle.loads(data)
- if exc:
- raise Errors.WafError('Execution failure: %s' % exc)
-
- if out:
- if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.debug('out: %s' % out)
- else:
- Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
- if err:
- if not isinstance(err, str):
- err = err.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.error('err: %s' % err)
- else:
- Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
-
- return ret
-
- def init_key(ctx):
- try:
- key = ctx.SHARED_KEY = os.environ['SHARED_KEY']
- except KeyError:
- key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)])
- os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key
- os.environ['PREFORKPID'] = str(os.getpid())
- return key
-
- def init_servers(ctx, maxval):
- while len(SERVERS) < 1:
- i = len(SERVERS)
- srv = make_server(ctx, i)
- SERVERS.append(srv)
- while len(CONNS) < maxval:
- i = len(CONNS)
- srv = SERVERS[0]
- conn = None
- for x in range(30):
- try:
- conn = make_conn(ctx, srv)
- break
- except socket.error:
- time.sleep(0.01)
- if not conn:
- raise ValueError('Could not start the server!')
- CONNS.append(conn)
-
- def init_smp(self):
- if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
- return
- if Utils.unversioned_sys_platform() in ('freebsd',):
- pid = os.getpid()
- cmd = ['cpuset', '-l', '0', '-p', str(pid)]
- elif Utils.unversioned_sys_platform() in ('linux',):
- pid = os.getpid()
- cmd = ['taskset', '-pc', '0', str(pid)]
- if cmd:
- self.cmd_and_log(cmd, quiet=0)
-
- def options(opt):
- opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
- init_key(opt)
- init_servers(opt, 40)
-
- def build(bld):
- if bld.cmd == 'clean':
- return
-
- init_key(bld)
- init_servers(bld, bld.jobs)
- init_smp(bld)
-
- bld.__class__.exec_command_old = bld.__class__.exec_command
- bld.__class__.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/preforkunix.py b/third_party/waf/waflib/extras/preforkunix.py
deleted file mode 100644
index ec9aeeb10e7..00000000000
--- a/third_party/waf/waflib/extras/preforkunix.py
+++ /dev/null
@@ -1,317 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-"""
-A version of prefork.py that uses unix sockets. The advantage is that it does not expose
-connections to the outside. Yet performance it only works on unix-like systems
-and performance can be slightly worse.
-
-To use::
-
- def options(opt):
- # recommended, fork new processes before using more memory
- opt.load('preforkunix')
-
- def build(bld):
- bld.load('preforkunix')
- ...
- more code
-"""
-
-import os, re, socket, threading, sys, subprocess, atexit, traceback, signal, time
-try:
- from queue import Queue
-except ImportError:
- from Queue import Queue
-try:
- import cPickle
-except ImportError:
- import pickle as cPickle
-
-HEADER_SIZE = 20
-
-REQ = 'REQ'
-RES = 'RES'
-BYE = 'BYE'
-
-def make_header(params, cookie=''):
- header = ','.join(params)
- header = header.ljust(HEADER_SIZE - len(cookie))
- assert(len(header) == HEADER_SIZE - len(cookie))
- header = header + cookie
- if sys.hexversion > 0x3000000:
- header = header.encode('iso8859-1')
- return header
-
-re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
-if 1:
- def send_response(conn, ret, out, err, exc):
- if out or err or exc:
- data = (out, err, exc)
- data = cPickle.dumps(data, -1)
- else:
- data = ''
-
- params = [RES, str(ret), str(len(data))]
-
- # no need for the cookie in the response
- conn.send(make_header(params))
- if data:
- conn.send(data)
-
- def process_command(conn):
- query = conn.recv(HEADER_SIZE)
- if not query:
- return None
- #print(len(query))
- assert(len(query) == HEADER_SIZE)
- if sys.hexversion > 0x3000000:
- query = query.decode('iso8859-1')
-
- #print "%r" % query
- if not re_valid_query.match(query):
- send_response(conn, -1, '', '', 'Invalid query %r' % query)
- raise ValueError('Invalid query %r' % query)
-
- query = query.strip().split(',')
-
- if query[0] == REQ:
- run_command(conn, query[1:])
- elif query[0] == BYE:
- raise ValueError('Exit')
- else:
- raise ValueError('Invalid query %r' % query)
- return 'ok'
-
- def run_command(conn, query):
-
- size = int(query[0])
- data = conn.recv(size)
- assert(len(data) == size)
- kw = cPickle.loads(data)
-
- # run command
- ret = out = err = exc = None
- cmd = kw['cmd']
- del kw['cmd']
- #print(cmd)
-
- try:
- if kw['stdout'] or kw['stderr']:
- p = subprocess.Popen(cmd, **kw)
- (out, err) = p.communicate()
- ret = p.returncode
- else:
- ret = subprocess.Popen(cmd, **kw).wait()
- except KeyboardInterrupt:
- raise
- except Exception as e:
- ret = -1
- exc = str(e) + traceback.format_exc()
-
- send_response(conn, ret, out, err, exc)
-
-if 1:
-
- from waflib import Logs, Utils, Runner, Errors, Options
-
- def init_task_pool(self):
- # lazy creation, and set a common pool for all task consumers
- pool = self.pool = []
- for i in range(self.numjobs):
- consumer = Runner.get_pool()
- pool.append(consumer)
- consumer.idx = i
- self.ready = Queue(0)
- def setq(consumer):
- consumer.ready = self.ready
- try:
- threading.current_thread().idx = consumer.idx
- except Exception as e:
- print(e)
- for x in pool:
- x.ready.put(setq)
- return pool
- Runner.Parallel.init_task_pool = init_task_pool
-
- def make_conn(bld):
- child_socket, parent_socket = socket.socketpair(socket.AF_UNIX)
- ppid = os.getpid()
- pid = os.fork()
- if pid == 0:
- parent_socket.close()
-
- # if the parent crashes, try to exit cleanly
- def reap():
- while 1:
- try:
- os.kill(ppid, 0)
- except OSError:
- break
- else:
- time.sleep(1)
- os.kill(os.getpid(), signal.SIGKILL)
- t = threading.Thread(target=reap)
- t.setDaemon(True)
- t.start()
-
- # write to child_socket only
- try:
- while process_command(child_socket):
- pass
- except KeyboardInterrupt:
- sys.exit(2)
- else:
- child_socket.close()
- return (pid, parent_socket)
-
- SERVERS = []
- CONNS = []
- def close_all():
- global SERVERS, CONS
- while CONNS:
- conn = CONNS.pop()
- try:
- conn.close()
- except:
- pass
- while SERVERS:
- pid = SERVERS.pop()
- try:
- os.kill(pid, 9)
- except:
- pass
- atexit.register(close_all)
-
- def put_data(conn, data):
- cnt = 0
- while cnt < len(data):
- sent = conn.send(data[cnt:])
- if sent == 0:
- raise RuntimeError('connection ended')
- cnt += sent
-
- def read_data(conn, siz):
- cnt = 0
- buf = []
- while cnt < siz:
- data = conn.recv(min(siz - cnt, 1024))
- if not data:
- raise RuntimeError('connection ended %r %r' % (cnt, siz))
- buf.append(data)
- cnt += len(data)
- if sys.hexversion > 0x3000000:
- ret = ''.encode('iso8859-1').join(buf)
- else:
- ret = ''.join(buf)
- return ret
-
- def exec_command(self, cmd, **kw):
- if 'stdout' in kw:
- if kw['stdout'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
- elif 'stderr' in kw:
- if kw['stderr'] not in (None, subprocess.PIPE):
- return self.exec_command_old(cmd, **kw)
-
- kw['shell'] = isinstance(cmd, str)
- Logs.debug('runner: %r' % cmd)
- Logs.debug('runner_env: kw=%s' % kw)
-
- if self.logger:
- self.logger.info(cmd)
-
- if 'stdout' not in kw:
- kw['stdout'] = subprocess.PIPE
- if 'stderr' not in kw:
- kw['stderr'] = subprocess.PIPE
-
- if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
- raise Errors.WafError("Program %s not found!" % cmd[0])
-
- idx = threading.current_thread().idx
- kw['cmd'] = cmd
-
- # serialization..
- #print("sub %r %r" % (idx, cmd))
- #print("write to %r %r" % (idx, cmd))
-
- data = cPickle.dumps(kw, -1)
- params = [REQ, str(len(data))]
- header = make_header(params)
-
- conn = CONNS[idx]
-
- put_data(conn, header + data)
-
- #print("running %r %r" % (idx, cmd))
- #print("read from %r %r" % (idx, cmd))
-
- data = read_data(conn, HEADER_SIZE)
- if sys.hexversion > 0x3000000:
- data = data.decode('iso8859-1')
-
- #print("received %r" % data)
- lst = data.split(',')
- ret = int(lst[1])
- dlen = int(lst[2])
-
- out = err = None
- if dlen:
- data = read_data(conn, dlen)
- (out, err, exc) = cPickle.loads(data)
- if exc:
- raise Errors.WafError('Execution failure: %s' % exc)
-
- if out:
- if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.debug('out: %s' % out)
- else:
- Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
- if err:
- if not isinstance(err, str):
- err = err.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.error('err: %s' % err)
- else:
- Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
-
- return ret
-
- def init_smp(self):
- if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
- return
- if Utils.unversioned_sys_platform() in ('freebsd',):
- pid = os.getpid()
- cmd = ['cpuset', '-l', '0', '-p', str(pid)]
- elif Utils.unversioned_sys_platform() in ('linux',):
- pid = os.getpid()
- cmd = ['taskset', '-pc', '0', str(pid)]
- if cmd:
- self.cmd_and_log(cmd, quiet=0)
-
- def options(opt):
- # memory consumption might be at the lowest point while processing options
- opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
- if Utils.is_win32 or os.sep != '/':
- return
- while len(CONNS) < 30:
- (pid, conn) = make_conn(opt)
- SERVERS.append(pid)
- CONNS.append(conn)
-
- def build(bld):
- if Utils.is_win32 or os.sep != '/':
- return
- if bld.cmd == 'clean':
- return
- while len(CONNS) < bld.jobs:
- (pid, conn) = make_conn(bld)
- SERVERS.append(pid)
- CONNS.append(conn)
- init_smp(bld)
- bld.__class__.exec_command_old = bld.__class__.exec_command
- bld.__class__.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/print_commands.py b/third_party/waf/waflib/extras/print_commands.py
deleted file mode 100644
index ada0ee53f8c..00000000000
--- a/third_party/waf/waflib/extras/print_commands.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
-
-#! /usr/bin/env python
-
-"""
-Illustrate how to override a class method to do something
-
-In this case, print the commands being executed as strings
-(the commands are usually lists, so this can be misleading)
-"""
-
-import sys
-from waflib import Context, Utils, Errors, Logs
-
-def exec_command(self, cmd, **kw):
- subprocess = Utils.subprocess
- kw['shell'] = isinstance(cmd, str)
-
- if isinstance(cmd, str):
- kw['shell'] = True
- txt = cmd
- else:
- txt = ' '.join(repr(x) if ' ' in x else x for x in cmd)
-
- Logs.debug('runner: %s', txt)
- Logs.debug('runner_env: kw=%s', kw)
-
- if self.logger:
- self.logger.info(cmd)
-
- if 'stdout' not in kw:
- kw['stdout'] = subprocess.PIPE
- if 'stderr' not in kw:
- kw['stderr'] = subprocess.PIPE
-
- if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
- raise Errors.WafError("Program %s not found!" % cmd[0])
-
- wargs = {}
- if 'timeout' in kw:
- if kw['timeout'] is not None:
- wargs['timeout'] = kw['timeout']
- del kw['timeout']
- if 'input' in kw:
- if kw['input']:
- wargs['input'] = kw['input']
- kw['stdin'] = Utils.subprocess.PIPE
- del kw['input']
-
- if 'cwd' in kw:
- if not isinstance(kw['cwd'], str):
- kw['cwd'] = kw['cwd'].abspath()
-
- try:
- if kw['stdout'] or kw['stderr']:
- p = subprocess.Popen(cmd, **kw)
- (out, err) = p.communicate(**wargs)
- ret = p.returncode
- else:
- out, err = (None, None)
- ret = subprocess.Popen(cmd, **kw).wait(**wargs)
- except Exception ,e:
- raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
-
- if out:
- if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.debug('out: %s' % out)
- else:
- Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
- if err:
- if not isinstance(err, str):
- err = err.decode(sys.stdout.encoding or 'iso8859-1')
- if self.logger:
- self.logger.error('err: %s' % err)
- else:
- Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
-
- return ret
-
-Context.Context.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/proc.py b/third_party/waf/waflib/extras/proc.py
index fec4c4ccd63..2d7e5cedb1f 100644
--- a/third_party/waf/waflib/extras/proc.py
+++ b/third_party/waf/waflib/extras/proc.py
@@ -1,4 +1,8 @@
#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
# per rosengren 2011
from os import environ, path
@@ -25,7 +29,6 @@ def proc(tsk):
gen = tsk.generator
inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES'])
- # FIXME the if-else construct will not work in python 2
cmd = (
[env.PROC] +
['SQLCHECK=SEMANTICS'] +
@@ -52,3 +55,4 @@ TaskGen.declare_chain(
ext_in = '.pc',
ext_out = '.c',
)
+
diff --git a/third_party/waf/waflib/extras/protoc.py b/third_party/waf/waflib/extras/protoc.py
index 97d1ef3c340..cb16e858182 100644
--- a/third_party/waf/waflib/extras/protoc.py
+++ b/third_party/waf/waflib/extras/protoc.py
@@ -1,16 +1,21 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Philipp Bender, 2012
# Matt Clarkson, 2012
-import re
+import re, os
from waflib.Task import Task
from waflib.TaskGen import extension
+from waflib import Errors, Context
"""
A simple tool to integrate protocol buffers into your build system.
-Example::
+Example for C++:
def configure(conf):
conf.load('compiler_cxx cxx protoc')
@@ -19,9 +24,55 @@ Example::
bld(
features = 'cxx cxxprogram'
source = 'main.cpp file1.proto proto/file2.proto',
- include = '. proto',
+ includes = '. proto',
target = 'executable')
+Example for Python:
+
+ def configure(conf):
+ conf.load('python protoc')
+
+ def build(bld):
+ bld(
+ features = 'py'
+ source = 'main.py file1.proto proto/file2.proto',
+ protoc_includes = 'proto')
+
+Example for both Python and C++ at same time:
+
+ def configure(conf):
+ conf.load('cxx python protoc')
+
+ def build(bld):
+ bld(
+ features = 'cxx py'
+ source = 'file1.proto proto/file2.proto',
+ protoc_includes = 'proto') # or includes
+
+
+Example for Java:
+
+ def options(opt):
+ opt.load('java')
+
+ def configure(conf):
+ conf.load('python java protoc')
+ # Here you have to point to your protobuf-java JAR and have it in classpath
+ conf.env.CLASSPATH_PROTOBUF = ['protobuf-java-2.5.0.jar']
+
+ def build(bld):
+ bld(
+ features = 'javac protoc',
+ name = 'pbjava',
+ srcdir = 'inc/ src', # directories used by javac
+ source = ['inc/message_inc.proto', 'inc/message.proto'],
+ # source is used by protoc for .proto files
+ use = 'PROTOBUF',
+ protoc_includes = ['inc']) # for protoc to search dependencies
+
+
+
+
Notes when using this tool:
- protoc command line parsing is tricky.
@@ -35,10 +86,9 @@ Notes when using this tool:
"""
class protoc(Task):
- # protoc expects the input proto file to be an absolute path.
- run_str = '${PROTOC} ${PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${SRC[0].abspath()}'
+ run_str = '${PROTOC} ${PROTOC_FL:PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${PROTOC_ST:PROTOC_INCPATHS} ${SRC[0].bldpath()}'
color = 'BLUE'
- ext_out = ['.h', 'pb.cc']
+ ext_out = ['.h', 'pb.cc', '.py', '.java']
def scan(self):
"""
Scan .proto dependencies
@@ -48,8 +98,17 @@ class protoc(Task):
nodes = []
names = []
seen = []
+ search_nodes = []
+
+ if not node:
+ return (nodes, names)
- if not node: return (nodes, names)
+ if 'cxx' in self.generator.features:
+ search_nodes = self.generator.includes_nodes
+
+ if 'py' in self.generator.features or 'javac' in self.generator.features:
+ for incpath in getattr(self.generator, 'protoc_includes', []):
+ search_nodes.append(self.generator.bld.path.find_node(incpath))
def parse_node(node):
if node in seen:
@@ -60,8 +119,8 @@ class protoc(Task):
m = re.search(r'^import\s+"(.*)";.*(//)?.*', line)
if m:
dep = m.groups()[0]
- for incpath in self.env.INCPATHS:
- found = incpath.find_resource(dep)
+ for incnode in search_nodes:
+ found = incnode.find_resource(dep)
if found:
nodes.append(found)
parse_node(found)
@@ -73,20 +132,108 @@ class protoc(Task):
@extension('.proto')
def process_protoc(self, node):
- cpp_node = node.change_ext('.pb.cc')
- hpp_node = node.change_ext('.pb.h')
- self.create_task('protoc', node, [cpp_node, hpp_node])
- self.source.append(cpp_node)
-
- if 'cxx' in self.features and not self.env.PROTOC_FLAGS:
- #self.env.PROTOC_FLAGS = '--cpp_out=%s' % node.parent.get_bld().abspath() # <- this does not work
- self.env.PROTOC_FLAGS = '--cpp_out=%s' % node.parent.get_bld().bldpath()
+ incdirs = []
+ out_nodes = []
+ protoc_flags = []
+
+ # ensure PROTOC_FLAGS is a list; a copy is used below anyway
+ self.env.PROTOC_FLAGS = self.to_list(self.env.PROTOC_FLAGS)
+
+ if 'cxx' in self.features:
+ cpp_node = node.change_ext('.pb.cc')
+ hpp_node = node.change_ext('.pb.h')
+ self.source.append(cpp_node)
+ out_nodes.append(cpp_node)
+ out_nodes.append(hpp_node)
+ protoc_flags.append('--cpp_out=%s' % node.parent.get_bld().bldpath())
+
+ if 'py' in self.features:
+ py_node = node.change_ext('_pb2.py')
+ self.source.append(py_node)
+ out_nodes.append(py_node)
+ protoc_flags.append('--python_out=%s' % node.parent.get_bld().bldpath())
+
+ if 'javac' in self.features:
+ pkgname, javapkg, javacn, nodename = None, None, None, None
+ messages = []
+
+ # .java file name is done with some rules depending on .proto file content:
+ # -) package is either derived from option java_package if present
+ # or from package directive
+ # -) file name is either derived from option java_outer_classname if present
+ # or the .proto file is converted to camelcase. If a message
+ # is named the same then the behaviour depends on protoc version
+ #
+ # See also: https://developers.google.com/protocol-buffers/docs/reference/java-generated#invocation
+
+ code = node.read().splitlines()
+ for line in code:
+ m = re.search(r'^package\s+(.*);', line)
+ if m:
+ pkgname = m.groups()[0]
+ m = re.search(r'^option\s+(\S*)\s*=\s*"(\S*)";', line)
+ if m:
+ optname = m.groups()[0]
+ if optname == 'java_package':
+ javapkg = m.groups()[1]
+ elif optname == 'java_outer_classname':
+ javacn = m.groups()[1]
+ if self.env.PROTOC_MAJOR > '2':
+ m = re.search(r'^message\s+(\w*)\s*{*', line)
+ if m:
+ messages.append(m.groups()[0])
+
+ if javapkg:
+ nodename = javapkg
+ elif pkgname:
+ nodename = pkgname
+ else:
+ raise Errors.WafError('Cannot derive java name from protoc file')
+
+ nodename = nodename.replace('.',os.sep) + os.sep
+ if javacn:
+ nodename += javacn + '.java'
+ else:
+ if self.env.PROTOC_MAJOR > '2' and node.abspath()[node.abspath().rfind(os.sep)+1:node.abspath().rfind('.')].title() in messages:
+ nodename += node.abspath()[node.abspath().rfind(os.sep)+1:node.abspath().rfind('.')].title() + 'OuterClass.java'
+ else:
+ nodename += node.abspath()[node.abspath().rfind(os.sep)+1:node.abspath().rfind('.')].title() + '.java'
+
+ java_node = node.parent.find_or_declare(nodename)
+ out_nodes.append(java_node)
+ protoc_flags.append('--java_out=%s' % node.parent.get_bld().bldpath())
+
+ # Make javac get also pick java code generated in build
+ if not node.parent.get_bld() in self.javac_task.srcdir:
+ self.javac_task.srcdir.append(node.parent.get_bld())
+
+ if not out_nodes:
+ raise Errors.WafError('Feature %r not supported by protoc extra' % self.features)
+
+ tsk = self.create_task('protoc', node, out_nodes)
+ tsk.env.append_value('PROTOC_FLAGS', protoc_flags)
+
+ if 'javac' in self.features:
+ self.javac_task.set_run_after(tsk)
+
+ # Instruct protoc where to search for .proto included files.
+ # For C++ standard include files dirs are used,
+ # but this doesn't apply to Python for example
+ for incpath in getattr(self, 'protoc_includes', []):
+ incdirs.append(self.bld.path.find_node(incpath).bldpath())
+ tsk.env.PROTOC_INCPATHS = incdirs
use = getattr(self, 'use', '')
if not 'PROTOBUF' in use:
self.use = self.to_list(use) + ['PROTOBUF']
def configure(conf):
- conf.check_cfg(package="protobuf", uselib_store="PROTOBUF", args=['--cflags', '--libs'])
+ conf.check_cfg(package='protobuf', uselib_store='PROTOBUF', args=['--cflags', '--libs'])
conf.find_program('protoc', var='PROTOC')
+ conf.start_msg('Checking for protoc version')
+ protocver = conf.cmd_and_log(conf.env.PROTOC + ['--version'], output=Context.BOTH)
+ protocver = ''.join(protocver).strip()[protocver[0].rfind(' ')+1:]
+ conf.end_msg(protocver)
+ conf.env.PROTOC_MAJOR = protocver[:protocver.find('.')]
conf.env.PROTOC_ST = '-I%s'
+ conf.env.PROTOC_FL = '%s'
diff --git a/third_party/waf/waflib/extras/pyqt5.py b/third_party/waf/waflib/extras/pyqt5.py
new file mode 100644
index 00000000000..0190f2527d7
--- /dev/null
+++ b/third_party/waf/waflib/extras/pyqt5.py
@@ -0,0 +1,245 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Federico Pellegrin, 2016-2018 (fedepell) adapted for Python
+
+"""
+This tool helps with finding Python Qt5 tools and libraries,
+and provides translation from QT5 files to Python code.
+
+The following snippet illustrates the tool usage::
+
+ def options(opt):
+ opt.load('py pyqt5')
+
+ def configure(conf):
+ conf.load('py pyqt5')
+
+ def build(bld):
+ bld(
+ features = 'py pyqt5',
+ source = 'main.py textures.qrc aboutDialog.ui',
+ )
+
+Here, the UI description and resource files will be processed
+to generate code.
+
+Usage
+=====
+
+Load the "pyqt5" tool.
+
+Add into the sources list also the qrc resources files or ui5
+definition files and they will be translated into python code
+with the system tools (PyQt5, pyside2, PyQt4 are searched in this
+order) and then compiled
+"""
+
+try:
+ from xml.sax import make_parser
+ from xml.sax.handler import ContentHandler
+except ImportError:
+ has_xml = False
+ ContentHandler = object
+else:
+ has_xml = True
+
+import os
+from waflib.Tools import python
+from waflib import Task, Options
+from waflib.TaskGen import feature, extension
+from waflib.Configure import conf
+from waflib import Logs
+
+EXT_RCC = ['.qrc']
+"""
+File extension for the resource (.qrc) files
+"""
+
+EXT_UI = ['.ui']
+"""
+File extension for the user interface (.ui) files
+"""
+
+
+class XMLHandler(ContentHandler):
+ """
+ Parses ``.qrc`` files
+ """
+ def __init__(self):
+ self.buf = []
+ self.files = []
+ def startElement(self, name, attrs):
+ if name == 'file':
+ self.buf = []
+ def endElement(self, name):
+ if name == 'file':
+ self.files.append(str(''.join(self.buf)))
+ def characters(self, cars):
+ self.buf.append(cars)
+
+@extension(*EXT_RCC)
+def create_pyrcc_task(self, node):
+ "Creates rcc and py task for ``.qrc`` files"
+ rcnode = node.change_ext('.py')
+ self.create_task('pyrcc', node, rcnode)
+ if getattr(self, 'install_from', None):
+ self.install_from = self.install_from.get_bld()
+ else:
+ self.install_from = self.path.get_bld()
+ self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
+ self.process_py(rcnode)
+
+@extension(*EXT_UI)
+def create_pyuic_task(self, node):
+ "Create uic tasks and py for user interface ``.ui`` definition files"
+ uinode = node.change_ext('.py')
+ self.create_task('ui5py', node, uinode)
+ if getattr(self, 'install_from', None):
+ self.install_from = self.install_from.get_bld()
+ else:
+ self.install_from = self.path.get_bld()
+ self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
+ self.process_py(uinode)
+
+@extension('.ts')
+def add_pylang(self, node):
+ """Adds all the .ts file into ``self.lang``"""
+ self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
+
+@feature('pyqt5')
+def apply_pyqt5(self):
+ """
+ The additional parameters are:
+
+ :param lang: list of translation files (\*.ts) to process
+ :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
+ :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
+ :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
+ """
+ if getattr(self, 'lang', None):
+ qmtasks = []
+ for x in self.to_list(self.lang):
+ if isinstance(x, str):
+ x = self.path.find_resource(x + '.ts')
+ qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
+
+
+ if getattr(self, 'langname', None):
+ qmnodes = [k.outputs[0] for k in qmtasks]
+ rcnode = self.langname
+ if isinstance(rcnode, str):
+ rcnode = self.path.find_or_declare(rcnode + '.qrc')
+ t = self.create_task('qm2rcc', qmnodes, rcnode)
+ create_pyrcc_task(self, t.outputs[0])
+
+class pyrcc(Task.Task):
+ """
+ Processes ``.qrc`` files
+ """
+ color = 'BLUE'
+ run_str = '${QT_PYRCC} ${SRC} -o ${TGT}'
+ ext_out = ['.py']
+
+ def rcname(self):
+ return os.path.splitext(self.inputs[0].name)[0]
+
+ def scan(self):
+ """Parse the *.qrc* files"""
+ if not has_xml:
+ Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+ return ([], [])
+
+ parser = make_parser()
+ curHandler = XMLHandler()
+ parser.setContentHandler(curHandler)
+ fi = open(self.inputs[0].abspath(), 'r')
+ try:
+ parser.parse(fi)
+ finally:
+ fi.close()
+
+ nodes = []
+ names = []
+ root = self.inputs[0].parent
+ for x in curHandler.files:
+ nd = root.find_resource(x)
+ if nd:
+ nodes.append(nd)
+ else:
+ names.append(x)
+ return (nodes, names)
+
+
+class ui5py(Task.Task):
+ """
+ Processes ``.ui`` files for python
+ """
+ color = 'BLUE'
+ run_str = '${QT_PYUIC} ${SRC} -o ${TGT}'
+ ext_out = ['.py']
+
+class ts2qm(Task.Task):
+ """
+ Generates ``.qm`` files from ``.ts`` files
+ """
+ color = 'BLUE'
+ run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+
+class qm2rcc(Task.Task):
+ """
+ Generates ``.qrc`` files from ``.qm`` files
+ """
+ color = 'BLUE'
+ after = 'ts2qm'
+ def run(self):
+ """Create a qrc file including the inputs"""
+ txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
+ code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
+ self.outputs[0].write(code)
+
+def configure(self):
+ self.find_pyqt5_binaries()
+
+ # warn about this during the configuration too
+ if not has_xml:
+ Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+
+@conf
+def find_pyqt5_binaries(self):
+ """
+ Detects PyQt5 or pyside2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc
+ """
+ env = self.env
+
+ if getattr(Options.options, 'want_pyside2', True):
+ self.find_program(['pyside2-uic'], var='QT_PYUIC')
+ self.find_program(['pyside2-rcc'], var='QT_PYRCC')
+ self.find_program(['pyside2-lupdate'], var='QT_PYLUPDATE')
+ elif getattr(Options.options, 'want_pyqt4', True):
+ self.find_program(['pyuic4'], var='QT_PYUIC')
+ self.find_program(['pyrcc4'], var='QT_PYRCC')
+ self.find_program(['pylupdate4'], var='QT_PYLUPDATE')
+ else:
+ self.find_program(['pyuic5','pyside2-uic','pyuic4'], var='QT_PYUIC')
+ self.find_program(['pyrcc5','pyside2-rcc','pyrcc4'], var='QT_PYRCC')
+ self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4'], var='QT_PYLUPDATE')
+
+ if not env.QT_PYUIC:
+ self.fatal('cannot find the uic compiler for python for qt5')
+
+ if not env.QT_PYUIC:
+ self.fatal('cannot find the rcc compiler for python for qt5')
+
+ self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE')
+
+def options(opt):
+ """
+ Command-line options
+ """
+ pyqt5opt=opt.add_option_group("Python QT5 Options")
+ pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use pyside2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after)')
+ pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
diff --git a/third_party/waf/waflib/extras/pytest.py b/third_party/waf/waflib/extras/pytest.py
new file mode 100644
index 00000000000..c4ed4c574da
--- /dev/null
+++ b/third_party/waf/waflib/extras/pytest.py
@@ -0,0 +1,229 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Calle Rosenquist, 2016-2018 (xbreak)
+
+"""
+Provides Python unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
+task via the **pytest** feature.
+
+To use pytest the following is needed:
+
+1. Load `pytest` and the dependency `waf_unit_test` tools.
+2. Create a task generator with feature `pytest` (not `test`) and customize behaviour with
+ the following attributes:
+
+ - `pytest_source`: Test input files.
+ - `ut_str`: Test runner command, e.g. ``${PYTHON} -B -m unittest discover`` or
+ if nose is used: ``${NOSETESTS} --no-byte-compile ${SRC}``.
+ - `ut_shell`: Determines if ``ut_str`` is executed in a shell. Default: False.
+ - `ut_cwd`: Working directory for test runner. Defaults to directory of
+ first ``pytest_source`` file.
+
+ Additionally the following `pytest` specific attributes are used in dependent taskgens:
+
+ - `pytest_path`: Node or string list of additional Python paths.
+ - `pytest_libpath`: Node or string list of additional library paths.
+
+The `use` dependencies are used for both update calculation and to populate
+the following environment variables for the `pytest` test runner:
+
+1. `PYTHONPATH` (`sys.path`) of any dependent taskgen that has the feature `py`:
+
+ - `install_from` attribute is used to determine where the root of the Python sources
+ are located. If `install_from` is not specified the default is to use the taskgen path
+ as the root.
+
+ - `pytest_path` attribute is used to manually specify additional Python paths.
+
+2. Dynamic linker search path variable (e.g. `LD_LIBRARY_PATH`) of any dependent taskgen with
+ non-static link_task.
+
+ - `pytest_libpath` attribute is used to manually specify additional linker paths.
+
+Note: `pytest` cannot automatically determine the correct `PYTHONPATH` for `pyext` taskgens
+ because the extension might be part of a Python package or used standalone:
+
+ - When used as part of another `py` package, the `PYTHONPATH` is provided by
+ that taskgen so no additional action is required.
+
+ - When used as a standalone module, the user needs to specify the `PYTHONPATH` explicitly
+ via the `pytest_path` attribute on the `pyext` taskgen.
+
+ For details c.f. the pytest playground examples.
+
+
+For example::
+
+ # A standalone Python C extension that demonstrates unit test environment population
+ # of PYTHONPATH and LD_LIBRARY_PATH/PATH/DYLD_LIBRARY_PATH.
+ #
+ # Note: `pytest_path` is provided here because pytest cannot automatically determine
+ # if the extension is part of another Python package or is used standalone.
+ bld(name = 'foo_ext',
+ features = 'c cshlib pyext',
+ source = 'src/foo_ext.c',
+ target = 'foo_ext',
+ pytest_path = [ bld.path.get_bld() ])
+
+ # Python package under test that also depend on the Python module `foo_ext`
+ #
+ # Note: `install_from` is added automatically to `PYTHONPATH`.
+ bld(name = 'foo',
+ features = 'py',
+ use = 'foo_ext',
+ source = bld.path.ant_glob('src/foo/*.py'),
+ install_from = 'src')
+
+ # Unit test example using the built in module unittest and let that discover
+ # any test cases.
+ bld(name = 'foo_test',
+ features = 'pytest',
+ use = 'foo',
+ pytest_source = bld.path.ant_glob('test/*.py'),
+ ut_str = '${PYTHON} -B -m unittest discover')
+
+"""
+
+import os
+from waflib import Task, TaskGen, Errors, Utils, Logs
+from waflib.Tools import ccroot
+
+def _process_use_rec(self, name):
+ """
+ Recursively process ``use`` for task generator with name ``name``..
+ Used by pytest_process_use.
+ """
+ if name in self.pytest_use_not or name in self.pytest_use_seen:
+ return
+ try:
+ tg = self.bld.get_tgen_by_name(name)
+ except Errors.WafError:
+ self.pytest_use_not.add(name)
+ return
+
+ self.pytest_use_seen.append(name)
+ tg.post()
+
+ for n in self.to_list(getattr(tg, 'use', [])):
+ _process_use_rec(self, n)
+
+
+@TaskGen.feature('pytest')
+@TaskGen.after_method('process_source', 'apply_link')
+def pytest_process_use(self):
+ """
+ Process the ``use`` attribute which contains a list of task generator names and store
+ paths that later is used to populate the unit test runtime environment.
+ """
+ self.pytest_use_not = set()
+ self.pytest_use_seen = []
+ self.pytest_paths = [] # strings or Nodes
+ self.pytest_libpaths = [] # strings or Nodes
+ self.pytest_dep_nodes = []
+
+ names = self.to_list(getattr(self, 'use', []))
+ for name in names:
+ _process_use_rec(self, name)
+
+ def extend_unique(lst, varlst):
+ ext = []
+ for x in varlst:
+ if x not in lst:
+ ext.append(x)
+ lst.extend(ext)
+
+ # Collect type specific info needed to construct a valid runtime environment
+ # for the test.
+ for name in self.pytest_use_seen:
+ tg = self.bld.get_tgen_by_name(name)
+
+ extend_unique(self.pytest_paths, Utils.to_list(getattr(tg, 'pytest_path', [])))
+ extend_unique(self.pytest_libpaths, Utils.to_list(getattr(tg, 'pytest_libpath', [])))
+
+ if 'py' in tg.features:
+ # Python dependencies are added to PYTHONPATH
+ pypath = getattr(tg, 'install_from', tg.path)
+
+ if 'buildcopy' in tg.features:
+ # Since buildcopy is used we assume that PYTHONPATH in build should be used,
+ # not source
+ extend_unique(self.pytest_paths, [pypath.get_bld().abspath()])
+
+ # Add buildcopy output nodes to dependencies
+ extend_unique(self.pytest_dep_nodes, [o for task in getattr(tg, 'tasks', []) \
+ for o in getattr(task, 'outputs', [])])
+ else:
+ # If buildcopy is not used, depend on sources instead
+ extend_unique(self.pytest_dep_nodes, tg.source)
+ extend_unique(self.pytest_paths, [pypath.abspath()])
+
+ if getattr(tg, 'link_task', None):
+ # For tasks with a link_task (C, C++, D et.c.) include their library paths:
+ if not isinstance(tg.link_task, ccroot.stlink_task):
+ extend_unique(self.pytest_dep_nodes, tg.link_task.outputs)
+ extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH)
+
+ if 'pyext' in tg.features:
+ # If the taskgen is extending Python we also want to add the interpreter libpath.
+ extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH_PYEXT)
+ else:
+ # Only add to libpath if the link task is not a Python extension
+ extend_unique(self.pytest_libpaths, [tg.link_task.outputs[0].parent.abspath()])
+
+
+@TaskGen.feature('pytest')
+@TaskGen.after_method('pytest_process_use')
+def make_pytest(self):
+ """
+ Creates a ``utest`` task with a populated environment for Python if not specified in ``ut_env``:
+
+ - Paths in `pytest_paths` attribute are used to populate PYTHONPATH
+ - Paths in `pytest_libpaths` attribute are used to populate the system library path (e.g. LD_LIBRARY_PATH)
+ """
+ nodes = self.to_nodes(self.pytest_source)
+ tsk = self.create_task('utest', nodes)
+
+ tsk.dep_nodes.extend(self.pytest_dep_nodes)
+ if getattr(self, 'ut_str', None):
+ self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
+ tsk.vars = lst + tsk.vars
+
+ if getattr(self, 'ut_cwd', None):
+ if isinstance(self.ut_cwd, str):
+ # we want a Node instance
+ if os.path.isabs(self.ut_cwd):
+ self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
+ else:
+ self.ut_cwd = self.path.make_node(self.ut_cwd)
+ else:
+ if tsk.inputs:
+ self.ut_cwd = tsk.inputs[0].parent
+ else:
+ raise Errors.WafError("no valid input files for pytest task, check pytest_source value")
+
+ if not self.ut_cwd.exists():
+ self.ut_cwd.mkdir()
+
+ if not hasattr(self, 'ut_env'):
+ self.ut_env = dict(os.environ)
+ def add_paths(var, lst):
+ # Add list of paths to a variable, lst can contain strings or nodes
+ lst = [ str(n) for n in lst ]
+ Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
+ self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')
+
+ # Prepend dependency paths to PYTHONPATH and LD_LIBRARY_PATH
+ add_paths('PYTHONPATH', self.pytest_paths)
+
+ if Utils.is_win32:
+ add_paths('PATH', self.pytest_libpaths)
+ elif Utils.unversioned_sys_platform() == 'darwin':
+ add_paths('DYLD_LIBRARY_PATH', self.pytest_libpaths)
+ add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
+ else:
+ add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
+
diff --git a/third_party/waf/waflib/extras/qnxnto.py b/third_party/waf/waflib/extras/qnxnto.py
new file mode 100644
index 00000000000..db4467df396
--- /dev/null
+++ b/third_party/waf/waflib/extras/qnxnto.py
@@ -0,0 +1,76 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Jérôme Carretero 2011 (zougloub)
+# QNX neutrino compatibility functions
+
+import sys, os
+from waflib import Utils
+
+class Popen(object):
+ """
+ Popen cannot work on QNX from a threaded program:
+ Forking in threads is not implemented in neutrino.
+
+ Python's os.popen / spawn / fork won't work when running in threads (they will if in the main program thread)
+
+ In waf, this happens mostly in build.
+ And the use cases can be replaced by os.system() calls.
+ """
+ __slots__ = ["prog", "kw", "popen", "verbose"]
+ verbose = 0
+ def __init__(self, prog, **kw):
+ try:
+ self.prog = prog
+ self.kw = kw
+ self.popen = None
+ if Popen.verbose:
+ sys.stdout.write("Popen created: %r, kw=%r..." % (prog, kw))
+
+ do_delegate = kw.get('stdout') == -1 and kw.get('stderr') == -1
+ if do_delegate:
+ if Popen.verbose:
+ print("Delegating to real Popen")
+ self.popen = self.real_Popen(prog, **kw)
+ else:
+ if Popen.verbose:
+ print("Emulating")
+ except Exception as e:
+ if Popen.verbose:
+ print("Exception: %s" % e)
+ raise
+
+ def __getattr__(self, name):
+ if Popen.verbose:
+ sys.stdout.write("Getattr: %s..." % name)
+ if name in Popen.__slots__:
+ return object.__getattribute__(self, name)
+ else:
+ if self.popen is not None:
+ if Popen.verbose:
+ print("from Popen")
+ return getattr(self.popen, name)
+ else:
+ if name == "wait":
+ return self.emu_wait
+ else:
+ raise Exception("subprocess emulation: not implemented: %s" % name)
+
+ def emu_wait(self):
+ if Popen.verbose:
+ print("emulated wait (%r kw=%r)" % (self.prog, self.kw))
+ if isinstance(self.prog, str):
+ cmd = self.prog
+ else:
+ cmd = " ".join(self.prog)
+ if 'cwd' in self.kw:
+ cmd = 'cd "%s" && %s' % (self.kw['cwd'], cmd)
+ return os.system(cmd)
+
+if sys.platform == "qnx6":
+ Popen.real_Popen = Utils.subprocess.Popen
+ Utils.subprocess.Popen = Popen
+
diff --git a/third_party/waf/waflib/extras/qt4.py b/third_party/waf/waflib/extras/qt4.py
new file mode 100644
index 00000000000..c310046188f
--- /dev/null
+++ b/third_party/waf/waflib/extras/qt4.py
@@ -0,0 +1,699 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+
+Tool Description
+================
+
+This tool helps with finding Qt4 tools and libraries,
+and also provides syntactic sugar for using Qt4 tools.
+
+The following snippet illustrates the tool usage::
+
+ def options(opt):
+ opt.load('compiler_cxx qt4')
+
+ def configure(conf):
+ conf.load('compiler_cxx qt4')
+
+ def build(bld):
+ bld(
+ features = 'qt4 cxx cxxprogram',
+ uselib = 'QTCORE QTGUI QTOPENGL QTSVG',
+ source = 'main.cpp textures.qrc aboutDialog.ui',
+ target = 'window',
+ )
+
+Here, the UI description and resource files will be processed
+to generate code.
+
+Usage
+=====
+
+Load the "qt4" tool.
+
+You also need to edit your sources accordingly:
+
+- the normal way of doing things is to have your C++ files
+ include the .moc file.
+ This is regarded as the best practice (and provides much faster
+ compilations).
+ It also implies that the include paths have beenset properly.
+
+- to have the include paths added automatically, use the following::
+
+ from waflib.TaskGen import feature, before_method, after_method
+ @feature('cxx')
+ @after_method('process_source')
+ @before_method('apply_incpaths')
+ def add_includes_paths(self):
+ incs = set(self.to_list(getattr(self, 'includes', '')))
+ for x in self.compiled_tasks:
+ incs.add(x.inputs[0].parent.path_from(self.path))
+ self.includes = sorted(incs)
+
+Note: another tool provides Qt processing that does not require
+.moc includes, see 'playground/slow_qt/'.
+
+A few options (--qt{dir,bin,...}) and environment variables
+(QT4_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
+tool path selection, etc; please read the source for more info.
+
+"""
+
+try:
+ from xml.sax import make_parser
+ from xml.sax.handler import ContentHandler
+except ImportError:
+ has_xml = False
+ ContentHandler = object
+else:
+ has_xml = True
+
+import os, sys
+from waflib.Tools import cxx
+from waflib import Task, Utils, Options, Errors, Context
+from waflib.TaskGen import feature, after_method, extension
+from waflib.Configure import conf
+from waflib import Logs
+
+MOC_H = ['.h', '.hpp', '.hxx', '.hh']
+"""
+File extensions associated to the .moc files
+"""
+
+EXT_RCC = ['.qrc']
+"""
+File extension for the resource (.qrc) files
+"""
+
+EXT_UI = ['.ui']
+"""
+File extension for the user interface (.ui) files
+"""
+
+EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
+"""
+File extensions of C++ files that may require a .moc processing
+"""
+
+QT4_LIBS = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner"
+
+class qxx(Task.classes['cxx']):
+ """
+ Each C++ file can have zero or several .moc files to create.
+ They are known only when the files are scanned (preprocessor)
+ To avoid scanning the c++ files each time (parsing C/C++), the results
+ are retrieved from the task cache (bld.node_deps/bld.raw_deps).
+ The moc tasks are also created *dynamically* during the build.
+ """
+
+ def __init__(self, *k, **kw):
+ Task.Task.__init__(self, *k, **kw)
+ self.moc_done = 0
+
+ def runnable_status(self):
+ """
+ Compute the task signature to make sure the scanner was executed. Create the
+ moc tasks by using :py:meth:`waflib.Tools.qt4.qxx.add_moc_tasks` (if necessary),
+ then postpone the task execution (there is no need to recompute the task signature).
+ """
+ if self.moc_done:
+ return Task.Task.runnable_status(self)
+ else:
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ self.add_moc_tasks()
+ return Task.Task.runnable_status(self)
+
+ def create_moc_task(self, h_node, m_node):
+ """
+ If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
+ It is not possible to change the file names, but we can assume that the moc transformation will be identical,
+ and the moc tasks can be shared in a global cache.
+
+ The defines passed to moc will then depend on task generator order. If this is not acceptable, then
+ use the tool slow_qt4 instead (and enjoy the slow builds... :-( )
+ """
+ try:
+ moc_cache = self.generator.bld.moc_cache
+ except AttributeError:
+ moc_cache = self.generator.bld.moc_cache = {}
+
+ try:
+ return moc_cache[h_node]
+ except KeyError:
+ tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
+ tsk.set_inputs(h_node)
+ tsk.set_outputs(m_node)
+
+ if self.generator:
+ self.generator.tasks.append(tsk)
+
+ # direct injection in the build phase (safe because called from the main thread)
+ gen = self.generator.bld.producer
+ gen.outstanding.append(tsk)
+ gen.total += 1
+
+ return tsk
+
+ def moc_h_ext(self):
+ ext = []
+ try:
+ ext = Options.options.qt_header_ext.split()
+ except AttributeError:
+ pass
+ if not ext:
+ ext = MOC_H
+ return ext
+
+ def add_moc_tasks(self):
+ """
+ Create the moc tasks by looking in ``bld.raw_deps[self.uid()]``
+ """
+ node = self.inputs[0]
+ bld = self.generator.bld
+
+ try:
+ # compute the signature once to know if there is a moc file to create
+ self.signature()
+ except KeyError:
+ # the moc file may be referenced somewhere else
+ pass
+ else:
+ # remove the signature, it must be recomputed with the moc task
+ delattr(self, 'cache_sig')
+
+ include_nodes = [node.parent] + self.generator.includes_nodes
+
+ moctasks = []
+ mocfiles = set()
+ for d in bld.raw_deps.get(self.uid(), []):
+ if not d.endswith('.moc'):
+ continue
+
+ # process that base.moc only once
+ if d in mocfiles:
+ continue
+ mocfiles.add(d)
+
+ # find the source associated with the moc file
+ h_node = None
+
+ base2 = d[:-4]
+ for x in include_nodes:
+ for e in self.moc_h_ext():
+ h_node = x.find_node(base2 + e)
+ if h_node:
+ break
+ if h_node:
+ m_node = h_node.change_ext('.moc')
+ break
+ else:
+ # foo.cpp -> foo.cpp.moc
+ for k in EXT_QT4:
+ if base2.endswith(k):
+ for x in include_nodes:
+ h_node = x.find_node(base2)
+ if h_node:
+ break
+ if h_node:
+ m_node = h_node.change_ext(k + '.moc')
+ break
+
+ if not h_node:
+ raise Errors.WafError('No source found for %r which is a moc file' % d)
+
+ # create the moc task
+ task = self.create_moc_task(h_node, m_node)
+ moctasks.append(task)
+
+ # simple scheduler dependency: run the moc task before others
+ self.run_after.update(set(moctasks))
+ self.moc_done = 1
+
+class trans_update(Task.Task):
+ """Update a .ts files from a list of C++ files"""
+ run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
+ color = 'BLUE'
+
+class XMLHandler(ContentHandler):
+ """
+ Parser for *.qrc* files
+ """
+ def __init__(self):
+ self.buf = []
+ self.files = []
+ def startElement(self, name, attrs):
+ if name == 'file':
+ self.buf = []
+ def endElement(self, name):
+ if name == 'file':
+ self.files.append(str(''.join(self.buf)))
+ def characters(self, cars):
+ self.buf.append(cars)
+
+@extension(*EXT_RCC)
+def create_rcc_task(self, node):
+ "Create rcc and cxx tasks for *.qrc* files"
+ rcnode = node.change_ext('_rc.cpp')
+ self.create_task('rcc', node, rcnode)
+ cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
+ try:
+ self.compiled_tasks.append(cpptask)
+ except AttributeError:
+ self.compiled_tasks = [cpptask]
+ return cpptask
+
+@extension(*EXT_UI)
+def create_uic_task(self, node):
+ "hook for uic tasks"
+ uictask = self.create_task('ui4', node)
+ uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
+
+@extension('.ts')
+def add_lang(self, node):
+ """add all the .ts file into self.lang"""
+ self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
+
+@feature('qt4')
+@after_method('apply_link')
+def apply_qt4(self):
+ """
+ Add MOC_FLAGS which may be necessary for moc::
+
+ def build(bld):
+ bld.program(features='qt4', source='main.cpp', target='app', use='QTCORE')
+
+ The additional parameters are:
+
+ :param lang: list of translation files (\*.ts) to process
+ :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
+ :param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**)
+ :type update: bool
+ :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
+ :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
+ """
+ if getattr(self, 'lang', None):
+ qmtasks = []
+ for x in self.to_list(self.lang):
+ if isinstance(x, str):
+ x = self.path.find_resource(x + '.ts')
+ qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
+
+ if getattr(self, 'update', None) and Options.options.trans_qt4:
+ cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
+ a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
+ for x in qmtasks:
+ self.create_task('trans_update', cxxnodes, x.inputs)
+
+ if getattr(self, 'langname', None):
+ qmnodes = [x.outputs[0] for x in qmtasks]
+ rcnode = self.langname
+ if isinstance(rcnode, str):
+ rcnode = self.path.find_or_declare(rcnode + '.qrc')
+ t = self.create_task('qm2rcc', qmnodes, rcnode)
+ k = create_rcc_task(self, t.outputs[0])
+ self.link_task.inputs.append(k.outputs[0])
+
+ lst = []
+ for flag in self.to_list(self.env['CXXFLAGS']):
+ if len(flag) < 2:
+ continue
+ f = flag[0:2]
+ if f in ('-D', '-I', '/D', '/I'):
+ if (f[0] == '/'):
+ lst.append('-' + flag[1:])
+ else:
+ lst.append(flag)
+ self.env.append_value('MOC_FLAGS', lst)
+
+@extension(*EXT_QT4)
+def cxx_hook(self, node):
+ """
+ Re-map C++ file extensions to the :py:class:`waflib.Tools.qt4.qxx` task.
+ """
+ return self.create_compiled_task('qxx', node)
+
+class rcc(Task.Task):
+ """
+ Process *.qrc* files
+ """
+ color = 'BLUE'
+ run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
+ ext_out = ['.h']
+
+ def rcname(self):
+ return os.path.splitext(self.inputs[0].name)[0]
+
+ def scan(self):
+ """Parse the *.qrc* files"""
+ if not has_xml:
+ Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
+ return ([], [])
+
+ parser = make_parser()
+ curHandler = XMLHandler()
+ parser.setContentHandler(curHandler)
+ fi = open(self.inputs[0].abspath(), 'r')
+ try:
+ parser.parse(fi)
+ finally:
+ fi.close()
+
+ nodes = []
+ names = []
+ root = self.inputs[0].parent
+ for x in curHandler.files:
+ nd = root.find_resource(x)
+ if nd:
+ nodes.append(nd)
+ else:
+ names.append(x)
+ return (nodes, names)
+
+class moc(Task.Task):
+ """
+ Create *.moc* files
+ """
+ color = 'BLUE'
+ run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
+ def keyword(self):
+ return "Creating"
+ def __str__(self):
+ return self.outputs[0].path_from(self.generator.bld.launch_node())
+
+class ui4(Task.Task):
+ """
+ Process *.ui* files
+ """
+ color = 'BLUE'
+ run_str = '${QT_UIC} ${SRC} -o ${TGT}'
+ ext_out = ['.h']
+
+class ts2qm(Task.Task):
+ """
+ Create *.qm* files from *.ts* files
+ """
+ color = 'BLUE'
+ run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+
+class qm2rcc(Task.Task):
+ """
+ Transform *.qm* files into *.rc* files
+ """
+ color = 'BLUE'
+ after = 'ts2qm'
+
+ def run(self):
+ """Create a qrc file including the inputs"""
+ txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
+ code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
+ self.outputs[0].write(code)
+
+def configure(self):
+ """
+ Besides the configuration options, the environment variable QT4_ROOT may be used
+ to give the location of the qt4 libraries (absolute path).
+
+ The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg`
+ """
+ self.find_qt4_binaries()
+ self.set_qt4_libs_to_check()
+ self.set_qt4_defines()
+ self.find_qt4_libraries()
+ self.add_qt4_rpath()
+ self.simplify_qt4_libs()
+
+@conf
+def find_qt4_binaries(self):
+ env = self.env
+ opt = Options.options
+
+ qtdir = getattr(opt, 'qtdir', '')
+ qtbin = getattr(opt, 'qtbin', '')
+
+ paths = []
+
+ if qtdir:
+ qtbin = os.path.join(qtdir, 'bin')
+
+ # the qt directory has been given from QT4_ROOT - deduce the qt binary path
+ if not qtdir:
+ qtdir = os.environ.get('QT4_ROOT', '')
+ qtbin = os.environ.get('QT4_BIN') or os.path.join(qtdir, 'bin')
+
+ if qtbin:
+ paths = [qtbin]
+
+ # no qtdir, look in the path and in /usr/local/Trolltech
+ if not qtdir:
+ paths = os.environ.get('PATH', '').split(os.pathsep)
+ paths.append('/usr/share/qt4/bin/')
+ try:
+ lst = Utils.listdir('/usr/local/Trolltech/')
+ except OSError:
+ pass
+ else:
+ if lst:
+ lst.sort()
+ lst.reverse()
+
+ # keep the highest version
+ qtdir = '/usr/local/Trolltech/%s/' % lst[0]
+ qtbin = os.path.join(qtdir, 'bin')
+ paths.append(qtbin)
+
+ # at the end, try to find qmake in the paths given
+ # keep the one with the highest version
+ cand = None
+ prev_ver = ['4', '0', '0']
+ for qmk in ('qmake-qt4', 'qmake4', 'qmake'):
+ try:
+ qmake = self.find_program(qmk, path_list=paths)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ try:
+ version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
+ except self.errors.WafError:
+ pass
+ else:
+ if version:
+ new_ver = version.split('.')
+ if new_ver > prev_ver:
+ cand = qmake
+ prev_ver = new_ver
+ if cand:
+ self.env.QMAKE = cand
+ else:
+ self.fatal('Could not find qmake for qt4')
+
+ qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep
+
+ def find_bin(lst, var):
+ if var in env:
+ return
+ for f in lst:
+ try:
+ ret = self.find_program(f, path_list=paths)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ env[var]=ret
+ break
+
+ find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
+ find_bin(['uic-qt4', 'uic'], 'QT_UIC')
+ if not env.QT_UIC:
+ self.fatal('cannot find the uic compiler for qt4')
+
+ self.start_msg('Checking for uic version')
+ uicver = self.cmd_and_log(env.QT_UIC + ["-version"], output=Context.BOTH)
+ uicver = ''.join(uicver).strip()
+ uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
+ self.end_msg(uicver)
+ if uicver.find(' 3.') != -1:
+ self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
+
+ find_bin(['moc-qt4', 'moc'], 'QT_MOC')
+ find_bin(['rcc-qt4', 'rcc'], 'QT_RCC')
+ find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
+ find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
+
+ env['UIC3_ST']= '%s -o %s'
+ env['UIC_ST'] = '%s -o %s'
+ env['MOC_ST'] = '-o'
+ env['ui_PATTERN'] = 'ui_%s.h'
+ env['QT_LRELEASE_FLAGS'] = ['-silent']
+ env.MOCCPPPATH_ST = '-I%s'
+ env.MOCDEFINES_ST = '-D%s'
+
+@conf
+def find_qt4_libraries(self):
+ qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR")
+ if not qtlibs:
+ try:
+ qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
+ except Errors.WafError:
+ qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
+ qtlibs = os.path.join(qtdir, 'lib')
+ self.msg('Found the Qt4 libraries in', qtlibs)
+
+ qtincludes = os.environ.get("QT4_INCLUDES") or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
+ env = self.env
+ if not 'PKG_CONFIG_PATH' in os.environ:
+ os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs)
+
+ try:
+ if os.environ.get("QT4_XCOMPILE"):
+ raise self.errors.ConfigurationError()
+ self.check_cfg(atleast_pkgconfig_version='0.1')
+ except self.errors.ConfigurationError:
+ for i in self.qt4_vars:
+ uselib = i.upper()
+ if Utils.unversioned_sys_platform() == "darwin":
+ # Since at least qt 4.7.3 each library locates in separate directory
+ frameworkName = i + ".framework"
+ qtDynamicLib = os.path.join(qtlibs, frameworkName, i)
+ if os.path.exists(qtDynamicLib):
+ env.append_unique('FRAMEWORK_' + uselib, i)
+ self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
+ else:
+ self.msg('Checking for %s' % i, False, 'YELLOW')
+ env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers'))
+ elif env.DEST_OS != "win32":
+ qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so")
+ qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a")
+ if os.path.exists(qtDynamicLib):
+ env.append_unique('LIB_' + uselib, i)
+ self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
+ elif os.path.exists(qtStaticLib):
+ env.append_unique('LIB_' + uselib, i)
+ self.msg('Checking for %s' % i, qtStaticLib, 'GREEN')
+ else:
+ self.msg('Checking for %s' % i, False, 'YELLOW')
+
+ env.append_unique('LIBPATH_' + uselib, qtlibs)
+ env.append_unique('INCLUDES_' + uselib, qtincludes)
+ env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
+ else:
+ # Release library names are like QtCore4
+ for k in ("lib%s.a", "lib%s4.a", "%s.lib", "%s4.lib"):
+ lib = os.path.join(qtlibs, k % i)
+ if os.path.exists(lib):
+ env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
+ self.msg('Checking for %s' % i, lib, 'GREEN')
+ break
+ else:
+ self.msg('Checking for %s' % i, False, 'YELLOW')
+
+ env.append_unique('LIBPATH_' + uselib, qtlibs)
+ env.append_unique('INCLUDES_' + uselib, qtincludes)
+ env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
+
+ # Debug library names are like QtCore4d
+ uselib = i.upper() + "_debug"
+ for k in ("lib%sd.a", "lib%sd4.a", "%sd.lib", "%sd4.lib"):
+ lib = os.path.join(qtlibs, k % i)
+ if os.path.exists(lib):
+ env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
+ self.msg('Checking for %s' % i, lib, 'GREEN')
+ break
+ else:
+ self.msg('Checking for %s' % i, False, 'YELLOW')
+
+ env.append_unique('LIBPATH_' + uselib, qtlibs)
+ env.append_unique('INCLUDES_' + uselib, qtincludes)
+ env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
+ else:
+ for i in self.qt4_vars_debug + self.qt4_vars:
+ self.check_cfg(package=i, args='--cflags --libs', mandatory=False)
+
+@conf
+def simplify_qt4_libs(self):
+ # the libpaths make really long command-lines
+ # remove the qtcore ones from qtgui, etc
+ env = self.env
+ def process_lib(vars_, coreval):
+ for d in vars_:
+ var = d.upper()
+ if var == 'QTCORE':
+ continue
+
+ value = env['LIBPATH_'+var]
+ if value:
+ core = env[coreval]
+ accu = []
+ for lib in value:
+ if lib in core:
+ continue
+ accu.append(lib)
+ env['LIBPATH_'+var] = accu
+
+ process_lib(self.qt4_vars, 'LIBPATH_QTCORE')
+ process_lib(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
+
+@conf
+def add_qt4_rpath(self):
+ # rpath if wanted
+ env = self.env
+ if getattr(Options.options, 'want_rpath', False):
+ def process_rpath(vars_, coreval):
+ for d in vars_:
+ var = d.upper()
+ value = env['LIBPATH_'+var]
+ if value:
+ core = env[coreval]
+ accu = []
+ for lib in value:
+ if var != 'QTCORE':
+ if lib in core:
+ continue
+ accu.append('-Wl,--rpath='+lib)
+ env['RPATH_'+var] = accu
+ process_rpath(self.qt4_vars, 'LIBPATH_QTCORE')
+ process_rpath(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
+
+@conf
+def set_qt4_libs_to_check(self):
+ if not hasattr(self, 'qt4_vars'):
+ self.qt4_vars = QT4_LIBS
+ self.qt4_vars = Utils.to_list(self.qt4_vars)
+ if not hasattr(self, 'qt4_vars_debug'):
+ self.qt4_vars_debug = [a + '_debug' for a in self.qt4_vars]
+ self.qt4_vars_debug = Utils.to_list(self.qt4_vars_debug)
+
+@conf
+def set_qt4_defines(self):
+ if sys.platform != 'win32':
+ return
+ for x in self.qt4_vars:
+ y = x[2:].upper()
+ self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
+ self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)
+
+def options(opt):
+ """
+ Command-line options
+ """
+ opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
+
+ opt.add_option('--header-ext',
+ type='string',
+ default='',
+ help='header extension for moc files',
+ dest='qt_header_ext')
+
+ for i in 'qtdir qtbin qtlibs'.split():
+ opt.add_option('--'+i, type='string', default='', dest=i)
+
+ opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
+
diff --git a/third_party/waf/waflib/extras/relocation.py b/third_party/waf/waflib/extras/relocation.py
index 052890b6837..50ac5b9d41b 100644
--- a/third_party/waf/waflib/extras/relocation.py
+++ b/third_party/waf/waflib/extras/relocation.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
"""
Waf 1.6
@@ -81,3 +85,5 @@ def apply_incpaths(self):
self.includes_nodes = lst
bld = self.bld
self.env['INCPATHS'] = [x.is_child_of(bld.srcnode) and x.path_from(bld.bldnode) or x.abspath() for x in lst]
+
+
diff --git a/third_party/waf/waflib/extras/remote.py b/third_party/waf/waflib/extras/remote.py
index 6aca854f32f..9bf6e2d71c6 100644
--- a/third_party/waf/waflib/extras/remote.py
+++ b/third_party/waf/waflib/extras/remote.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Remote Builds tool using rsync+ssh
@@ -76,7 +80,7 @@ Usage
4. Setup the ssh server and ssh keys
- The ssh key should not be protected by a password, or it will prompt for it everytime.
+ The ssh key should not be protected by a password, or it will prompt for it every time.
Create the key on the client:
.. code:: bash
@@ -324,3 +328,4 @@ def rsync_and_ssh(task):
ret = task.exec_command(bld.make_save_command(task))
if ret:
return ret
+
diff --git a/third_party/waf/waflib/extras/resx.py b/third_party/waf/waflib/extras/resx.py
new file mode 100644
index 00000000000..ead38ef50b5
--- /dev/null
+++ b/third_party/waf/waflib/extras/resx.py
@@ -0,0 +1,39 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+
+import os
+from waflib import Task
+from waflib.TaskGen import extension
+
+def configure(conf):
+ conf.find_program(['resgen'], var='RESGEN')
+ conf.env.RESGENFLAGS = '/useSourcePath'
+
+@extension('.resx')
+def resx_file(self, node):
+ """
+ Bind the .resx extension to a resgen task
+ """
+ if not getattr(self, 'cs_task', None):
+ self.bld.fatal('resx_file has no link task for use %r' % self)
+
+ # Given assembly 'Foo' and file 'Sub/Dir/File.resx', create 'Foo.Sub.Dir.File.resources'
+ assembly = getattr(self, 'namespace', os.path.splitext(self.gen)[0])
+ res = os.path.splitext(node.path_from(self.path))[0].replace('/', '.').replace('\\', '.')
+ out = self.path.find_or_declare(assembly + '.' + res + '.resources')
+
+ tsk = self.create_task('resgen', node, out)
+
+ self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
+ self.env.append_value('RESOURCES', tsk.outputs[0].bldpath())
+
+class resgen(Task.Task):
+ """
+ Compile C# resource files
+ """
+ color = 'YELLOW'
+ run_str = '${RESGEN} ${RESGENFLAGS} ${SRC} ${TGT}'
diff --git a/third_party/waf/waflib/extras/review.py b/third_party/waf/waflib/extras/review.py
index 34796cb6236..8d167c15e13 100644
--- a/third_party/waf/waflib/extras/review.py
+++ b/third_party/waf/waflib/extras/review.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Laurent Birtz, 2011
@@ -242,7 +246,8 @@ class ReviewContext(Context.Context):
"""
Return true if the review sets specified are equal.
"""
- if len(set1.keys()) != len(set2.keys()): return False
+ if len(set1.keys()) != len(set2.keys()):
+ return False
for key in set1.keys():
if not key in set2 or set1[key] != set2[key]:
return False
@@ -259,7 +264,8 @@ class ReviewContext(Context.Context):
name = ", ".join(opt._short_opts + opt._long_opts)
help = opt.help
actual = None
- if dest in review_set: actual = review_set[dest]
+ if dest in review_set:
+ actual = review_set[dest]
default = review_defaults[dest]
lines.append(self.format_option(name, help, actual, default, term_width))
return "Configuration:\n\n" + "\n\n".join(lines) + "\n"
@@ -278,7 +284,8 @@ class ReviewContext(Context.Context):
w = textwrap.TextWrapper()
w.width = term_width - 1
- if w.width < 60: w.width = 60
+ if w.width < 60:
+ w.width = 60
out = ""
@@ -319,3 +326,4 @@ def new_configure_execute(self):
old_configure_execute(self)
Context.create_context('review').store_review_set(new_review_set)
Configure.ConfigurationContext.execute = new_configure_execute
+
diff --git a/third_party/waf/waflib/extras/rst.py b/third_party/waf/waflib/extras/rst.py
index c8cd7526dcc..1703eeac348 100644
--- a/third_party/waf/waflib/extras/rst.py
+++ b/third_party/waf/waflib/extras/rst.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Jérôme Carretero, 2013 (zougloub)
@@ -36,8 +40,11 @@ from waflib.TaskGen import feature, before_method
rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split()
-def parse_rst_node(node, nodes, names, seen):
+def parse_rst_node(task, node, nodes, names, seen, dirs=None):
# TODO add extensibility, to handle custom rst include tags...
+ if dirs is None:
+ dirs = (node.parent,node.get_bld().parent)
+
if node in seen:
return
seen.append(node)
@@ -46,14 +53,19 @@ def parse_rst_node(node, nodes, names, seen):
for match in re_rst.finditer(code):
ipath = match.group('file')
itype = match.group('type')
- Logs.debug("rst: visiting %s: %s" % (itype, ipath))
- found = node.parent.find_resource(ipath)
- if found:
- nodes.append(found)
- if itype == 'include':
- parse_rst_node(found, nodes, names, seen)
- else:
- names.append(ipath)
+ Logs.debug('rst: visiting %s: %s', itype, ipath)
+ found = False
+ for d in dirs:
+ Logs.debug('rst: looking for %s in %s', ipath, d.abspath())
+ found = d.find_node(ipath)
+ if found:
+ Logs.debug('rst: found %s as %s', ipath, found.abspath())
+ nodes.append((itype, found))
+ if itype == 'include':
+ parse_rst_node(task, found, nodes, names, seen)
+ break
+ if not found:
+ names.append((itype, ipath))
class docutils(Task.Task):
"""
@@ -74,13 +86,13 @@ class docutils(Task.Task):
if not node:
return (nodes, names)
- parse_rst_node(node, nodes, names, seen)
+ parse_rst_node(self, node, nodes, names, seen)
- Logs.debug("rst: %s: found the following file deps: %s" % (repr(self), nodes))
+ Logs.debug('rst: %r: found the following file deps: %r', self, nodes)
if names:
- Logs.warn("rst: %s: could not find the following file deps: %s" % (repr(self), names))
+ Logs.warn('rst: %r: could not find the following file deps: %r', self, names)
- return (nodes, names)
+ return ([v for (t,v) in nodes], [v for (t,v) in names])
def check_status(self, msg, retcode):
"""
@@ -92,7 +104,7 @@ class docutils(Task.Task):
:type retcode: boolean
"""
if retcode != 0:
- raise Errors.WafError("%r command exit status %r" % (msg, retcode))
+ raise Errors.WafError('%r command exit status %r' % (msg, retcode))
def run(self):
"""
@@ -116,7 +128,7 @@ class rst2html(docutils):
if stylesheet is not None:
ssnode = self.generator.to_nodes(stylesheet)[0]
nodes.append(ssnode)
- Logs.debug("rst: adding dep to %s %s" % (attribute, stylesheet))
+ Logs.debug('rst: adding dep to %s %s', attribute, stylesheet)
return nodes, names
@@ -235,7 +247,7 @@ def apply_rst(self):
inst_to = getattr(self, 'install_path', None)
if inst_to:
- self.install_task = self.bld.install_files(inst_to, task.outputs[:], env=self.env)
+ self.install_task = self.add_install_files(install_to=inst_to, install_from=task.outputs[:])
self.source = []
@@ -249,3 +261,4 @@ def configure(self):
"""
for p in rst_progs:
self.find_program(p, mandatory=False)
+
diff --git a/third_party/waf/waflib/extras/run_do_script.py b/third_party/waf/waflib/extras/run_do_script.py
new file mode 100644
index 00000000000..529cc08ae9e
--- /dev/null
+++ b/third_party/waf/waflib/extras/run_do_script.py
@@ -0,0 +1,143 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a Stata do-script in the directory specified by **ctx.bldnode**. The
+first and only argument will be the name of the do-script (no extension),
+which can be accessed inside the do-script by the local macro `1'. Useful
+for keeping a log file.
+
+The tool uses the log file that is automatically kept by Stata only
+for error-catching purposes, it will be destroyed if the task finished
+without error. In case of an error in **some_script.do**, you can inspect
+it as **some_script.log** in the **ctx.bldnode** directory.
+
+Note that Stata will not return an error code if it exits abnormally --
+catching errors relies on parsing the log file mentioned before. Should
+the parser behave incorrectly please send an email to hmgaudecker [at] gmail.
+
+**WARNING**
+
+ The tool will not work if multiple do-scripts of the same name---but in
+ different directories---are run at the same time! Avoid this situation.
+
+Usage::
+
+ ctx(features='run_do_script',
+ source='some_script.do',
+ target=['some_table.tex', 'some_figure.eps'],
+ deps='some_data.csv')
+"""
+
+
+import os, re, sys
+from waflib import Task, TaskGen, Logs
+
+if sys.platform == 'darwin':
+ STATA_COMMANDS = ['Stata64MP', 'StataMP',
+ 'Stata64SE', 'StataSE',
+ 'Stata64', 'Stata']
+ STATAFLAGS = '-e -q do'
+ STATAENCODING = 'MacRoman'
+elif sys.platform.startswith('linux'):
+ STATA_COMMANDS = ['stata-mp', 'stata-se', 'stata']
+ STATAFLAGS = '-b -q do'
+ # Not sure whether this is correct...
+ STATAENCODING = 'Latin-1'
+elif sys.platform.lower().startswith('win'):
+ STATA_COMMANDS = ['StataMP-64', 'StataMP-ia',
+ 'StataMP', 'StataSE-64',
+ 'StataSE-ia', 'StataSE',
+ 'Stata-64', 'Stata-ia',
+ 'Stata.e', 'WMPSTATA',
+ 'WSESTATA', 'WSTATA']
+ STATAFLAGS = '/e do'
+ STATAENCODING = 'Latin-1'
+else:
+ raise Exception("Unknown sys.platform: %s " % sys.platform)
+
+def configure(ctx):
+ ctx.find_program(STATA_COMMANDS, var='STATACMD', errmsg="""\n
+No Stata executable found!\n\n
+If Stata is needed:\n
+ 1) Check the settings of your system path.
+ 2) Note we are looking for Stata executables called: %s
+ If yours has a different name, please report to hmgaudecker [at] gmail\n
+Else:\n
+ Do not load the 'run_do_script' tool in the main wscript.\n\n""" % STATA_COMMANDS)
+ ctx.env.STATAFLAGS = STATAFLAGS
+ ctx.env.STATAENCODING = STATAENCODING
+
+class run_do_script_base(Task.Task):
+ """Run a Stata do-script from the bldnode directory."""
+ run_str = '"${STATACMD}" ${STATAFLAGS} "${SRC[0].abspath()}" "${DOFILETRUNK}"'
+ shell = True
+
+class run_do_script(run_do_script_base):
+ """Use the log file automatically kept by Stata for error-catching.
+ Erase it if the task finished without error. If not, it will show
+ up as do_script.log in the bldnode directory.
+ """
+ def run(self):
+ run_do_script_base.run(self)
+ ret, log_tail = self.check_erase_log_file()
+ if ret:
+ Logs.error("""Running Stata on %r failed with code %r.\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
+ self.inputs[0], ret, self.env.LOGFILEPATH, log_tail)
+ return ret
+
+ def check_erase_log_file(self):
+ """Parse Stata's default log file and erase it if everything okay.
+
+ Parser is based on Brendan Halpin's shell script found here:
+ http://teaching.sociology.ul.ie/bhalpin/wordpress/?p=122
+ """
+
+ if sys.version_info.major >= 3:
+ kwargs = {'file': self.env.LOGFILEPATH, 'mode': 'r', 'encoding': self.env.STATAENCODING}
+ else:
+ kwargs = {'name': self.env.LOGFILEPATH, 'mode': 'r'}
+ with open(**kwargs) as log:
+ log_tail = log.readlines()[-10:]
+ for line in log_tail:
+ error_found = re.match("r\(([0-9]+)\)", line)
+ if error_found:
+ return error_found.group(1), ''.join(log_tail)
+ else:
+ pass
+ # Only end up here if the parser did not identify an error.
+ os.remove(self.env.LOGFILEPATH)
+ return None, None
+
+
+@TaskGen.feature('run_do_script')
+@TaskGen.before_method('process_source')
+def apply_run_do_script(tg):
+ """Task generator customising the options etc. to call Stata in batch
+ mode for running a do-script.
+ """
+
+ # Convert sources and targets to nodes
+ src_node = tg.path.find_resource(tg.source)
+ tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+ tsk = tg.create_task('run_do_script', src=src_node, tgt=tgt_nodes)
+ tsk.env.DOFILETRUNK = os.path.splitext(src_node.name)[0]
+ tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s.log' % (tsk.env.DOFILETRUNK))
+
+ # dependencies (if the attribute 'deps' changes, trigger a recompilation)
+ for x in tg.to_list(getattr(tg, 'deps', [])):
+ node = tg.path.find_resource(x)
+ if not node:
+ tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+ tsk.dep_nodes.append(node)
+ Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+ # Bypass the execution of process_source by setting the source to an empty list
+ tg.source = []
+
diff --git a/third_party/waf/waflib/extras/run_m_script.py b/third_party/waf/waflib/extras/run_m_script.py
new file mode 100644
index 00000000000..bc6db0027ab
--- /dev/null
+++ b/third_party/waf/waflib/extras/run_m_script.py
@@ -0,0 +1,92 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a Matlab script.
+
+Note that the script is run in the directory where it lives -- Matlab won't
+allow it any other way.
+
+For error-catching purposes, keep an own log-file that is destroyed if the
+task finished without error. If not, it will show up as mscript_[index].log
+in the bldnode directory.
+
+Usage::
+
+ ctx(features='run_m_script',
+ source='some_script.m',
+ target=['some_table.tex', 'some_figure.eps'],
+ deps='some_data.mat')
+"""
+
+import os, sys
+from waflib import Task, TaskGen, Logs
+
+MATLAB_COMMANDS = ['matlab']
+
+def configure(ctx):
+ ctx.find_program(MATLAB_COMMANDS, var='MATLABCMD', errmsg = """\n
+No Matlab executable found!\n\n
+If Matlab is needed:\n
+ 1) Check the settings of your system path.
+ 2) Note we are looking for Matlab executables called: %s
+ If yours has a different name, please report to hmgaudecker [at] gmail\n
+Else:\n
+ Do not load the 'run_m_script' tool in the main wscript.\n\n""" % MATLAB_COMMANDS)
+ ctx.env.MATLABFLAGS = '-wait -nojvm -nosplash -minimize'
+
+class run_m_script_base(Task.Task):
+ """Run a Matlab script."""
+ run_str = '"${MATLABCMD}" ${MATLABFLAGS} -logfile "${LOGFILEPATH}" -r "try, ${MSCRIPTTRUNK}, exit(0), catch err, disp(err.getReport()), exit(1), end"'
+ shell = True
+
+class run_m_script(run_m_script_base):
+ """Erase the Matlab overall log file if everything went okay, else raise an
+ error and print its 10 last lines.
+ """
+ def run(self):
+ ret = run_m_script_base.run(self)
+ logfile = self.env.LOGFILEPATH
+ if ret:
+ mode = 'r'
+ if sys.version_info.major >= 3:
+ mode = 'rb'
+ with open(logfile, mode=mode) as f:
+ tail = f.readlines()[-10:]
+ Logs.error("""Running Matlab on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
+ self.inputs[0], ret, logfile, '\n'.join(tail))
+ else:
+ os.remove(logfile)
+ return ret
+
+@TaskGen.feature('run_m_script')
+@TaskGen.before_method('process_source')
+def apply_run_m_script(tg):
+ """Task generator customising the options etc. to call Matlab in batch
+ mode for running a m-script.
+ """
+
+ # Convert sources and targets to nodes
+ src_node = tg.path.find_resource(tg.source)
+ tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+ tsk = tg.create_task('run_m_script', src=src_node, tgt=tgt_nodes)
+ tsk.cwd = src_node.parent.abspath()
+ tsk.env.MSCRIPTTRUNK = os.path.splitext(src_node.name)[0]
+ tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (tsk.env.MSCRIPTTRUNK, tg.idx))
+
+ # dependencies (if the attribute 'deps' changes, trigger a recompilation)
+ for x in tg.to_list(getattr(tg, 'deps', [])):
+ node = tg.path.find_resource(x)
+ if not node:
+ tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+ tsk.dep_nodes.append(node)
+ Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+ # Bypass the execution of process_source by setting the source to an empty list
+ tg.source = []
diff --git a/third_party/waf/waflib/extras/run_py_script.py b/third_party/waf/waflib/extras/run_py_script.py
new file mode 100644
index 00000000000..845a3ced798
--- /dev/null
+++ b/third_party/waf/waflib/extras/run_py_script.py
@@ -0,0 +1,108 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a Python script in the directory specified by **ctx.bldnode**.
+
+Select a Python version by specifying the **version** keyword for
+the task generator instance as integer 2 or 3. Default is 3.
+
+If the build environment has an attribute "PROJECT_PATHS" with
+a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
+Same a string passed to the optional **add_to_pythonpath**
+keyword (appended after the PROJECT_ROOT).
+
+Usage::
+
+ ctx(features='run_py_script', version=3,
+ source='some_script.py',
+ target=['some_table.tex', 'some_figure.eps'],
+ deps='some_data.csv',
+ add_to_pythonpath='src/some/library')
+"""
+
+import os, re
+from waflib import Task, TaskGen, Logs
+
+
+def configure(conf):
+ """TODO: Might need to be updated for Windows once
+ "PEP 397":http://www.python.org/dev/peps/pep-0397/ is settled.
+ """
+ conf.find_program('python', var='PY2CMD', mandatory=False)
+ conf.find_program('python3', var='PY3CMD', mandatory=False)
+ if not conf.env.PY2CMD and not conf.env.PY3CMD:
+ conf.fatal("No Python interpreter found!")
+
+class run_py_2_script(Task.Task):
+ """Run a Python 2 script."""
+ run_str = '${PY2CMD} ${SRC[0].abspath()}'
+ shell=True
+
+class run_py_3_script(Task.Task):
+ """Run a Python 3 script."""
+ run_str = '${PY3CMD} ${SRC[0].abspath()}'
+ shell=True
+
+@TaskGen.feature('run_py_script')
+@TaskGen.before_method('process_source')
+def apply_run_py_script(tg):
+ """Task generator for running either Python 2 or Python 3 on a single
+ script.
+
+ Attributes:
+
+ * source -- A **single** source node or string. (required)
+ * target -- A single target or list of targets (nodes or strings)
+ * deps -- A single dependency or list of dependencies (nodes or strings)
+ * add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable
+
+ If the build environment has an attribute "PROJECT_PATHS" with
+ a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
+ """
+
+ # Set the Python version to use, default to 3.
+ v = getattr(tg, 'version', 3)
+ if v not in (2, 3):
+ raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v)
+
+ # Convert sources and targets to nodes
+ src_node = tg.path.find_resource(tg.source)
+ tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+ # Create the task.
+ tsk = tg.create_task('run_py_%d_script' %v, src=src_node, tgt=tgt_nodes)
+
+ # custom execution environment
+ # TODO use a list and os.sep.join(lst) at the end instead of concatenating strings
+ tsk.env.env = dict(os.environ)
+ tsk.env.env['PYTHONPATH'] = tsk.env.env.get('PYTHONPATH', '')
+ project_paths = getattr(tsk.env, 'PROJECT_PATHS', None)
+ if project_paths and 'PROJECT_ROOT' in project_paths:
+ tsk.env.env['PYTHONPATH'] += os.pathsep + project_paths['PROJECT_ROOT'].abspath()
+ if getattr(tg, 'add_to_pythonpath', None):
+ tsk.env.env['PYTHONPATH'] += os.pathsep + tg.add_to_pythonpath
+
+ # Clean up the PYTHONPATH -- replace double occurrences of path separator
+ tsk.env.env['PYTHONPATH'] = re.sub(os.pathsep + '+', os.pathsep, tsk.env.env['PYTHONPATH'])
+
+ # Clean up the PYTHONPATH -- doesn't like starting with path separator
+ if tsk.env.env['PYTHONPATH'].startswith(os.pathsep):
+ tsk.env.env['PYTHONPATH'] = tsk.env.env['PYTHONPATH'][1:]
+
+ # dependencies (if the attribute 'deps' changes, trigger a recompilation)
+ for x in tg.to_list(getattr(tg, 'deps', [])):
+ node = tg.path.find_resource(x)
+ if not node:
+ tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+ tsk.dep_nodes.append(node)
+ Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+ # Bypass the execution of process_source by setting the source to an empty list
+ tg.source = []
+
diff --git a/third_party/waf/waflib/extras/run_r_script.py b/third_party/waf/waflib/extras/run_r_script.py
new file mode 100644
index 00000000000..8dd81825e9c
--- /dev/null
+++ b/third_party/waf/waflib/extras/run_r_script.py
@@ -0,0 +1,90 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Run a R script in the directory specified by **ctx.bldnode**.
+
+For error-catching purposes, keep an own log-file that is destroyed if the
+task finished without error. If not, it will show up as rscript_[index].log
+in the bldnode directory.
+
+Usage::
+
+ ctx(features='run_r_script',
+ source='some_script.r',
+ target=['some_table.tex', 'some_figure.eps'],
+ deps='some_data.csv')
+"""
+
+
+import os, sys
+from waflib import Task, TaskGen, Logs
+
+R_COMMANDS = ['RTerm', 'R', 'r']
+
+def configure(ctx):
+ ctx.find_program(R_COMMANDS, var='RCMD', errmsg = """\n
+No R executable found!\n\n
+If R is needed:\n
+ 1) Check the settings of your system path.
+ 2) Note we are looking for R executables called: %s
+ If yours has a different name, please report to hmgaudecker [at] gmail\n
+Else:\n
+ Do not load the 'run_r_script' tool in the main wscript.\n\n""" % R_COMMANDS)
+ ctx.env.RFLAGS = 'CMD BATCH --slave'
+
+class run_r_script_base(Task.Task):
+ """Run a R script."""
+ run_str = '"${RCMD}" ${RFLAGS} "${SRC[0].abspath()}" "${LOGFILEPATH}"'
+ shell = True
+
+class run_r_script(run_r_script_base):
+ """Erase the R overall log file if everything went okay, else raise an
+ error and print its 10 last lines.
+ """
+ def run(self):
+ ret = run_r_script_base.run(self)
+ logfile = self.env.LOGFILEPATH
+ if ret:
+ mode = 'r'
+ if sys.version_info.major >= 3:
+ mode = 'rb'
+ with open(logfile, mode=mode) as f:
+ tail = f.readlines()[-10:]
+ Logs.error("""Running R on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
+ self.inputs[0], ret, logfile, '\n'.join(tail))
+ else:
+ os.remove(logfile)
+ return ret
+
+
+@TaskGen.feature('run_r_script')
+@TaskGen.before_method('process_source')
+def apply_run_r_script(tg):
+ """Task generator customising the options etc. to call R in batch
+ mode for running a R script.
+ """
+
+ # Convert sources and targets to nodes
+ src_node = tg.path.find_resource(tg.source)
+ tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
+
+ tsk = tg.create_task('run_r_script', src=src_node, tgt=tgt_nodes)
+ tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (os.path.splitext(src_node.name)[0], tg.idx))
+
+ # dependencies (if the attribute 'deps' changes, trigger a recompilation)
+ for x in tg.to_list(getattr(tg, 'deps', [])):
+ node = tg.path.find_resource(x)
+ if not node:
+ tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
+ tsk.dep_nodes.append(node)
+ Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
+
+ # Bypass the execution of process_source by setting the source to an empty list
+ tg.source = []
+
diff --git a/third_party/waf/waflib/extras/sas.py b/third_party/waf/waflib/extras/sas.py
new file mode 100644
index 00000000000..a7220068b96
--- /dev/null
+++ b/third_party/waf/waflib/extras/sas.py
@@ -0,0 +1,75 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Mark Coggeshall, 2010
+
+"SAS support"
+
+import os
+from waflib import Task, Errors, Logs
+from waflib.TaskGen import feature, before_method
+
+sas_fun, _ = Task.compile_fun('sas -sysin ${SRCFILE} -log ${LOGFILE} -print ${LSTFILE}', shell=False)
+
+class sas(Task.Task):
+ vars = ['SAS', 'SASFLAGS']
+ def run(task):
+ command = 'SAS'
+ fun = sas_fun
+
+ node = task.inputs[0]
+ logfilenode = node.change_ext('.log')
+ lstfilenode = node.change_ext('.lst')
+
+ # set the cwd
+ task.cwd = task.inputs[0].parent.get_src().abspath()
+ Logs.debug('runner: %r on %r', command, node)
+
+ SASINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep
+ task.env.env = {'SASINPUTS': SASINPUTS}
+
+ task.env.SRCFILE = node.abspath()
+ task.env.LOGFILE = logfilenode.abspath()
+ task.env.LSTFILE = lstfilenode.abspath()
+ ret = fun(task)
+ if ret:
+ Logs.error('Running %s on %r returned a non-zero exit', command, node)
+ Logs.error('SRCFILE = %r', node)
+ Logs.error('LOGFILE = %r', logfilenode)
+ Logs.error('LSTFILE = %r', lstfilenode)
+ return ret
+
+@feature('sas')
+@before_method('process_source')
+def apply_sas(self):
+ if not getattr(self, 'type', None) in ('sas',):
+ self.type = 'sas'
+
+ self.env['logdir'] = getattr(self, 'logdir', 'log')
+ self.env['lstdir'] = getattr(self, 'lstdir', 'lst')
+
+ deps_lst = []
+
+ if getattr(self, 'deps', None):
+ deps = self.to_list(self.deps)
+ for filename in deps:
+ n = self.path.find_resource(filename)
+ if not n:
+ n = self.bld.root.find_resource(filename)
+ if not n:
+ raise Errors.WafError('cannot find input file %s for processing' % filename)
+ if not n in deps_lst:
+ deps_lst.append(n)
+
+ for node in self.to_nodes(self.source):
+ if self.type == 'sas':
+ task = self.create_task('sas', src=node)
+ task.dep_nodes = deps_lst
+ self.source = []
+
+def configure(self):
+ self.find_program('sas', var='SAS', mandatory=False)
+
diff --git a/third_party/waf/waflib/extras/satellite_assembly.py b/third_party/waf/waflib/extras/satellite_assembly.py
new file mode 100644
index 00000000000..a9e1ddf6d62
--- /dev/null
+++ b/third_party/waf/waflib/extras/satellite_assembly.py
@@ -0,0 +1,61 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/python
+# encoding: utf-8
+# vim: tabstop=4 noexpandtab
+
+"""
+Create a satellite assembly from "*.??.txt" files. ?? stands for a language code.
+
+The projects Resources subfolder contains resources.??.txt string files for several languages.
+The build folder will hold the satellite assemblies as ./??/ExeName.resources.dll
+
+#gen becomes template (It is called gen because it also uses resx.py).
+bld(source='Resources/resources.de.txt',gen=ExeName)
+"""
+
+import os, re
+from waflib import Task
+from waflib.TaskGen import feature,before_method
+
+class al(Task.Task):
+ run_str = '${AL} ${ALFLAGS}'
+
+@feature('satellite_assembly')
+@before_method('process_source')
+def satellite_assembly(self):
+ if not getattr(self, 'gen', None):
+ self.bld.fatal('satellite_assembly needs a template assembly provided with the "gen" parameter')
+ res_lang = re.compile(r'(.*)\.(\w\w)\.(?:resx|txt)',flags=re.I)
+
+ # self.source can contain node objects, so this will break in one way or another
+ self.source = self.to_list(self.source)
+ for i, x in enumerate(self.source):
+ #x = 'resources/resources.de.resx'
+ #x = 'resources/resources.de.txt'
+ mo = res_lang.match(x)
+ if mo:
+ template = os.path.splitext(self.gen)[0]
+ templatedir, templatename = os.path.split(template)
+ res = mo.group(1)
+ lang = mo.group(2)
+ #./Resources/resources.de.resources
+ resources = self.path.find_or_declare(res+ '.' + lang + '.resources')
+ self.create_task('resgen', self.to_nodes(x), [resources])
+ #./de/Exename.resources.dll
+ satellite = self.path.find_or_declare(os.path.join(templatedir,lang,templatename) + '.resources.dll')
+ tsk = self.create_task('al',[resources],[satellite])
+ tsk.env.append_value('ALFLAGS','/template:'+os.path.join(self.path.relpath(),self.gen))
+ tsk.env.append_value('ALFLAGS','/embed:'+resources.relpath())
+ tsk.env.append_value('ALFLAGS','/culture:'+lang)
+ tsk.env.append_value('ALFLAGS','/out:'+satellite.relpath())
+ self.source[i] = None
+ # remove the None elements that we just substituted
+ self.source = list(filter(lambda x:x, self.source))
+
+def configure(ctx):
+ ctx.find_program('al', var='AL', mandatory=True)
+ ctx.load('resx')
+
diff --git a/third_party/waf/waflib/extras/scala.py b/third_party/waf/waflib/extras/scala.py
new file mode 100644
index 00000000000..bdc2bbbee59
--- /dev/null
+++ b/third_party/waf/waflib/extras/scala.py
@@ -0,0 +1,132 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+Scala support
+
+scalac outputs files a bit where it wants to
+"""
+
+import os
+from waflib import Task, Utils, Node
+from waflib.TaskGen import feature, before_method, after_method
+
+from waflib.Tools import ccroot
+ccroot.USELIB_VARS['scalac'] = set(['CLASSPATH', 'SCALACFLAGS'])
+
+from waflib.Tools import javaw
+
+@feature('scalac')
+@before_method('process_source')
+def apply_scalac(self):
+
+ Utils.def_attrs(self, jarname='', classpath='',
+ sourcepath='.', srcdir='.',
+ jar_mf_attributes={}, jar_mf_classpath=[])
+
+ outdir = getattr(self, 'outdir', None)
+ if outdir:
+ if not isinstance(outdir, Node.Node):
+ outdir = self.path.get_bld().make_node(self.outdir)
+ else:
+ outdir = self.path.get_bld()
+ outdir.mkdir()
+ self.env['OUTDIR'] = outdir.abspath()
+
+ self.scalac_task = tsk = self.create_task('scalac')
+ tmp = []
+
+ srcdir = getattr(self, 'srcdir', '')
+ if isinstance(srcdir, Node.Node):
+ srcdir = [srcdir]
+ for x in Utils.to_list(srcdir):
+ if isinstance(x, Node.Node):
+ y = x
+ else:
+ y = self.path.find_dir(x)
+ if not y:
+ self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
+ tmp.append(y)
+ tsk.srcdir = tmp
+
+# reuse some code
+feature('scalac')(javaw.use_javac_files)
+after_method('apply_scalac')(javaw.use_javac_files)
+
+feature('scalac')(javaw.set_classpath)
+after_method('apply_scalac', 'use_scalac_files')(javaw.set_classpath)
+
+
+SOURCE_RE = '**/*.scala'
+class scalac(javaw.javac):
+ color = 'GREEN'
+ vars = ['CLASSPATH', 'SCALACFLAGS', 'SCALAC', 'OUTDIR']
+
+ def runnable_status(self):
+ """
+ Wait for dependent tasks to be complete, then read the file system to find the input nodes.
+ """
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+
+ if not self.inputs:
+ global SOURCE_RE
+ self.inputs = []
+ for x in self.srcdir:
+ self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
+ return super(javaw.javac, self).runnable_status()
+
+ def run(self):
+ """
+ Execute the scalac compiler
+ """
+ env = self.env
+ gen = self.generator
+ bld = gen.bld
+ wd = bld.bldnode.abspath()
+ def to_list(xx):
+ if isinstance(xx, str):
+ return [xx]
+ return xx
+ self.last_cmd = lst = []
+ lst.extend(to_list(env['SCALAC']))
+ lst.extend(['-classpath'])
+ lst.extend(to_list(env['CLASSPATH']))
+ lst.extend(['-d'])
+ lst.extend(to_list(env['OUTDIR']))
+ lst.extend(to_list(env['SCALACFLAGS']))
+ lst.extend([a.abspath() for a in self.inputs])
+ lst = [x for x in lst if x]
+ try:
+ self.out = self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, output=0, quiet=0)[1]
+ except:
+ self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None)
+
+def configure(self):
+ """
+ Detect the scalac program
+ """
+ # If SCALA_HOME is set, we prepend it to the path list
+ java_path = self.environ['PATH'].split(os.pathsep)
+ v = self.env
+
+ if 'SCALA_HOME' in self.environ:
+ java_path = [os.path.join(self.environ['SCALA_HOME'], 'bin')] + java_path
+ self.env['SCALA_HOME'] = [self.environ['SCALA_HOME']]
+
+ for x in 'scalac scala'.split():
+ self.find_program(x, var=x.upper(), path_list=java_path)
+
+ if 'CLASSPATH' in self.environ:
+ v['CLASSPATH'] = self.environ['CLASSPATH']
+
+ v.SCALACFLAGS = ['-verbose']
+ if not v['SCALAC']:
+ self.fatal('scalac is required for compiling scala classes')
+
diff --git a/third_party/waf/waflib/extras/slow_qt4.py b/third_party/waf/waflib/extras/slow_qt4.py
new file mode 100644
index 00000000000..e3014cdf070
--- /dev/null
+++ b/third_party/waf/waflib/extras/slow_qt4.py
@@ -0,0 +1,100 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# Thomas Nagy, 2011 (ita)
+
+"""
+Create _moc.cpp files
+
+The builds are 30-40% faster when .moc files are included,
+you should NOT use this tool. If you really
+really want it:
+
+def configure(conf):
+ conf.load('compiler_cxx qt4')
+ conf.load('slow_qt4')
+
+See playground/slow_qt/wscript for a complete example.
+"""
+
+from waflib.TaskGen import extension
+from waflib import Task
+import waflib.Tools.qt4
+import waflib.Tools.cxx
+
+@extension(*waflib.Tools.qt4.EXT_QT4)
+def cxx_hook(self, node):
+ self.create_compiled_task('cxx_qt', node)
+
+class cxx_qt(Task.classes['cxx']):
+ def runnable_status(self):
+ ret = Task.classes['cxx'].runnable_status(self)
+ if ret != Task.ASK_LATER and not getattr(self, 'moc_done', None):
+
+ try:
+ cache = self.generator.moc_cache
+ except AttributeError:
+ cache = self.generator.moc_cache = {}
+
+ deps = self.generator.bld.node_deps[self.uid()]
+ for x in [self.inputs[0]] + deps:
+ if x.read().find('Q_OBJECT') > 0:
+
+ # process "foo.h -> foo.moc" only if "foo.cpp" is in the sources for the current task generator
+ # this code will work because it is in the main thread (runnable_status)
+ if x.name.rfind('.') > -1: # a .h file...
+ name = x.name[:x.name.rfind('.')]
+ for tsk in self.generator.compiled_tasks:
+ if tsk.inputs and tsk.inputs[0].name.startswith(name):
+ break
+ else:
+ # no corresponding file, continue
+ continue
+
+ # the file foo.cpp could be compiled for a static and a shared library - hence the %number in the name
+ cxx_node = x.parent.get_bld().make_node(x.name.replace('.', '_') + '_%d_moc.cpp' % self.generator.idx)
+ if cxx_node in cache:
+ continue
+ cache[cxx_node] = self
+
+ tsk = Task.classes['moc'](env=self.env, generator=self.generator)
+ tsk.set_inputs(x)
+ tsk.set_outputs(cxx_node)
+
+ if x.name.endswith('.cpp'):
+ # moc is trying to be too smart but it is too dumb:
+ # why forcing the #include when Q_OBJECT is in the cpp file?
+ gen = self.generator.bld.producer
+ gen.outstanding.append(tsk)
+ gen.total += 1
+ self.set_run_after(tsk)
+ else:
+ cxxtsk = Task.classes['cxx'](env=self.env, generator=self.generator)
+ cxxtsk.set_inputs(tsk.outputs)
+ cxxtsk.set_outputs(cxx_node.change_ext('.o'))
+ cxxtsk.set_run_after(tsk)
+
+ try:
+ self.more_tasks.extend([tsk, cxxtsk])
+ except AttributeError:
+ self.more_tasks = [tsk, cxxtsk]
+
+ try:
+ link = self.generator.link_task
+ except AttributeError:
+ pass
+ else:
+ link.set_run_after(cxxtsk)
+ link.inputs.extend(cxxtsk.outputs)
+ link.inputs.sort(key=lambda x: x.abspath())
+
+ self.moc_done = True
+
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+
+ return ret
+
diff --git a/third_party/waf/waflib/extras/smart_continue.py b/third_party/waf/waflib/extras/smart_continue.py
deleted file mode 100644
index 8c171a8d96c..00000000000
--- a/third_party/waf/waflib/extras/smart_continue.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#! /usr/bin/env python
-# Thomas Nagy, 2011
-
-# Try to cancel the tasks that cannot run with the option -k when an error occurs:
-# 1 direct file dependencies
-# 2 tasks listed in the before/after/ext_in/ext_out attributes
-
-from waflib import Task, Runner
-
-Task.CANCELED = 4
-
-def cancel_next(self, tsk):
- if not isinstance(tsk, Task.TaskBase):
- return
- if tsk.hasrun >= Task.SKIPPED:
- # normal execution, no need to do anything here
- return
-
- try:
- canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes
- except AttributeError:
- canceled_tasks = self.canceled_tasks = set([])
- canceled_nodes = self.canceled_nodes = set([])
-
- try:
- canceled_nodes.update(tsk.outputs)
- except AttributeError:
- pass
-
- try:
- canceled_tasks.add(tsk)
- except AttributeError:
- pass
-
-def get_out(self):
- tsk = self.out.get()
- if not self.stop:
- self.add_more_tasks(tsk)
- self.count -= 1
- self.dirty = True
- self.cancel_next(tsk) # new code
-
-def error_handler(self, tsk):
- if not self.bld.keep:
- self.stop = True
- self.error.append(tsk)
- self.cancel_next(tsk) # new code
-
-Runner.Parallel.cancel_next = cancel_next
-Runner.Parallel.get_out = get_out
-Runner.Parallel.error_handler = error_handler
-
-def get_next_task(self):
- tsk = self.get_next_task_smart_continue()
- if not tsk:
- return tsk
-
- try:
- canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes
- except AttributeError:
- pass
- else:
- # look in the tasks that this one is waiting on
- # if one of them was canceled, cancel this one too
- for x in tsk.run_after:
- if x in canceled_tasks:
- tsk.hasrun = Task.CANCELED
- self.cancel_next(tsk)
- break
- else:
- # so far so good, now consider the nodes
- for x in getattr(tsk, 'inputs', []) + getattr(tsk, 'deps', []):
- if x in canceled_nodes:
- tsk.hasrun = Task.CANCELED
- self.cancel_next(tsk)
- break
- return tsk
-
-Runner.Parallel.get_next_task_smart_continue = Runner.Parallel.get_next_task
-Runner.Parallel.get_next_task = get_next_task
diff --git a/third_party/waf/waflib/extras/softlink_libs.py b/third_party/waf/waflib/extras/softlink_libs.py
new file mode 100644
index 00000000000..ed453848786
--- /dev/null
+++ b/third_party/waf/waflib/extras/softlink_libs.py
@@ -0,0 +1,80 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# per rosengren 2011
+
+from waflib.TaskGen import feature, after_method
+from waflib.Task import Task, always_run
+from os.path import basename, isabs
+from os import tmpfile, linesep
+
+def options(opt):
+ grp = opt.add_option_group('Softlink Libraries Options')
+ grp.add_option('--exclude', default='/usr/lib,/lib', help='No symbolic links are created for libs within [%default]')
+
+def configure(cnf):
+ cnf.find_program('ldd')
+ if not cnf.env.SOFTLINK_EXCLUDE:
+ cnf.env.SOFTLINK_EXCLUDE = cnf.options.exclude.split(',')
+
+@feature('softlink_libs')
+@after_method('process_rule')
+def add_finder(self):
+ tgt = self.path.find_or_declare(self.target)
+ self.create_task('sll_finder', tgt=tgt)
+ self.create_task('sll_installer', tgt=tgt)
+ always_run(sll_installer)
+
+class sll_finder(Task):
+ ext_out = 'softlink_libs'
+ def run(self):
+ bld = self.generator.bld
+ linked=[]
+ target_paths = []
+ for g in bld.groups:
+ for tgen in g:
+ # FIXME it might be better to check if there is a link_task (getattr?)
+ target_paths += [tgen.path.get_bld().bldpath()]
+ linked += [t.outputs[0].bldpath()
+ for t in getattr(tgen, 'tasks', [])
+ if t.__class__.__name__ in
+ ['cprogram', 'cshlib', 'cxxprogram', 'cxxshlib']]
+ lib_list = []
+ if len(linked):
+ cmd = [self.env.LDD] + linked
+ # FIXME add DYLD_LIBRARY_PATH+PATH for osx+win32
+ ldd_env = {'LD_LIBRARY_PATH': ':'.join(target_paths + self.env.LIBPATH)}
+ # FIXME the with syntax will not work in python 2
+ with tmpfile() as result:
+ self.exec_command(cmd, env=ldd_env, stdout=result)
+ result.seek(0)
+ for line in result.readlines():
+ words = line.split()
+ if len(words) < 3 or words[1] != '=>':
+ continue
+ lib = words[2]
+ if lib == 'not':
+ continue
+ if any([lib.startswith(p) for p in
+ [bld.bldnode.abspath(), '('] +
+ self.env.SOFTLINK_EXCLUDE]):
+ continue
+ if not isabs(lib):
+ continue
+ lib_list.append(lib)
+ lib_list = sorted(set(lib_list))
+ self.outputs[0].write(linesep.join(lib_list + self.env.DYNAMIC_LIBS))
+ return 0
+
+class sll_installer(Task):
+ ext_in = 'softlink_libs'
+ def run(self):
+ tgt = self.outputs[0]
+ self.generator.bld.install_files('${LIBDIR}', tgt, postpone=False)
+ lib_list=tgt.read().split()
+ for lib in lib_list:
+ self.generator.bld.symlink_as('${LIBDIR}/'+basename(lib), lib, postpone=False)
+ return 0
+
diff --git a/third_party/waf/waflib/extras/stale.py b/third_party/waf/waflib/extras/stale.py
index a1e63ee58f0..991af82cd61 100644
--- a/third_party/waf/waflib/extras/stale.py
+++ b/third_party/waf/waflib/extras/stale.py
@@ -1,4 +1,8 @@
#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
# encoding: UTF-8
# Thomas Nagy, 2006-2015 (ita)
@@ -13,6 +17,8 @@ a full 'waf clean'
Of course, it will only work if there are no dynamically generated
nodes/tasks, in which case the method will have to be modified
to exclude some folders for example.
+
+Make sure to set bld.post_mode = waflib.Build.POST_AT_ONCE
"""
from waflib import Logs, Build
@@ -59,7 +65,7 @@ def stale_rec(node, nodes):
else:
if not node in nodes:
if can_delete(node):
- Logs.warn("Removing stale file -> %s" % node.abspath())
+ Logs.warn('Removing stale file -> %r', node)
node.delete()
old = Parallel.refill_task_list
@@ -73,24 +79,24 @@ def refill_task_list(self):
self.stale_done = True
# this does not work in partial builds
- if hasattr(bld, 'options') and bld.options.targets and bld.options.targets != '*':
+ if bld.targets != '*':
return iit
# this does not work in dynamic builds
- if not hasattr(bld, 'post_mode') or bld.post_mode == Build.POST_LAZY:
+ if getattr(bld, 'post_mode') == Build.POST_AT_ONCE:
return iit
# obtain the nodes to use during the build
nodes = []
- for i in range(len(bld.groups)):
- tasks = bld.get_tasks_group(i)
+ for tasks in bld.groups:
for x in tasks:
try:
nodes.extend(x.outputs)
- except:
+ except AttributeError:
pass
stale_rec(bld.bldnode, nodes)
return iit
Parallel.refill_task_list = refill_task_list
+
diff --git a/third_party/waf/waflib/extras/stracedeps.py b/third_party/waf/waflib/extras/stracedeps.py
index f9581a9e908..4be44248cb6 100644
--- a/third_party/waf/waflib/extras/stracedeps.py
+++ b/third_party/waf/waflib/extras/stracedeps.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2015 (ita)
@@ -66,11 +70,8 @@ def get_strace_args(self):
@task_method
def exec_command(self, cmd, **kw):
bld = self.generator.bld
- try:
- if not kw.get('cwd', None):
- kw['cwd'] = bld.cwd
- except AttributeError:
- bld.cwd = kw['cwd'] = bld.variant_dir
+ if not 'cwd' in kw:
+ kw['cwd'] = self.get_cwd()
args = self.get_strace_args()
fname = self.get_strace_file()
@@ -103,6 +104,9 @@ def parse_strace_deps(self, path, cwd):
except OSError:
pass
+ if not isinstance(cwd, str):
+ cwd = cwd.abspath()
+
nodes = []
bld = self.generator.bld
try:
@@ -114,7 +118,7 @@ def parse_strace_deps(self, path, cwd):
pid_to_cwd = {}
global BANNED
- done = set([])
+ done = set()
for m in re.finditer(re_lines, cnt):
# scraping the output of strace
pid = m.group('pid')
@@ -162,7 +166,7 @@ def parse_strace_deps(self, path, cwd):
# record the dependencies then force the task signature recalculation for next time
if Logs.verbose:
- Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
+ Logs.debug('deps: real scanner for %r returned %r', self, nodes)
bld = self.generator.bld
bld.node_deps[self.uid()] = nodes
bld.raw_deps[self.uid()] = []
@@ -171,3 +175,4 @@ def parse_strace_deps(self, path, cwd):
except AttributeError:
pass
self.signature()
+
diff --git a/third_party/waf/waflib/extras/swig.py b/third_party/waf/waflib/extras/swig.py
index b654db7f211..18d665e5d46 100644
--- a/third_party/waf/waflib/extras/swig.py
+++ b/third_party/waf/waflib/extras/swig.py
@@ -1,11 +1,15 @@
#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
# encoding: UTF-8
# Petar Forai
# Thomas Nagy 2008-2010 (ita)
import re
from waflib import Task, Logs
-from waflib.TaskGen import extension
+from waflib.TaskGen import extension, feature, after_method
from waflib.Configure import conf
from waflib.Tools import c_preproc
@@ -64,6 +68,7 @@ class swig(Task.Task):
lst_src = []
seen = []
+ missing = []
to_see = [self.inputs[0]]
while to_see:
@@ -87,14 +92,14 @@ class swig(Task.Task):
to_see.append(u)
break
else:
- Logs.warn('could not find %r' % n)
-
- return (lst_src, [])
+ missing.append(n)
+ return (lst_src, missing)
# provide additional language processing
swig_langs = {}
def swigf(fun):
swig_langs[fun.__name__.replace('swig_', '')] = fun
+ return fun
swig.swigf = swigf
def swig_c(self):
@@ -112,7 +117,7 @@ def swig_c(self):
c_tsk.set_run_after(self)
ge = self.generator.bld.producer
- ge.outstanding.insert(0, c_tsk)
+ ge.outstanding.append(c_tsk)
ge.total += 1
try:
@@ -121,7 +126,11 @@ def swig_c(self):
pass
else:
ltask.set_run_after(c_tsk)
+ # setting input nodes does not declare the build order
+ # because the build already started
ltask.inputs.append(c_tsk.outputs[0])
+ # set the build order after the build started:
+ ge.revdeps[c_tsk].add(ltask)
self.outputs.append(out_node)
@@ -160,19 +169,67 @@ def i_file(self, node):
outdir.mkdir()
tsk.outdir = outdir
+@feature('c', 'cxx', 'd', 'fc', 'asm')
+@after_method('apply_link', 'process_source')
+def enforce_swig_before_link(self):
+ try:
+ link_task = self.link_task
+ except AttributeError:
+ pass
+ else:
+ for x in self.tasks:
+ if x.__class__.__name__ == 'swig':
+ link_task.run_after.add(x)
+
@conf
-def check_swig_version(self):
- """Returns a tuple representing the swig version, like (1,3,28)"""
+def check_swig_version(conf, minver=None):
+ """
+ Check if the swig tool is found matching a given minimum version.
+ minver should be a tuple, eg. to check for swig >= 1.3.28 pass (1,3,28) as minver.
+
+ If successful, SWIG_VERSION is defined as 'MAJOR.MINOR'
+ (eg. '1.3') of the actual swig version found.
+
+ :param minver: minimum version
+ :type minver: tuple of int
+ :return: swig version
+ :rtype: tuple of int
+ """
+ assert minver is None or isinstance(minver, tuple)
+ swigbin = conf.env['SWIG']
+ if not swigbin:
+ conf.fatal('could not find the swig executable')
+
+ # Get swig version string
+ cmd = swigbin + ['-version']
+ Logs.debug('swig: Running swig command %r', cmd)
reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
- swig_out = self.cmd_and_log(self.env.SWIG + ['-version'])
+ swig_out = conf.cmd_and_log(cmd)
+ swigver_tuple = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')])
+
+ # Compare swig version with the minimum required
+ result = (minver is None) or (swigver_tuple >= minver)
- swigver = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')])
- self.env['SWIG_VERSION'] = swigver
- msg = 'Checking for swig version'
- self.msg(msg, '.'.join(map(str, swigver)))
- return swigver
+ if result:
+ # Define useful environment variables
+ swigver = '.'.join([str(x) for x in swigver_tuple[:2]])
+ conf.env['SWIG_VERSION'] = swigver
+
+ # Feedback
+ swigver_full = '.'.join(map(str, swigver_tuple[:3]))
+ if minver is None:
+ conf.msg('Checking for swig version', swigver_full)
+ else:
+ minver_str = '.'.join(map(str, minver))
+ conf.msg('Checking for swig version >= %s' % (minver_str,), swigver_full, color=result and 'GREEN' or 'YELLOW')
+
+ if not result:
+ conf.fatal('The swig version is too old, expecting %r' % (minver,))
+
+ return swigver_tuple
def configure(conf):
conf.find_program('swig', var='SWIG')
conf.env.SWIGPATH_ST = '-I%s'
conf.env.SWIGDEF_ST = '-D%s'
+
diff --git a/third_party/waf/waflib/extras/syms.py b/third_party/waf/waflib/extras/syms.py
index d2efd993452..05fff9f59a2 100644
--- a/third_party/waf/waflib/extras/syms.py
+++ b/third_party/waf/waflib/extras/syms.py
@@ -1,8 +1,12 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
"""
-this tool supports the export_symbols_regex to export the symbols in a shared library.
+This tool supports the export_symbols_regex to export the symbols in a shared library.
by default, all symbols are exported by gcc, and nothing by msvc.
to use the tool, do something like:
@@ -12,7 +16,6 @@ def build(ctx):
only the symbols starting with 'mylib_' will be exported.
"""
-import os
import re
from waflib.Context import STDOUT
from waflib.Task import Task
@@ -23,26 +26,20 @@ class gen_sym(Task):
def run(self):
obj = self.inputs[0]
kw = {}
- if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
- re_nm = re.compile(r'External\s+\|\s+_(' + self.generator.export_symbols_regex + r')\b')
+ reg = getattr(self.generator, 'export_symbols_regex', '.+?')
+ if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+ re_nm = re.compile(r'External\s+\|\s+_(?P<symbol>%s)\b' % reg)
cmd = (self.env.DUMPBIN or ['dumpbin']) + ['/symbols', obj.abspath()]
-
- # Dumpbin requires custom environment sniffed out by msvc.py earlier
- if self.env['PATH']:
- env = dict(self.env.env or os.environ)
- env.update(PATH = os.pathsep.join(self.env['PATH']))
- kw['env'] = env
-
else:
if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows
- re_nm = re.compile(r'T\s+_(' + self.generator.export_symbols_regex + r')\b')
+ re_nm = re.compile(r'(T|D)\s+_(?P<symbol>%s)\b' % reg)
elif self.env.DEST_BINFMT=='mac-o':
- re_nm=re.compile(r'T\s+(_?'+self.generator.export_symbols_regex+r')\b')
+ re_nm=re.compile(r'(T|D)\s+(?P<symbol>_?%s)\b' % reg)
else:
- re_nm = re.compile(r'T\s+(' + self.generator.export_symbols_regex + r')\b')
- cmd = [self.env.NM[0] or 'nm', '-g', obj.abspath()]
- syms = re_nm.findall(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))
+ re_nm = re.compile(r'(T|D)\s+(?P<symbol>%s)\b' % reg)
+ cmd = (self.env.NM or ['nm']) + ['-g', obj.abspath()]
+ syms = [m.group('symbol') for m in re_nm.finditer(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))]
self.outputs[0].write('%r' % syms)
class compile_sym(Task):
@@ -64,23 +61,28 @@ class compile_sym(Task):
raise WafError('NotImplemented')
@feature('syms')
-@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local')
+@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local', 'propagate_uselib_vars')
def do_the_symbol_stuff(self):
- ins = [x.outputs[0] for x in self.compiled_tasks]
- self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
+ def_node = self.path.find_or_declare(getattr(self, 'sym_file', self.target + '.def'))
+ compiled_tasks = getattr(self, 'compiled_tasks', None)
+ if compiled_tasks:
+ ins = [x.outputs[0] for x in compiled_tasks]
+ self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
+ self.create_task('compile_sym', [x.outputs[0] for x in self.gen_sym_tasks], def_node)
+
+ link_task = getattr(self, 'link_task', None)
+ if link_task:
+ self.link_task.dep_nodes.append(def_node)
+
+ if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+ self.link_task.env.append_value('LINKFLAGS', ['/def:' + def_node.bldpath()])
+ elif self.env.DEST_BINFMT == 'pe':
+ # gcc on windows takes *.def as an additional input
+ self.link_task.inputs.append(def_node)
+ elif self.env.DEST_BINFMT == 'elf':
+ self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + def_node.bldpath()])
+ elif self.env.DEST_BINFMT=='mac-o':
+ self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,' + def_node.bldpath()])
+ else:
+ raise WafError('NotImplemented')
- tsk = self.create_task('compile_sym',
- [x.outputs[0] for x in self.gen_sym_tasks],
- self.path.find_or_declare(getattr(self, 'sym_filename', self.target + '.def')))
- self.link_task.set_run_after(tsk)
- self.link_task.dep_nodes.append(tsk.outputs[0])
- if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
- self.link_task.env.append_value('LINKFLAGS', ['/def:' + tsk.outputs[0].bldpath()])
- elif self.env.DEST_BINFMT == 'pe': #gcc on windows takes *.def as an additional input
- self.link_task.inputs.append(tsk.outputs[0])
- elif self.env.DEST_BINFMT == 'elf':
- self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + tsk.outputs[0].bldpath()])
- elif self.env.DEST_BINFMT=='mac-o':
- self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,'+tsk.outputs[0].bldpath()])
- else:
- raise WafError('NotImplemented')
diff --git a/third_party/waf/waflib/extras/sync_exec.py b/third_party/waf/waflib/extras/sync_exec.py
deleted file mode 100644
index ba241fc69b4..00000000000
--- a/third_party/waf/waflib/extras/sync_exec.py
+++ /dev/null
@@ -1,8 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-This tool is obsolete, the sync_exec feature is now the default
-"""
-
-pass
diff --git a/third_party/waf/waflib/extras/ticgt.py b/third_party/waf/waflib/extras/ticgt.py
new file mode 100644
index 00000000000..117585cecae
--- /dev/null
+++ b/third_party/waf/waflib/extras/ticgt.py
@@ -0,0 +1,304 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+
+# Texas Instruments code generator support (experimental)
+# When reporting issues, please directly assign the bug to the maintainer.
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2012"
+
+"""
+TI cgt6x is a compiler suite for TI DSPs.
+
+The toolchain does pretty weird things, and I'm sure I'm missing some of them.
+But still, the tool saves time.
+
+What this tool does is:
+
+- create a TI compiler environment
+- create TI compiler features, to handle some specifics about this compiler
+ It has a few idiosyncracies, such as not giving the liberty of the .o file names
+- automatically activate them when using the TI compiler
+- handle the tconf tool
+ The tool
+
+TODO:
+
+- the set_platform_flags() function is not nice
+- more tests
+- broaden tool scope, if needed
+
+"""
+
+import os, re
+
+from waflib import Options, Utils, Task, TaskGen
+from waflib.Tools import c, ccroot, c_preproc
+from waflib.Configure import conf
+from waflib.TaskGen import feature, before_method
+from waflib.Tools.c import cprogram
+
+opj = os.path.join
+
+@conf
+def find_ticc(conf):
+ conf.find_program(['cl6x'], var='CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
+ conf.env.CC_NAME = 'ticc'
+
+@conf
+def find_tild(conf):
+ conf.find_program(['lnk6x'], var='LINK_CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
+ conf.env.LINK_CC_NAME = 'tild'
+
+@conf
+def find_tiar(conf):
+ conf.find_program(['ar6x'], var='AR', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
+ conf.env.AR_NAME = 'tiar'
+ conf.env.ARFLAGS = 'qru'
+
+@conf
+def ticc_common_flags(conf):
+ v = conf.env
+
+ if not v['LINK_CC']:
+ v['LINK_CC'] = v['CC']
+ v['CCLNK_SRC_F'] = []
+ v['CCLNK_TGT_F'] = ['-o']
+ v['CPPPATH_ST'] = '-I%s'
+ v['DEFINES_ST'] = '-d%s'
+
+ v['LIB_ST'] = '-l%s' # template for adding libs
+ v['LIBPATH_ST'] = '-i%s' # template for adding libpaths
+ v['STLIB_ST'] = '-l=%s.lib'
+ v['STLIBPATH_ST'] = '-i%s'
+
+ # program
+ v['cprogram_PATTERN'] = '%s.out'
+
+ # static lib
+ #v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
+ v['cstlib_PATTERN'] = '%s.lib'
+
+def configure(conf):
+ v = conf.env
+ v.TI_CGT_DIR = getattr(Options.options, 'ti-cgt-dir', "")
+ v.TI_DSPLINK_DIR = getattr(Options.options, 'ti-dsplink-dir', "")
+ v.TI_BIOSUTILS_DIR = getattr(Options.options, 'ti-biosutils-dir', "")
+ v.TI_DSPBIOS_DIR = getattr(Options.options, 'ti-dspbios-dir', "")
+ v.TI_XDCTOOLS_DIR = getattr(Options.options, 'ti-xdctools-dir', "")
+ conf.find_ticc()
+ conf.find_tiar()
+ conf.find_tild()
+ conf.ticc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
+ conf.find_program(['tconf'], var='TCONF', path_list=v.TI_XDCTOOLS_DIR)
+
+ conf.env.TCONF_INCLUDES += [
+ opj(conf.env.TI_DSPBIOS_DIR, 'packages'),
+ ]
+
+ conf.env.INCLUDES += [
+ opj(conf.env.TI_CGT_DIR, 'include'),
+ ]
+
+ conf.env.LIBPATH += [
+ opj(conf.env.TI_CGT_DIR, "lib"),
+ ]
+
+ conf.env.INCLUDES_DSPBIOS += [
+ opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'include'),
+ ]
+
+ conf.env.LIBPATH_DSPBIOS += [
+ opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'lib'),
+ ]
+
+ conf.env.INCLUDES_DSPLINK += [
+ opj(conf.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc'),
+ ]
+
+@conf
+def ti_set_debug(cfg, debug=1):
+ """
+ Sets debug flags for the compiler.
+
+ TODO:
+ - for each TI CFLAG/INCLUDES/LINKFLAGS/LIBPATH replace RELEASE by DEBUG
+ - -g --no_compress
+ """
+ if debug:
+ cfg.env.CFLAGS += "-d_DEBUG -dDEBUG -dDDSP_DEBUG".split()
+
+@conf
+def ti_dsplink_set_platform_flags(cfg, splat, dsp, dspbios_ver, board):
+ """
+ Sets the INCLUDES, LINKFLAGS for DSPLINK and TCONF_INCLUDES
+ For the specific hardware.
+
+ Assumes that DSPLINK was built in its own folder.
+
+ :param splat: short platform name (eg. OMAPL138)
+ :param dsp: DSP name (eg. 674X)
+ :param dspbios_ver: string identifying DspBios version (eg. 5.XX)
+ :param board: board name (eg. OMAPL138GEM)
+
+ """
+ d1 = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver)
+ d = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver, board)
+ cfg.env.TCONF_INCLUDES += [d1, d]
+ cfg.env.INCLUDES_DSPLINK += [
+ opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', dsp),
+ d,
+ ]
+
+ cfg.env.LINKFLAGS_DSPLINK += [
+ opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'export', 'BIN', 'DspBios', splat, board+'_0', 'RELEASE', 'dsplink%s.lib' % x)
+ for x in ('', 'pool', 'mpcs', 'mplist', 'msg', 'data', 'notify', 'ringio')
+ ]
+
+
+def options(opt):
+ opt.add_option('--with-ti-cgt', type='string', dest='ti-cgt-dir', help = 'Specify alternate cgt root folder', default="")
+ opt.add_option('--with-ti-biosutils', type='string', dest='ti-biosutils-dir', help = 'Specify alternate biosutils folder', default="")
+ opt.add_option('--with-ti-dspbios', type='string', dest='ti-dspbios-dir', help = 'Specify alternate dspbios folder', default="")
+ opt.add_option('--with-ti-dsplink', type='string', dest='ti-dsplink-dir', help = 'Specify alternate dsplink folder', default="")
+ opt.add_option('--with-ti-xdctools', type='string', dest='ti-xdctools-dir', help = 'Specify alternate xdctools folder', default="")
+
+class ti_cprogram(cprogram):
+ """
+ Link object files into a c program
+
+ Changes:
+
+ - the linked executable to have a relative path (because we can)
+ - put the LIBPATH first
+ """
+ run_str = '${LINK_CC} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].bldpath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} '
+
+@feature("c")
+@before_method('apply_link')
+def use_ti_cprogram(self):
+ """
+ Automatically uses ti_cprogram link process
+ """
+ if 'cprogram' in self.features and self.env.CC_NAME == 'ticc':
+ self.features.insert(0, "ti_cprogram")
+
+class ti_c(Task.Task):
+ """
+ Compile task for the TI codegen compiler
+
+ This compiler does not allow specifying the output file name, only the output path.
+
+ """
+ "Compile C files into object files"
+ run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${SRC} -c ${OUT} ${CPPFLAGS}'
+ vars = ['CCDEPS'] # unused variable to depend on, just in case
+ ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
+ scan = c_preproc.scan
+
+def create_compiled_task(self, name, node):
+ """
+ Overrides ccroot.create_compiled_task to support ti_c
+ """
+ out = '%s' % (node.change_ext('.obj').name)
+ if self.env.CC_NAME == 'ticc':
+ name = 'ti_c'
+ task = self.create_task(name, node, node.parent.find_or_declare(out))
+ self.env.OUT = '-fr%s' % (node.parent.get_bld().abspath())
+ try:
+ self.compiled_tasks.append(task)
+ except AttributeError:
+ self.compiled_tasks = [task]
+ return task
+
+@TaskGen.extension('.c')
+def c_hook(self, node):
+ "Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
+ if self.env.CC_NAME == 'ticc':
+ return create_compiled_task(self, 'ti_c', node)
+ else:
+ return self.create_compiled_task('c', node)
+
+
+@feature("ti-tconf")
+@before_method('process_source')
+def apply_tconf(self):
+ sources = [x.get_src() for x in self.to_nodes(self.source, path=self.path.get_src())]
+ node = sources[0]
+ assert(sources[0].name.endswith(".tcf"))
+ if len(sources) > 1:
+ assert(sources[1].name.endswith(".cmd"))
+
+ target = getattr(self, 'target', self.source)
+ target_node = node.get_bld().parent.find_or_declare(node.name)
+
+ procid = "%d" % int(getattr(self, 'procid', 0))
+
+ importpaths = []
+ includes = Utils.to_list(getattr(self, 'includes', []))
+ for x in includes + self.env.TCONF_INCLUDES:
+ if x == os.path.abspath(x):
+ importpaths.append(x)
+ else:
+ relpath = self.path.find_node(x).path_from(target_node.parent)
+ importpaths.append(relpath)
+
+ task = self.create_task('ti_tconf', sources, target_node.change_ext('.cdb'))
+ task.path = self.path
+ task.includes = includes
+ task.cwd = target_node.parent.abspath()
+ task.env = self.env.derive()
+ task.env["TCONFSRC"] = node.path_from(target_node.parent)
+ task.env["TCONFINC"] = '-Dconfig.importPath=%s' % ";".join(importpaths)
+ task.env['TCONFPROGNAME'] = '-Dconfig.programName=%s' % target
+ task.env['PROCID'] = procid
+ task.outputs = [
+ target_node.change_ext("cfg_c.c"),
+ target_node.change_ext("cfg.s62"),
+ target_node.change_ext("cfg.cmd"),
+ ]
+
+ create_compiled_task(self, 'ti_c', task.outputs[1])
+ ctask = create_compiled_task(self, 'ti_c', task.outputs[0])
+ ctask.env = self.env.derive()
+
+ self.add_those_o_files(target_node.change_ext("cfg.cmd"))
+ if len(sources) > 1:
+ self.add_those_o_files(sources[1])
+ self.source = []
+
+re_tconf_include = re.compile(r'(?P<type>utils\.importFile)\("(?P<file>.*)"\)',re.M)
+class ti_tconf(Task.Task):
+ run_str = '${TCONF} ${TCONFINC} ${TCONFPROGNAME} ${TCONFSRC} ${PROCID}'
+ color = 'PINK'
+
+ def scan(self):
+ includes = Utils.to_list(getattr(self, 'includes', []))
+
+ def deps(node):
+ nodes, names = [], []
+ if node:
+ code = Utils.readf(node.abspath())
+ for match in re_tconf_include.finditer(code):
+ path = match.group('file')
+ if path:
+ for x in includes:
+ filename = opj(x, path)
+ fi = self.path.find_resource(filename)
+ if fi:
+ subnodes, subnames = deps(fi)
+ nodes += subnodes
+ names += subnames
+ nodes.append(fi)
+ names.append(path)
+ break
+ return nodes, names
+ return deps(self.inputs[0])
+
diff --git a/third_party/waf/waflib/extras/unc.py b/third_party/waf/waflib/extras/unc.py
deleted file mode 100644
index e630c2a7d05..00000000000
--- a/third_party/waf/waflib/extras/unc.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2014 (ita)
-
-"""
-This module enables automatic handling of network paths of the form \\server\share for both input
-and output files. While a typical script may require the following::
-
- import os
- def build(bld):
-
- node = bld.root.make_node('\\\\COMPUTER\\share\\test.txt')
-
- # mark the server/share levels as folders
- k = node.parent
- while k:
- k.cache_isdir = True
- k = k.parent
-
- # clear the file if removed
- if not os.path.isfile(node.abspath()):
- node.sig = None
-
- # create the folder structure
- if node.parent.height() > 2:
- node.parent.mkdir()
-
- # then the task generator
- def myfun(tsk):
- tsk.outputs[0].write("data")
- bld(rule=myfun, source='wscript', target=[nd])
-
-this tool will make the process much easier, for example::
-
- def configure(conf):
- conf.load('unc') # do not import the module directly
-
- def build(bld):
- def myfun(tsk):
- tsk.outputs[0].write("data")
- bld(rule=myfun, update_outputs=True,
- source='wscript',
- target='\\\\COMPUTER\\share\\test.txt')
- bld(rule=myfun, update_outputs=True,
- source='\\\\COMPUTER\\share\\test.txt',
- target='\\\\COMPUTER\\share\\test2.txt')
-"""
-
-import os
-from waflib import Node, Utils, Context
-
-def find_resource(self, lst):
- if isinstance(lst, str):
- lst = [x for x in Node.split_path(lst) if x and x != '.']
-
- if lst[0].startswith('\\\\'):
- if len(lst) < 3:
- return None
- node = self.ctx.root.make_node(lst[0]).make_node(lst[1])
- node.cache_isdir = True
- node.parent.cache_isdir = True
-
- ret = node.search_node(lst[2:])
- if not ret:
- ret = node.find_node(lst[2:])
- if ret and os.path.isdir(ret.abspath()):
- return None
- return ret
-
- return self.find_resource_orig(lst)
-
-def find_or_declare(self, lst):
- if isinstance(lst, str):
- lst = [x for x in Node.split_path(lst) if x and x != '.']
-
- if lst[0].startswith('\\\\'):
- if len(lst) < 3:
- return None
- node = self.ctx.root.make_node(lst[0]).make_node(lst[1])
- node.cache_isdir = True
- node.parent.cache_isdir = True
- ret = node.find_node(lst[2:])
- if not ret:
- ret = node.make_node(lst[2:])
- if not os.path.isfile(ret.abspath()):
- ret.sig = None
- ret.parent.mkdir()
- return ret
-
- return self.find_or_declare_orig(lst)
-
-def abspath(self):
- """For MAX_PATH limitations"""
- ret = self.abspath_orig()
- if not ret.startswith("\\"):
- return "\\\\?\\" + ret
- return ret
-
-if Utils.is_win32:
- Node.Node.find_resource_orig = Node.Node.find_resource
- Node.Node.find_resource = find_resource
-
- Node.Node.find_or_declare_orig = Node.Node.find_or_declare
- Node.Node.find_or_declare = find_or_declare
-
- Node.Node.abspath_orig = Node.Node.abspath
- Node.Node.abspath = abspath
-
- for k in list(Context.cache_modules.keys()):
- Context.cache_modules["\\\\?\\" + k] = Context.cache_modules[k]
diff --git a/third_party/waf/waflib/extras/unity.py b/third_party/waf/waflib/extras/unity.py
index f30ba50ca45..cb6a8d54db7 100644
--- a/third_party/waf/waflib/extras/unity.py
+++ b/third_party/waf/waflib/extras/unity.py
@@ -1,67 +1,112 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
"""
-Compile whole groups of C/C++ files at once.
+Compile whole groups of C/C++ files at once
+(C and C++ files are processed independently though).
+
+To enable globally::
+
+ def options(opt):
+ opt.load('compiler_cxx')
+ def build(bld):
+ bld.load('compiler_cxx unity')
+
+To enable for specific task generators only::
+
+ def build(bld):
+ bld(features='c cprogram unity', source='main.c', ...)
+
+The file order is often significant in such builds, so it can be
+necessary to adjust the order of source files and the batch sizes.
+To control the amount of files processed in a batch per target
+(the default is 50)::
+
+ def build(bld):
+ bld(features='c cprogram', unity_size=20)
-def build(bld):
- bld.load('compiler_cxx unity')
"""
-import sys
from waflib import Task, Options
from waflib.Tools import c_preproc
from waflib import TaskGen
MAX_BATCH = 50
+EXTS_C = ('.c',)
+EXTS_CXX = ('.cpp','.cc','.cxx','.C','.c++')
+
def options(opt):
global MAX_BATCH
- opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH, help='batch size (0 for no batch)')
+ opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH,
+ help='default unity batch size (0 disables unity builds)')
+
+@TaskGen.taskgen_method
+def batch_size(self):
+ default = getattr(Options.options, 'batchsize', MAX_BATCH)
+ if default < 1:
+ return 0
+ return getattr(self, 'unity_size', default)
+
class unity(Task.Task):
color = 'BLUE'
scan = c_preproc.scan
+ def to_include(self, node):
+ ret = node.path_from(self.outputs[0].parent)
+ ret = ret.replace('\\', '\\\\').replace('"', '\\"')
+ return ret
def run(self):
- lst = ['#include "%s"\n' % node.abspath() for node in self.inputs]
+ lst = ['#include "%s"\n' % self.to_include(node) for node in self.inputs]
txt = ''.join(lst)
self.outputs[0].write(txt)
+ def __str__(self):
+ node = self.outputs[0]
+ return node.path_from(node.ctx.launch_node())
-@TaskGen.taskgen_method
-def batch_size(self):
- return getattr(Options.options, 'batchsize', MAX_BATCH)
-
-def make_batch_fun(ext):
- # this generic code makes this quite unreadable, defining the function two times might have been better
- def make_batch(self, node):
- cnt = self.batch_size()
- if cnt <= 1:
- return self.create_compiled_task(ext, node)
- x = getattr(self, 'master_%s' % ext, None)
- if not x or len(x.inputs) >= cnt:
- x = self.create_task('unity')
- setattr(self, 'master_%s' % ext, x)
-
- cnt_cur = getattr(self, 'cnt_%s' % ext, 0)
- cxxnode = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, ext))
- x.outputs = [cxxnode]
- setattr(self, 'cnt_%s' % ext, cnt_cur + 1)
- self.create_compiled_task(ext, cxxnode)
- x.inputs.append(node)
- return make_batch
-
-def enable_support(cc, cxx):
- if cxx or not cc:
- TaskGen.extension('.cpp', '.cc', '.cxx', '.C', '.c++')(make_batch_fun('cxx'))
- if cc:
- TaskGen.extension('.c')(make_batch_fun('c'))
- else:
- TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']
-
-has_c = '.c' in TaskGen.task_gen.mappings or 'waflib.Tools.compiler_c' in sys.modules
-has_cpp = '.cpp' in TaskGen.task_gen.mappings or 'waflib.Tools.compiler_cxx' in sys.modules
-enable_support(has_c, has_cpp) # by default
+def bind_unity(obj, cls_name, exts):
+ if not 'mappings' in obj.__dict__:
+ obj.mappings = dict(obj.mappings)
+
+ for j in exts:
+ fun = obj.mappings[j]
+ if fun.__name__ == 'unity_fun':
+ raise ValueError('Attempt to bind unity mappings multiple times %r' % j)
+
+ def unity_fun(self, node):
+ cnt = self.batch_size()
+ if cnt <= 1:
+ return fun(self, node)
+ x = getattr(self, 'master_%s' % cls_name, None)
+ if not x or len(x.inputs) >= cnt:
+ x = self.create_task('unity')
+ setattr(self, 'master_%s' % cls_name, x)
+
+ cnt_cur = getattr(self, 'cnt_%s' % cls_name, 0)
+ c_node = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, cls_name))
+ x.outputs = [c_node]
+ setattr(self, 'cnt_%s' % cls_name, cnt_cur + 1)
+ fun(self, c_node)
+ x.inputs.append(node)
+
+ obj.mappings[j] = unity_fun
+
+@TaskGen.feature('unity')
+@TaskGen.before('process_source')
+def single_unity(self):
+ lst = self.to_list(self.features)
+ if 'c' in lst:
+ bind_unity(self, 'c', EXTS_C)
+ if 'cxx' in lst:
+ bind_unity(self, 'cxx', EXTS_CXX)
def build(bld):
- # it is best to do this
- enable_support(bld.env.CC_NAME, bld.env.CXX_NAME)
+ if bld.env.CC_NAME:
+ bind_unity(TaskGen.task_gen, 'c', EXTS_C)
+ if bld.env.CXX_NAME:
+ bind_unity(TaskGen.task_gen, 'cxx', EXTS_CXX)
+
diff --git a/third_party/waf/waflib/extras/use_config.py b/third_party/waf/waflib/extras/use_config.py
index ffaafce85d5..b5a73d7f3e1 100644
--- a/third_party/waf/waflib/extras/use_config.py
+++ b/third_party/waf/waflib/extras/use_config.py
@@ -1,3 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
#!/usr/bin/env python
# coding=utf-8
# Mathieu Courtois - EDF R&D, 2013 - http://www.code-aster.org
@@ -50,6 +54,21 @@ import sys
import os.path as osp
import os
+local_repo = ''
+"""Local repository containing additional Waf tools (plugins)"""
+remote_repo = 'https://raw.githubusercontent.com/waf-project/waf/master/'
+"""
+Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::
+
+ $ waf configure --download
+"""
+
+remote_locs = ['waflib/extras', 'waflib/Tools']
+"""
+Remote directories for use with :py:const:`waflib.extras.use_config.remote_repo`
+"""
+
+
try:
from urllib import request
except ImportError:
@@ -94,12 +113,12 @@ def download_check(node):
def download_tool(tool, force=False, ctx=None):
"""
- Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`::
+ Download a Waf tool from the remote repository defined in :py:const:`waflib.extras.use_config.remote_repo`::
$ waf configure --download
"""
- for x in Utils.to_list(Context.remote_repo):
- for sub in Utils.to_list(Context.remote_locs):
+ for x in Utils.to_list(remote_repo):
+ for sub in Utils.to_list(remote_locs):
url = '/'.join((x, sub, tool + '.py'))
try:
web = urlopen(url)
@@ -115,12 +134,12 @@ def download_tool(tool, force=False, ctx=None):
else:
tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
tmp.write(web.read(), 'wb')
- Logs.warn('Downloaded %s from %s' % (tool, url))
+ Logs.warn('Downloaded %s from %s', tool, url)
download_check(tmp)
try:
module = Context.load_tool(tool)
except Exception:
- Logs.warn('The tool %s from %s is unusable' % (tool, url))
+ Logs.warn('The tool %s from %s is unusable', tool, url)
try:
tmp.delete()
except Exception:
@@ -134,6 +153,9 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
try:
module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path)
except ImportError as e:
+ if not ctx or not hasattr(Options.options, 'download'):
+ Logs.error('Could not load %r during options phase (download unavailable at this point)' % tool)
+ raise
if Options.options.download:
module = download_tool(tool, ctx=ctx)
if not module:
@@ -164,3 +186,4 @@ def configure(self):
self.load(cfg, tooldir=tooldir, **kwargs)
self.start_msg('Checking for configuration')
self.end_msg(use_cfg)
+
diff --git a/third_party/waf/waflib/extras/valadoc.py b/third_party/waf/waflib/extras/valadoc.py
new file mode 100644
index 00000000000..bb2a4d22030
--- /dev/null
+++ b/third_party/waf/waflib/extras/valadoc.py
@@ -0,0 +1,144 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: UTF-8
+# Nicolas Joseph 2009
+
+"""
+ported from waf 1.5:
+TODO: tabs vs spaces
+"""
+
+from waflib import Task, Utils, Errors, Logs
+from waflib.TaskGen import feature
+
+VALADOC_STR = '${VALADOC}'
+
+class valadoc(Task.Task):
+ vars = ['VALADOC', 'VALADOCFLAGS']
+ color = 'BLUE'
+ after = ['cprogram', 'cstlib', 'cshlib', 'cxxprogram', 'cxxstlib', 'cxxshlib']
+ quiet = True # no outputs .. this is weird
+
+ def __init__(self, *k, **kw):
+ Task.Task.__init__(self, *k, **kw)
+ self.output_dir = ''
+ self.doclet = ''
+ self.package_name = ''
+ self.package_version = ''
+ self.files = []
+ self.vapi_dirs = []
+ self.protected = True
+ self.private = False
+ self.inherit = False
+ self.deps = False
+ self.vala_defines = []
+ self.vala_target_glib = None
+ self.enable_non_null_experimental = False
+ self.force = False
+
+ def run(self):
+ if not self.env['VALADOCFLAGS']:
+ self.env['VALADOCFLAGS'] = ''
+ cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
+ cmd.append ('-o %s' % self.output_dir)
+ if getattr(self, 'doclet', None):
+ cmd.append ('--doclet %s' % self.doclet)
+ cmd.append ('--package-name %s' % self.package_name)
+ if getattr(self, 'package_version', None):
+ cmd.append ('--package-version %s' % self.package_version)
+ if getattr(self, 'packages', None):
+ for package in self.packages:
+ cmd.append ('--pkg %s' % package)
+ if getattr(self, 'vapi_dirs', None):
+ for vapi_dir in self.vapi_dirs:
+ cmd.append ('--vapidir %s' % vapi_dir)
+ if not getattr(self, 'protected', None):
+ cmd.append ('--no-protected')
+ if getattr(self, 'private', None):
+ cmd.append ('--private')
+ if getattr(self, 'inherit', None):
+ cmd.append ('--inherit')
+ if getattr(self, 'deps', None):
+ cmd.append ('--deps')
+ if getattr(self, 'vala_defines', None):
+ for define in self.vala_defines:
+ cmd.append ('--define %s' % define)
+ if getattr(self, 'vala_target_glib', None):
+ cmd.append ('--target-glib=%s' % self.vala_target_glib)
+ if getattr(self, 'enable_non_null_experimental', None):
+ cmd.append ('--enable-non-null-experimental')
+ if getattr(self, 'force', None):
+ cmd.append ('--force')
+ cmd.append (' '.join ([x.abspath() for x in self.files]))
+ return self.generator.bld.exec_command(' '.join(cmd))
+
+@feature('valadoc')
+def process_valadoc(self):
+ """
+ Generate API documentation from Vala source code with valadoc
+
+ doc = bld(
+ features = 'valadoc',
+ output_dir = '../doc/html',
+ package_name = 'vala-gtk-example',
+ package_version = '1.0.0',
+ packages = 'gtk+-2.0',
+ vapi_dirs = '../vapi',
+ force = True
+ )
+
+ path = bld.path.find_dir ('../src')
+ doc.files = path.ant_glob (incl='**/*.vala')
+ """
+
+ task = self.create_task('valadoc')
+ if getattr(self, 'output_dir', None):
+ task.output_dir = self.path.find_or_declare(self.output_dir).abspath()
+ else:
+ Errors.WafError('no output directory')
+ if getattr(self, 'doclet', None):
+ task.doclet = self.doclet
+ else:
+ Errors.WafError('no doclet directory')
+ if getattr(self, 'package_name', None):
+ task.package_name = self.package_name
+ else:
+ Errors.WafError('no package name')
+ if getattr(self, 'package_version', None):
+ task.package_version = self.package_version
+ if getattr(self, 'packages', None):
+ task.packages = Utils.to_list(self.packages)
+ if getattr(self, 'vapi_dirs', None):
+ vapi_dirs = Utils.to_list(self.vapi_dirs)
+ for vapi_dir in vapi_dirs:
+ try:
+ task.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
+ except AttributeError:
+ Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
+ if getattr(self, 'files', None):
+ task.files = self.files
+ else:
+ Errors.WafError('no input file')
+ if getattr(self, 'protected', None):
+ task.protected = self.protected
+ if getattr(self, 'private', None):
+ task.private = self.private
+ if getattr(self, 'inherit', None):
+ task.inherit = self.inherit
+ if getattr(self, 'deps', None):
+ task.deps = self.deps
+ if getattr(self, 'vala_defines', None):
+ task.vala_defines = Utils.to_list(self.vala_defines)
+ if getattr(self, 'vala_target_glib', None):
+ task.vala_target_glib = self.vala_target_glib
+ if getattr(self, 'enable_non_null_experimental', None):
+ task.enable_non_null_experimental = self.enable_non_null_experimental
+ if getattr(self, 'force', None):
+ task.force = self.force
+
+def configure(conf):
+ conf.find_program('valadoc', errmsg='You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
+
diff --git a/third_party/waf/waflib/extras/why.py b/third_party/waf/waflib/extras/why.py
index c3875f4e3de..8404e213888 100644
--- a/third_party/waf/waflib/extras/why.py
+++ b/third_party/waf/waflib/extras/why.py
@@ -1,5 +1,9 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
# Thomas Nagy, 2010 (ita)
"""
@@ -17,8 +21,10 @@ from waflib import Task, Utils, Logs, Errors
def signature(self):
# compute the result one time, and suppose the scan_signature will give the good result
- try: return self.cache_sig
- except AttributeError: pass
+ try:
+ return self.cache_sig
+ except AttributeError:
+ pass
self.m = Utils.md5()
self.m.update(self.hcode)
@@ -62,7 +68,7 @@ def runnable_status(self):
def v(x):
return Utils.to_hex(x)
- Logs.debug("Task %r" % self)
+ Logs.debug('Task %r', self)
msgs = ['* Implicit or scanner dependency', '* Task code', '* Source file, explicit or manual dependency', '* Configuration data variable']
tmp = 'task: -> %s: %s %s'
for x in range(len(msgs)):
@@ -70,6 +76,7 @@ def runnable_status(self):
a = new_sigs[x*l : (x+1)*l]
b = old_sigs[x*l : (x+1)*l]
if (a != b):
- Logs.debug(tmp % (msgs[x].ljust(35), v(a), v(b)))
+ Logs.debug(tmp, msgs[x].ljust(35), v(a), v(b))
return ret
Task.Task.runnable_status = runnable_status
+
diff --git a/third_party/waf/waflib/extras/win32_opts.py b/third_party/waf/waflib/extras/win32_opts.py
index 28491cd328f..f8f41870567 100644
--- a/third_party/waf/waflib/extras/win32_opts.py
+++ b/third_party/waf/waflib/extras/win32_opts.py
@@ -1,10 +1,16 @@
#! /usr/bin/env python
# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
"""
Windows-specific optimizations
-This module can help reducing the overhead of listing files on windows (more than 10000 files).
+This module can help reducing the overhead of listing files on windows
+(more than 10000 files). Python 3.5 already provides the listdir
+optimization though.
"""
import os
@@ -16,7 +22,7 @@ except AttributeError:
TP = '%s\\*'
if Utils.is_win32:
- from waflib.extras import md5_tstamp
+ from waflib.Tools import md5_tstamp
import ctypes, ctypes.wintypes
FindFirstFile = ctypes.windll.kernel32.FindFirstFileW
@@ -99,13 +105,7 @@ if Utils.is_win32:
pass
except AttributeError:
self.ctx.hash_cache = {}
-
- if not self.is_bld():
- if self.is_child_of(self.ctx.srcnode):
- self.sig = self.cached_hash_file()
- else:
- self.sig = Utils.h_file(self.abspath())
- self.ctx.hash_cache[id(self)] = ret = self.sig
+ self.ctx.hash_cache[id(self)] = ret = Utils.h_file(self.abspath())
return ret
Node.Node.get_bld_sig = get_bld_sig_win32
@@ -126,7 +126,7 @@ if Utils.is_win32:
find = FindFirstFile(TP % curpath, ctypes.byref(findData))
if find == INVALID_HANDLE_VALUE:
- Logs.error("invalid win32 handle isfile_cached %r" % self.abspath())
+ Logs.error("invalid win32 handle isfile_cached %r", self.abspath())
return os.path.isfile(self.abspath())
try:
@@ -138,7 +138,7 @@ if Utils.is_win32:
if not FindNextFile(find, ctypes.byref(findData)):
break
except Exception as e:
- Logs.error('exception while listing a folder %r %r' % (self.abspath(), e))
+ Logs.error('exception while listing a folder %r %r', self.abspath(), e)
return os.path.isfile(self.abspath())
finally:
FindClose(find)
@@ -148,12 +148,11 @@ if Utils.is_win32:
def find_or_declare_win32(self, lst):
# assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile
if isinstance(lst, str):
- lst = [x for x in Node.split_path(lst) if x and x != '.']
+ lst = [x for x in Utils.split_path(lst) if x and x != '.']
- node = self.get_bld().search(lst)
+ node = self.get_bld().search_node(lst)
if node:
if not node.isfile_cached():
- node.sig = None
try:
node.parent.mkdir()
except OSError:
@@ -163,7 +162,6 @@ if Utils.is_win32:
node = self.find_node(lst)
if node:
if not node.isfile_cached():
- node.sig = None
try:
node.parent.mkdir()
except OSError:
@@ -173,3 +171,4 @@ if Utils.is_win32:
node.parent.mkdir()
return node
Node.Node.find_or_declare = find_or_declare_win32
+
diff --git a/third_party/waf/waflib/extras/wix.py b/third_party/waf/waflib/extras/wix.py
new file mode 100644
index 00000000000..c9daae6ead3
--- /dev/null
+++ b/third_party/waf/waflib/extras/wix.py
@@ -0,0 +1,91 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/python
+# encoding: utf-8
+# vim: tabstop=4 noexpandtab
+
+"""
+Windows Installer XML Tool (WiX)
+
+.wxs --- candle ---> .wxobj --- light ---> .msi
+
+bld(features='wix', some.wxs, gen='some.msi', candleflags=[..], lightflags=[..])
+
+bld(features='wix', source=['bundle.wxs','WixBalExtension'], gen='setup.exe', candleflags=[..])
+"""
+
+import os, copy
+from waflib import TaskGen
+from waflib import Task
+from waflib.Utils import winreg
+
+class candle(Task.Task):
+ run_str = '${CANDLE} -nologo ${CANDLEFLAGS} -out ${TGT} ${SRC[0].abspath()}',
+
+class light(Task.Task):
+ run_str = "${LIGHT} -nologo -b ${SRC[0].parent.abspath()} ${LIGHTFLAGS} -out ${TGT} ${SRC[0].abspath()}"
+
+@TaskGen.feature('wix')
+@TaskGen.before_method('process_source')
+def wix(self):
+ #X.wxs -> ${SRC} for CANDLE
+ #X.wxobj -> ${SRC} for LIGHT
+ #X.dll -> -ext X in ${LIGHTFLAGS}
+ #X.wxl -> wixui.wixlib -loc X.wxl in ${LIGHTFLAGS}
+ wxobj = []
+ wxs = []
+ exts = []
+ wxl = []
+ rest = []
+ for x in self.source:
+ if x.endswith('.wxobj'):
+ wxobj.append(x)
+ elif x.endswith('.wxs'):
+ wxobj.append(self.path.find_or_declare(x[:-4]+'.wxobj'))
+ wxs.append(x)
+ elif x.endswith('.dll'):
+ exts.append(x[:-4])
+ elif '.' not in x:
+ exts.append(x)
+ elif x.endswith('.wxl'):
+ wxl.append(x)
+ else:
+ rest.append(x)
+ self.source = self.to_nodes(rest) #.wxs
+
+ cndl = self.create_task('candle', self.to_nodes(wxs), self.to_nodes(wxobj))
+ lght = self.create_task('light', self.to_nodes(wxobj), self.path.find_or_declare(self.gen))
+
+ cndl.env.CANDLEFLAGS = copy.copy(getattr(self,'candleflags',[]))
+ lght.env.LIGHTFLAGS = copy.copy(getattr(self,'lightflags',[]))
+
+ for x in wxl:
+ lght.env.append_value('LIGHTFLAGS','wixui.wixlib')
+ lght.env.append_value('LIGHTFLAGS','-loc')
+ lght.env.append_value('LIGHTFLAGS',x)
+ for x in exts:
+ cndl.env.append_value('CANDLEFLAGS','-ext')
+ cndl.env.append_value('CANDLEFLAGS',x)
+ lght.env.append_value('LIGHTFLAGS','-ext')
+ lght.env.append_value('LIGHTFLAGS',x)
+
+#wix_bin_path()
+def wix_bin_path():
+ basekey = r"SOFTWARE\Microsoft\.NETFramework\AssemblyFolders"
+ query = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, basekey)
+ cnt=winreg.QueryInfoKey(query)[0]
+ thiskey = r'C:\Program Files (x86)\WiX Toolset v3.10\SDK'
+ for i in range(cnt-1,-1,-1):
+ thiskey = winreg.EnumKey(query,i)
+ if 'WiX' in thiskey:
+ break
+ winreg.CloseKey(query)
+ return os.path.normpath(winreg.QueryValue(winreg.HKEY_LOCAL_MACHINE, basekey+r'\\'+thiskey)+'..\\bin')
+
+def configure(ctx):
+ path_list=[wix_bin_path()]
+ ctx.find_program('candle', var='CANDLE', mandatory=True, path_list = path_list)
+ ctx.find_program('light', var='LIGHT', mandatory=True, path_list = path_list)
+
diff --git a/third_party/waf/waflib/extras/xcode6.py b/third_party/waf/waflib/extras/xcode6.py
new file mode 100644
index 00000000000..15cddf2b056
--- /dev/null
+++ b/third_party/waf/waflib/extras/xcode6.py
@@ -0,0 +1,731 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# XCode 3/XCode 4/XCode 6/Xcode 7 generator for Waf
+# Based on work by Nicolas Mercier 2011
+# Extended by Simon Warg 2015, https://github.com/mimon
+# XCode project file format based on http://www.monobjc.net/xcode-project-file-format.html
+
+"""
+See playground/xcode6/ for usage examples.
+
+"""
+
+from waflib import Context, TaskGen, Build, Utils, Errors, Logs
+import os, sys
+
+# FIXME too few extensions
+XCODE_EXTS = ['.c', '.cpp', '.m', '.mm']
+
+HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
+
+MAP_EXT = {
+ '': "folder",
+ '.h' : "sourcecode.c.h",
+
+ '.hh': "sourcecode.cpp.h",
+ '.inl': "sourcecode.cpp.h",
+ '.hpp': "sourcecode.cpp.h",
+
+ '.c': "sourcecode.c.c",
+
+ '.m': "sourcecode.c.objc",
+
+ '.mm': "sourcecode.cpp.objcpp",
+
+ '.cc': "sourcecode.cpp.cpp",
+
+ '.cpp': "sourcecode.cpp.cpp",
+ '.C': "sourcecode.cpp.cpp",
+ '.cxx': "sourcecode.cpp.cpp",
+ '.c++': "sourcecode.cpp.cpp",
+
+ '.l': "sourcecode.lex", # luthor
+ '.ll': "sourcecode.lex",
+
+ '.y': "sourcecode.yacc",
+ '.yy': "sourcecode.yacc",
+
+ '.plist': "text.plist.xml",
+ ".nib": "wrapper.nib",
+ ".xib": "text.xib",
+}
+
+# Used in PBXNativeTarget elements
+PRODUCT_TYPE_APPLICATION = 'com.apple.product-type.application'
+PRODUCT_TYPE_FRAMEWORK = 'com.apple.product-type.framework'
+PRODUCT_TYPE_EXECUTABLE = 'com.apple.product-type.tool'
+PRODUCT_TYPE_LIB_STATIC = 'com.apple.product-type.library.static'
+PRODUCT_TYPE_LIB_DYNAMIC = 'com.apple.product-type.library.dynamic'
+PRODUCT_TYPE_EXTENSION = 'com.apple.product-type.kernel-extension'
+PRODUCT_TYPE_IOKIT = 'com.apple.product-type.kernel-extension.iokit'
+
+# Used in PBXFileReference elements
+FILE_TYPE_APPLICATION = 'wrapper.cfbundle'
+FILE_TYPE_FRAMEWORK = 'wrapper.framework'
+FILE_TYPE_LIB_DYNAMIC = 'compiled.mach-o.dylib'
+FILE_TYPE_LIB_STATIC = 'archive.ar'
+FILE_TYPE_EXECUTABLE = 'compiled.mach-o.executable'
+
+# Tuple packs of the above
+TARGET_TYPE_FRAMEWORK = (PRODUCT_TYPE_FRAMEWORK, FILE_TYPE_FRAMEWORK, '.framework')
+TARGET_TYPE_APPLICATION = (PRODUCT_TYPE_APPLICATION, FILE_TYPE_APPLICATION, '.app')
+TARGET_TYPE_DYNAMIC_LIB = (PRODUCT_TYPE_LIB_DYNAMIC, FILE_TYPE_LIB_DYNAMIC, '.dylib')
+TARGET_TYPE_STATIC_LIB = (PRODUCT_TYPE_LIB_STATIC, FILE_TYPE_LIB_STATIC, '.a')
+TARGET_TYPE_EXECUTABLE = (PRODUCT_TYPE_EXECUTABLE, FILE_TYPE_EXECUTABLE, '')
+
+# Maps target type string to its data
+TARGET_TYPES = {
+ 'framework': TARGET_TYPE_FRAMEWORK,
+ 'app': TARGET_TYPE_APPLICATION,
+ 'dylib': TARGET_TYPE_DYNAMIC_LIB,
+ 'stlib': TARGET_TYPE_STATIC_LIB,
+ 'exe' :TARGET_TYPE_EXECUTABLE,
+}
+
+def delete_invalid_values(dct):
+ """ Deletes entries that are dictionaries or sets """
+ for k, v in list(dct.items()):
+ if isinstance(v, dict) or isinstance(v, set):
+ del dct[k]
+ return dct
+
+"""
+Configuration of the global project settings. Sets an environment variable 'PROJ_CONFIGURATION'
+which is a dictionary of configuration name and buildsettings pair.
+E.g.:
+env.PROJ_CONFIGURATION = {
+ 'Debug': {
+ 'ARCHS': 'x86',
+ ...
+ }
+ 'Release': {
+ 'ARCHS' x86_64'
+ ...
+ }
+}
+The user can define a completely customized dictionary in configure() stage. Otherwise a default Debug/Release will be created
+based on env variable
+"""
+def configure(self):
+ if not self.env.PROJ_CONFIGURATION:
+ self.to_log("A default project configuration was created since no custom one was given in the configure(conf) stage. Define your custom project settings by adding PROJ_CONFIGURATION to env. The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.\n")
+
+ # Check for any added config files added by the tool 'c_config'.
+ if 'cfg_files' in self.env:
+ self.env.INCLUDES = Utils.to_list(self.env.INCLUDES) + [os.path.abspath(os.path.dirname(f)) for f in self.env.cfg_files]
+
+ # Create default project configuration?
+ if 'PROJ_CONFIGURATION' not in self.env:
+ defaults = delete_invalid_values(self.env.get_merged_dict())
+ self.env.PROJ_CONFIGURATION = {
+ "Debug": defaults,
+ "Release": defaults,
+ }
+
+ # Some build settings are required to be present by XCode. We will supply default values
+ # if user hasn't defined any.
+ defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')]
+ for cfgname,settings in self.env.PROJ_CONFIGURATION.iteritems():
+ for default_var, default_val in defaults_required:
+ if default_var not in settings:
+ settings[default_var] = default_val
+
+ # Error check customization
+ if not isinstance(self.env.PROJ_CONFIGURATION, dict):
+ raise Errors.ConfigurationError("The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.")
+
+part1 = 0
+part2 = 10000
+part3 = 0
+id = 562000999
+def newid():
+ global id
+ id += 1
+ return "%04X%04X%04X%012d" % (0, 10000, 0, id)
+
+"""
+Represents a tree node in the XCode project plist file format.
+When written to a file, all attributes of XCodeNode are stringified together with
+its value. However, attributes starting with an underscore _ are ignored
+during that process and allows you to store arbitray values that are not supposed
+to be written out.
+"""
+class XCodeNode(object):
+ def __init__(self):
+ self._id = newid()
+ self._been_written = False
+
+ def tostring(self, value):
+ if isinstance(value, dict):
+ result = "{\n"
+ for k,v in value.items():
+ result = result + "\t\t\t%s = %s;\n" % (k, self.tostring(v))
+ result = result + "\t\t}"
+ return result
+ elif isinstance(value, str):
+ return "\"%s\"" % value
+ elif isinstance(value, list):
+ result = "(\n"
+ for i in value:
+ result = result + "\t\t\t%s,\n" % self.tostring(i)
+ result = result + "\t\t)"
+ return result
+ elif isinstance(value, XCodeNode):
+ return value._id
+ else:
+ return str(value)
+
+ def write_recursive(self, value, file):
+ if isinstance(value, dict):
+ for k,v in value.items():
+ self.write_recursive(v, file)
+ elif isinstance(value, list):
+ for i in value:
+ self.write_recursive(i, file)
+ elif isinstance(value, XCodeNode):
+ value.write(file)
+
+ def write(self, file):
+ if not self._been_written:
+ self._been_written = True
+ for attribute,value in self.__dict__.items():
+ if attribute[0] != '_':
+ self.write_recursive(value, file)
+ w = file.write
+ w("\t%s = {\n" % self._id)
+ w("\t\tisa = %s;\n" % self.__class__.__name__)
+ for attribute,value in self.__dict__.items():
+ if attribute[0] != '_':
+ w("\t\t%s = %s;\n" % (attribute, self.tostring(value)))
+ w("\t};\n\n")
+
+# Configurations
+class XCBuildConfiguration(XCodeNode):
+ def __init__(self, name, settings = {}, env=None):
+ XCodeNode.__init__(self)
+ self.baseConfigurationReference = ""
+ self.buildSettings = settings
+ self.name = name
+ if env and env.ARCH:
+ settings['ARCHS'] = " ".join(env.ARCH)
+
+
+class XCConfigurationList(XCodeNode):
+ def __init__(self, configlst):
+ """ :param configlst: list of XCConfigurationList """
+ XCodeNode.__init__(self)
+ self.buildConfigurations = configlst
+ self.defaultConfigurationIsVisible = 0
+ self.defaultConfigurationName = configlst and configlst[0].name or ""
+
+# Group/Files
+class PBXFileReference(XCodeNode):
+ def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"):
+
+ XCodeNode.__init__(self)
+ self.fileEncoding = 4
+ if not filetype:
+ _, ext = os.path.splitext(name)
+ filetype = MAP_EXT.get(ext, 'text')
+ self.lastKnownFileType = filetype
+ self.explicitFileType = filetype
+ self.name = name
+ self.path = path
+ self.sourceTree = sourcetree
+
+ def __hash__(self):
+ return (self.path+self.name).__hash__()
+
+ def __eq__(self, other):
+ return (self.path, self.name) == (other.path, other.name)
+
+class PBXBuildFile(XCodeNode):
+ """ This element indicate a file reference that is used in a PBXBuildPhase (either as an include or resource). """
+ def __init__(self, fileRef, settings={}):
+ XCodeNode.__init__(self)
+
+ # fileRef is a reference to a PBXFileReference object
+ self.fileRef = fileRef
+
+ # A map of key/value pairs for additionnal settings.
+ self.settings = settings
+
+ def __hash__(self):
+ return (self.fileRef).__hash__()
+
+ def __eq__(self, other):
+ return self.fileRef == other.fileRef
+
+class PBXGroup(XCodeNode):
+ def __init__(self, name, sourcetree = 'SOURCE_TREE'):
+ XCodeNode.__init__(self)
+ self.children = []
+ self.name = name
+ self.sourceTree = sourcetree
+
+ # Maintain a lookup table for all PBXFileReferences
+ # that are contained in this group.
+ self._filerefs = {}
+
+ def add(self, sources):
+ """
+ Add a list of PBXFileReferences to this group
+
+ :param sources: list of PBXFileReferences objects
+ """
+ self._filerefs.update(dict(zip(sources, sources)))
+ self.children.extend(sources)
+
+ def get_sub_groups(self):
+ """
+ Returns all child PBXGroup objects contained in this group
+ """
+ return list(filter(lambda x: isinstance(x, PBXGroup), self.children))
+
+ def find_fileref(self, fileref):
+ """
+ Recursively search this group for an existing PBXFileReference. Returns None
+ if none were found.
+
+ The reason you'd want to reuse existing PBXFileReferences from a PBXGroup is that XCode doesn't like PBXFileReferences that aren't part of a PBXGroup hierarchy.
+ If it isn't, the consequence is that certain UI features like 'Reveal in Finder'
+ stops working.
+ """
+ if fileref in self._filerefs:
+ return self._filerefs[fileref]
+ elif self.children:
+ for childgroup in self.get_sub_groups():
+ f = childgroup.find_fileref(fileref)
+ if f:
+ return f
+ return None
+
+class PBXContainerItemProxy(XCodeNode):
+ """ This is the element for to decorate a target item. """
+ def __init__(self, containerPortal, remoteGlobalIDString, remoteInfo='', proxyType=1):
+ XCodeNode.__init__(self)
+ self.containerPortal = containerPortal # PBXProject
+ self.remoteGlobalIDString = remoteGlobalIDString # PBXNativeTarget
+ self.remoteInfo = remoteInfo # Target name
+ self.proxyType = proxyType
+
+class PBXTargetDependency(XCodeNode):
+ """ This is the element for referencing other target through content proxies. """
+ def __init__(self, native_target, proxy):
+ XCodeNode.__init__(self)
+ self.target = native_target
+ self.targetProxy = proxy
+
+class PBXFrameworksBuildPhase(XCodeNode):
+ """ This is the element for the framework link build phase, i.e. linking to frameworks """
+ def __init__(self, pbxbuildfiles):
+ XCodeNode.__init__(self)
+ self.buildActionMask = 2147483647
+ self.runOnlyForDeploymentPostprocessing = 0
+ self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
+
+class PBXHeadersBuildPhase(XCodeNode):
+ """ This is the element for adding header files to be packaged into the .framework """
+ def __init__(self, pbxbuildfiles):
+ XCodeNode.__init__(self)
+ self.buildActionMask = 2147483647
+ self.runOnlyForDeploymentPostprocessing = 0
+ self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
+
+class PBXCopyFilesBuildPhase(XCodeNode):
+ """
+ Represents the PBXCopyFilesBuildPhase section. PBXBuildFile
+ can be added to this node to copy files after build is done.
+ """
+ def __init__(self, pbxbuildfiles, dstpath, dstSubpathSpec=0, *args, **kwargs):
+ XCodeNode.__init__(self)
+ self.files = pbxbuildfiles
+ self.dstPath = dstpath
+ self.dstSubfolderSpec = dstSubpathSpec
+
+class PBXSourcesBuildPhase(XCodeNode):
+ """ Represents the 'Compile Sources' build phase in a Xcode target """
+ def __init__(self, buildfiles):
+ XCodeNode.__init__(self)
+ self.files = buildfiles # List of PBXBuildFile objects
+
+class PBXLegacyTarget(XCodeNode):
+ def __init__(self, action, target=''):
+ XCodeNode.__init__(self)
+ self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})])
+ if not target:
+ self.buildArgumentsString = "%s %s" % (sys.argv[0], action)
+ else:
+ self.buildArgumentsString = "%s %s --targets=%s" % (sys.argv[0], action, target)
+ self.buildPhases = []
+ self.buildToolPath = sys.executable
+ self.buildWorkingDirectory = ""
+ self.dependencies = []
+ self.name = target or action
+ self.productName = target or action
+ self.passBuildSettingsInEnvironment = 0
+
+class PBXShellScriptBuildPhase(XCodeNode):
+ def __init__(self, action, target):
+ XCodeNode.__init__(self)
+ self.buildActionMask = 2147483647
+ self.files = []
+ self.inputPaths = []
+ self.outputPaths = []
+ self.runOnlyForDeploymentPostProcessing = 0
+ self.shellPath = "/bin/sh"
+ self.shellScript = "%s %s %s --targets=%s" % (sys.executable, sys.argv[0], action, target)
+
+class PBXNativeTarget(XCodeNode):
+ """ Represents a target in XCode, e.g. App, DyLib, Framework etc. """
+ def __init__(self, target, node, target_type=TARGET_TYPE_APPLICATION, configlist=[], buildphases=[]):
+ XCodeNode.__init__(self)
+ product_type = target_type[0]
+ file_type = target_type[1]
+
+ self.buildConfigurationList = XCConfigurationList(configlist)
+ self.buildPhases = buildphases
+ self.buildRules = []
+ self.dependencies = []
+ self.name = target
+ self.productName = target
+ self.productType = product_type # See TARGET_TYPE_ tuples constants
+ self.productReference = PBXFileReference(node.name, node.abspath(), file_type, '')
+
+ def add_configuration(self, cf):
+ """ :type cf: XCBuildConfiguration """
+ self.buildConfigurationList.buildConfigurations.append(cf)
+
+ def add_build_phase(self, phase):
+ # Some build phase types may appear only once. If a phase type already exists, then merge them.
+ if ( (phase.__class__ == PBXFrameworksBuildPhase)
+ or (phase.__class__ == PBXSourcesBuildPhase) ):
+ for b in self.buildPhases:
+ if b.__class__ == phase.__class__:
+ b.files.extend(phase.files)
+ return
+ self.buildPhases.append(phase)
+
+ def add_dependency(self, depnd):
+ self.dependencies.append(depnd)
+
+# Root project object
+class PBXProject(XCodeNode):
+ def __init__(self, name, version, env):
+ XCodeNode.__init__(self)
+
+ if not isinstance(env.PROJ_CONFIGURATION, dict):
+ raise Errors.WafError("Error: env.PROJ_CONFIGURATION must be a dictionary. This is done for you if you do not define one yourself. However, did you load the xcode module at the end of your wscript configure() ?")
+
+ # Retrieve project configuration
+ configurations = []
+ for config_name, settings in env.PROJ_CONFIGURATION.items():
+ cf = XCBuildConfiguration(config_name, settings)
+ configurations.append(cf)
+
+ self.buildConfigurationList = XCConfigurationList(configurations)
+ self.compatibilityVersion = version[0]
+ self.hasScannedForEncodings = 1
+ self.mainGroup = PBXGroup(name)
+ self.projectRoot = ""
+ self.projectDirPath = ""
+ self.targets = []
+ self._objectVersion = version[1]
+
+ def create_target_dependency(self, target, name):
+ """ : param target : PXBNativeTarget """
+ proxy = PBXContainerItemProxy(self, target, name)
+ dependecy = PBXTargetDependency(target, proxy)
+ return dependecy
+
+ def write(self, file):
+
+ # Make sure this is written only once
+ if self._been_written:
+ return
+
+ w = file.write
+ w("// !$*UTF8*$!\n")
+ w("{\n")
+ w("\tarchiveVersion = 1;\n")
+ w("\tclasses = {\n")
+ w("\t};\n")
+ w("\tobjectVersion = %d;\n" % self._objectVersion)
+ w("\tobjects = {\n\n")
+
+ XCodeNode.write(self, file)
+
+ w("\t};\n")
+ w("\trootObject = %s;\n" % self._id)
+ w("}\n")
+
+ def add_target(self, target):
+ self.targets.append(target)
+
+ def get_target(self, name):
+ """ Get a reference to PBXNativeTarget if it exists """
+ for t in self.targets:
+ if t.name == name:
+ return t
+ return None
+
+@TaskGen.feature('c', 'cxx')
+@TaskGen.after('propagate_uselib_vars', 'apply_incpaths')
+def process_xcode(self):
+ bld = self.bld
+ try:
+ p = bld.project
+ except AttributeError:
+ return
+
+ if not hasattr(self, 'target_type'):
+ return
+
+ products_group = bld.products_group
+
+ target_group = PBXGroup(self.name)
+ p.mainGroup.children.append(target_group)
+
+ # Determine what type to build - framework, app bundle etc.
+ target_type = getattr(self, 'target_type', 'app')
+ if target_type not in TARGET_TYPES:
+ raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'" % (target_type, "', '".join(TARGET_TYPES.keys()), self.name))
+ else:
+ target_type = TARGET_TYPES[target_type]
+ file_ext = target_type[2]
+
+ # Create the output node
+ target_node = self.path.find_or_declare(self.name+file_ext)
+ target = PBXNativeTarget(self.name, target_node, target_type, [], [])
+
+ products_group.children.append(target.productReference)
+
+ # Pull source files from the 'source' attribute and assign them to a UI group.
+ # Use a default UI group named 'Source' unless the user
+ # provides a 'group_files' dictionary to customize the UI grouping.
+ sources = getattr(self, 'source', [])
+ if hasattr(self, 'group_files'):
+ group_files = getattr(self, 'group_files', [])
+ for grpname,files in group_files.items():
+ group = bld.create_group(grpname, files)
+ target_group.children.append(group)
+ else:
+ group = bld.create_group('Source', sources)
+ target_group.children.append(group)
+
+ # Create a PBXFileReference for each source file.
+ # If the source file already exists as a PBXFileReference in any of the UI groups, then
+ # reuse that PBXFileReference object (XCode does not like it if we don't reuse)
+ for idx, path in enumerate(sources):
+ fileref = PBXFileReference(path.name, path.abspath())
+ existing_fileref = target_group.find_fileref(fileref)
+ if existing_fileref:
+ sources[idx] = existing_fileref
+ else:
+ sources[idx] = fileref
+
+ # If the 'source' attribute contains any file extension that XCode can't work with,
+ # then remove it. The allowed file extensions are defined in XCODE_EXTS.
+ is_valid_file_extension = lambda file: os.path.splitext(file.path)[1] in XCODE_EXTS
+ sources = list(filter(is_valid_file_extension, sources))
+
+ buildfiles = [bld.unique_buildfile(PBXBuildFile(x)) for x in sources]
+ target.add_build_phase(PBXSourcesBuildPhase(buildfiles))
+
+ # Check if any framework to link against is some other target we've made
+ libs = getattr(self, 'tmp_use_seen', [])
+ for lib in libs:
+ use_target = p.get_target(lib)
+ if use_target:
+ # Create an XCode dependency so that XCode knows to build the other target before this target
+ dependency = p.create_target_dependency(use_target, use_target.name)
+ target.add_dependency(dependency)
+
+ buildphase = PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)])
+ target.add_build_phase(buildphase)
+ if lib in self.env.LIB:
+ self.env.LIB = list(filter(lambda x: x != lib, self.env.LIB))
+
+ # If 'export_headers' is present, add files to the Headers build phase in xcode.
+ # These are files that'll get packed into the Framework for instance.
+ exp_hdrs = getattr(self, 'export_headers', [])
+ hdrs = bld.as_nodes(Utils.to_list(exp_hdrs))
+ files = [p.mainGroup.find_fileref(PBXFileReference(n.name, n.abspath())) for n in hdrs]
+ files = [PBXBuildFile(f, {'ATTRIBUTES': ('Public',)}) for f in files]
+ buildphase = PBXHeadersBuildPhase(files)
+ target.add_build_phase(buildphase)
+
+ # Merge frameworks and libs into one list, and prefix the frameworks
+ frameworks = Utils.to_list(self.env.FRAMEWORK)
+ frameworks = ' '.join(['-framework %s' % (f.split('.framework')[0]) for f in frameworks])
+
+ libs = Utils.to_list(self.env.STLIB) + Utils.to_list(self.env.LIB)
+ libs = ' '.join(bld.env['STLIB_ST'] % t for t in libs)
+
+ # Override target specific build settings
+ bldsettings = {
+ 'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'],
+ 'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR) ,
+ 'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH),
+ 'OTHER_LDFLAGS': libs + ' ' + frameworks,
+ 'OTHER_LIBTOOLFLAGS': bld.env['LINKFLAGS'],
+ 'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']),
+ 'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']),
+ 'INSTALL_PATH': []
+ }
+
+ # Install path
+ installpaths = Utils.to_list(getattr(self, 'install', []))
+ prodbuildfile = PBXBuildFile(target.productReference)
+ for instpath in installpaths:
+ bldsettings['INSTALL_PATH'].append(instpath)
+ target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile], instpath))
+
+ if not bldsettings['INSTALL_PATH']:
+ del bldsettings['INSTALL_PATH']
+
+ # Create build settings which can override the project settings. Defaults to none if user
+ # did not pass argument. This will be filled up with target specific
+ # search paths, libs to link etc.
+ settings = getattr(self, 'settings', {})
+
+ # The keys represents different build configuration, e.g. Debug, Release and so on..
+ # Insert our generated build settings to all configuration names
+ keys = set(settings.keys() + bld.env.PROJ_CONFIGURATION.keys())
+ for k in keys:
+ if k in settings:
+ settings[k].update(bldsettings)
+ else:
+ settings[k] = bldsettings
+
+ for k,v in settings.items():
+ target.add_configuration(XCBuildConfiguration(k, v))
+
+ p.add_target(target)
+
+
+class xcode(Build.BuildContext):
+ cmd = 'xcode6'
+ fun = 'build'
+
+ def as_nodes(self, files):
+ """ Returns a list of waflib.Nodes from a list of string of file paths """
+ nodes = []
+ for x in files:
+ if not isinstance(x, str):
+ d = x
+ else:
+ d = self.srcnode.find_node(x)
+ if not d:
+ raise Errors.WafError('File \'%s\' was not found' % x)
+ nodes.append(d)
+ return nodes
+
+ def create_group(self, name, files):
+ """
+ Returns a new PBXGroup containing the files (paths) passed in the files arg
+ :type files: string
+ """
+ group = PBXGroup(name)
+ """
+ Do not use unique file reference here, since XCode seem to allow only one file reference
+ to be referenced by a group.
+ """
+ files_ = []
+ for d in self.as_nodes(Utils.to_list(files)):
+ fileref = PBXFileReference(d.name, d.abspath())
+ files_.append(fileref)
+ group.add(files_)
+ return group
+
+ def unique_buildfile(self, buildfile):
+ """
+ Returns a unique buildfile, possibly an existing one.
+ Use this after you've constructed a PBXBuildFile to make sure there is
+ only one PBXBuildFile for the same file in the same project.
+ """
+ try:
+ build_files = self.build_files
+ except AttributeError:
+ build_files = self.build_files = {}
+
+ if buildfile not in build_files:
+ build_files[buildfile] = buildfile
+ return build_files[buildfile]
+
+ def execute(self):
+ """
+ Entry point
+ """
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
+
+ appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
+
+ p = PBXProject(appname, ('Xcode 3.2', 46), self.env)
+
+ # If we don't create a Products group, then
+ # XCode will create one, which entails that
+ # we'll start to see duplicate files in the UI
+ # for some reason.
+ products_group = PBXGroup('Products')
+ p.mainGroup.children.append(products_group)
+
+ self.project = p
+ self.products_group = products_group
+
+ # post all task generators
+ # the process_xcode method above will be called for each target
+ if self.targets and self.targets != '*':
+ (self._min_grp, self._exact_tg) = self.get_targets()
+
+ self.current_group = 0
+ while self.current_group < len(self.groups):
+ self.post_group()
+ self.current_group += 1
+
+ node = self.bldnode.make_node('%s.xcodeproj' % appname)
+ node.mkdir()
+ node = node.make_node('project.pbxproj')
+ with open(node.abspath(), 'w') as f:
+ p.write(f)
+ Logs.pprint('GREEN', 'Wrote %r' % node.abspath())
+
+def bind_fun(tgtype):
+ def fun(self, *k, **kw):
+ tgtype = fun.__name__
+ if tgtype == 'shlib' or tgtype == 'dylib':
+ features = 'cxx cxxshlib'
+ tgtype = 'dylib'
+ elif tgtype == 'framework':
+ features = 'cxx cxxshlib'
+ tgtype = 'framework'
+ elif tgtype == 'program':
+ features = 'cxx cxxprogram'
+ tgtype = 'exe'
+ elif tgtype == 'app':
+ features = 'cxx cxxprogram'
+ tgtype = 'app'
+ elif tgtype == 'stlib':
+ features = 'cxx cxxstlib'
+ tgtype = 'stlib'
+ lst = kw['features'] = Utils.to_list(kw.get('features', []))
+ for x in features.split():
+ if not x in kw['features']:
+ lst.append(x)
+
+ kw['target_type'] = tgtype
+ return self(*k, **kw)
+ fun.__name__ = tgtype
+ setattr(Build.BuildContext, tgtype, fun)
+ return fun
+
+for xx in 'app framework dylib shlib stlib program'.split():
+ bind_fun(xx)
+