summaryrefslogtreecommitdiff
path: root/third_party/waf/waflib/extras
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/waf/waflib/extras')
-rw-r--r--third_party/waf/waflib/extras/__init__.py7
-rw-r--r--third_party/waf/waflib/extras/add_objects.py6
-rw-r--r--third_party/waf/waflib/extras/batched_cc.py168
-rw-r--r--third_party/waf/waflib/extras/build_file_tracker.py31
-rw-r--r--third_party/waf/waflib/extras/build_logs.py110
-rw-r--r--third_party/waf/waflib/extras/c_bgxlc.py31
-rw-r--r--third_party/waf/waflib/extras/c_dumbpreproc.py70
-rw-r--r--third_party/waf/waflib/extras/c_emscripten.py96
-rw-r--r--third_party/waf/waflib/extras/c_nec.py71
-rw-r--r--third_party/waf/waflib/extras/cfg_altoptions.py109
-rw-r--r--third_party/waf/waflib/extras/cfg_cross_gnu.py176
-rw-r--r--third_party/waf/waflib/extras/clang_compilation_database.py65
-rw-r--r--third_party/waf/waflib/extras/codelite.py880
-rw-r--r--third_party/waf/waflib/extras/color_gcc.py38
-rw-r--r--third_party/waf/waflib/extras/color_rvct.py50
-rw-r--r--third_party/waf/waflib/extras/compat15.py405
-rw-r--r--third_party/waf/waflib/extras/cppcheck.py546
-rw-r--r--third_party/waf/waflib/extras/cpplint.py217
-rw-r--r--third_party/waf/waflib/extras/cython.py145
-rw-r--r--third_party/waf/waflib/extras/dcc.py70
-rw-r--r--third_party/waf/waflib/extras/distnet.py431
-rw-r--r--third_party/waf/waflib/extras/doxygen.py226
-rw-r--r--third_party/waf/waflib/extras/dpapi.py86
-rw-r--r--third_party/waf/waflib/extras/file_to_object.py136
-rw-r--r--third_party/waf/waflib/extras/freeimage.py73
-rw-r--r--third_party/waf/waflib/extras/fsb.py30
-rw-r--r--third_party/waf/waflib/extras/gccdeps.py211
-rw-r--r--third_party/waf/waflib/extras/go.py255
-rw-r--r--third_party/waf/waflib/extras/gob2.py16
-rw-r--r--third_party/waf/waflib/extras/halide.py149
-rw-r--r--third_party/waf/waflib/extras/local_rpath.py18
-rw-r--r--third_party/waf/waflib/extras/make.py141
-rw-r--r--third_party/waf/waflib/extras/md5_tstamp.py67
-rw-r--r--third_party/waf/waflib/extras/mem_reducer.py110
-rw-r--r--third_party/waf/waflib/extras/misc.py410
-rw-r--r--third_party/waf/waflib/extras/msvcdeps.py262
-rw-r--r--third_party/waf/waflib/extras/msvs.py1033
-rw-r--r--third_party/waf/waflib/extras/netcache_client.py389
-rw-r--r--third_party/waf/waflib/extras/nobuild.py23
-rw-r--r--third_party/waf/waflib/extras/objcopy.py51
-rw-r--r--third_party/waf/waflib/extras/package.py75
-rw-r--r--third_party/waf/waflib/extras/parallel_debug.py441
-rw-r--r--third_party/waf/waflib/extras/pch.py148
-rw-r--r--third_party/waf/waflib/extras/pep8.py106
-rwxr-xr-xthird_party/waf/waflib/extras/prefork.py401
-rw-r--r--third_party/waf/waflib/extras/preforkjava.py236
-rw-r--r--third_party/waf/waflib/extras/preforkunix.py317
-rw-r--r--third_party/waf/waflib/extras/print_commands.py84
-rw-r--r--third_party/waf/waflib/extras/proc.py54
-rw-r--r--third_party/waf/waflib/extras/protoc.py92
-rw-r--r--third_party/waf/waflib/extras/relocation.py83
-rw-r--r--third_party/waf/waflib/extras/remote.py326
-rw-r--r--third_party/waf/waflib/extras/review.py321
-rw-r--r--third_party/waf/waflib/extras/rst.py251
-rw-r--r--third_party/waf/waflib/extras/smart_continue.py80
-rw-r--r--third_party/waf/waflib/extras/stale.py96
-rw-r--r--third_party/waf/waflib/extras/stracedeps.py173
-rw-r--r--third_party/waf/waflib/extras/swig.py178
-rw-r--r--third_party/waf/waflib/extras/syms.py86
-rw-r--r--third_party/waf/waflib/extras/sync_exec.py8
-rw-r--r--third_party/waf/waflib/extras/unc.py110
-rw-r--r--third_party/waf/waflib/extras/unity.py67
-rw-r--r--third_party/waf/waflib/extras/use_config.py166
-rw-r--r--third_party/waf/waflib/extras/why.py75
-rw-r--r--third_party/waf/waflib/extras/win32_opts.py175
65 files changed, 11557 insertions, 0 deletions
diff --git a/third_party/waf/waflib/extras/__init__.py b/third_party/waf/waflib/extras/__init__.py
new file mode 100644
index 00000000000..3dfaabd1371
--- /dev/null
+++ b/third_party/waf/waflib/extras/__init__.py
@@ -0,0 +1,7 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)
diff --git a/third_party/waf/waflib/extras/add_objects.py b/third_party/waf/waflib/extras/add_objects.py
new file mode 100644
index 00000000000..5606fd661e8
--- /dev/null
+++ b/third_party/waf/waflib/extras/add_objects.py
@@ -0,0 +1,6 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+from waflib import Logs
+Logs.warn('This tool has been merged to the main library, remove the references to "add_objects"')
diff --git a/third_party/waf/waflib/extras/batched_cc.py b/third_party/waf/waflib/extras/batched_cc.py
new file mode 100644
index 00000000000..4e48e780801
--- /dev/null
+++ b/third_party/waf/waflib/extras/batched_cc.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2015 (ita)
+
+"""
+Build as batches.
+
+Instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
+cc -c ../file1.c ../file2.c ../file3.c
+
+Files are output on the directory where the compiler is called, and dependencies are more difficult
+to track (do not run the command on all source files if only one file changes)
+
+As such, we do as if the files were compiled one by one, but no command is actually run:
+replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
+signatures from each slave and finds out the command-line to run.
+
+Just import this module in the configuration (no other change required).
+This is provided as an example, for performance unity builds are recommended (fewer tasks and
+fewer jobs to execute). See waflib/extras/unity.py.
+"""
+
+from waflib import Task, Utils
+from waflib.TaskGen import extension, feature, after_method
+from waflib.Tools import c, cxx
+
+MAX_BATCH = 50
+
+c_str = '${CC} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
+c_fun, _ = Task.compile_fun_noshell(c_str)
+
+cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}'
+cxx_fun, _ = Task.compile_fun_noshell(cxx_str)
+
+count = 70000
+class batch_task(Task.Task):
+ color = 'PINK'
+
+ after = ['c', 'cxx']
+ before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']
+
+ def uid(self):
+ m = Utils.md5()
+ m.update(Task.Task.uid(self))
+ m.update(str(self.generator.idx).encode())
+ return m.digest()
+
+ def __str__(self):
+ return 'Batch compilation for %d slaves' % len(self.slaves)
+
+ def __init__(self, *k, **kw):
+ Task.Task.__init__(self, *k, **kw)
+ self.slaves = []
+ self.inputs = []
+ self.hasrun = 0
+
+ global count
+ count += 1
+ self.idx = count
+
+ def add_slave(self, slave):
+ self.slaves.append(slave)
+ self.set_run_after(slave)
+
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+
+ for t in self.slaves:
+ #if t.executed:
+ if t.hasrun != Task.SKIPPED:
+ return Task.RUN_ME
+
+ return Task.SKIP_ME
+
+ def run(self):
+ self.outputs = []
+
+ srclst = []
+ slaves = []
+ for t in self.slaves:
+ if t.hasrun != Task.SKIPPED:
+ slaves.append(t)
+ srclst.append(t.inputs[0].abspath())
+
+ self.env.SRCLST = srclst
+ self.cwd = slaves[0].outputs[0].parent.abspath()
+
+ if self.slaves[0].__class__.__name__ == 'c':
+ ret = c_fun(self)
+ else:
+ ret = cxx_fun(self)
+
+ if ret:
+ return ret
+
+ for t in slaves:
+ t.old_post_run()
+
+def hook(cls_type):
+ def n_hook(self, node):
+
+ ext = '.obj' if self.env.CC_NAME == 'msvc' else '.o'
+ name = node.name
+ k = name.rfind('.')
+ if k >= 0:
+ basename = name[:k] + ext
+ else:
+ basename = name + ext
+
+ outdir = node.parent.get_bld().make_node('%d' % self.idx)
+ outdir.mkdir()
+ out = outdir.find_or_declare(basename)
+
+ task = self.create_task(cls_type, node, out)
+
+ try:
+ self.compiled_tasks.append(task)
+ except AttributeError:
+ self.compiled_tasks = [task]
+
+ if not getattr(self, 'masters', None):
+ self.masters = {}
+ self.allmasters = []
+
+ def fix_path(tsk):
+ if self.env.CC_NAME == 'msvc':
+ tsk.env.append_unique('CXX_TGT_F_BATCHED', '/Fo%s\\' % outdir.abspath())
+
+ if not node.parent in self.masters:
+ m = self.masters[node.parent] = self.master = self.create_task('batch')
+ fix_path(m)
+ self.allmasters.append(m)
+ else:
+ m = self.masters[node.parent]
+ if len(m.slaves) > MAX_BATCH:
+ m = self.masters[node.parent] = self.master = self.create_task('batch')
+ fix_path(m)
+ self.allmasters.append(m)
+ m.add_slave(task)
+ return task
+ return n_hook
+
+extension('.c')(hook('c'))
+extension('.cpp','.cc','.cxx','.C','.c++')(hook('cxx'))
+
+@feature('cprogram', 'cshlib', 'cstaticlib', 'cxxprogram', 'cxxshlib', 'cxxstlib')
+@after_method('apply_link')
+def link_after_masters(self):
+ if getattr(self, 'allmasters', None):
+ for m in self.allmasters:
+ self.link_task.set_run_after(m)
+
+# Modify the c and cxx task classes - in theory it would be best to
+# create subclasses and to re-map the c/c++ extensions
+for x in ('c', 'cxx'):
+ t = Task.classes[x]
+ def run(self):
+ pass
+
+ def post_run(self):
+ pass
+
+ setattr(t, 'oldrun', getattr(t, 'run', None))
+ setattr(t, 'run', run)
+ setattr(t, 'old_post_run', t.post_run)
+ setattr(t, 'post_run', post_run)
diff --git a/third_party/waf/waflib/extras/build_file_tracker.py b/third_party/waf/waflib/extras/build_file_tracker.py
new file mode 100644
index 00000000000..a00f7b2ad80
--- /dev/null
+++ b/third_party/waf/waflib/extras/build_file_tracker.py
@@ -0,0 +1,31 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015
+
+"""
+Force files to depend on the timestamps of those located in the build directory. You may
+want to use this to force partial rebuilds, see playground/track_output_files/ for a working example.
+
+Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
+or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
+or to hash the file in the build directory with its timestamp (similar to 'update_outputs')
+"""
+
+import os
+from waflib import Node, Utils
+
+def get_bld_sig(self):
+ try:
+ return self.cache_sig
+ except AttributeError:
+ pass
+
+ if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
+ self.sig = Utils.h_file(self.abspath())
+ self.cache_sig = ret = self.sig
+ else:
+ # add the
+ self.cache_sig = ret = self.sig + str(os.stat(self.abspath()).st_mtime)
+ return ret
+
+Node.Node.get_bld_sig = get_bld_sig
diff --git a/third_party/waf/waflib/extras/build_logs.py b/third_party/waf/waflib/extras/build_logs.py
new file mode 100644
index 00000000000..2fb8d346874
--- /dev/null
+++ b/third_party/waf/waflib/extras/build_logs.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2013 (ita)
+
+"""
+A system for recording all outputs to a log file. Just add the following to your wscript file::
+
+ def init(ctx):
+ ctx.load('build_logs')
+"""
+
+import atexit, sys, time, os, shutil, threading
+from waflib import ansiterm, Logs, Context
+
+# adding the logs under the build/ directory will clash with the clean/ command
+try:
+ up = os.path.dirname(Context.g_module.__file__)
+except AttributeError:
+ up = '.'
+LOGFILE = os.path.join(up, 'logs', '%s.log' % time.strftime('%Y_%m_%d_%H_%M'))
+
+wlock = threading.Lock()
+class log_to_file(object):
+ def __init__(self, stream, fileobj, filename):
+ self.stream = stream
+ self.encoding = self.stream.encoding
+ self.fileobj = fileobj
+ self.filename = filename
+ self.is_valid = True
+ def replace_colors(self, data):
+ for x in Logs.colors_lst.values():
+ if isinstance(x, str):
+ data = data.replace(x, '')
+ return data
+ def write(self, data):
+ try:
+ wlock.acquire()
+ self.stream.write(data)
+ self.stream.flush()
+ if self.is_valid:
+ self.fileobj.write(self.replace_colors(data))
+ finally:
+ wlock.release()
+ def fileno(self):
+ return self.stream.fileno()
+ def flush(self):
+ self.stream.flush()
+ if self.is_valid:
+ self.fileobj.flush()
+ def isatty(self):
+ return self.stream.isatty()
+
+def init(ctx):
+ global LOGFILE
+ filename = os.path.abspath(LOGFILE)
+ try:
+ os.makedirs(os.path.dirname(os.path.abspath(filename)))
+ except OSError:
+ pass
+
+ if hasattr(os, 'O_NOINHERIT'):
+ fd = os.open(LOGFILE, os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT)
+ fileobj = os.fdopen(fd, 'w')
+ else:
+ fileobj = open(LOGFILE, 'w')
+ old_stderr = sys.stderr
+
+ # sys.stdout has already been replaced, so __stdout__ will be faster
+ #sys.stdout = log_to_file(sys.stdout, fileobj, filename)
+ #sys.stderr = log_to_file(sys.stderr, fileobj, filename)
+ def wrap(stream):
+ if stream.isatty():
+ return ansiterm.AnsiTerm(stream)
+ return stream
+ sys.stdout = log_to_file(wrap(sys.__stdout__), fileobj, filename)
+ sys.stderr = log_to_file(wrap(sys.__stderr__), fileobj, filename)
+
+ # now mess with the logging module...
+ for x in Logs.log.handlers:
+ try:
+ stream = x.stream
+ except AttributeError:
+ pass
+ else:
+ if id(stream) == id(old_stderr):
+ x.stream = sys.stderr
+
+def exit_cleanup():
+ try:
+ fileobj = sys.stdout.fileobj
+ except AttributeError:
+ pass
+ else:
+ sys.stdout.is_valid = False
+ sys.stderr.is_valid = False
+ fileobj.close()
+ filename = sys.stdout.filename
+
+ Logs.info('Output logged to %r' % filename)
+
+ # then copy the log file to "latest.log" if possible
+ up = os.path.dirname(os.path.abspath(filename))
+ try:
+ shutil.copy(filename, os.path.join(up, 'latest.log'))
+ except OSError:
+ # this may fail on windows due to processes spawned
+ #
+ pass
+
+atexit.register(exit_cleanup)
diff --git a/third_party/waf/waflib/extras/c_bgxlc.py b/third_party/waf/waflib/extras/c_bgxlc.py
new file mode 100644
index 00000000000..7633f566b07
--- /dev/null
+++ b/third_party/waf/waflib/extras/c_bgxlc.py
@@ -0,0 +1,31 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+"""
+IBM XL Compiler for Blue Gene
+"""
+
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+
+from waflib.Tools import xlc # method xlc_common_flags
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('c_bgxlc')
+
+@conf
+def find_bgxlc(conf):
+ cc = conf.find_program(['bgxlc_r','bgxlc'], var='CC')
+ conf.get_xlc_version(cc)
+ conf.env.CC = cc
+ conf.env.CC_NAME = 'bgxlc'
+
+def configure(conf):
+ conf.find_bgxlc()
+ conf.find_ar()
+ conf.xlc_common_flags()
+ conf.env.LINKFLAGS_cshlib = ['-G','-Wl,-bexpfull']
+ conf.env.LINKFLAGS_cprogram = []
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/c_dumbpreproc.py b/third_party/waf/waflib/extras/c_dumbpreproc.py
new file mode 100644
index 00000000000..9407527aca4
--- /dev/null
+++ b/third_party/waf/waflib/extras/c_dumbpreproc.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+Dumb C/C++ preprocessor for finding dependencies
+
+It will look at all include files it can find after removing the comments, so the following
+will always add the dependency on both "a.h" and "b.h"::
+
+ #include "a.h"
+ #ifdef B
+ #include "b.h"
+ #endif
+ int main() {
+ return 0;
+ }
+
+To use::
+
+ def configure(conf):
+ conf.load('compiler_c')
+ conf.load('c_dumbpreproc')
+"""
+
+import re
+from waflib.Tools import c_preproc
+
+re_inc = re.compile(
+ '^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$',
+ re.IGNORECASE | re.MULTILINE)
+
+def lines_includes(node):
+ code = node.read()
+ if c_preproc.use_trigraphs:
+ for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
+ code = c_preproc.re_nl.sub('', code)
+ code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+ return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
+
+parser = c_preproc.c_parser
+class dumb_parser(parser):
+ def addlines(self, node):
+ if node in self.nodes[:-1]:
+ return
+ self.currentnode_stack.append(node.parent)
+
+ # Avoid reading the same files again
+ try:
+ lines = self.parse_cache[node]
+ except KeyError:
+ lines = self.parse_cache[node] = lines_includes(node)
+
+ self.lines = lines + [(c_preproc.POPFILE, '')] + self.lines
+
+ def start(self, node, env):
+ try:
+ self.parse_cache = node.ctx.parse_cache
+ except AttributeError:
+ self.parse_cache = node.ctx.parse_cache = {}
+
+ self.addlines(node)
+ while self.lines:
+ (x, y) = self.lines.pop(0)
+ if x == c_preproc.POPFILE:
+ self.currentnode_stack.pop()
+ continue
+ self.tryfind(y)
+
+c_preproc.c_parser = dumb_parser
diff --git a/third_party/waf/waflib/extras/c_emscripten.py b/third_party/waf/waflib/extras/c_emscripten.py
new file mode 100644
index 00000000000..6e7fbbe1142
--- /dev/null
+++ b/third_party/waf/waflib/extras/c_emscripten.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 vi:ts=4:noexpandtab
+
+import subprocess, shlex, sys
+
+from waflib.Tools import ccroot, gcc, gxx
+from waflib.Configure import conf
+from waflib.TaskGen import after_method, feature
+
+from waflib.Tools.compiler_c import c_compiler
+from waflib.Tools.compiler_cxx import cxx_compiler
+
+for supported_os in ('linux', 'darwin', 'gnu', 'aix'):
+ c_compiler[supported_os].append('c_emscripten')
+ cxx_compiler[supported_os].append('c_emscripten')
+
+
+@conf
+def get_emscripten_version(conf, cc):
+ """
+ Emscripten doesn't support processing '-' like clang/gcc
+ """
+
+ dummy = conf.cachedir.parent.make_node("waf-emscripten.c")
+ dummy.write("")
+ cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()]
+ env = conf.env.env or None
+ try:
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
+ out = p.communicate()[0]
+ except Exception as e:
+ conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e))
+
+ if not isinstance(out, str):
+ out = out.decode(sys.stdout.encoding or 'iso8859-1')
+
+ k = {}
+ out = out.splitlines()
+ for line in out:
+ lst = shlex.split(line)
+ if len(lst)>2:
+ key = lst[1]
+ val = lst[2]
+ k[key] = val
+
+ if not ('__clang__' in k and 'EMSCRIPTEN' in k):
+ conf.fatal('Could not determine the emscripten compiler version.')
+
+ conf.env.DEST_OS = 'generic'
+ conf.env.DEST_BINFMT = 'elf'
+ conf.env.DEST_CPU = 'asm-js'
+ conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
+ return k
+
+@conf
+def find_emscripten(conf):
+ cc = conf.find_program(['emcc'], var='CC')
+ conf.get_emscripten_version(cc)
+ conf.env.CC = cc
+ conf.env.CC_NAME = 'emscripten'
+ cxx = conf.find_program(['em++'], var='CXX')
+ conf.env.CXX = cxx
+ conf.env.CXX_NAME = 'emscripten'
+ conf.find_program(['emar'], var='AR')
+
+def configure(conf):
+ conf.find_emscripten()
+ conf.find_ar()
+ conf.gcc_common_flags()
+ conf.gxx_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
+ conf.env.ARFLAGS = ['rcs']
+ conf.env.cshlib_PATTERN = '%s.js'
+ conf.env.cxxshlib_PATTERN = '%s.js'
+ conf.env.cstlib_PATTERN = '%s.bc'
+ conf.env.cxxstlib_PATTERN = '%s.bc'
+ conf.env.cprogram_PATTERN = '%s.html'
+ conf.env.cxxprogram_PATTERN = '%s.html'
+ conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+
+@feature('c', 'cxx', 'acm', 'includes')
+@after_method('propagate_uselib_vars', 'process_source', 'apply_incpaths')
+def apply_incpaths_emscripten(self):
+ """
+ Emscripten doesn't like absolute include paths
+ """
+ # TODO: in waf 1.9 we can switch back to bldnode as the default since path_from handles cross-drive paths
+ if self.env.CC_NAME != 'emscripten' or self.env.CC_NAME != 'emscripten':
+ return
+ lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
+ self.includes_nodes = lst
+ self.env['INCPATHS'] = [x.path_from(self.bld.bldnode) for x in lst]
diff --git a/third_party/waf/waflib/extras/c_nec.py b/third_party/waf/waflib/extras/c_nec.py
new file mode 100644
index 00000000000..87e0c055f26
--- /dev/null
+++ b/third_party/waf/waflib/extras/c_nec.py
@@ -0,0 +1,71 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+"""
+NEC SX Compiler for SX vector systems
+"""
+
+import re
+from waflib import Utils
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+
+from waflib.Tools import xlc # method xlc_common_flags
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('c_nec')
+
+@conf
+def find_sxc(conf):
+ cc = conf.find_program(['sxcc'], var='CC')
+ conf.get_sxc_version(cc)
+ conf.env.CC = cc
+ conf.env.CC_NAME = 'sxcc'
+
+@conf
+def get_sxc_version(conf, fc):
+ version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+ cmd = fc + ['-V']
+ p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
+ out, err = p.communicate()
+
+ if out: match = version_re(out)
+ else: match = version_re(err)
+ if not match:
+ conf.fatal('Could not determine the NEC C compiler version.')
+ k = match.groupdict()
+ conf.env['C_VERSION'] = (k['major'], k['minor'])
+
+@conf
+def sxc_common_flags(conf):
+ v=conf.env
+ v['CC_SRC_F']=[]
+ v['CC_TGT_F']=['-c','-o']
+ if not v['LINK_CC']:v['LINK_CC']=v['CC']
+ v['CCLNK_SRC_F']=[]
+ v['CCLNK_TGT_F']=['-o']
+ v['CPPPATH_ST']='-I%s'
+ v['DEFINES_ST']='-D%s'
+ v['LIB_ST']='-l%s'
+ v['LIBPATH_ST']='-L%s'
+ v['STLIB_ST']='-l%s'
+ v['STLIBPATH_ST']='-L%s'
+ v['RPATH_ST']=''
+ v['SONAME_ST']=[]
+ v['SHLIB_MARKER']=[]
+ v['STLIB_MARKER']=[]
+ v['LINKFLAGS_cprogram']=['']
+ v['cprogram_PATTERN']='%s'
+ v['CFLAGS_cshlib']=['-fPIC']
+ v['LINKFLAGS_cshlib']=['']
+ v['cshlib_PATTERN']='lib%s.so'
+ v['LINKFLAGS_cstlib']=[]
+ v['cstlib_PATTERN']='lib%s.a'
+
+def configure(conf):
+ conf.find_sxc()
+ conf.find_program('sxar',VAR='AR')
+ conf.sxc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/third_party/waf/waflib/extras/cfg_altoptions.py b/third_party/waf/waflib/extras/cfg_altoptions.py
new file mode 100644
index 00000000000..4a82a70dede
--- /dev/null
+++ b/third_party/waf/waflib/extras/cfg_altoptions.py
@@ -0,0 +1,109 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Tool to extend c_config.check_cfg()
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+This tool allows to work around the absence of ``*-config`` programs
+on systems, by keeping the same clean configuration syntax but inferring
+values or permitting their modification via the options interface.
+
+Note that pkg-config can also support setting ``PKG_CONFIG_PATH``,
+so you can put custom files in a folder containing new .pc files.
+This tool could also be implemented by taking advantage of this fact.
+
+Usage::
+
+ def options(opt):
+ opt.load('c_config_alt')
+ opt.add_package_option('package')
+
+ def configure(cfg):
+ conf.load('c_config_alt')
+ conf.check_cfg(...)
+
+Known issues:
+
+- Behavior with different build contexts...
+
+"""
+
+import os
+import functools
+from waflib import Configure, Options, Errors
+
+def name_to_dest(x):
+ return x.lower().replace('-', '_')
+
+
+def options(opt):
+ def x(opt, param):
+ dest = name_to_dest(param)
+ gr = opt.get_option_group("configure options")
+ gr.add_option('--%s-root' % dest,
+ help="path containing include and lib subfolders for %s" \
+ % param,
+ )
+
+ opt.add_package_option = functools.partial(x, opt)
+
+
+check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg')
+
+@Configure.conf
+def check_cfg(conf, *k, **kw):
+ if k:
+ lst = k[0].split()
+ kw['package'] = lst[0]
+ kw['args'] = ' '.join(lst[1:])
+
+ if not 'package' in kw:
+ return check_cfg_old(conf, **kw)
+
+ package = kw['package']
+
+ package_lo = name_to_dest(package)
+ package_hi = package.upper().replace('-', '_') # TODO FIXME
+ package_hi = kw.get('uselib_store', package_hi)
+
+ def check_folder(path, name):
+ try:
+ assert os.path.isdir(path)
+ except AssertionError:
+ raise Errors.ConfigurationError(
+ "%s_%s (%s) is not a folder!" \
+ % (package_lo, name, path))
+ return path
+
+ root = getattr(Options.options, '%s_root' % package_lo, None)
+
+ if root is None:
+ return check_cfg_old(conf, **kw)
+ else:
+ def add_manual_var(k, v):
+ conf.start_msg('Adding for %s a manual var' % (package))
+ conf.env["%s_%s" % (k, package_hi)] = v
+ conf.end_msg("%s = %s" % (k, v))
+
+
+ check_folder(root, 'root')
+
+ pkg_inc = check_folder(os.path.join(root, "include"), 'inc')
+ add_manual_var('INCLUDES', [pkg_inc])
+ pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath')
+ add_manual_var('LIBPATH', [pkg_lib])
+ add_manual_var('LIB', [package])
+
+ for x in kw.get('manual_deps', []):
+ for k, v in sorted(conf.env.get_merged_dict().items()):
+ if k.endswith('_%s' % x):
+ k = k.replace('_%s' % x, '')
+ conf.start_msg('Adding for %s a manual dep' \
+ %(package))
+ conf.env["%s_%s" % (k, package_hi)] += v
+ conf.end_msg('%s += %s' % (k, v))
+
+ return True
diff --git a/third_party/waf/waflib/extras/cfg_cross_gnu.py b/third_party/waf/waflib/extras/cfg_cross_gnu.py
new file mode 100644
index 00000000000..0fb2efb080d
--- /dev/null
+++ b/third_party/waf/waflib/extras/cfg_cross_gnu.py
@@ -0,0 +1,176 @@
+#!/usr/bin/python
+# -*- coding: utf-8 vi:ts=4:noexpandtab
+# Tool to provide dedicated variables for cross-compilation
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+This tool allows to use environment variables to define cross-compilation things,
+mostly used when you use build variants.
+
+The variables are obtained from the environment in 3 ways:
+
+1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
+2. By defining HOST_x
+3. By defining ${CHOST//-/_}_x
+
+Usage:
+
+- In your build script::
+
+ def configure(cfg):
+ ...
+ conf.load('c_cross_gnu')
+ for variant in x_variants:
+ conf.xcheck_host()
+ conf.xcheck_host_var('POUET')
+ ...
+
+ ...
+
+- Then::
+
+ CHOST=arm-hardfloat-linux-gnueabi waf configure
+
+ env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
+
+ CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
+
+ HOST_CC="clang -..." waf configure
+
+"""
+
+import os
+from waflib import Utils, Configure
+
+try:
+ from shlex import quote
+except ImportError:
+ from pipes import quote
+
+def get_chost_stuff(conf):
+ """
+ Get the CHOST environment variable contents
+ """
+ chost = None
+ chost_envar = None
+ if conf.env.CHOST:
+ chost = conf.env.CHOST[0]
+ chost_envar = chost.replace('-', '_')
+ return chost, chost_envar
+
+
+@Configure.conf
+def xcheck_envar(conf, name, wafname=None, cross=False):
+ wafname = wafname or name
+ envar = os.environ.get(name, None)
+
+ if envar is None:
+ return
+
+ value = Utils.to_list(envar) if envar != '' else [envar]
+
+ conf.env[wafname] = value
+ if cross:
+ pretty = 'cross-compilation %s' % wafname
+ else:
+ pretty = wafname
+ conf.msg('Will use %s' % pretty,
+ " ".join(quote(x) for x in value))
+
+@Configure.conf
+def xcheck_host_prog(conf, name, tool, wafname=None):
+ wafname = wafname or name
+
+ chost, chost_envar = get_chost_stuff(conf)
+
+ specific = None
+ if chost:
+ specific = os.environ.get('%s_%s' % (chost_envar, name), None)
+
+ if specific:
+ value = Utils.to_list(specific)
+ conf.env[wafname] += value
+ conf.msg('Will use cross-compilation %s from %s_%s' \
+ % (name, chost_envar, name),
+ " ".join(quote(x) for x in value))
+ return
+ else:
+ envar = os.environ.get('HOST_%s' % name, None)
+ if envar is not None:
+ value = Utils.to_list(envar)
+ conf.env[wafname] = value
+ conf.msg('Will use cross-compilation %s from HOST_%s' \
+ % (name, name),
+ " ".join(quote(x) for x in value))
+ return
+
+ if conf.env[wafname]:
+ return
+
+ value = None
+ if chost:
+ value = '%s-%s' % (chost, tool)
+
+ if value:
+ conf.env[wafname] = value
+ conf.msg('Will use cross-compilation %s from CHOST' \
+ % wafname, value)
+
+@Configure.conf
+def xcheck_host_envar(conf, name, wafname=None):
+ wafname = wafname or name
+
+ chost, chost_envar = get_chost_stuff(conf)
+
+ specific = None
+ if chost:
+ specific = os.environ.get('%s_%s' % (chost_envar, name), None)
+
+ if specific:
+ value = Utils.to_list(specific)
+ conf.env[wafname] += value
+ conf.msg('Will use cross-compilation %s from %s_%s' \
+ % (name, chost_envar, name),
+ " ".join(quote(x) for x in value))
+ return
+
+
+ envar = os.environ.get('HOST_%s' % name, None)
+ if envar is None:
+ return
+
+ value = Utils.to_list(envar) if envar != '' else [envar]
+
+ conf.env[wafname] = value
+ conf.msg('Will use cross-compilation %s from HOST_%s' \
+ % (name, name),
+ " ".join(quote(x) for x in value))
+
+
+@Configure.conf
+def xcheck_host(conf):
+ conf.xcheck_envar('CHOST', cross=True)
+ conf.xcheck_host_prog('CC', 'gcc')
+ conf.xcheck_host_prog('CXX', 'g++')
+ conf.xcheck_host_prog('LINK_CC', 'gcc')
+ conf.xcheck_host_prog('LINK_CXX', 'g++')
+ conf.xcheck_host_prog('AR', 'ar')
+ conf.xcheck_host_prog('AS', 'as')
+ conf.xcheck_host_prog('LD', 'ld')
+ conf.xcheck_host_envar('CFLAGS')
+ conf.xcheck_host_envar('CXXFLAGS')
+ conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
+ conf.xcheck_host_envar('LIB')
+ conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
+ conf.xcheck_host_envar('PKG_CONFIG_PATH')
+
+ if not conf.env.env:
+ conf.env.env = {}
+ conf.env.env.update(os.environ)
+ if conf.env.PKG_CONFIG_LIBDIR:
+ conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
+ if conf.env.PKG_CONFIG_PATH:
+ conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]
diff --git a/third_party/waf/waflib/extras/clang_compilation_database.py b/third_party/waf/waflib/extras/clang_compilation_database.py
new file mode 100644
index 00000000000..e7230d4c7f4
--- /dev/null
+++ b/third_party/waf/waflib/extras/clang_compilation_database.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Christoph Koke, 2013
+
+"""
+Writes the c and cpp compile commands into build/compile_commands.json
+see http://clang.llvm.org/docs/JSONCompilationDatabase.html
+
+Usage:
+
+ def configure(conf):
+ conf.load('compiler_cxx')
+ ...
+ conf.load('clang_compilation_database')
+"""
+
+import sys, os, json, shlex, pipes
+from waflib import Logs, TaskGen
+from waflib.Tools import c, cxx
+
+if sys.hexversion >= 0x3030000:
+ quote = shlex.quote
+else:
+ quote = pipes.quote
+
+@TaskGen.feature('*')
+@TaskGen.after_method('process_use')
+def collect_compilation_db_tasks(self):
+ "Add a compilation database entry for compiled tasks"
+ try:
+ clang_db = self.bld.clang_compilation_database_tasks
+ except AttributeError:
+ clang_db = self.bld.clang_compilation_database_tasks = []
+ self.bld.add_post_fun(write_compilation_database)
+
+ for task in getattr(self, 'compiled_tasks', []):
+ if isinstance(task, (c.c, cxx.cxx)):
+ clang_db.append(task)
+
+def write_compilation_database(ctx):
+ "Write the clang compilation database as JSON"
+ database_file = ctx.bldnode.make_node('compile_commands.json')
+ Logs.info("Build commands will be stored in %s" % database_file.path_from(ctx.path))
+ try:
+ root = json.load(database_file)
+ except IOError:
+ root = []
+ clang_db = dict((x["file"], x) for x in root)
+ for task in getattr(ctx, 'clang_compilation_database_tasks', []):
+ try:
+ cmd = task.last_cmd
+ except AttributeError:
+ continue
+ directory = getattr(task, 'cwd', ctx.variant_dir)
+ f_node = task.inputs[0]
+ filename = os.path.relpath(f_node.abspath(), directory)
+ cmd = " ".join(map(quote, cmd))
+ entry = {
+ "directory": directory,
+ "command": cmd,
+ "file": filename,
+ }
+ clang_db[filename] = entry
+ root = list(clang_db.values())
+ database_file.write(json.dumps(root, indent=2))
diff --git a/third_party/waf/waflib/extras/codelite.py b/third_party/waf/waflib/extras/codelite.py
new file mode 100644
index 00000000000..c12ae4b9796
--- /dev/null
+++ b/third_party/waf/waflib/extras/codelite.py
@@ -0,0 +1,880 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# CodeLite Project
+# Christian Klein (chrikle@berlios.de)
+# Created: Jan 2012
+# As templete for this file I used the msvs.py
+# I hope this template will work proper
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+"""
+
+
+To add this tool to your project:
+def options(conf):
+ opt.load('codelite')
+
+It can be a good idea to add the sync_exec tool too.
+
+To generate solution files:
+$ waf configure codelite
+
+To customize the outputs, provide subclasses in your wscript files:
+
+from waflib.extras import codelite
+class vsnode_target(codelite.vsnode_target):
+ def get_build_command(self, props):
+ # likely to be required
+ return "waf.bat build"
+ def collect_source(self):
+ # likely to be required
+ ...
+class codelite_bar(codelite.codelite_generator):
+ def init(self):
+ codelite.codelite_generator.init(self)
+ self.vsnode_target = vsnode_target
+
+The codelite class re-uses the same build() function for reading the targets (task generators),
+you may therefore specify codelite settings on the context object:
+
+def build(bld):
+ bld.codelite_solution_name = 'foo.workspace'
+ bld.waf_command = 'waf.bat'
+ bld.projects_dir = bld.srcnode.make_node('')
+ bld.projects_dir.mkdir()
+
+
+ASSUMPTIONS:
+* a project can be either a directory or a target, project files are written only for targets that have source files
+* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
+"""
+
+import os, re, sys
+import uuid # requires python 2.5
+from waflib.Build import BuildContext
+from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
+
+HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
+
+PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
+<CodeLite_Project Name="${project.name}" InternalType="Library">
+ <Plugins>
+ <Plugin Name="qmake">
+ <![CDATA[00010001N0005Release000000000000]]>
+ </Plugin>
+ </Plugins>
+ <Description/>
+ <Dependencies/>
+ <VirtualDirectory Name="src">
+ ${for x in project.source}
+ ${if (project.get_key(x)=="sourcefile")}
+ <File Name="${x.abspath()}"/>
+ ${endif}
+ ${endfor}
+ </VirtualDirectory>
+ <VirtualDirectory Name="include">
+ ${for x in project.source}
+ ${if (project.get_key(x)=="headerfile")}
+ <File Name="${x.abspath()}"/>
+ ${endif}
+ ${endfor}
+ </VirtualDirectory>
+ <Settings Type="Dynamic Library">
+ <GlobalSettings>
+ <Compiler Options="" C_Options="">
+ <IncludePath Value="."/>
+ </Compiler>
+ <Linker Options="">
+ <LibraryPath Value="."/>
+ </Linker>
+ <ResourceCompiler Options=""/>
+ </GlobalSettings>
+ <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
+ <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
+ <IncludePath Value="."/>
+ <IncludePath Value="."/>
+ </Compiler>
+ <Linker Options="" Required="yes">
+ <LibraryPath Value=""/>
+ </Linker>
+ <ResourceCompiler Options="" Required="no"/>
+ <General OutputFile="${xml:project.build_properties[0].output_file}" IntermediateDirectory="" Command="" CommandArguments="" PauseExecWhenProcTerminates="yes"/>
+ <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
+ <![CDATA[]]>
+ </Environment>
+ <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
+ <PostConnectCommands/>
+ <StartupCommands/>
+ </Releaseger>
+ <PreBuild/>
+ <PostBuild/>
+ <CustomBuild Enabled="yes">
+ $b = project.build_properties[0]}
+ <RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
+ <CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
+ <BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand>
+ <Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
+ <Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>
+ <Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
+ <Target Name="Rebuild All">${xml:project.get_rebuild_all_command(project.build_properties[0])}</Target>
+ <Target Name="Clean All">${xml:project.get_clean_all_command(project.build_properties[0])}</Target>
+ <Target Name="Build and Install All">${xml:project.get_build_and_install_all_command(project.build_properties[0])}</Target>
+ <PreprocessFileCommand/>
+ <SingleFileCommand/>
+ <MakefileGenerationCommand/>
+ <ThirdPartyToolName>None</ThirdPartyToolName>
+ <WorkingDirectory/>
+ </CustomBuild>
+ <AdditionalRules>
+ <CustomPostBuild/>
+ <CustomPreBuild/>
+ </AdditionalRules>
+ <Completion>
+ <ClangCmpFlags/>
+ <ClangPP/>
+ <SearchPaths/>
+ </Completion>
+ </Configuration>
+ <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
+ <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
+ <IncludePath Value="."/>
+ </Compiler>
+ <Linker Options="" Required="yes"/>
+ <ResourceCompiler Options="" Required="no"/>
+ <General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateReleaseArgs="no" ReleaseArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes"/>
+ <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
+ <![CDATA[
+
+
+
+ ]]>
+ </Environment>
+ <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
+ <PostConnectCommands/>
+ <StartupCommands/>
+ </Releaseger>
+ <PreBuild/>
+ <PostBuild/>
+ <CustomBuild Enabled="no">
+ <RebuildCommand/>
+ <CleanCommand/>
+ <BuildCommand/>
+ <PreprocessFileCommand/>
+ <SingleFileCommand/>
+ <MakefileGenerationCommand/>
+ <ThirdPartyToolName/>
+ <WorkingDirectory/>
+ </CustomBuild>
+ <AdditionalRules>
+ <CustomPostBuild/>
+ <CustomPreBuild/>
+ </AdditionalRules>
+ <Completion>
+ <ClangCmpFlags/>
+ <ClangPP/>
+ <SearchPaths/>
+ </Completion>
+ </Configuration>
+ </Settings>
+</CodeLite_Project>'''
+
+
+
+
+SOLUTION_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
+<CodeLite_Workspace Name="${getattr(project, 'codelite_solution_name', None)[:-10]}" Database="./${getattr(project, 'codelite_solution_name', None)[:-10]}.tags">
+${for p in project.all_projects}
+ <Project Name = "${p.name}" Path = "${p.title}" Active="No"/>
+${endfor}
+ <BuildMatrix>
+ <WorkspaceConfiguration Name="Release" Selected="yes">
+${for p in project.all_projects}
+ <Project Name="${p.name}" ConfigName="Release"/>
+${endfor}
+ </WorkspaceConfiguration>
+ </BuildMatrix>
+</CodeLite_Workspace>'''
+
+
+
+COMPILE_TEMPLATE = '''def f(project):
+ lst = []
+ def xml_escape(value):
+ return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+ %s
+
+ #f = open('cmd.txt', 'w')
+ #f.write(str(lst))
+ #f.close()
+ return ''.join(lst)
+'''
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
+def compile_template(line):
+ """
+ Compile a template expression into a python function (like jsps, but way shorter)
+ """
+ extr = []
+ def repl(match):
+ g = match.group
+ if g('dollar'): return "$"
+ elif g('backslash'):
+ return "\\"
+ elif g('subst'):
+ extr.append(g('code'))
+ return "<<|@|>>"
+ return None
+
+ line2 = reg_act.sub(repl, line)
+ params = line2.split('<<|@|>>')
+ assert(extr)
+
+
+ indent = 0
+ buf = []
+ app = buf.append
+
+ def app(txt):
+ buf.append(indent * '\t' + txt)
+
+ for x in range(len(extr)):
+ if params[x]:
+ app("lst.append(%r)" % params[x])
+
+ f = extr[x]
+ if f.startswith('if') or f.startswith('for'):
+ app(f + ':')
+ indent += 1
+ elif f.startswith('py:'):
+ app(f[3:])
+ elif f.startswith('endif') or f.startswith('endfor'):
+ indent -= 1
+ elif f.startswith('else') or f.startswith('elif'):
+ indent -= 1
+ app(f + ':')
+ indent += 1
+ elif f.startswith('xml:'):
+ app('lst.append(xml_escape(%s))' % f[4:])
+ else:
+ #app('lst.append((%s) or "cannot find %s")' % (f, f))
+ app('lst.append(%s)' % f)
+
+ if extr:
+ if params[-1]:
+ app("lst.append(%r)" % params[-1])
+
+ fun = COMPILE_TEMPLATE % "\n\t".join(buf)
+ #print(fun)
+ return Task.funex(fun)
+
+
+re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
+def rm_blank_lines(txt):
+ txt = re_blank.sub('\r\n', txt)
+ return txt
+
+BOM = '\xef\xbb\xbf'
+try:
+ BOM = bytes(BOM, 'iso8859-1') # python 3
+except NameError:
+ pass
+
+def stealth_write(self, data, flags='wb'):
+ try:
+ unicode
+ except NameError:
+ data = data.encode('utf-8') # python 3
+ else:
+ data = data.decode(sys.getfilesystemencoding(), 'replace')
+ data = data.encode('utf-8')
+
+ if self.name.endswith('.project') or self.name.endswith('.project'):
+ data = BOM + data
+
+ try:
+ txt = self.read(flags='rb')
+ if txt != data:
+ raise ValueError('must write')
+ except (IOError, ValueError):
+ self.write(data, flags=flags)
+ else:
+ Logs.debug('codelite: skipping %s' % self.abspath())
+Node.Node.stealth_write = stealth_write
+
+re_quote = re.compile("[^a-zA-Z0-9-]")
+def quote(s):
+ return re_quote.sub("_", s)
+
+def xml_escape(value):
+ return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+def make_uuid(v, prefix = None):
+ """
+ simple utility function
+ """
+ if isinstance(v, dict):
+ keys = list(v.keys())
+ keys.sort()
+ tmp = str([(k, v[k]) for k in keys])
+ else:
+ tmp = str(v)
+ d = Utils.md5(tmp.encode()).hexdigest().upper()
+ if prefix:
+ d = '%s%s' % (prefix, d[8:])
+ gid = uuid.UUID(d, version = 4)
+ return str(gid).upper()
+
+def diff(node, fromnode):
+ # difference between two nodes, but with "(..)" instead of ".."
+ c1 = node
+ c2 = fromnode
+
+ c1h = c1.height()
+ c2h = c2.height()
+
+ lst = []
+ up = 0
+
+ while c1h > c2h:
+ lst.append(c1.name)
+ c1 = c1.parent
+ c1h -= 1
+
+ while c2h > c1h:
+ up += 1
+ c2 = c2.parent
+ c2h -= 1
+
+ while id(c1) != id(c2):
+ lst.append(c1.name)
+ up += 1
+
+ c1 = c1.parent
+ c2 = c2.parent
+
+ for i in range(up):
+ lst.append('(..)')
+ lst.reverse()
+ return tuple(lst)
+
+class build_property(object):
+ pass
+
+class vsnode(object):
+ """
+ Abstract class representing visual studio elements
+ We assume that all visual studio nodes have a uuid and a parent
+ """
+ def __init__(self, ctx):
+ self.ctx = ctx # codelite context
+ self.name = '' # string, mandatory
+ self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
+ self.uuid = '' # string, mandatory
+ self.parent = None # parent node for visual studio nesting
+
+ def get_waf(self):
+ """
+ Override in subclasses...
+ """
+ return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf'))
+
+ def ptype(self):
+ """
+ Return a special uuid for projects written in the solution file
+ """
+ pass
+
+ def write(self):
+ """
+ Write the project file, by default, do nothing
+ """
+ pass
+
+ def make_uuid(self, val):
+ """
+ Alias for creating uuid values easily (the templates cannot access global variables)
+ """
+ return make_uuid(val)
+
+class vsnode_vsdir(vsnode):
+ """
+ Nodes representing visual studio folders (which do not match the filesystem tree!)
+ """
+ VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
+ def __init__(self, ctx, uuid, name, vspath=''):
+ vsnode.__init__(self, ctx)
+ self.title = self.name = name
+ self.uuid = uuid
+ self.vspath = vspath or name
+
+ def ptype(self):
+ return self.VS_GUID_SOLUTIONFOLDER
+
+class vsnode_project(vsnode):
+ """
+ Abstract class representing visual studio project elements
+ A project is assumed to be writable, and has a node representing the file to write to
+ """
+ VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+ def ptype(self):
+ return self.VS_GUID_VCPROJ
+
+ def __init__(self, ctx, node):
+ vsnode.__init__(self, ctx)
+ self.path = node
+ self.uuid = make_uuid(node.abspath())
+ self.name = node.name
+ self.title = self.path.abspath()
+ self.source = [] # list of node objects
+ self.build_properties = [] # list of properties (nmake commands, output dir, etc)
+
+ def dirs(self):
+ """
+ Get the list of parent folders of the source files (header files included)
+ for writing the filters
+ """
+ lst = []
+ def add(x):
+ if x.height() > self.tg.path.height() and x not in lst:
+ lst.append(x)
+ add(x.parent)
+ for x in self.source:
+ add(x.parent)
+ return lst
+
+ def write(self):
+ Logs.debug('codelite: creating %r' % self.path)
+ #print "self.name:",self.name
+
+ # first write the project file
+ template1 = compile_template(PROJECT_TEMPLATE)
+ proj_str = template1(self)
+ proj_str = rm_blank_lines(proj_str)
+ self.path.stealth_write(proj_str)
+
+ # then write the filter
+ #template2 = compile_template(FILTER_TEMPLATE)
+ #filter_str = template2(self)
+ #filter_str = rm_blank_lines(filter_str)
+ #tmp = self.path.parent.make_node(self.path.name + '.filters')
+ #tmp.stealth_write(filter_str)
+
+ def get_key(self, node):
+ """
+ required for writing the source files
+ """
+ name = node.name
+ if name.endswith('.cpp') or name.endswith('.c'):
+ return 'sourcefile'
+ return 'headerfile'
+
+ def collect_properties(self):
+ """
+ Returns a list of triplet (configuration, platform, output_directory)
+ """
+ ret = []
+ for c in self.ctx.configurations:
+ for p in self.ctx.platforms:
+ x = build_property()
+ x.outdir = ''
+
+ x.configuration = c
+ x.platform = p
+
+ x.preprocessor_definitions = ''
+ x.includes_search_path = ''
+
+ # can specify "deploy_dir" too
+ ret.append(x)
+ self.build_properties = ret
+
+ def get_build_params(self, props):
+ opt = ''
+ return (self.get_waf(), opt)
+
+ def get_build_command(self, props):
+ return "%s build %s" % self.get_build_params(props)
+
+ def get_clean_command(self, props):
+ return "%s clean %s" % self.get_build_params(props)
+
+ def get_rebuild_command(self, props):
+ return "%s clean build %s" % self.get_build_params(props)
+
+ def get_install_command(self, props):
+ return "%s install %s" % self.get_build_params(props)
+ def get_build_and_install_command(self, props):
+ return "%s build install %s" % self.get_build_params(props)
+
+ def get_build_and_install_all_command(self, props):
+ return "%s build install" % self.get_build_params(props)[0]
+
+ def get_clean_all_command(self, props):
+ return "%s clean" % self.get_build_params(props)[0]
+
+ def get_build_all_command(self, props):
+ return "%s build" % self.get_build_params(props)[0]
+
+ def get_rebuild_all_command(self, props):
+ return "%s clean build" % self.get_build_params(props)[0]
+
+ def get_filter_name(self, node):
+ lst = diff(node, self.tg.path)
+ return '\\'.join(lst) or '.'
+
+class vsnode_alias(vsnode_project):
+ def __init__(self, ctx, node, name):
+ vsnode_project.__init__(self, ctx, node)
+ self.name = name
+ self.output_file = ''
+
+class vsnode_build_all(vsnode_alias):
+ """
+ Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
+ This is the only alias enabled by default
+ """
+ def __init__(self, ctx, node, name='build_all_projects'):
+ vsnode_alias.__init__(self, ctx, node, name)
+ self.is_active = True
+
+class vsnode_install_all(vsnode_alias):
+ """
+ Fake target used to emulate the behaviour of "make install"
+ """
+ def __init__(self, ctx, node, name='install_all_projects'):
+ vsnode_alias.__init__(self, ctx, node, name)
+
+ def get_build_command(self, props):
+ return "%s build install %s" % self.get_build_params(props)
+
+ def get_clean_command(self, props):
+ return "%s clean %s" % self.get_build_params(props)
+
+ def get_rebuild_command(self, props):
+ return "%s clean build install %s" % self.get_build_params(props)
+
+class vsnode_project_view(vsnode_alias):
+ """
+ Fake target used to emulate a file system view
+ """
+ def __init__(self, ctx, node, name='project_view'):
+ vsnode_alias.__init__(self, ctx, node, name)
+ self.tg = self.ctx() # fake one, cannot remove
+ self.exclude_files = Node.exclude_regs + '''
+waf-1.8.*
+waf3-1.8.*/**
+.waf-1.8.*
+.waf3-1.8.*/**
+**/*.sdf
+**/*.suo
+**/*.ncb
+**/%s
+ ''' % Options.lockfile
+
+ def collect_source(self):
+ # this is likely to be slow
+ self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
+
+ def get_build_command(self, props):
+ params = self.get_build_params(props) + (self.ctx.cmd,)
+ return "%s %s %s" % params
+
+ def get_clean_command(self, props):
+ return ""
+
+ def get_rebuild_command(self, props):
+ return self.get_build_command(props)
+
+class vsnode_target(vsnode_project):
+ """
+ CodeLite project representing a targets (programs, libraries, etc) and bound
+ to a task generator
+ """
+ def __init__(self, ctx, tg):
+ """
+ A project is more or less equivalent to a file/folder
+ """
+ base = getattr(ctx, 'projects_dir', None) or tg.path
+ node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
+ vsnode_project.__init__(self, ctx, node)
+ self.name = quote(tg.name)
+ self.tg = tg # task generator
+
+ def get_build_params(self, props):
+ """
+ Override the default to add the target name
+ """
+ opt = ''
+ if getattr(self, 'tg', None):
+ opt += " --targets=%s" % self.tg.name
+ return (self.get_waf(), opt)
+
+ def collect_source(self):
+ tg = self.tg
+ source_files = tg.to_nodes(getattr(tg, 'source', []))
+ include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', []))
+ include_files = []
+ for x in include_dirs:
+ if isinstance(x, str):
+ x = tg.path.find_node(x)
+ if x:
+ lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
+ include_files.extend(lst)
+
+ # remove duplicates
+ self.source.extend(list(set(source_files + include_files)))
+ self.source.sort(key=lambda x: x.abspath())
+
+ def collect_properties(self):
+ """
+ CodeLite projects are associated with platforms and configurations (for building especially)
+ """
+ super(vsnode_target, self).collect_properties()
+ for x in self.build_properties:
+ x.outdir = self.path.parent.abspath()
+ x.preprocessor_definitions = ''
+ x.includes_search_path = ''
+
+ try:
+ tsk = self.tg.link_task
+ except AttributeError:
+ pass
+ else:
+ x.output_file = tsk.outputs[0].abspath()
+ x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
+ x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
+
+class codelite_generator(BuildContext):
+ '''generates a CodeLite workspace'''
+ cmd = 'codelite'
+ fun = 'build'
+
+ def init(self):
+ """
+ Some data that needs to be present
+ """
+ if not getattr(self, 'configurations', None):
+ self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
+ if not getattr(self, 'platforms', None):
+ self.platforms = ['Win32']
+ if not getattr(self, 'all_projects', None):
+ self.all_projects = []
+ if not getattr(self, 'project_extension', None):
+ self.project_extension = '.project'
+ if not getattr(self, 'projects_dir', None):
+ self.projects_dir = self.srcnode.make_node('')
+ self.projects_dir.mkdir()
+
+ # bind the classes to the object, so that subclass can provide custom generators
+ if not getattr(self, 'vsnode_vsdir', None):
+ self.vsnode_vsdir = vsnode_vsdir
+ if not getattr(self, 'vsnode_target', None):
+ self.vsnode_target = vsnode_target
+ if not getattr(self, 'vsnode_build_all', None):
+ self.vsnode_build_all = vsnode_build_all
+ if not getattr(self, 'vsnode_install_all', None):
+ self.vsnode_install_all = vsnode_install_all
+ if not getattr(self, 'vsnode_project_view', None):
+ self.vsnode_project_view = vsnode_project_view
+
+ self.numver = '11.00'
+ self.vsver = '2010'
+
+ def execute(self):
+ """
+ Entry point
+ """
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
+
+ # user initialization
+ self.init()
+
+ # two phases for creating the solution
+ self.collect_projects() # add project objects into "self.all_projects"
+ self.write_files() # write the corresponding project and solution files
+
+ def collect_projects(self):
+ """
+ Fill the list self.all_projects with project objects
+ Fill the list of build targets
+ """
+ self.collect_targets()
+ #self.add_aliases()
+ #self.collect_dirs()
+ default_project = getattr(self, 'default_project', None)
+ def sortfun(x):
+ if x.name == default_project:
+ return ''
+ return getattr(x, 'path', None) and x.path.abspath() or x.name
+ self.all_projects.sort(key=sortfun)
+
+
+ def write_files(self):
+
+ """
+ Write the project and solution files from the data collected
+ so far. It is unlikely that you will want to change this
+ """
+ for p in self.all_projects:
+ p.write()
+
+ # and finally write the solution file
+ node = self.get_solution_node()
+ node.parent.mkdir()
+ Logs.warn('Creating %r' % node)
+ #a = dir(self.root)
+ #for b in a:
+ # print b
+ #print self.group_names
+ #print "Hallo2: ",self.root.listdir()
+ #print getattr(self, 'codelite_solution_name', None)
+ template1 = compile_template(SOLUTION_TEMPLATE)
+ sln_str = template1(self)
+ sln_str = rm_blank_lines(sln_str)
+ node.stealth_write(sln_str)
+
+ def get_solution_node(self):
+ """
+ The solution filename is required when writing the .vcproj files
+ return self.solution_node and if it does not exist, make one
+ """
+ try:
+ return self.solution_node
+ except:
+ pass
+
+ codelite_solution_name = getattr(self, 'codelite_solution_name', None)
+ if not codelite_solution_name:
+ codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace'
+ setattr(self, 'codelite_solution_name', codelite_solution_name)
+ if os.path.isabs(codelite_solution_name):
+ self.solution_node = self.root.make_node(codelite_solution_name)
+ else:
+ self.solution_node = self.srcnode.make_node(codelite_solution_name)
+ return self.solution_node
+
+ def project_configurations(self):
+ """
+ Helper that returns all the pairs (config,platform)
+ """
+ ret = []
+ for c in self.configurations:
+ for p in self.platforms:
+ ret.append((c, p))
+ return ret
+
+ def collect_targets(self):
+ """
+ Process the list of task generators
+ """
+ for g in self.groups:
+ for tg in g:
+ if not isinstance(tg, TaskGen.task_gen):
+ continue
+
+ if not hasattr(tg, 'codelite_includes'):
+ tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
+ tg.post()
+ if not getattr(tg, 'link_task', None):
+ continue
+
+ p = self.vsnode_target(self, tg)
+ p.collect_source() # delegate this processing
+ p.collect_properties()
+ self.all_projects.append(p)
+
+ def add_aliases(self):
+ """
+ Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
+ We also add an alias for "make install" (disabled by default)
+ """
+ base = getattr(self, 'projects_dir', None) or self.tg.path
+
+ node_project = base.make_node('build_all_projects' + self.project_extension) # Node
+ p_build = self.vsnode_build_all(self, node_project)
+ p_build.collect_properties()
+ self.all_projects.append(p_build)
+
+ node_project = base.make_node('install_all_projects' + self.project_extension) # Node
+ p_install = self.vsnode_install_all(self, node_project)
+ p_install.collect_properties()
+ self.all_projects.append(p_install)
+
+ node_project = base.make_node('project_view' + self.project_extension) # Node
+ p_view = self.vsnode_project_view(self, node_project)
+ p_view.collect_source()
+ p_view.collect_properties()
+ self.all_projects.append(p_view)
+
+ n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases")
+ p_build.parent = p_install.parent = p_view.parent = n
+ self.all_projects.append(n)
+
+ def collect_dirs(self):
+ """
+ Create the folder structure in the CodeLite project view
+ """
+ seen = {}
+ def make_parents(proj):
+ # look at a project, try to make a parent
+ if getattr(proj, 'parent', None):
+ # aliases already have parents
+ return
+ x = proj.iter_path
+ if x in seen:
+ proj.parent = seen[x]
+ return
+
+ # There is not vsnode_vsdir for x.
+ # So create a project representing the folder "x"
+ n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name)
+ n.iter_path = x.parent
+ self.all_projects.append(n)
+
+ # recurse up to the project directory
+ if x.height() > self.srcnode.height() + 1:
+ make_parents(n)
+
+ for p in self.all_projects[:]: # iterate over a copy of all projects
+ if not getattr(p, 'tg', None):
+ # but only projects that have a task generator
+ continue
+
+ # make a folder for each task generator
+ p.iter_path = p.tg.path
+ make_parents(p)
+
+
+
+def options(ctx):
+ pass
diff --git a/third_party/waf/waflib/extras/color_gcc.py b/third_party/waf/waflib/extras/color_gcc.py
new file mode 100644
index 00000000000..b3587e8db44
--- /dev/null
+++ b/third_party/waf/waflib/extras/color_gcc.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# Replaces the default formatter by one which understands GCC output and colorizes it.
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2012"
+
+import sys
+from waflib import Logs
+
+class ColorGCCFormatter(Logs.formatter):
+ def __init__(self, colors):
+ self.colors = colors
+ Logs.formatter.__init__(self)
+ def format(self, rec):
+ frame = sys._getframe()
+ while frame:
+ func = frame.f_code.co_name
+ if func == 'exec_command':
+ cmd = frame.f_locals['cmd']
+ if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
+ lines = []
+ for line in rec.msg.splitlines():
+ if 'warning: ' in line:
+ lines.append(self.colors.YELLOW + line)
+ elif 'error: ' in line:
+ lines.append(self.colors.RED + line)
+ elif 'note: ' in line:
+ lines.append(self.colors.CYAN + line)
+ else:
+ lines.append(line)
+ rec.msg = "\n".join(lines)
+ frame = frame.f_back
+ return Logs.formatter.format(self, rec)
+
+def options(opt):
+ Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors))
diff --git a/third_party/waf/waflib/extras/color_rvct.py b/third_party/waf/waflib/extras/color_rvct.py
new file mode 100644
index 00000000000..837fca2edf1
--- /dev/null
+++ b/third_party/waf/waflib/extras/color_rvct.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# Replaces the default formatter by one which understands RVCT output and colorizes it.
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2012"
+
+import sys
+import atexit
+from waflib import Logs
+
+errors = []
+
+def show_errors():
+ for i, e in enumerate(errors):
+ if i > 5:
+ break
+ print("Error: %s" % e)
+
+atexit.register(show_errors)
+
+class RcvtFormatter(Logs.formatter):
+ def __init__(self, colors):
+ Logs.formatter.__init__(self)
+ self.colors = colors
+ def format(self, rec):
+ frame = sys._getframe()
+ while frame:
+ func = frame.f_code.co_name
+ if func == 'exec_command':
+ cmd = frame.f_locals['cmd']
+ if isinstance(cmd, list) and ('armcc' in cmd[0] or 'armld' in cmd[0]):
+ lines = []
+ for line in rec.msg.splitlines():
+ if 'Warning: ' in line:
+ lines.append(self.colors.YELLOW + line)
+ elif 'Error: ' in line:
+ lines.append(self.colors.RED + line)
+ errors.append(line)
+ elif 'note: ' in line:
+ lines.append(self.colors.CYAN + line)
+ else:
+ lines.append(line)
+ rec.msg = "\n".join(lines)
+ frame = frame.f_back
+ return Logs.formatter.format(self, rec)
+
+def options(opt):
+ Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors))
diff --git a/third_party/waf/waflib/extras/compat15.py b/third_party/waf/waflib/extras/compat15.py
new file mode 100644
index 00000000000..69722ffa0c9
--- /dev/null
+++ b/third_party/waf/waflib/extras/compat15.py
@@ -0,0 +1,405 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+This file is provided to enable compatibility with waf 1.5
+It was enabled by default in waf 1.6, but it is not used in waf 1.7
+"""
+
+import sys
+from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context
+
+# the following is to bring some compatibility with waf 1.5 "import waflib.Configure → import Configure"
+sys.modules['Environment'] = ConfigSet
+ConfigSet.Environment = ConfigSet.ConfigSet
+
+sys.modules['Logs'] = Logs
+sys.modules['Options'] = Options
+sys.modules['Scripting'] = Scripting
+sys.modules['Task'] = Task
+sys.modules['Build'] = Build
+sys.modules['Configure'] = Configure
+sys.modules['Node'] = Node
+sys.modules['Runner'] = Runner
+sys.modules['TaskGen'] = TaskGen
+sys.modules['Utils'] = Utils
+sys.modules['Constants'] = Context
+Context.SRCDIR = ''
+Context.BLDDIR = ''
+
+from waflib.Tools import c_preproc
+sys.modules['preproc'] = c_preproc
+
+from waflib.Tools import c_config
+sys.modules['config_c'] = c_config
+
+ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive
+ConfigSet.ConfigSet.set_variant = Utils.nada
+
+Utils.pproc = Utils.subprocess
+
+Build.BuildContext.add_subdirs = Build.BuildContext.recurse
+Build.BuildContext.new_task_gen = Build.BuildContext.__call__
+Build.BuildContext.is_install = 0
+Node.Node.relpath_gen = Node.Node.path_from
+
+Utils.pproc = Utils.subprocess
+Utils.get_term_cols = Logs.get_term_cols
+
+def cmd_output(cmd, **kw):
+
+ silent = False
+ if 'silent' in kw:
+ silent = kw['silent']
+ del(kw['silent'])
+
+ if 'e' in kw:
+ tmp = kw['e']
+ del(kw['e'])
+ kw['env'] = tmp
+
+ kw['shell'] = isinstance(cmd, str)
+ kw['stdout'] = Utils.subprocess.PIPE
+ if silent:
+ kw['stderr'] = Utils.subprocess.PIPE
+
+ try:
+ p = Utils.subprocess.Popen(cmd, **kw)
+ output = p.communicate()[0]
+ except OSError ,e:
+ raise ValueError(str(e))
+
+ if p.returncode:
+ if not silent:
+ msg = "command execution failed: %s -> %r" % (cmd, str(output))
+ raise ValueError(msg)
+ output = ''
+ return output
+Utils.cmd_output = cmd_output
+
+def name_to_obj(self, s, env=None):
+ if Logs.verbose:
+ Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
+ return self.get_tgen_by_name(s)
+Build.BuildContext.name_to_obj = name_to_obj
+
+def env_of_name(self, name):
+ try:
+ return self.all_envs[name]
+ except KeyError:
+ Logs.error('no such environment: '+name)
+ return None
+Build.BuildContext.env_of_name = env_of_name
+
+
+def set_env_name(self, name, env):
+ self.all_envs[name] = env
+ return env
+Configure.ConfigurationContext.set_env_name = set_env_name
+
+def retrieve(self, name, fromenv=None):
+ try:
+ env = self.all_envs[name]
+ except KeyError:
+ env = ConfigSet.ConfigSet()
+ self.prepare_env(env)
+ self.all_envs[name] = env
+ else:
+ if fromenv:
+ Logs.warn('The environment %s may have been configured already', name)
+ return env
+Configure.ConfigurationContext.retrieve = retrieve
+
+Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse
+Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load
+Configure.conftest = Configure.conf
+Configure.ConfigurationError = Errors.ConfigurationError
+Utils.WafError = Errors.WafError
+
+Options.OptionsContext.sub_options = Options.OptionsContext.recurse
+Options.OptionsContext.tool_options = Context.Context.load
+Options.Handler = Options.OptionsContext
+
+Task.simple_task_type = Task.task_type_from_func = Task.task_factory
+Task.TaskBase.classes = Task.classes
+
+def setitem(self, key, value):
+ if key.startswith('CCFLAGS'):
+ key = key[1:]
+ self.table[key] = value
+ConfigSet.ConfigSet.__setitem__ = setitem
+
+@TaskGen.feature('d')
+@TaskGen.before('apply_incpaths')
+def old_importpaths(self):
+ if getattr(self, 'importpaths', []):
+ self.includes = self.importpaths
+
+from waflib import Context
+eld = Context.load_tool
+def load_tool(*k, **kw):
+ ret = eld(*k, **kw)
+ if 'set_options' in ret.__dict__:
+ if Logs.verbose:
+ Logs.warn('compat: rename "set_options" to options')
+ ret.options = ret.set_options
+ if 'detect' in ret.__dict__:
+ if Logs.verbose:
+ Logs.warn('compat: rename "detect" to "configure"')
+ ret.configure = ret.detect
+ return ret
+Context.load_tool = load_tool
+
+def get_curdir(self):
+ return self.path.abspath()
+Context.Context.curdir = property(get_curdir, Utils.nada)
+
+def get_srcdir(self):
+ return self.srcnode.abspath()
+Configure.ConfigurationContext.srcdir = property(get_srcdir, Utils.nada)
+
+def get_blddir(self):
+ return self.bldnode.abspath()
+Configure.ConfigurationContext.blddir = property(get_blddir, Utils.nada)
+
+Configure.ConfigurationContext.check_message_1 = Configure.ConfigurationContext.start_msg
+Configure.ConfigurationContext.check_message_2 = Configure.ConfigurationContext.end_msg
+
+rev = Context.load_module
+def load_module(path, encoding=None):
+ ret = rev(path, encoding)
+ if 'set_options' in ret.__dict__:
+ if Logs.verbose:
+ Logs.warn('compat: rename "set_options" to "options" (%r)', path)
+ ret.options = ret.set_options
+ if 'srcdir' in ret.__dict__:
+ if Logs.verbose:
+ Logs.warn('compat: rename "srcdir" to "top" (%r)', path)
+ ret.top = ret.srcdir
+ if 'blddir' in ret.__dict__:
+ if Logs.verbose:
+ Logs.warn('compat: rename "blddir" to "out" (%r)', path)
+ ret.out = ret.blddir
+ Utils.g_module = Context.g_module
+ Options.launch_dir = Context.launch_dir
+ return ret
+Context.load_module = load_module
+
+old_post = TaskGen.task_gen.post
+def post(self):
+ self.features = self.to_list(self.features)
+ if 'cc' in self.features:
+ if Logs.verbose:
+ Logs.warn('compat: the feature cc does not exist anymore (use "c")')
+ self.features.remove('cc')
+ self.features.append('c')
+ if 'cstaticlib' in self.features:
+ if Logs.verbose:
+ Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
+ self.features.remove('cstaticlib')
+ self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib')
+ if getattr(self, 'ccflags', None):
+ if Logs.verbose:
+ Logs.warn('compat: "ccflags" was renamed to "cflags"')
+ self.cflags = self.ccflags
+ return old_post(self)
+TaskGen.task_gen.post = post
+
+def waf_version(*k, **kw):
+ Logs.warn('wrong version (waf_version was removed in waf 1.6)')
+Utils.waf_version = waf_version
+
+
+import os
+@TaskGen.feature('c', 'cxx', 'd')
+@TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
+@TaskGen.after('apply_link', 'process_source')
+def apply_uselib_local(self):
+ """
+ process the uselib_local attribute
+ execute after apply_link because of the execution order set on 'link_task'
+ """
+ env = self.env
+ from waflib.Tools.ccroot import stlink_task
+
+ # 1. the case of the libs defined in the project (visit ancestors first)
+ # the ancestors external libraries (uselib) will be prepended
+ self.uselib = self.to_list(getattr(self, 'uselib', []))
+ self.includes = self.to_list(getattr(self, 'includes', []))
+ names = self.to_list(getattr(self, 'uselib_local', []))
+ get = self.bld.get_tgen_by_name
+ seen = set()
+ seen_uselib = set()
+ tmp = Utils.deque(names) # consume a copy of the list of names
+ if tmp:
+ if Logs.verbose:
+ Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
+ while tmp:
+ lib_name = tmp.popleft()
+ # visit dependencies only once
+ if lib_name in seen:
+ continue
+
+ y = get(lib_name)
+ y.post()
+ seen.add(lib_name)
+
+ # object has ancestors to process (shared libraries): add them to the end of the list
+ if getattr(y, 'uselib_local', None):
+ for x in self.to_list(getattr(y, 'uselib_local', [])):
+ obj = get(x)
+ obj.post()
+ if getattr(obj, 'link_task', None):
+ if not isinstance(obj.link_task, stlink_task):
+ tmp.append(x)
+
+ # link task and flags
+ if getattr(y, 'link_task', None):
+
+ link_name = y.target[y.target.rfind(os.sep) + 1:]
+ if isinstance(y.link_task, stlink_task):
+ env.append_value('STLIB', [link_name])
+ else:
+ # some linkers can link against programs
+ env.append_value('LIB', [link_name])
+
+ # the order
+ self.link_task.set_run_after(y.link_task)
+
+ # for the recompilation
+ self.link_task.dep_nodes += y.link_task.outputs
+
+ # add the link path too
+ tmp_path = y.link_task.outputs[0].parent.bldpath()
+ if not tmp_path in env['LIBPATH']:
+ env.prepend_value('LIBPATH', [tmp_path])
+
+ # add ancestors uselib too - but only propagate those that have no staticlib defined
+ for v in self.to_list(getattr(y, 'uselib', [])):
+ if v not in seen_uselib:
+ seen_uselib.add(v)
+ if not env['STLIB_' + v]:
+ if not v in self.uselib:
+ self.uselib.insert(0, v)
+
+ # if the library task generator provides 'export_includes', add to the include path
+ # the export_includes must be a list of paths relative to the other library
+ if getattr(y, 'export_includes', None):
+ self.includes.extend(y.to_incnodes(y.export_includes))
+
+@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
+@TaskGen.after('apply_link')
+def apply_objdeps(self):
+ "add the .o files produced by some other object files in the same manner as uselib_local"
+ names = getattr(self, 'add_objects', [])
+ if not names:
+ return
+ names = self.to_list(names)
+
+ get = self.bld.get_tgen_by_name
+ seen = []
+ while names:
+ x = names[0]
+
+ # visit dependencies only once
+ if x in seen:
+ names = names[1:]
+ continue
+
+ # object does not exist ?
+ y = get(x)
+
+ # object has ancestors to process first ? update the list of names
+ if getattr(y, 'add_objects', None):
+ added = 0
+ lst = y.to_list(y.add_objects)
+ lst.reverse()
+ for u in lst:
+ if u in seen: continue
+ added = 1
+ names = [u]+names
+ if added: continue # list of names modified, loop
+
+ # safe to process the current object
+ y.post()
+ seen.append(x)
+
+ for t in getattr(y, 'compiled_tasks', []):
+ self.link_task.inputs.extend(t.outputs)
+
+@TaskGen.after('apply_link')
+def process_obj_files(self):
+ if not hasattr(self, 'obj_files'):
+ return
+ for x in self.obj_files:
+ node = self.path.find_resource(x)
+ self.link_task.inputs.append(node)
+
+@TaskGen.taskgen_method
+def add_obj_file(self, file):
+ """Small example on how to link object files as if they were source
+ obj = bld.create_obj('cc')
+ obj.add_obj_file('foo.o')"""
+ if not hasattr(self, 'obj_files'): self.obj_files = []
+ if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
+ self.obj_files.append(file)
+
+
+old_define = Configure.ConfigurationContext.__dict__['define']
+
+@Configure.conf
+def define(self, key, val, quote=True, comment=''):
+ old_define(self, key, val, quote, comment)
+ if key.startswith('HAVE_'):
+ self.env[key] = 1
+
+old_undefine = Configure.ConfigurationContext.__dict__['undefine']
+
+@Configure.conf
+def undefine(self, key, comment=''):
+ old_undefine(self, key, comment)
+ if key.startswith('HAVE_'):
+ self.env[key] = 0
+
+# some people might want to use export_incdirs, but it was renamed
+def set_incdirs(self, val):
+ Logs.warn('compat: change "export_incdirs" by "export_includes"')
+ self.export_includes = val
+TaskGen.task_gen.export_incdirs = property(None, set_incdirs)
+
+def install_dir(self, path):
+ if not path:
+ return []
+
+ destpath = Utils.subst_vars(path, self.env)
+
+ if self.is_install > 0:
+ Logs.info('* creating %s', destpath)
+ Utils.check_dir(destpath)
+ elif self.is_install < 0:
+ Logs.info('* removing %s', destpath)
+ try:
+ os.remove(destpath)
+ except OSError:
+ pass
+Build.BuildContext.install_dir = install_dir
+
+# before/after names
+repl = {'apply_core': 'process_source',
+ 'apply_lib_vars': 'process_source',
+ 'apply_obj_vars': 'propagate_uselib_vars',
+ 'exec_rule': 'process_rule'
+}
+def after(*k):
+ k = [repl.get(key, key) for key in k]
+ return TaskGen.after_method(*k)
+
+def before(*k):
+ k = [repl.get(key, key) for key in k]
+ return TaskGen.before_method(*k)
+TaskGen.before = before
diff --git a/third_party/waf/waflib/extras/cppcheck.py b/third_party/waf/waflib/extras/cppcheck.py
new file mode 100644
index 00000000000..3bbeabf200a
--- /dev/null
+++ b/third_party/waf/waflib/extras/cppcheck.py
@@ -0,0 +1,546 @@
+#! /usr/bin/env python
+# -*- encoding: utf-8 -*-
+# Michel Mooij, michel.mooij7@gmail.com
+
+"""
+Tool Description
+================
+This module provides a waf wrapper (i.e. waftool) around the C/C++ source code
+checking tool 'cppcheck'.
+
+See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool
+itself.
+Note that many linux distributions already provide a ready to install version
+of cppcheck. On fedora, for instance, it can be installed using yum:
+
+ 'sudo yum install cppcheck'
+
+
+Usage
+=====
+In order to use this waftool simply add it to the 'options' and 'configure'
+functions of your main waf script as shown in the example below:
+
+ def options(opt):
+ opt.load('cppcheck', tooldir='./waftools')
+
+ def configure(conf):
+ conf.load('cppcheck')
+
+Note that example shown above assumes that the cppcheck waftool is located in
+the sub directory named 'waftools'.
+
+When configured as shown in the example above, cppcheck will automatically
+perform a source code analysis on all C/C++ build tasks that have been
+defined in your waf build system.
+
+The example shown below for a C program will be used as input for cppcheck when
+building the task.
+
+ def build(bld):
+ bld.program(name='foo', src='foobar.c')
+
+The result of the source code analysis will be stored both as xml and html
+files in the build location for the task. Should any error be detected by
+cppcheck the build will be aborted and a link to the html report will be shown.
+
+When needed source code checking by cppcheck can be disabled per task, per
+detected error or warning for a particular task. It can be also be disabled for
+all tasks.
+
+In order to exclude a task from source code checking add the skip option to the
+task as shown below:
+
+ def build(bld):
+ bld.program(
+ name='foo',
+ src='foobar.c'
+ cppcheck_skip=True
+ )
+
+When needed problems detected by cppcheck may be suppressed using a file
+containing a list of suppression rules. The relative or absolute path to this
+file can be added to the build task as shown in the example below:
+
+ bld.program(
+ name='bar',
+ src='foobar.c',
+ cppcheck_suppress='bar.suppress'
+ )
+
+A cppcheck suppress file should contain one suppress rule per line. Each of
+these rules will be passed as an '--suppress=<rule>' argument to cppcheck.
+
+Dependencies
+================
+This waftool depends on the python pygments module, it is used for source code
+syntax highlighting when creating the html reports. see http://pygments.org/ for
+more information on this package.
+
+Remarks
+================
+The generation of the html report is originally based on the cppcheck-htmlreport.py
+script that comes shipped with the cppcheck tool.
+"""
+
+import sys
+import xml.etree.ElementTree as ElementTree
+from waflib import Task, TaskGen, Logs, Context
+
+PYGMENTS_EXC_MSG= '''
+The required module 'pygments' could not be found. Please install it using your
+platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install',
+see 'http://pygments.org/download/' for installation instructions.
+'''
+
+try:
+ import pygments
+ from pygments import formatters, lexers
+except ImportError as e:
+ Logs.warn(PYGMENTS_EXC_MSG)
+ raise e
+
+
+def options(opt):
+ opt.add_option('--cppcheck-skip', dest='cppcheck_skip',
+ default=False, action='store_true',
+ help='do not check C/C++ sources (default=False)')
+
+ opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume',
+ default=False, action='store_true',
+ help='continue in case of errors (default=False)')
+
+ opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable',
+ default='warning,performance,portability,style,unusedFunction', action='store',
+ help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)")
+
+ opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable',
+ default='warning,performance,portability,style', action='store',
+ help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)")
+
+ opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c',
+ default='c99', action='store',
+ help='cppcheck standard to use when checking C (default=c99)')
+
+ opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx',
+ default='c++03', action='store',
+ help='cppcheck standard to use when checking C++ (default=c++03)')
+
+ opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config',
+ default=False, action='store_true',
+ help='forced check for missing buildin include files, e.g. stdio.h (default=False)')
+
+ opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs',
+ default='20', action='store',
+ help='maximum preprocessor (--max-configs) define iterations (default=20)')
+
+
+def configure(conf):
+ if conf.options.cppcheck_skip:
+ conf.env.CPPCHECK_SKIP = [True]
+ conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c
+ conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx
+ conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
+ conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
+ conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
+ conf.find_program('cppcheck', var='CPPCHECK')
+
+
+@TaskGen.feature('c')
+@TaskGen.feature('cxx')
+def cppcheck_execute(self):
+ if len(self.env.CPPCHECK_SKIP) or self.bld.options.cppcheck_skip:
+ return
+ if getattr(self, 'cppcheck_skip', False):
+ return
+ task = self.create_task('cppcheck')
+ task.cmd = _tgen_create_cmd(self)
+ task.fatal = []
+ if not self.bld.options.cppcheck_err_resume:
+ task.fatal.append('error')
+
+
+def _tgen_create_cmd(self):
+ features = getattr(self, 'features', [])
+ std_c = self.env.CPPCHECK_STD_C
+ std_cxx = self.env.CPPCHECK_STD_CXX
+ max_configs = self.env.CPPCHECK_MAX_CONFIGS
+ bin_enable = self.env.CPPCHECK_BIN_ENABLE
+ lib_enable = self.env.CPPCHECK_LIB_ENABLE
+
+ cmd = self.env.CPPCHECK
+ args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
+ args.append('--max-configs=%s' % max_configs)
+
+ if 'cxx' in features:
+ args.append('--language=c++')
+ args.append('--std=%s' % std_cxx)
+ else:
+ args.append('--language=c')
+ args.append('--std=%s' % std_c)
+
+ if self.bld.options.cppcheck_check_config:
+ args.append('--check-config')
+
+ if set(['cprogram','cxxprogram']) & set(features):
+ args.append('--enable=%s' % bin_enable)
+ else:
+ args.append('--enable=%s' % lib_enable)
+
+ for src in self.to_list(getattr(self, 'source', [])):
+ args.append('%r' % src)
+ for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))):
+ args.append('-I%r' % inc)
+ for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)):
+ args.append('-I%r' % inc)
+ return cmd + args
+
+
+class cppcheck(Task.Task):
+ quiet = True
+
+ def run(self):
+ stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR)
+ self._save_xml_report(stderr)
+ defects = self._get_defects(stderr)
+ index = self._create_html_report(defects)
+ self._errors_evaluate(defects, index)
+ return 0
+
+ def _save_xml_report(self, s):
+ '''use cppcheck xml result string, add the command string used to invoke cppcheck
+ and save as xml file.
+ '''
+ header = '%s\n' % s.splitlines()[0]
+ root = ElementTree.fromstring(s)
+ cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
+ cmd.text = str(self.cmd)
+ body = ElementTree.tostring(root)
+ node = self.generator.path.get_bld().find_or_declare('cppcheck.xml')
+ node.write(header + body)
+
+ def _get_defects(self, xml_string):
+ '''evaluate the xml string returned by cppcheck (on sdterr) and use it to create
+ a list of defects.
+ '''
+ defects = []
+ for error in ElementTree.fromstring(xml_string).iter('error'):
+ defect = {}
+ defect['id'] = error.get('id')
+ defect['severity'] = error.get('severity')
+ defect['msg'] = str(error.get('msg')).replace('<','&lt;')
+ defect['verbose'] = error.get('verbose')
+ for location in error.findall('location'):
+ defect['file'] = location.get('file')
+ defect['line'] = str(int(location.get('line')) - 1)
+ defects.append(defect)
+ return defects
+
+ def _create_html_report(self, defects):
+ files, css_style_defs = self._create_html_files(defects)
+ index = self._create_html_index(files)
+ self._create_css_file(css_style_defs)
+ return index
+
+ def _create_html_files(self, defects):
+ sources = {}
+ defects = [defect for defect in defects if defect.has_key('file')]
+ for defect in defects:
+ name = defect['file']
+ if not sources.has_key(name):
+ sources[name] = [defect]
+ else:
+ sources[name].append(defect)
+
+ files = {}
+ css_style_defs = None
+ bpath = self.generator.path.get_bld().abspath()
+ names = sources.keys()
+ for i in range(0,len(names)):
+ name = names[i]
+ htmlfile = 'cppcheck/%i.html' % (i)
+ errors = sources[name]
+ files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
+ css_style_defs = self._create_html_file(name, htmlfile, errors)
+ return files, css_style_defs
+
+ def _create_html_file(self, sourcefile, htmlfile, errors):
+ name = self.generator.get_name()
+ root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
+ title = root.find('head/title')
+ title.text = 'cppcheck - report - %s' % name
+
+ body = root.find('body')
+ for div in body.findall('div'):
+ if div.get('id') == 'page':
+ page = div
+ break
+ for div in page.findall('div'):
+ if div.get('id') == 'header':
+ h1 = div.find('h1')
+ h1.text = 'cppcheck report - %s' % name
+ if div.get('id') == 'content':
+ content = div
+ srcnode = self.generator.bld.root.find_node(sourcefile)
+ hl_lines = [e['line'] for e in errors if e.has_key('line')]
+ formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
+ formatter.errors = [e for e in errors if e.has_key('line')]
+ css_style_defs = formatter.get_style_defs('.highlight')
+ lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
+ s = pygments.highlight(srcnode.read(), lexer, formatter)
+ table = ElementTree.fromstring(s)
+ content.append(table)
+
+ s = ElementTree.tostring(root, method='html')
+ s = CCPCHECK_HTML_TYPE + s
+ node = self.generator.path.get_bld().find_or_declare(htmlfile)
+ node.write(s)
+ return css_style_defs
+
+ def _create_html_index(self, files):
+ name = self.generator.get_name()
+ root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
+ title = root.find('head/title')
+ title.text = 'cppcheck - report - %s' % name
+
+ body = root.find('body')
+ for div in body.findall('div'):
+ if div.get('id') == 'page':
+ page = div
+ break
+ for div in page.findall('div'):
+ if div.get('id') == 'header':
+ h1 = div.find('h1')
+ h1.text = 'cppcheck report - %s' % name
+ if div.get('id') == 'content':
+ content = div
+ self._create_html_table(content, files)
+
+ s = ElementTree.tostring(root, method='html')
+ s = CCPCHECK_HTML_TYPE + s
+ node = self.generator.path.get_bld().find_or_declare('cppcheck/index.html')
+ node.write(s)
+ return node
+
+ def _create_html_table(self, content, files):
+ table = ElementTree.fromstring(CPPCHECK_HTML_TABLE)
+ for name, val in files.items():
+ f = val['htmlfile']
+ s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name)
+ row = ElementTree.fromstring(s)
+ table.append(row)
+
+ errors = sorted(val['errors'], key=lambda e: int(e['line']) if e.has_key('line') else sys.maxint)
+ for e in errors:
+ if not e.has_key('line'):
+ s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
+ else:
+ attr = ''
+ if e['severity'] == 'error':
+ attr = 'class="error"'
+ s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line'])
+ s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg'])
+ row = ElementTree.fromstring(s)
+ table.append(row)
+ content.append(table)
+
+ def _create_css_file(self, css_style_defs):
+ css = str(CPPCHECK_CSS_FILE)
+ if css_style_defs:
+ css = "%s\n%s\n" % (css, css_style_defs)
+ node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css')
+ node.write(css)
+
+ def _errors_evaluate(self, errors, http_index):
+ name = self.generator.get_name()
+ fatal = self.fatal
+ severity = [err['severity'] for err in errors]
+ problems = [err for err in errors if err['severity'] != 'information']
+
+ if set(fatal) & set(severity):
+ exc = "\n"
+ exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name
+ exc += "\n file://%r" % (http_index)
+ exc += "\n"
+ self.generator.bld.fatal(exc)
+
+ elif len(problems):
+ msg = "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name
+ msg += "\n file://%r" % http_index
+ msg += "\n"
+ Logs.error(msg)
+
+
+class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
+ errors = []
+
+ def wrap(self, source, outfile):
+ line_no = 1
+ for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile):
+ # If this is a source code line we want to add a span tag at the end.
+ if i == 1:
+ for error in self.errors:
+ if int(error['line']) == line_no:
+ t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
+ line_no = line_no + 1
+ yield i, t
+
+
+CCPCHECK_HTML_TYPE = \
+'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n'
+
+CPPCHECK_HTML_FILE = """
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp "&#160;">]>
+<html>
+ <head>
+ <title>cppcheck - report - XXX</title>
+ <link href="style.css" rel="stylesheet" type="text/css" />
+ <style type="text/css">
+ </style>
+ </head>
+ <body class="body">
+ <div id="page-header">&nbsp;</div>
+ <div id="page">
+ <div id="header">
+ <h1>cppcheck report - XXX</h1>
+ </div>
+ <div id="menu">
+ <a href="index.html">Defect list</a>
+ </div>
+ <div id="content">
+ </div>
+ <div id="footer">
+ <div>cppcheck - a tool for static C/C++ code analysis</div>
+ <div>
+ Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
+ Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
+ IRC: #cppcheck at irc.freenode.net
+ </div>
+ &nbsp;
+ </div>
+ &nbsp;
+ </div>
+ <div id="page-footer">&nbsp;</div>
+ </body>
+</html>
+"""
+
+CPPCHECK_HTML_TABLE = """
+<table>
+ <tr>
+ <th>Line</th>
+ <th>Id</th>
+ <th>Severity</th>
+ <th>Message</th>
+ </tr>
+</table>
+"""
+
+CPPCHECK_HTML_ERROR = \
+'<span style="background: #ffaaaa;padding: 3px;">&lt;--- %s</span>\n'
+
+CPPCHECK_CSS_FILE = """
+body.body {
+ font-family: Arial;
+ font-size: 13px;
+ background-color: black;
+ padding: 0px;
+ margin: 0px;
+}
+
+.error {
+ font-family: Arial;
+ font-size: 13px;
+ background-color: #ffb7b7;
+ padding: 0px;
+ margin: 0px;
+}
+
+th, td {
+ min-width: 100px;
+ text-align: left;
+}
+
+#page-header {
+ clear: both;
+ width: 1200px;
+ margin: 20px auto 0px auto;
+ height: 10px;
+ border-bottom-width: 2px;
+ border-bottom-style: solid;
+ border-bottom-color: #aaaaaa;
+}
+
+#page {
+ width: 1160px;
+ margin: auto;
+ border-left-width: 2px;
+ border-left-style: solid;
+ border-left-color: #aaaaaa;
+ border-right-width: 2px;
+ border-right-style: solid;
+ border-right-color: #aaaaaa;
+ background-color: White;
+ padding: 20px;
+}
+
+#page-footer {
+ clear: both;
+ width: 1200px;
+ margin: auto;
+ height: 10px;
+ border-top-width: 2px;
+ border-top-style: solid;
+ border-top-color: #aaaaaa;
+}
+
+#header {
+ width: 100%;
+ height: 70px;
+ background-image: url(logo.png);
+ background-repeat: no-repeat;
+ background-position: left top;
+ border-bottom-style: solid;
+ border-bottom-width: thin;
+ border-bottom-color: #aaaaaa;
+}
+
+#menu {
+ margin-top: 5px;
+ text-align: left;
+ float: left;
+ width: 100px;
+ height: 300px;
+}
+
+#menu > a {
+ margin-left: 10px;
+ display: block;
+}
+
+#content {
+ float: left;
+ width: 1020px;
+ margin: 5px;
+ padding: 0px 10px 10px 10px;
+ border-left-style: solid;
+ border-left-width: thin;
+ border-left-color: #aaaaaa;
+}
+
+#footer {
+ padding-bottom: 5px;
+ padding-top: 5px;
+ border-top-style: solid;
+ border-top-width: thin;
+ border-top-color: #aaaaaa;
+ clear: both;
+ font-size: 10px;
+}
+
+#footer > div {
+ float: left;
+ width: 33%;
+}
+
+"""
diff --git a/third_party/waf/waflib/extras/cpplint.py b/third_party/waf/waflib/extras/cpplint.py
new file mode 100644
index 00000000000..e574ab115d6
--- /dev/null
+++ b/third_party/waf/waflib/extras/cpplint.py
@@ -0,0 +1,217 @@
+#! /usr/bin/env python
+# encoding: utf-8
+#
+# written by Sylvain Rouquette, 2014
+
+'''
+
+This is an extra tool, not bundled with the default waf binary.
+To add the cpplint tool to the waf file:
+$ ./waf-light --tools=compat15,cpplint
+
+this tool also requires cpplint for python.
+If you have PIP, you can install it like this: pip install cpplint
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('compiler_cxx cpplint')
+
+ def configure(conf):
+ conf.load('compiler_cxx cpplint')
+ # optional, you can also specify them on the command line
+ conf.env.CPPLINT_FILTERS = ','.join((
+ '-whitespace/newline', # c++11 lambda
+ '-readability/braces', # c++11 constructor
+ '-whitespace/braces', # c++11 constructor
+ '-build/storage_class', # c++11 for-range
+ '-whitespace/blank_line', # user pref
+ '-whitespace/labels' # user pref
+ ))
+
+ def build(bld):
+ bld(features='cpplint', source='main.cpp', target='app')
+ # add include files, because they aren't usually built
+ bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp'))
+'''
+
+import sys, re
+import logging
+import threading
+from waflib import Task, TaskGen, Logs, Options, Node
+try:
+ import cpplint.cpplint as cpplint_tool
+except ImportError:
+ try:
+ import cpplint as cpplint_tool
+ except ImportError:
+ pass
+
+
+critical_errors = 0
+CPPLINT_FORMAT = '[CPPLINT] %(filename)s:\nline %(linenum)s, severity %(confidence)s, category: %(category)s\n%(message)s\n'
+RE_EMACS = re.compile('(?P<filename>.*):(?P<linenum>\d+): (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]')
+CPPLINT_RE = {
+ 'waf': RE_EMACS,
+ 'emacs': RE_EMACS,
+ 'vs7': re.compile('(?P<filename>.*)\((?P<linenum>\d+)\): (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
+ 'eclipse': re.compile('(?P<filename>.*):(?P<linenum>\d+): warning: (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
+}
+
+def options(opt):
+ opt.add_option('--cpplint-filters', type='string',
+ default='', dest='CPPLINT_FILTERS',
+ help='add filters to cpplint')
+ opt.add_option('--cpplint-length', type='int',
+ default=80, dest='CPPLINT_LINE_LENGTH',
+ help='specify the line length (default: 80)')
+ opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL',
+ help='specify the log level (default: 1)')
+ opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK',
+ help='break the build if error >= level (default: 5)')
+ opt.add_option('--cpplint-skip', action='store_true',
+ default=False, dest='CPPLINT_SKIP',
+ help='skip cpplint during build')
+ opt.add_option('--cpplint-output', type='string',
+ default='waf', dest='CPPLINT_OUTPUT',
+ help='select output format (waf, emacs, vs7)')
+
+
+def configure(conf):
+ conf.start_msg('Checking cpplint')
+ try:
+ cpplint_tool._cpplint_state
+ conf.end_msg('ok')
+ except NameError:
+ conf.env.CPPLINT_SKIP = True
+ conf.end_msg('not found, skipping it.')
+
+
+class cpplint_formatter(Logs.formatter):
+ def __init__(self, fmt):
+ logging.Formatter.__init__(self, CPPLINT_FORMAT)
+ self.fmt = fmt
+
+ def format(self, rec):
+ if self.fmt == 'waf':
+ result = CPPLINT_RE[self.fmt].match(rec.msg).groupdict()
+ rec.msg = CPPLINT_FORMAT % result
+ if rec.levelno <= logging.INFO:
+ rec.c1 = Logs.colors.CYAN
+ return super(cpplint_formatter, self).format(rec)
+
+
+class cpplint_handler(Logs.log_handler):
+ def __init__(self, stream=sys.stderr, **kw):
+ super(cpplint_handler, self).__init__(stream, **kw)
+ self.stream = stream
+
+ def emit(self, rec):
+ rec.stream = self.stream
+ self.emit_override(rec)
+ self.flush()
+
+
+class cpplint_wrapper(object):
+ stream = None
+ tasks_count = 0
+ lock = threading.RLock()
+
+ def __init__(self, logger, threshold, fmt):
+ self.logger = logger
+ self.threshold = threshold
+ self.error_count = 0
+ self.fmt = fmt
+
+ def __enter__(self):
+ with cpplint_wrapper.lock:
+ cpplint_wrapper.tasks_count += 1
+ if cpplint_wrapper.tasks_count == 1:
+ sys.stderr.flush()
+ cpplint_wrapper.stream = sys.stderr
+ sys.stderr = self
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ with cpplint_wrapper.lock:
+ cpplint_wrapper.tasks_count -= 1
+ if cpplint_wrapper.tasks_count == 0:
+ sys.stderr = cpplint_wrapper.stream
+ sys.stderr.flush()
+
+ def isatty(self):
+ return True
+
+ def write(self, message):
+ global critical_errors
+ result = CPPLINT_RE[self.fmt].match(message)
+ if not result:
+ return
+ level = int(result.groupdict()['confidence'])
+ if level >= self.threshold:
+ critical_errors += 1
+ if level <= 2:
+ self.logger.info(message)
+ elif level <= 4:
+ self.logger.warning(message)
+ else:
+ self.logger.error(message)
+
+
+cpplint_logger = None
+def get_cpplint_logger(fmt):
+ global cpplint_logger
+ if cpplint_logger:
+ return cpplint_logger
+ cpplint_logger = logging.getLogger('cpplint')
+ hdlr = cpplint_handler()
+ hdlr.setFormatter(cpplint_formatter(fmt))
+ cpplint_logger.addHandler(hdlr)
+ cpplint_logger.setLevel(logging.DEBUG)
+ return cpplint_logger
+
+
+class cpplint(Task.Task):
+ color = 'PINK'
+
+ def __init__(self, *k, **kw):
+ super(cpplint, self).__init__(*k, **kw)
+
+ def run(self):
+ global critical_errors
+ with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT):
+ if self.env.CPPLINT_OUTPUT != 'waf':
+ cpplint_tool._cpplint_state.output_format = self.env.CPPLINT_OUTPUT
+ cpplint_tool._cpplint_state.SetFilters(self.env.CPPLINT_FILTERS)
+ cpplint_tool._line_length = self.env.CPPLINT_LINE_LENGTH
+ cpplint_tool.ProcessFile(self.inputs[0].abspath(), self.env.CPPLINT_LEVEL)
+ return critical_errors
+
+@TaskGen.extension('.h', '.hh', '.hpp', '.hxx')
+def cpplint_includes(self, node):
+ pass
+
+@TaskGen.feature('cpplint')
+@TaskGen.before_method('process_source')
+def post_cpplint(self):
+ if self.env.CPPLINT_SKIP:
+ return
+
+ if not self.env.CPPLINT_INITIALIZED:
+ for key, value in Options.options.__dict__.items():
+ if not key.startswith('CPPLINT_') or self.env[key]:
+ continue
+ self.env[key] = value
+ self.env.CPPLINT_INITIALIZED = True
+
+ if not self.env.CPPLINT_OUTPUT in CPPLINT_RE:
+ return
+
+ for src in self.to_list(getattr(self, 'source', [])):
+ if isinstance(src, Node.Node):
+ node = src
+ else:
+ node = self.path.find_or_declare(src)
+ if not node:
+ self.bld.fatal('Could not find %r' % src)
+ self.create_task('cpplint', node)
diff --git a/third_party/waf/waflib/extras/cython.py b/third_party/waf/waflib/extras/cython.py
new file mode 100644
index 00000000000..26d1c6f147c
--- /dev/null
+++ b/third_party/waf/waflib/extras/cython.py
@@ -0,0 +1,145 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010-2015
+
+import os, re
+from waflib import Task, Logs
+from waflib.TaskGen import extension
+
+cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*')
+re_cyt = re.compile(r"""
+ (?:from\s+(\w+)\s+)? # optionally match "from foo" and capture foo
+ c?import\s(\w+|[*]) # require "import bar" and capture bar
+ """, re.M | re.VERBOSE)
+
+@extension('.pyx')
+def add_cython_file(self, node):
+ """
+ Process a *.pyx* file given in the list of source files. No additional
+ feature is required::
+
+ def build(bld):
+ bld(features='c cshlib pyext', source='main.c foo.pyx', target='app')
+ """
+ ext = '.c'
+ if 'cxx' in self.features:
+ self.env.append_unique('CYTHONFLAGS', '--cplus')
+ ext = '.cc'
+
+ for x in getattr(self, 'cython_includes', []):
+ # TODO re-use these nodes in "scan" below
+ d = self.path.find_dir(x)
+ if d:
+ self.env.append_unique('CYTHONFLAGS', '-I%s' % d.abspath())
+
+ tsk = self.create_task('cython', node, node.change_ext(ext))
+ self.source += tsk.outputs
+
+class cython(Task.Task):
+ run_str = '${CYTHON} ${CYTHONFLAGS} -o ${TGT[0].abspath()} ${SRC}'
+ color = 'GREEN'
+
+ vars = ['INCLUDES']
+ """
+ Rebuild whenever the INCLUDES change. The variables such as CYTHONFLAGS will be appended
+ by the metaclass.
+ """
+
+ ext_out = ['.h']
+ """
+ The creation of a .h file is known only after the build has begun, so it is not
+ possible to compute a build order just by looking at the task inputs/outputs.
+ """
+
+ def runnable_status(self):
+ """
+ Perform a double-check to add the headers created by cython
+ to the output nodes. The scanner is executed only when the cython task
+ must be executed (optimization).
+ """
+ ret = super(cython, self).runnable_status()
+ if ret == Task.ASK_LATER:
+ return ret
+ for x in self.generator.bld.raw_deps[self.uid()]:
+ if x.startswith('header:'):
+ self.outputs.append(self.inputs[0].parent.find_or_declare(x.replace('header:', '')))
+ return super(cython, self).runnable_status()
+
+ def post_run(self):
+ for x in self.outputs:
+ if x.name.endswith('.h'):
+ if not os.path.exists(x.abspath()):
+ if Logs.verbose:
+ Logs.warn('Expected %r' % x.abspath())
+ x.write('')
+ return Task.Task.post_run(self)
+
+ def scan(self):
+ """
+ Return the dependent files (.pxd) by looking in the include folders.
+ Put the headers to generate in the custom list "bld.raw_deps".
+ To inspect the scanne results use::
+
+ $ waf clean build --zones=deps
+ """
+ node = self.inputs[0]
+ txt = node.read()
+
+ mods = []
+ for m in re_cyt.finditer(txt):
+ if m.group(1): # matches "from foo import bar"
+ mods.append(m.group(1))
+ else:
+ mods.append(m.group(2))
+
+ Logs.debug("cython: mods %r" % mods)
+ incs = getattr(self.generator, 'cython_includes', [])
+ incs = [self.generator.path.find_dir(x) for x in incs]
+ incs.append(node.parent)
+
+ found = []
+ missing = []
+ for x in mods:
+ for y in incs:
+ k = y.find_resource(x + '.pxd')
+ if k:
+ found.append(k)
+ break
+ else:
+ missing.append(x)
+
+ # the cython file implicitly depends on a pxd file that might be present
+ implicit = node.parent.find_resource(node.name[:-3] + 'pxd')
+ if implicit:
+ found.append(implicit)
+
+ Logs.debug("cython: found %r" % found)
+
+ # Now the .h created - store them in bld.raw_deps for later use
+ has_api = False
+ has_public = False
+ for l in txt.splitlines():
+ if cy_api_pat.match(l):
+ if ' api ' in l:
+ has_api = True
+ if ' public ' in l:
+ has_public = True
+ name = node.name.replace('.pyx', '')
+ if has_api:
+ missing.append('header:%s_api.h' % name)
+ if has_public:
+ missing.append('header:%s.h' % name)
+
+ return (found, missing)
+
+def options(ctx):
+ ctx.add_option('--cython-flags', action='store', default='', help='space separated list of flags to pass to cython')
+
+def configure(ctx):
+ if not ctx.env.CC and not ctx.env.CXX:
+ ctx.fatal('Load a C/C++ compiler first')
+ if not ctx.env.PYTHON:
+ ctx.fatal('Load the python tool first!')
+ ctx.find_program('cython', var='CYTHON')
+ if ctx.options.cython_flags:
+ ctx.env.CYTHONFLAGS = ctx.options.cython_flags
diff --git a/third_party/waf/waflib/extras/dcc.py b/third_party/waf/waflib/extras/dcc.py
new file mode 100644
index 00000000000..8fd209686bb
--- /dev/null
+++ b/third_party/waf/waflib/extras/dcc.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Jérôme Carretero, 2011 (zougloub)
+
+from waflib import Options
+from waflib.Tools import ccroot
+from waflib.Configure import conf
+
+@conf
+def find_dcc(conf):
+ conf.find_program(['dcc'], var='CC', path_list=getattr(Options.options, 'diabbindir', ""))
+ conf.env.CC_NAME = 'dcc'
+
+@conf
+def find_dld(conf):
+ conf.find_program(['dld'], var='LINK_CC', path_list=getattr(Options.options, 'diabbindir', ""))
+ conf.env.LINK_CC_NAME = 'dld'
+
+@conf
+def find_dar(conf):
+ conf.find_program(['dar'], var='AR', path_list=getattr(Options.options, 'diabbindir', ""))
+ conf.env.AR_NAME = 'dar'
+ conf.env.ARFLAGS = 'rcs'
+
+@conf
+def find_ddump(conf):
+ conf.find_program(['ddump'], var='DDUMP', path_list=getattr(Options.options, 'diabbindir', ""))
+
+@conf
+def dcc_common_flags(conf):
+ v = conf.env
+ v['CC_SRC_F'] = []
+ v['CC_TGT_F'] = ['-c', '-o']
+
+ # linker
+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
+ v['CCLNK_SRC_F'] = []
+ v['CCLNK_TGT_F'] = ['-o']
+ v['CPPPATH_ST'] = '-I%s'
+ v['DEFINES_ST'] = '-D%s'
+
+ v['LIB_ST'] = '-l:%s' # template for adding libs
+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
+ v['STLIB_ST'] = '-l:%s'
+ v['STLIBPATH_ST'] = '-L%s'
+ v['RPATH_ST'] = '-Wl,-rpath,%s'
+ #v['STLIB_MARKER'] = '-Wl,-Bstatic'
+
+ # program
+ v['cprogram_PATTERN'] = '%s.elf'
+
+ # static lib
+ v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
+ v['cstlib_PATTERN'] = 'lib%s.a'
+
+def configure(conf):
+ conf.find_dcc()
+ conf.find_dar()
+ conf.find_dld()
+ conf.find_ddump()
+ conf.dcc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
+
+def options(opt):
+ """
+ Add the ``--with-diab-bindir`` command-line options.
+ """
+ opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="")
diff --git a/third_party/waf/waflib/extras/distnet.py b/third_party/waf/waflib/extras/distnet.py
new file mode 100644
index 00000000000..ac8c34491e1
--- /dev/null
+++ b/third_party/waf/waflib/extras/distnet.py
@@ -0,0 +1,431 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+waf-powered distributed network builds, with a network cache.
+
+Caching files from a server has advantages over a NFS/Samba shared folder:
+
+- builds are much faster because they use local files
+- builds just continue to work in case of a network glitch
+- permissions are much simpler to manage
+"""
+
+import os, urllib, tarfile, re, shutil, tempfile, sys
+from collections import OrderedDict
+from waflib import Context, Utils, Logs
+
+try:
+ from urllib.parse import urlencode
+except ImportError:
+ urlencode = urllib.urlencode
+
+def safe_urlencode(data):
+ x = urlencode(data)
+ try:
+ x = x.encode('utf-8')
+ except Exception:
+ pass
+ return x
+
+try:
+ from urllib.error import URLError
+except ImportError:
+ from urllib2 import URLError
+
+try:
+ from urllib.request import Request, urlopen
+except ImportError:
+ from urllib2 import Request, urlopen
+
+DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
+DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
+TARFORMAT = 'w:bz2'
+TIMEOUT = 60
+REQUIRES = 'requires.txt'
+
+re_com = re.compile('\s*#.*', re.M)
+
+def total_version_order(num):
+ lst = num.split('.')
+ template = '%10s' * len(lst)
+ ret = template % tuple(lst)
+ return ret
+
+def get_distnet_cache():
+ return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)
+
+def get_server_url():
+ return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)
+
+def get_download_url():
+ return '%s/download.py' % get_server_url()
+
+def get_upload_url():
+ return '%s/upload.py' % get_server_url()
+
+def get_resolve_url():
+ return '%s/resolve.py' % get_server_url()
+
+def send_package_name():
+ out = getattr(Context.g_module, 'out', 'build')
+ pkgfile = '%s/package_to_upload.tarfile' % out
+ return pkgfile
+
+class package(Context.Context):
+ fun = 'package'
+ cmd = 'package'
+
+ def execute(self):
+ try:
+ files = self.files
+ except AttributeError:
+ files = self.files = []
+
+ Context.Context.execute(self)
+ pkgfile = send_package_name()
+ if not pkgfile in files:
+ if not REQUIRES in files:
+ files.append(REQUIRES)
+ self.make_tarfile(pkgfile, files, add_to_package=False)
+
+ def make_tarfile(self, filename, files, **kw):
+ if kw.get('add_to_package', True):
+ self.files.append(filename)
+
+ with tarfile.open(filename, TARFORMAT) as tar:
+ endname = os.path.split(filename)[-1]
+ endname = endname.split('.')[0] + '/'
+ for x in files:
+ tarinfo = tar.gettarinfo(x, x)
+ tarinfo.uid = tarinfo.gid = 0
+ tarinfo.uname = tarinfo.gname = 'root'
+ tarinfo.size = os.stat(x).st_size
+
+ # TODO - more archive creation options?
+ if kw.get('bare', True):
+ tarinfo.name = os.path.split(x)[1]
+ else:
+ tarinfo.name = endname + x # todo, if tuple, then..
+ Logs.debug("adding %r to %s" % (tarinfo.name, filename))
+ with open(x, 'rb') as f:
+ tar.addfile(tarinfo, f)
+ Logs.info('Created %s' % filename)
+
+class publish(Context.Context):
+ fun = 'publish'
+ cmd = 'publish'
+ def execute(self):
+ if hasattr(Context.g_module, 'publish'):
+ Context.Context.execute(self)
+ mod = Context.g_module
+
+ rfile = getattr(self, 'rfile', send_package_name())
+ if not os.path.isfile(rfile):
+ self.fatal('Create the release file with "waf release" first! %r' % rfile)
+
+ fdata = Utils.readf(rfile, m='rb')
+ data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])
+
+ req = Request(get_upload_url(), data)
+ response = urlopen(req, timeout=TIMEOUT)
+ data = response.read().strip()
+
+ if sys.hexversion>0x300000f:
+ data = data.decode('utf-8')
+
+ if data != 'ok':
+ self.fatal('Could not publish the package %r' % data)
+
+class constraint(object):
+ def __init__(self, line=''):
+ self.required_line = line
+ self.info = []
+
+ line = line.strip()
+ if not line:
+ return
+
+ lst = line.split(',')
+ if lst:
+ self.pkgname = lst[0]
+ self.required_version = lst[1]
+ for k in lst:
+ a, b, c = k.partition('=')
+ if a and c:
+ self.info.append((a, c))
+ def __str__(self):
+ buf = []
+ buf.append(self.pkgname)
+ buf.append(self.required_version)
+ for k in self.info:
+ buf.append('%s=%s' % k)
+ return ','.join(buf)
+
+ def __repr__(self):
+ return "requires %s-%s" % (self.pkgname, self.required_version)
+
+ def human_display(self, pkgname, pkgver):
+ return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)
+
+ def why(self):
+ ret = []
+ for x in self.info:
+ if x[0] == 'reason':
+ ret.append(x[1])
+ return ret
+
+ def add_reason(self, reason):
+ self.info.append(('reason', reason))
+
+def parse_constraints(text):
+ assert(text is not None)
+ constraints = []
+ text = re.sub(re_com, '', text)
+ lines = text.splitlines()
+ for line in lines:
+ line = line.strip()
+ if not line:
+ continue
+ constraints.append(constraint(line))
+ return constraints
+
+def list_package_versions(cachedir, pkgname):
+ pkgdir = os.path.join(cachedir, pkgname)
+ try:
+ versions = os.listdir(pkgdir)
+ except OSError:
+ return []
+ versions.sort(key=total_version_order)
+ versions.reverse()
+ return versions
+
+class package_reader(Context.Context):
+ cmd = 'solver'
+ fun = 'solver'
+
+ def __init__(self, **kw):
+ Context.Context.__init__(self, **kw)
+
+ self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
+ self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
+ self.cache_constraints = {}
+ self.constraints = []
+
+ def compute_dependencies(self, filename=REQUIRES):
+ text = Utils.readf(filename)
+ data = safe_urlencode([('text', text)])
+
+ if '--offline' in sys.argv:
+ self.constraints = self.local_resolve(text)
+ else:
+ req = Request(get_resolve_url(), data)
+ try:
+ response = urlopen(req, timeout=TIMEOUT)
+ except URLError as e:
+ Logs.warn('The package server is down! %r' % e)
+ self.constraints = self.local_resolve(text)
+ else:
+ ret = response.read()
+ try:
+ ret = ret.decode('utf-8')
+ except Exception:
+ pass
+ self.trace(ret)
+ self.constraints = parse_constraints(ret)
+ self.check_errors()
+
+ def check_errors(self):
+ errors = False
+ for c in self.constraints:
+ if not c.required_version:
+ errors = True
+
+ reasons = c.why()
+ if len(reasons) == 1:
+ Logs.error('%s but no matching package could be found in this repository' % reasons[0])
+ else:
+ Logs.error('Conflicts on package %r:' % c.pkgname)
+ for r in reasons:
+ Logs.error(' %s' % r)
+ if errors:
+ self.fatal('The package requirements cannot be satisfied!')
+
+ def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
+ try:
+ return self.cache_constraints[(pkgname, pkgver)]
+ except KeyError:
+ #Logs.error("no key %r" % (pkgname, pkgver))
+ text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
+ ret = parse_constraints(text)
+ self.cache_constraints[(pkgname, pkgver)] = ret
+ return ret
+
+ def apply_constraint(self, domain, constraint):
+ vname = constraint.required_version.replace('*', '.*')
+ rev = re.compile(vname, re.M)
+ ret = [x for x in domain if rev.match(x)]
+ return ret
+
+ def trace(self, *k):
+ if getattr(self, 'debug', None):
+ Logs.error(*k)
+
+ def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
+ # breadth first search
+ n_packages_to_versions = dict(packages_to_versions)
+ n_packages_to_constraints = dict(packages_to_constraints)
+
+ self.trace("calling solve with %r %r %r" % (packages_to_versions, todo, done))
+ done = done + [pkgname]
+
+ constraints = self.load_constraints(pkgname, pkgver)
+ self.trace("constraints %r" % constraints)
+
+ for k in constraints:
+ try:
+ domain = n_packages_to_versions[k.pkgname]
+ except KeyError:
+ domain = list_package_versions(get_distnet_cache(), k.pkgname)
+
+
+ self.trace("constraints?")
+ if not k.pkgname in done:
+ todo = todo + [k.pkgname]
+
+ self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))
+
+ # apply the constraint
+ domain = self.apply_constraint(domain, k)
+
+ self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))
+
+ n_packages_to_versions[k.pkgname] = domain
+
+ # then store the constraint applied
+ constraints = list(packages_to_constraints.get(k.pkgname, []))
+ constraints.append((pkgname, pkgver, k))
+ n_packages_to_constraints[k.pkgname] = constraints
+
+ if not domain:
+ self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
+ return (n_packages_to_versions, n_packages_to_constraints)
+
+ # next package on the todo list
+ if not todo:
+ return (n_packages_to_versions, n_packages_to_constraints)
+
+ n_pkgname = todo[0]
+ n_pkgver = n_packages_to_versions[n_pkgname][0]
+ tmp = dict(n_packages_to_versions)
+ tmp[n_pkgname] = [n_pkgver]
+
+ self.trace("fixed point %s" % n_pkgname)
+
+ return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)
+
+ def get_results(self):
+ return '\n'.join([str(c) for c in self.constraints])
+
+ def solution_to_constraints(self, versions, constraints):
+ solution = []
+ for p in versions.keys():
+ c = constraint()
+ solution.append(c)
+
+ c.pkgname = p
+ if versions[p]:
+ c.required_version = versions[p][0]
+ else:
+ c.required_version = ''
+ for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
+ c.add_reason(c2.human_display(from_pkgname, from_pkgver))
+ return solution
+
+ def local_resolve(self, text):
+ self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
+ p2v = OrderedDict({self.myproject: [self.myversion]})
+ (versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
+ return self.solution_to_constraints(versions, constraints)
+
+ def download_to_file(self, pkgname, pkgver, subdir, tmp):
+ data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
+ req = urlopen(get_download_url(), data, timeout=TIMEOUT)
+ with open(tmp, 'wb') as f:
+ while True:
+ buf = req.read(8192)
+ if not buf:
+ break
+ f.write(buf)
+
+ def extract_tar(self, subdir, pkgdir, tmpfile):
+ with tarfile.open(tmpfile) as f:
+ temp = tempfile.mkdtemp(dir=pkgdir)
+ try:
+ f.extractall(temp)
+ os.rename(temp, os.path.join(pkgdir, subdir))
+ finally:
+ try:
+ shutil.rmtree(temp)
+ except Exception:
+ pass
+
+ def get_pkg_dir(self, pkgname, pkgver, subdir):
+ pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
+ if not os.path.isdir(pkgdir):
+ os.makedirs(pkgdir)
+
+ target = os.path.join(pkgdir, subdir)
+
+ if os.path.exists(target):
+ return target
+
+ (fd, tmp) = tempfile.mkstemp(dir=pkgdir)
+ try:
+ os.close(fd)
+ self.download_to_file(pkgname, pkgver, subdir, tmp)
+ if subdir == REQUIRES:
+ os.rename(tmp, target)
+ else:
+ self.extract_tar(subdir, pkgdir, tmp)
+ finally:
+ try:
+ os.remove(tmp)
+ except OSError:
+ pass
+
+ return target
+
+ def __iter__(self):
+ if not self.constraints:
+ self.compute_dependencies()
+ for x in self.constraints:
+ if x.pkgname == self.myproject:
+ continue
+ yield x
+ raise StopIteration
+
+ def execute(self):
+ self.compute_dependencies()
+
+packages = package_reader()
+
+def load_tools(ctx, extra):
+ global packages
+ for c in packages:
+ packages.get_pkg_dir(c.pkgname, c.required_version, extra)
+ noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
+ for x in os.listdir(noarchdir):
+ if x.startswith('waf_') and x.endswith('.py'):
+ ctx.load([x.rstrip('.py')], tooldir=[noarchdir])
+
+def options(opt):
+ opt.add_option('--offline', action='store_true')
+ packages.execute()
+ load_tools(opt, REQUIRES)
+
+def configure(conf):
+ load_tools(conf, conf.variant)
+
+def build(bld):
+ load_tools(bld, bld.variant)
diff --git a/third_party/waf/waflib/extras/doxygen.py b/third_party/waf/waflib/extras/doxygen.py
new file mode 100644
index 00000000000..9e17595f89a
--- /dev/null
+++ b/third_party/waf/waflib/extras/doxygen.py
@@ -0,0 +1,226 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy 2008-2010 (ita)
+
+"""
+
+Doxygen support
+
+Variables passed to bld():
+* doxyfile -- the Doxyfile to use
+* doxy_tar -- destination archive for generated documentation (if desired)
+* install_path -- where to install the documentation
+* pars -- dictionary overriding doxygen configuration settings
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('doxygen')
+
+ def configure(conf):
+ conf.load('doxygen')
+ # check conf.env.DOXYGEN, if it is mandatory
+
+ def build(bld):
+ if bld.env.DOXYGEN:
+ bld(features="doxygen", doxyfile='Doxyfile', ...)
+"""
+
+import os, os.path, re
+from waflib import Task, Utils, Node
+from waflib.TaskGen import feature
+
+DOXY_STR = '"${DOXYGEN}" - '
+DOXY_FMTS = 'html latex man rft xml'.split()
+DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
+c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
+inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
+'''.split())
+
+re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
+re_nl = re.compile('\r*\n', re.M)
+def parse_doxy(txt):
+ tbl = {}
+ txt = re_rl.sub('', txt)
+ lines = re_nl.split(txt)
+ for x in lines:
+ x = x.strip()
+ if not x or x.startswith('#') or x.find('=') < 0:
+ continue
+ if x.find('+=') >= 0:
+ tmp = x.split('+=')
+ key = tmp[0].strip()
+ if key in tbl:
+ tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
+ else:
+ tbl[key] = '+='.join(tmp[1:]).strip()
+ else:
+ tmp = x.split('=')
+ tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
+ return tbl
+
+class doxygen(Task.Task):
+ vars = ['DOXYGEN', 'DOXYFLAGS']
+ color = 'BLUE'
+
+ def runnable_status(self):
+ '''
+ self.pars are populated in runnable_status - because this function is being
+ run *before* both self.pars "consumers" - scan() and run()
+
+ set output_dir (node) for the output
+ '''
+
+ for x in self.run_after:
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'pars', None):
+ txt = self.inputs[0].read()
+ self.pars = parse_doxy(txt)
+ if self.pars.get('OUTPUT_DIRECTORY'):
+ # Use the path parsed from the Doxyfile as an absolute path
+ output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY'])
+ else:
+ # If no OUTPUT_PATH was specified in the Doxyfile, build path from the Doxyfile name + '.doxy'
+ output_node = self.inputs[0].parent.get_bld().make_node(self.inputs[0].name + '.doxy')
+ output_node.mkdir()
+ self.pars['OUTPUT_DIRECTORY'] = output_node.abspath()
+
+ # Override with any parameters passed to the task generator
+ if getattr(self.generator, 'pars', None):
+ for k, v in self.generator.pars.items():
+ self.pars[k] = v
+
+ self.doxy_inputs = getattr(self, 'doxy_inputs', [])
+ if not self.pars.get('INPUT'):
+ self.doxy_inputs.append(self.inputs[0].parent)
+ else:
+ for i in self.pars.get('INPUT').split():
+ if os.path.isabs(i):
+ node = self.generator.bld.root.find_node(i)
+ else:
+ node = self.inputs[0].parent.find_node(i)
+ if not node:
+ self.generator.bld.fatal('Could not find the doxygen input %r' % i)
+ self.doxy_inputs.append(node)
+
+ if not getattr(self, 'output_dir', None):
+ bld = self.generator.bld
+ # Output path is always an absolute path as it was transformed above.
+ self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
+
+ self.signature()
+ ret = Task.Task.runnable_status(self)
+ if ret == Task.SKIP_ME:
+ # in case the files were removed
+ self.add_install()
+ return ret
+
+ def scan(self):
+ exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
+ file_patterns = self.pars.get('FILE_PATTERNS','').split()
+ if not file_patterns:
+ file_patterns = DOXY_FILE_PATTERNS
+ if self.pars.get('RECURSIVE') == 'YES':
+ file_patterns = ["**/%s" % pattern for pattern in file_patterns]
+ nodes = []
+ names = []
+ for node in self.doxy_inputs:
+ if os.path.isdir(node.abspath()):
+ for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
+ nodes.append(m)
+ else:
+ nodes.append(node)
+ return (nodes, names)
+
+ def run(self):
+ dct = self.pars.copy()
+ code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
+ code = code.encode() # for python 3
+ #fmt = DOXY_STR % (self.inputs[0].parent.abspath())
+ cmd = Utils.subst_vars(DOXY_STR, self.env)
+ env = self.env.env or None
+ proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.inputs[0].parent.abspath())
+ proc.communicate(code)
+ return proc.returncode
+
+ def post_run(self):
+ nodes = self.output_dir.ant_glob('**/*', quiet=True)
+ for x in nodes:
+ x.sig = Utils.h_file(x.abspath())
+ self.add_install()
+ return Task.Task.post_run(self)
+
+ def add_install(self):
+ nodes = self.output_dir.ant_glob('**/*', quiet=True)
+ self.outputs += nodes
+ if getattr(self.generator, 'install_path', None):
+ if not getattr(self.generator, 'doxy_tar', None):
+ self.generator.bld.install_files(self.generator.install_path,
+ self.outputs,
+ postpone=False,
+ cwd=self.output_dir,
+ relative_trick=True)
+
+class tar(Task.Task):
+ "quick tar creation"
+ run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
+ color = 'RED'
+ after = ['doxygen']
+ def runnable_status(self):
+ for x in getattr(self, 'input_tasks', []):
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'tar_done_adding', None):
+ # execute this only once
+ self.tar_done_adding = True
+ for x in getattr(self, 'input_tasks', []):
+ self.set_inputs(x.outputs)
+ if not self.inputs:
+ return Task.SKIP_ME
+ return Task.Task.runnable_status(self)
+
+ def __str__(self):
+ tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
+ return '%s: %s\n' % (self.__class__.__name__, tgt_str)
+
+@feature('doxygen')
+def process_doxy(self):
+ if not getattr(self, 'doxyfile', None):
+ self.generator.bld.fatal('no doxyfile??')
+
+ node = self.doxyfile
+ if not isinstance(node, Node.Node):
+ node = self.path.find_resource(node)
+ if not node:
+ raise ValueError('doxygen file not found')
+
+ # the task instance
+ dsk = self.create_task('doxygen', node)
+
+ if getattr(self, 'doxy_tar', None):
+ tsk = self.create_task('tar')
+ tsk.input_tasks = [dsk]
+ tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
+ if self.doxy_tar.endswith('bz2'):
+ tsk.env['TAROPTS'] = ['cjf']
+ elif self.doxy_tar.endswith('gz'):
+ tsk.env['TAROPTS'] = ['czf']
+ else:
+ tsk.env['TAROPTS'] = ['cf']
+ if getattr(self, 'install_path', None):
+ self.bld.install_files(self.install_path, tsk.outputs)
+
+def configure(conf):
+ '''
+ Check if doxygen and tar commands are present in the system
+
+ If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
+ variables will be set. Detection can be controlled by setting DOXYGEN and
+ TAR environmental variables.
+ '''
+
+ conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
+ conf.find_program('tar', var='TAR', mandatory=False)
diff --git a/third_party/waf/waflib/extras/dpapi.py b/third_party/waf/waflib/extras/dpapi.py
new file mode 100644
index 00000000000..4df64bfa4e5
--- /dev/null
+++ b/third_party/waf/waflib/extras/dpapi.py
@@ -0,0 +1,86 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Matt Clarkson, 2012
+
+'''
+DPAPI access library (http://msdn.microsoft.com/en-us/library/ms995355.aspx)
+This file uses code originally created by Crusher Joe:
+http://article.gmane.org/gmane.comp.python.ctypes/420
+And modified by Wayne Koorts:
+http://stackoverflow.com/questions/463832/using-dpapi-with-python
+'''
+
+from ctypes import windll, byref, cdll, Structure, POINTER, c_char, c_buffer
+from ctypes.wintypes import DWORD
+from waflib.Configure import conf
+
+LocalFree = windll.kernel32.LocalFree
+memcpy = cdll.msvcrt.memcpy
+CryptProtectData = windll.crypt32.CryptProtectData
+CryptUnprotectData = windll.crypt32.CryptUnprotectData
+CRYPTPROTECT_UI_FORBIDDEN = 0x01
+try:
+ extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'.encode('ascii')
+except AttributeError:
+ extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'
+
+class DATA_BLOB(Structure):
+ _fields_ = [
+ ('cbData', DWORD),
+ ('pbData', POINTER(c_char))
+ ]
+
+def get_data(blob_out):
+ cbData = int(blob_out.cbData)
+ pbData = blob_out.pbData
+ buffer = c_buffer(cbData)
+ memcpy(buffer, pbData, cbData)
+ LocalFree(pbData);
+ return buffer.raw
+
+@conf
+def dpapi_encrypt_data(self, input_bytes, entropy = extra_entropy):
+ '''
+ Encrypts data and returns byte string
+
+ :param input_bytes: The data to be encrypted
+ :type input_bytes: String or Bytes
+ :param entropy: Extra entropy to add to the encryption process (optional)
+ :type entropy: String or Bytes
+ '''
+ if not isinstance(input_bytes, bytes) or not isinstance(entropy, bytes):
+ self.fatal('The inputs to dpapi must be bytes')
+ buffer_in = c_buffer(input_bytes, len(input_bytes))
+ buffer_entropy = c_buffer(entropy, len(entropy))
+ blob_in = DATA_BLOB(len(input_bytes), buffer_in)
+ blob_entropy = DATA_BLOB(len(entropy), buffer_entropy)
+ blob_out = DATA_BLOB()
+
+ if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy),
+ None, None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
+ return get_data(blob_out)
+ else:
+ self.fatal('Failed to decrypt data')
+
+@conf
+def dpapi_decrypt_data(self, encrypted_bytes, entropy = extra_entropy):
+ '''
+ Decrypts data and returns byte string
+
+ :param encrypted_bytes: The encrypted data
+ :type encrypted_bytes: Bytes
+ :param entropy: Extra entropy to add to the encryption process (optional)
+ :type entropy: String or Bytes
+ '''
+ if not isinstance(encrypted_bytes, bytes) or not isinstance(entropy, bytes):
+ self.fatal('The inputs to dpapi must be bytes')
+ buffer_in = c_buffer(encrypted_bytes, len(encrypted_bytes))
+ buffer_entropy = c_buffer(entropy, len(entropy))
+ blob_in = DATA_BLOB(len(encrypted_bytes), buffer_in)
+ blob_entropy = DATA_BLOB(len(entropy), buffer_entropy)
+ blob_out = DATA_BLOB()
+ if CryptUnprotectData(byref(blob_in), None, byref(blob_entropy), None,
+ None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
+ return get_data(blob_out)
+ else:
+ self.fatal('Failed to decrypt data')
diff --git a/third_party/waf/waflib/extras/file_to_object.py b/third_party/waf/waflib/extras/file_to_object.py
new file mode 100644
index 00000000000..c2e8809b3ec
--- /dev/null
+++ b/third_party/waf/waflib/extras/file_to_object.py
@@ -0,0 +1,136 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Tool to embed file into objects
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+This tool allows to embed file contents in object files (.o).
+It is not exactly portable, and the file contents are reachable
+using various non-portable fashions.
+The goal here is to provide a functional interface to the embedding
+of file data in objects.
+See the ``playground/embedded_resources`` example for an example.
+
+Usage::
+
+ bld(
+ name='pipeline',
+ # ^ Reference this in use="..." for things using the generated code
+ features='file_to_object',
+ source='some.file',
+ # ^ Name of the file to embed in binary section.
+ )
+
+Known issues:
+
+- Destination is named like source, with extension renamed to .o
+ eg. some.file -> some.o
+
+"""
+
+import os
+from waflib import Task, TaskGen, Errors
+
+def filename_c_escape(x):
+ return x.replace("\\", "\\\\")
+
+class file_to_object_s(Task.Task):
+ color = 'CYAN'
+ dep_vars = ('DEST_CPU', 'DEST_BINFMT')
+
+ def run(self):
+ name = []
+ for i, x in enumerate(self.inputs[0].name):
+ if x.isalnum():
+ name.append(x)
+ else:
+ name.append('_')
+ file = self.inputs[0].abspath()
+ size = os.path.getsize(file)
+ if self.env.DEST_CPU in ('x86_64', 'ia', 'aarch64'):
+ unit = 'quad'
+ align = 8
+ elif self.env.DEST_CPU in ('x86','arm', 'thumb', 'm68k'):
+ unit = 'long'
+ align = 4
+ else:
+ raise Errors.WafError("Unsupported DEST_CPU, please report bug!")
+
+ file = filename_c_escape(file)
+ name = "_binary_" + "".join(name)
+ rodata = ".section .rodata"
+ if self.env.DEST_BINFMT == "mac-o":
+ name = "_" + name
+ rodata = ".section __TEXT,__const"
+
+ with open(self.outputs[0].abspath(), 'w') as f:
+ f.write(\
+"""
+ .global %(name)s_start
+ .global %(name)s_end
+ .global %(name)s_size
+ %(rodata)s
+%(name)s_start:
+ .incbin "%(file)s"
+%(name)s_end:
+ .align %(align)d
+%(name)s_size:
+ .%(unit)s 0x%(size)x
+""" % locals())
+
+class file_to_object_c(Task.Task):
+ color = 'CYAN'
+ def run(self):
+ name = []
+ for i, x in enumerate(self.inputs[0].name):
+ if x.isalnum():
+ name.append(x)
+ else:
+ name.append('_')
+ file = self.inputs[0].abspath()
+ size = os.path.getsize(file)
+
+ name = "_binary_" + "".join(name)
+
+ data = self.inputs[0].read('rb')
+ lines, line = [], []
+ for idx_byte, byte in enumerate(data):
+ line.append(byte)
+ if len(line) > 15 or idx_byte == size-1:
+ lines.append(", ".join(("0x%02x" % ord(x)) for x in line))
+ line = []
+ data = ",\n ".join(lines)
+
+ self.outputs[0].write(\
+"""
+unsigned long %(name)s_size = %(size)dL;
+char const %(name)s_start[] = {
+ %(data)s
+};
+char const %(name)s_end[] = {};
+""" % locals())
+
+@TaskGen.feature('file_to_object')
+@TaskGen.before_method('process_source')
+def tg_file_to_object(self):
+ bld = self.bld
+ sources = self.to_nodes(self.source)
+ targets = []
+ for src in sources:
+ if bld.env.F2O_METHOD == ["asm"]:
+ tgt = src.parent.find_or_declare(src.name + '.f2o.s')
+ tsk = self.create_task('file_to_object_s', src, tgt)
+ tsk.cwd = src.parent.abspath() # verify
+ else:
+ tgt = src.parent.find_or_declare(src.name + '.f2o.c')
+ tsk = self.create_task('file_to_object_c', src, tgt)
+ tsk.cwd = src.parent.abspath() # verify
+ targets.append(tgt)
+ self.source = targets
+
+def configure(conf):
+ conf.load('gas')
+ conf.env.F2O_METHOD = ["c"]
diff --git a/third_party/waf/waflib/extras/freeimage.py b/third_party/waf/waflib/extras/freeimage.py
new file mode 100644
index 00000000000..8933abe2edc
--- /dev/null
+++ b/third_party/waf/waflib/extras/freeimage.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# written by Sylvain Rouquette, 2011
+
+'''
+To add the freeimage tool to the waf file:
+$ ./waf-light --tools=compat15,freeimage
+ or, if you have waf >= 1.6.2
+$ ./waf update --files=freeimage
+
+The wscript will look like:
+
+def options(opt):
+ opt.load('compiler_cxx freeimage')
+
+def configure(conf):
+ conf.load('compiler_cxx freeimage')
+
+ # you can call check_freeimage with some parameters.
+ # It's optional on Linux, it's 'mandatory' on Windows if
+ # you didn't use --fi-path on the command-line
+
+ # conf.check_freeimage(path='FreeImage/Dist', fip=True)
+
+def build(bld):
+ bld(source='main.cpp', target='app', use='FREEIMAGE')
+'''
+
+from waflib import Utils
+from waflib.Configure import conf
+
+
+def options(opt):
+ opt.add_option('--fi-path', type='string', default='', dest='fi_path',
+ help='''path to the FreeImage directory \
+ where the files are e.g. /FreeImage/Dist''')
+ opt.add_option('--fip', action='store_true', default=False, dest='fip',
+ help='link with FreeImagePlus')
+ opt.add_option('--fi-static', action='store_true',
+ default=False, dest='fi_static',
+ help="link as shared libraries")
+
+
+@conf
+def check_freeimage(self, path=None, fip=False):
+ self.start_msg('Checking FreeImage')
+ if not self.env['CXX']:
+ self.fatal('you must load compiler_cxx before loading freeimage')
+ prefix = self.options.fi_static and 'ST' or ''
+ platform = Utils.unversioned_sys_platform()
+ if platform == 'win32':
+ if not path:
+ self.fatal('you must specify the path to FreeImage. \
+ use --fi-path=/FreeImage/Dist')
+ else:
+ self.env['INCLUDES_FREEIMAGE'] = path
+ self.env['%sLIBPATH_FREEIMAGE' % prefix] = path
+ libs = ['FreeImage']
+ if self.options.fip:
+ libs.append('FreeImagePlus')
+ if platform == 'win32':
+ self.env['%sLIB_FREEIMAGE' % prefix] = libs
+ else:
+ self.env['%sLIB_FREEIMAGE' % prefix] = [i.lower() for i in libs]
+ self.end_msg('ok')
+
+
+def configure(conf):
+ platform = Utils.unversioned_sys_platform()
+ if platform == 'win32' and not conf.options.fi_path:
+ return
+ conf.check_freeimage(conf.options.fi_path, conf.options.fip)
diff --git a/third_party/waf/waflib/extras/fsb.py b/third_party/waf/waflib/extras/fsb.py
new file mode 100644
index 00000000000..ba475d815d3
--- /dev/null
+++ b/third_party/waf/waflib/extras/fsb.py
@@ -0,0 +1,30 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Fully sequential builds
+
+The previous tasks from task generators are re-processed, and this may lead to speed issues
+Yet, if you are using this, speed is probably a minor concern
+"""
+
+from waflib import Build
+
+def options(opt):
+ pass
+
+def configure(conf):
+ pass
+
+class FSBContext(Build.BuildContext):
+ def __call__(self, *k, **kw):
+ ret = Build.BuildContext.__call__(self, *k, **kw)
+
+ # evaluate the results immediately
+ Build.BuildContext.compile(self)
+
+ return ret
+
+ def compile(self):
+ pass
diff --git a/third_party/waf/waflib/extras/gccdeps.py b/third_party/waf/waflib/extras/gccdeps.py
new file mode 100644
index 00000000000..26b8bdba938
--- /dev/null
+++ b/third_party/waf/waflib/extras/gccdeps.py
@@ -0,0 +1,211 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2010 (ita)
+
+"""
+Execute the tasks with gcc -MD, read the dependencies from the .d file
+and prepare the dependency calculation for the next run.
+This affects the cxx class, so make sure to load Qt5 after this tool.
+
+Usage:
+ def configure(conf):
+ conf.load('gccdeps')
+"""
+
+import os, re, threading
+from waflib import Task, Logs, Utils, Errors
+from waflib.Tools import c_preproc
+from waflib.TaskGen import before_method, feature
+
+lock = threading.Lock()
+
+gccdeps_flags = ['-MD']
+if not c_preproc.go_absolute:
+ gccdeps_flags = ['-MMD']
+
+# Third-party tools are allowed to add extra names in here with append()
+supported_compilers = ['gcc', 'icc', 'clang']
+
+def scan(self):
+ if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+ return super(self.derived_gccdeps, self).scan()
+ nodes = self.generator.bld.node_deps.get(self.uid(), [])
+ names = []
+ return (nodes, names)
+
+re_o = re.compile("\.o$")
+re_splitter = re.compile(r'(?<!\\)\s+') # split by space, except when spaces are escaped
+
+def remove_makefile_rule_lhs(line):
+ # Splitting on a plain colon would accidentally match inside a
+ # Windows absolute-path filename, so we must search for a colon
+ # followed by whitespace to find the divider between LHS and RHS
+ # of the Makefile rule.
+ rulesep = ': '
+
+ sep_idx = line.find(rulesep)
+ if sep_idx >= 0:
+ return line[sep_idx + 2:]
+ else:
+ return line
+
+def path_to_node(base_node, path, cached_nodes):
+ # Take the base node and the path and return a node
+ # Results are cached because searching the node tree is expensive
+ # The following code is executed by threads, it is not safe, so a lock is needed...
+ if getattr(path, '__hash__'):
+ node_lookup_key = (base_node, path)
+ else:
+ # Not hashable, assume it is a list and join into a string
+ node_lookup_key = (base_node, os.path.sep.join(path))
+ try:
+ lock.acquire()
+ node = cached_nodes[node_lookup_key]
+ except KeyError:
+ node = base_node.find_resource(path)
+ cached_nodes[node_lookup_key] = node
+ finally:
+ lock.release()
+ return node
+
+def post_run(self):
+ if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+ return super(self.derived_gccdeps, self).post_run()
+
+ name = self.outputs[0].abspath()
+ name = re_o.sub('.d', name)
+ try:
+ txt = Utils.readf(name)
+ except EnvironmentError:
+ Logs.error('Could not find a .d dependency file, are cflags/cxxflags overwritten?')
+ raise
+ #os.remove(name)
+
+ # Compilers have the choice to either output the file's dependencies
+ # as one large Makefile rule:
+ #
+ # /path/to/file.o: /path/to/dep1.h \
+ # /path/to/dep2.h \
+ # /path/to/dep3.h \
+ # ...
+ #
+ # or as many individual rules:
+ #
+ # /path/to/file.o: /path/to/dep1.h
+ # /path/to/file.o: /path/to/dep2.h
+ # /path/to/file.o: /path/to/dep3.h
+ # ...
+ #
+ # So the first step is to sanitize the input by stripping out the left-
+ # hand side of all these lines. After that, whatever remains are the
+ # implicit dependencies of task.outputs[0]
+ txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()])
+
+ # Now join all the lines together
+ txt = txt.replace('\\\n', '')
+
+ val = txt.strip()
+ val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x]
+
+ nodes = []
+ bld = self.generator.bld
+
+ # Dynamically bind to the cache
+ try:
+ cached_nodes = bld.cached_nodes
+ except AttributeError:
+ cached_nodes = bld.cached_nodes = {}
+
+ for x in val:
+
+ node = None
+ if os.path.isabs(x):
+ node = path_to_node(bld.root, x, cached_nodes)
+ else:
+ # TODO waf 1.9 - single cwd value
+ path = getattr(bld, 'cwdx', bld.bldnode)
+ # when calling find_resource, make sure the path does not contain '..'
+ x = [k for k in Utils.split_path(x) if k and k != '.']
+ while '..' in x:
+ idx = x.index('..')
+ if idx == 0:
+ x = x[1:]
+ path = path.parent
+ else:
+ del x[idx]
+ del x[idx-1]
+
+ node = path_to_node(path, x, cached_nodes)
+
+ if not node:
+ raise ValueError('could not find %r for %r' % (x, self))
+ if id(node) == id(self.inputs[0]):
+ # ignore the source file, it is already in the dependencies
+ # this way, successful config tests may be retrieved from the cache
+ continue
+ nodes.append(node)
+
+ Logs.debug('deps: gccdeps for %s returned %s', self, nodes)
+
+ bld.node_deps[self.uid()] = nodes
+ bld.raw_deps[self.uid()] = []
+
+ try:
+ del self.cache_sig
+ except AttributeError:
+ pass
+
+ Task.Task.post_run(self)
+
+def sig_implicit_deps(self):
+ if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+ return super(self.derived_gccdeps, self).sig_implicit_deps()
+ try:
+ return Task.Task.sig_implicit_deps(self)
+ except Errors.WafError:
+ return Utils.SIG_NIL
+
+def wrap_compiled_task(classname):
+ derived_class = type(classname, (Task.classes[classname],), {})
+ derived_class.derived_gccdeps = derived_class
+ derived_class.post_run = post_run
+ derived_class.scan = scan
+ derived_class.sig_implicit_deps = sig_implicit_deps
+
+for k in ('c', 'cxx'):
+ if k in Task.classes:
+ wrap_compiled_task(k)
+
+@before_method('process_source')
+@feature('force_gccdeps')
+def force_gccdeps(self):
+ self.env.ENABLE_GCCDEPS = ['c', 'cxx']
+
+def configure(conf):
+ # in case someone provides a --enable-gccdeps command-line option
+ if not getattr(conf.options, 'enable_gccdeps', True):
+ return
+
+ global gccdeps_flags
+ flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
+ if conf.env.CC_NAME in supported_compilers:
+ try:
+ conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))
+ except Errors.ConfigurationError:
+ pass
+ else:
+ conf.env.append_value('CFLAGS', gccdeps_flags)
+ conf.env.append_unique('ENABLE_GCCDEPS', 'c')
+
+ if conf.env.CXX_NAME in supported_compilers:
+ try:
+ conf.check(fragment='int main() { return 0; }', features='cxx force_gccdeps', cxxflags=flags, msg='Checking for cxx flags %r' % ''.join(flags))
+ except Errors.ConfigurationError:
+ pass
+ else:
+ conf.env.append_value('CXXFLAGS', gccdeps_flags)
+ conf.env.append_unique('ENABLE_GCCDEPS', 'cxx')
diff --git a/third_party/waf/waflib/extras/go.py b/third_party/waf/waflib/extras/go.py
new file mode 100644
index 00000000000..2ba54b8116a
--- /dev/null
+++ b/third_party/waf/waflib/extras/go.py
@@ -0,0 +1,255 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Tom Wambold tom5760 gmail.com 2009
+# Thomas Nagy 2010
+
+"""
+Go as a language may look nice, but its toolchain is one of the worse a developer
+has ever seen. It keeps changing though, and I would like to believe that it will get
+better eventually, but the crude reality is that this tool and the examples are
+getting broken every few months.
+
+If you have been lured into trying to use Go, you should stick to their Makefiles.
+"""
+
+import os, platform
+
+from waflib import Utils, Task, TaskGen
+from waflib.TaskGen import feature, extension, after_method, before_method
+from waflib.Tools.ccroot import link_task, stlink_task, propagate_uselib_vars, process_use
+
+class go(Task.Task):
+ run_str = '${GOC} ${GOCFLAGS} ${CPPPATH_ST:INCPATHS} -o ${TGT} ${SRC}'
+
+class gopackage(stlink_task):
+ run_str = '${GOP} grc ${TGT} ${SRC}'
+
+class goprogram(link_task):
+ run_str = '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}'
+ inst_to = '${BINDIR}'
+ chmod = Utils.O755
+
+class cgopackage(stlink_task):
+ color = 'YELLOW'
+ inst_to = '${LIBDIR}'
+ ext_in = ['.go']
+ ext_out = ['.a']
+
+ def run(self):
+ src_dir = self.generator.bld.path
+ source = self.inputs
+ target = self.outputs[0].change_ext('')
+
+ #print ("--> %s" % self.outputs)
+ #print ('++> %s' % self.outputs[1])
+ bld_dir = self.outputs[1]
+ bld_dir.mkdir()
+ obj_dir = bld_dir.make_node('_obj')
+ obj_dir.mkdir()
+
+ bld_srcs = []
+ for s in source:
+ # FIXME: it seems gomake/cgo stumbles on filenames like a/b/c.go
+ # -> for the time being replace '/' with '_'...
+ #b = bld_dir.make_node(s.path_from(src_dir))
+ b = bld_dir.make_node(s.path_from(src_dir).replace(os.sep,'_'))
+ b.parent.mkdir()
+ #print ('++> %s' % (s.path_from(src_dir),))
+ try:
+ try:os.remove(b.abspath())
+ except Exception:pass
+ os.symlink(s.abspath(), b.abspath())
+ except Exception:
+ # if no support for symlinks, copy the file from src
+ b.write(s.read())
+ bld_srcs.append(b)
+ #print("--|> [%s]" % b.abspath())
+ b.sig = Utils.h_file(b.abspath())
+ pass
+ #self.set_inputs(bld_srcs)
+ #self.generator.bld.raw_deps[self.uid()] = [self.signature()] + bld_srcs
+ makefile_node = bld_dir.make_node("Makefile")
+ makefile_tmpl = '''\
+# Copyright 2009 The Go Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file. ---
+
+include $(GOROOT)/src/Make.inc
+
+TARG=%(target)s
+
+GCIMPORTS= %(gcimports)s
+
+CGOFILES=\\
+\t%(source)s
+
+CGO_CFLAGS= %(cgo_cflags)s
+
+CGO_LDFLAGS= %(cgo_ldflags)s
+
+include $(GOROOT)/src/Make.pkg
+
+%%: install %%.go
+ $(GC) $*.go
+ $(LD) -o $@ $*.$O
+
+''' % {
+'gcimports': ' '.join(l for l in self.env['GOCFLAGS']),
+'cgo_cflags' : ' '.join(l for l in self.env['GOCFLAGS']),
+'cgo_ldflags': ' '.join(l for l in self.env['GOLFLAGS']),
+'target': target.path_from(obj_dir),
+'source': ' '.join([b.path_from(bld_dir) for b in bld_srcs])
+}
+ makefile_node.write(makefile_tmpl)
+ #print ("::makefile: %s"%makefile_node.abspath())
+ cmd = Utils.subst_vars('gomake ${GOMAKE_FLAGS}', self.env).strip()
+ o = self.outputs[0].change_ext('.gomake.log')
+ fout_node = bld_dir.find_or_declare(o.name)
+ fout = open(fout_node.abspath(), 'w')
+ rc = self.generator.bld.exec_command(
+ cmd,
+ stdout=fout,
+ stderr=fout,
+ cwd=bld_dir.abspath(),
+ )
+ if rc != 0:
+ import waflib.Logs as msg
+ msg.error('** error running [%s] (cgo-%s)' % (cmd, target))
+ msg.error(fout_node.read())
+ return rc
+ self.generator.bld.read_stlib(
+ target,
+ paths=[obj_dir.abspath(),],
+ )
+ tgt = self.outputs[0]
+ if tgt.parent != obj_dir:
+ install_dir = os.path.join('${LIBDIR}',
+ tgt.parent.path_from(obj_dir))
+ else:
+ install_dir = '${LIBDIR}'
+ #print('===> %s (%s)' % (tgt.abspath(), install_dir))
+ self.generator.bld.install_files(
+ install_dir,
+ tgt.abspath(),
+ relative_trick=False,
+ postpone=False,
+ )
+ return rc
+
+@extension('.go')
+def compile_go(self, node):
+ #print('*'*80, self.name)
+ if not ('cgopackage' in self.features):
+ return self.create_compiled_task('go', node)
+ #print ('compile_go-cgo...')
+ #bld_dir = node.parent.get_bld()
+ #obj_dir = bld_dir.make_node('_obj')
+ return self.create_task('cgopackage', node, node.change_ext('.a'))
+
+@feature('gopackage', 'goprogram', 'cgopackage')
+@before_method('process_source')
+def go_compiler_is_foobar(self):
+ if self.env.GONAME == 'gcc':
+ return
+ self.source = self.to_nodes(self.source)
+ src = []
+ go = []
+ for node in self.source:
+ if node.name.endswith('.go'):
+ go.append(node)
+ else:
+ src.append(node)
+ self.source = src
+ if not ('cgopackage' in self.features):
+ #print('--> [%s]... (%s)' % (go[0], getattr(self, 'target', 'N/A')))
+ tsk = self.create_compiled_task('go', go[0])
+ tsk.inputs.extend(go[1:])
+ else:
+ #print ('+++ [%s] +++' % self.target)
+ bld_dir = self.path.get_bld().make_node('cgopackage--%s' % self.target.replace(os.sep,'_'))
+ obj_dir = bld_dir.make_node('_obj')
+ target = obj_dir.make_node(self.target+'.a')
+ tsk = self.create_task('cgopackage', go, [target, bld_dir])
+ self.link_task = tsk
+
+@feature('gopackage', 'goprogram', 'cgopackage')
+@after_method('process_source', 'apply_incpaths',)
+def go_local_libs(self):
+ names = self.to_list(getattr(self, 'use', []))
+ #print ('== go-local-libs == [%s] == use: %s' % (self.name, names))
+ for name in names:
+ tg = self.bld.get_tgen_by_name(name)
+ if not tg:
+ raise Utils.WafError('no target of name %r necessary for %r in go uselib local' % (name, self))
+ tg.post()
+ #print ("-- tg[%s]: %s" % (self.name,name))
+ lnk_task = getattr(tg, 'link_task', None)
+ if lnk_task:
+ for tsk in self.tasks:
+ if isinstance(tsk, (go, gopackage, cgopackage)):
+ tsk.set_run_after(lnk_task)
+ tsk.dep_nodes.extend(lnk_task.outputs)
+ path = lnk_task.outputs[0].parent.abspath()
+ if isinstance(lnk_task, (go, gopackage)):
+ # handle hierarchical packages
+ path = lnk_task.generator.path.get_bld().abspath()
+ elif isinstance(lnk_task, (cgopackage,)):
+ # handle hierarchical cgopackages
+ cgo_obj_dir = lnk_task.outputs[1].find_or_declare('_obj')
+ path = cgo_obj_dir.abspath()
+ # recursively add parent GOCFLAGS...
+ self.env.append_unique('GOCFLAGS',
+ getattr(lnk_task.env, 'GOCFLAGS',[]))
+ # ditto for GOLFLAGS...
+ self.env.append_unique('GOLFLAGS',
+ getattr(lnk_task.env, 'GOLFLAGS',[]))
+ self.env.append_unique('GOCFLAGS', ['-I%s' % path])
+ self.env.append_unique('GOLFLAGS', ['-L%s' % path])
+ for n in getattr(tg, 'includes_nodes', []):
+ self.env.append_unique('GOCFLAGS', ['-I%s' % n.abspath()])
+ pass
+ pass
+
+def configure(conf):
+
+ def set_def(var, val):
+ if not conf.env[var]:
+ conf.env[var] = val
+
+ goarch = os.getenv('GOARCH')
+ if goarch == '386':
+ set_def('GO_PLATFORM', 'i386')
+ elif goarch == 'amd64':
+ set_def('GO_PLATFORM', 'x86_64')
+ elif goarch == 'arm':
+ set_def('GO_PLATFORM', 'arm')
+ else:
+ set_def('GO_PLATFORM', platform.machine())
+
+ if conf.env.GO_PLATFORM == 'x86_64':
+ set_def('GO_COMPILER', '6g')
+ set_def('GO_LINKER', '6l')
+ elif conf.env.GO_PLATFORM in ('i386', 'i486', 'i586', 'i686'):
+ set_def('GO_COMPILER', '8g')
+ set_def('GO_LINKER', '8l')
+ elif conf.env.GO_PLATFORM == 'arm':
+ set_def('GO_COMPILER', '5g')
+ set_def('GO_LINKER', '5l')
+ set_def('GO_EXTENSION', '.5')
+
+ if not (conf.env.GO_COMPILER or conf.env.GO_LINKER):
+ raise conf.fatal('Unsupported platform ' + platform.machine())
+
+ set_def('GO_PACK', 'gopack')
+ set_def('gopackage_PATTERN', '%s.a')
+ set_def('CPPPATH_ST', '-I%s')
+
+ set_def('GOMAKE_FLAGS', ['--quiet'])
+ conf.find_program(conf.env.GO_COMPILER, var='GOC')
+ conf.find_program(conf.env.GO_LINKER, var='GOL')
+ conf.find_program(conf.env.GO_PACK, var='GOP')
+
+ conf.find_program('cgo', var='CGO')
+
+TaskGen.feature('go')(process_use)
+TaskGen.feature('go')(propagate_uselib_vars)
diff --git a/third_party/waf/waflib/extras/gob2.py b/third_party/waf/waflib/extras/gob2.py
new file mode 100644
index 00000000000..637f2934dc7
--- /dev/null
+++ b/third_party/waf/waflib/extras/gob2.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Ali Sabil, 2007
+
+from waflib import TaskGen
+
+TaskGen.declare_chain(
+ name = 'gob2',
+ rule = '${GOB2} -o ${TGT[0].bld_dir()} ${GOB2FLAGS} ${SRC}',
+ ext_in = '.gob',
+ ext_out = '.c'
+)
+
+def configure(conf):
+ conf.find_program('gob2', var='GOB2')
+ conf.env['GOB2FLAGS'] = ''
diff --git a/third_party/waf/waflib/extras/halide.py b/third_party/waf/waflib/extras/halide.py
new file mode 100644
index 00000000000..acec8eca8b4
--- /dev/null
+++ b/third_party/waf/waflib/extras/halide.py
@@ -0,0 +1,149 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Halide code generation tool
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+Tool to run `Halide <http://halide-lang.org>`_ code generators.
+
+Usage::
+
+ bld(
+ name='pipeline',
+ # ^ Reference this in use="..." for things using the generated code
+ #target=['pipeline.o', 'pipeline.h']
+ # ^ by default, name.{o,h} is added, but you can set the outputs here
+ features='halide',
+ halide_env="HL_TRACE=1 HL_TARGET=host-opencl-gpu_debug",
+ # ^ Environment passed to the generator,
+ # can be a dict, k/v list, or string.
+ args=[],
+ # ^ Command-line arguments to the generator (optional),
+ # eg. to give parameters to the scheduling
+ source='pipeline_gen',
+ # ^ Name of the source executable
+ )
+
+
+Known issues:
+
+
+- Currently only supports Linux (no ".exe")
+
+- Doesn't rerun on input modification when input is part of a build
+ chain, and has been modified externally.
+
+"""
+
+import os
+from waflib import Task, Utils, Options, TaskGen, Errors
+
+class run_halide_gen(Task.Task):
+ color = 'CYAN'
+ vars = ['HALIDE_ENV', 'HALIDE_ARGS']
+ run_str = "${SRC[0].abspath()} ${HALIDE_ARGS}"
+ def __str__(self):
+ stuff = "halide"
+ stuff += ("[%s]" % (",".join(
+ ('%s=%s' % (k,v)) for k, v in sorted(self.env.env.items()))))
+ return Task.Task.__str__(self).replace(self.__class__.__name__,
+ stuff)
+
+@TaskGen.feature('halide')
+@TaskGen.before_method('process_source')
+def halide(self):
+ Utils.def_attrs(self,
+ args=[],
+ halide_env={},
+ )
+
+ bld = self.bld
+
+ env = self.halide_env
+ try:
+ if isinstance(env, str):
+ env = dict(x.split('=') for x in env.split())
+ elif isinstance(env, list):
+ env = dict(x.split('=') for x in env)
+ assert isinstance(env, dict)
+ except Exception as e:
+ if not isinstance(e, ValueError) \
+ and not isinstance(e, AssertionError):
+ raise
+ raise Errors.WafError(
+ "halide_env must be under the form" \
+ " {'HL_x':'a', 'HL_y':'b'}" \
+ " or ['HL_x=y', 'HL_y=b']" \
+ " or 'HL_x=y HL_y=b'")
+
+ src = self.to_nodes(self.source)
+ assert len(src) == 1, "Only one source expected"
+ src = src[0]
+
+ args = Utils.to_list(self.args)
+
+ def change_ext(src, ext):
+ # Return a node with a new extension, in an appropriate folder
+ name = src.name
+ xpos = src.name.rfind('.')
+ if xpos == -1: xpos = len(src.name)
+ newname = name[:xpos] + ext
+ if src.is_child_of(bld.bldnode):
+ node = src.get_src().parent.find_or_declare(newname)
+ else:
+ node = bld.bldnode.find_or_declare(newname)
+ return node
+
+ def to_nodes(self, lst, path=None):
+ tmp = []
+ path = path or self.path
+ find = path.find_or_declare
+
+ if isinstance(lst, self.path.__class__):
+ lst = [lst]
+
+ for x in Utils.to_list(lst):
+ if isinstance(x, str):
+ node = find(x)
+ else:
+ node = x
+ tmp.append(node)
+ return tmp
+
+ tgt = to_nodes(self, self.target)
+ if not tgt:
+ tgt = [change_ext(src, '.o'), change_ext(src, '.h')]
+ cwd = tgt[0].parent.abspath()
+ task = self.create_task('run_halide_gen', src, tgt, cwd=cwd)
+ task.env.append_unique('HALIDE_ARGS', args)
+ if task.env.env == []:
+ task.env.env = {}
+ task.env.env.update(env)
+ task.env.HALIDE_ENV = " ".join(("%s=%s" % (k,v)) for (k,v) in sorted(env.items()))
+ task.env.HALIDE_ARGS = args
+
+ try:
+ self.compiled_tasks.append(task)
+ except AttributeError:
+ self.compiled_tasks = [task]
+ self.source = []
+
+def configure(conf):
+ if Options.options.halide_root is None:
+ conf.check_cfg(package='Halide', args='--cflags --libs')
+ else:
+ halide_root = Options.options.halide_root
+ conf.env.INCLUDES_HALIDE = [ os.path.join(halide_root, "include") ]
+ conf.env.LIBPATH_HALIDE = [ os.path.join(halide_root, "lib") ]
+ conf.env.LIB_HALIDE = ["Halide"]
+
+ # You might want to add this, while upstream doesn't fix it
+ #conf.env.LIB_HALIDE += ['ncurses', 'dl', 'pthread']
+
+def options(opt):
+ opt.add_option('--halide-root',
+ help="path to Halide include and lib files",
+ )
diff --git a/third_party/waf/waflib/extras/local_rpath.py b/third_party/waf/waflib/extras/local_rpath.py
new file mode 100644
index 00000000000..8942e97708f
--- /dev/null
+++ b/third_party/waf/waflib/extras/local_rpath.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+from waflib.TaskGen import after_method, feature
+
+@after_method('propagate_uselib_vars')
+@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib')
+def add_rpath_stuff(self):
+ all = self.to_list(getattr(self, 'use', []))
+ while all:
+ name = all.pop()
+ try:
+ tg = self.bld.get_tgen_by_name(name)
+ except:
+ continue
+ self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
+ all.extend(self.to_list(getattr(tg, 'use', [])))
diff --git a/third_party/waf/waflib/extras/make.py b/third_party/waf/waflib/extras/make.py
new file mode 100644
index 00000000000..8b99c4dd0e0
--- /dev/null
+++ b/third_party/waf/waflib/extras/make.py
@@ -0,0 +1,141 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+A make-like way of executing the build, following the relationships between inputs/outputs
+
+This algorithm will lead to slower builds, will not be as flexible as "waf build", but
+it might be useful for building data files (?)
+
+It is likely to break in the following cases:
+- files are created dynamically (no inputs or outputs)
+- headers
+- building two files from different groups
+"""
+
+import re
+from waflib import Options, Task
+from waflib.Build import BuildContext
+
+class MakeContext(BuildContext):
+ '''executes tasks in a step-by-step manner, following dependencies between inputs/outputs'''
+ cmd = 'make'
+ fun = 'build'
+
+ def __init__(self, **kw):
+ super(MakeContext, self).__init__(**kw)
+ self.files = Options.options.files
+
+ def get_build_iterator(self):
+ if not self.files:
+ while 1:
+ yield super(MakeContext, self).get_build_iterator()
+
+ for g in self.groups:
+ for tg in g:
+ try:
+ f = tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+
+ provides = {}
+ uses = {}
+ all_tasks = []
+ tasks = []
+ for pat in self.files.split(','):
+ matcher = self.get_matcher(pat)
+ for tg in g:
+ if isinstance(tg, Task.TaskBase):
+ lst = [tg]
+ else:
+ lst = tg.tasks
+ for tsk in lst:
+ all_tasks.append(tsk)
+
+ do_exec = False
+ for node in getattr(tsk, 'inputs', []):
+ try:
+ uses[node].append(tsk)
+ except:
+ uses[node] = [tsk]
+
+ if matcher(node, output=False):
+ do_exec = True
+ break
+
+ for node in getattr(tsk, 'outputs', []):
+ try:
+ provides[node].append(tsk)
+ except:
+ provides[node] = [tsk]
+
+ if matcher(node, output=True):
+ do_exec = True
+ break
+ if do_exec:
+ tasks.append(tsk)
+
+ # so we have the tasks that we need to process, the list of all tasks,
+ # the map of the tasks providing nodes, and the map of tasks using nodes
+
+ if not tasks:
+ # if there are no tasks matching, return everything in the current group
+ result = all_tasks
+ else:
+ # this is like a big filter...
+ result = set([])
+ seen = set([])
+ cur = set(tasks)
+ while cur:
+ result |= cur
+ tosee = set([])
+ for tsk in cur:
+ for node in getattr(tsk, 'inputs', []):
+ if node in seen:
+ continue
+ seen.add(node)
+ tosee |= set(provides.get(node, []))
+ cur = tosee
+ result = list(result)
+
+ Task.set_file_constraints(result)
+ Task.set_precedence_constraints(result)
+ yield result
+
+ while 1:
+ yield []
+
+ def get_matcher(self, pat):
+ # this returns a function
+ inn = True
+ out = True
+ if pat.startswith('in:'):
+ out = False
+ pat = pat.replace('in:', '')
+ elif pat.startswith('out:'):
+ inn = False
+ pat = pat.replace('out:', '')
+
+ anode = self.root.find_node(pat)
+ pattern = None
+ if not anode:
+ if not pat.startswith('^'):
+ pat = '^.+?%s' % pat
+ if not pat.endswith('$'):
+ pat = '%s$' % pat
+ pattern = re.compile(pat)
+
+ def match(node, output):
+ if output == True and not out:
+ return False
+ if output == False and not inn:
+ return False
+
+ if anode:
+ return anode == node
+ else:
+ return pattern.match(node.abspath())
+ return match
diff --git a/third_party/waf/waflib/extras/md5_tstamp.py b/third_party/waf/waflib/extras/md5_tstamp.py
new file mode 100644
index 00000000000..63b71d8d27a
--- /dev/null
+++ b/third_party/waf/waflib/extras/md5_tstamp.py
@@ -0,0 +1,67 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+This module assumes that only one build context is running at a given time, which
+is not the case if you want to execute configuration tests in parallel.
+
+Store some values on the buildcontext mapping file paths to
+stat values and md5 values (timestamp + md5)
+this way the md5 hashes are computed only when timestamp change (can be faster)
+There is usually little or no gain from enabling this, but it can be used to enable
+the second level cache with timestamps (WAFCACHE)
+
+You may have to run distclean or to remove the build directory before enabling/disabling
+this hashing scheme
+"""
+
+import os, stat
+from waflib import Utils, Build, Context
+
+STRONGEST = True
+
+try:
+ Build.BuildContext.store_real
+except AttributeError:
+
+ Context.DBFILE += '_md5tstamp'
+
+ Build.hashes_md5_tstamp = {}
+ Build.SAVED_ATTRS.append('hashes_md5_tstamp')
+ def store(self):
+ # save the hash cache as part of the default pickle file
+ self.hashes_md5_tstamp = Build.hashes_md5_tstamp
+ self.store_real()
+ Build.BuildContext.store_real = Build.BuildContext.store
+ Build.BuildContext.store = store
+
+ def restore(self):
+ # we need a module variable for h_file below
+ self.restore_real()
+ try:
+ Build.hashes_md5_tstamp = self.hashes_md5_tstamp or {}
+ except AttributeError:
+ Build.hashes_md5_tstamp = {}
+ Build.BuildContext.restore_real = Build.BuildContext.restore
+ Build.BuildContext.restore = restore
+
+ def h_file(filename):
+ st = os.stat(filename)
+ if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
+
+ if filename in Build.hashes_md5_tstamp:
+ if Build.hashes_md5_tstamp[filename][0] == str(st.st_mtime):
+ return Build.hashes_md5_tstamp[filename][1]
+ if STRONGEST:
+ ret = Utils.h_file_no_md5(filename)
+ Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), ret)
+ return ret
+ else:
+ m = Utils.md5()
+ m.update(str(st.st_mtime))
+ m.update(str(st.st_size))
+ m.update(filename)
+ Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), m.digest())
+ return m.digest()
+ Utils.h_file_no_md5 = Utils.h_file
+ Utils.h_file = h_file
diff --git a/third_party/waf/waflib/extras/mem_reducer.py b/third_party/waf/waflib/extras/mem_reducer.py
new file mode 100644
index 00000000000..e97c8d7272c
--- /dev/null
+++ b/third_party/waf/waflib/extras/mem_reducer.py
@@ -0,0 +1,110 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+
+"""
+This tool can help to reduce the memory usage in very large builds featuring many tasks with after/before attributes.
+It may also improve the overall build time by decreasing the amount of iterations over tasks.
+
+Usage:
+def options(opt):
+ opt.load('mem_reducer')
+"""
+
+import itertools
+from waflib import Utils, Task, Runner
+
+class SetOfTasks(object):
+ """Wraps a set and a task which has a list of other sets.
+ The interface is meant to mimic the interface of set. Add missing functions as needed.
+ """
+ def __init__(self, owner):
+ self._set = owner.run_after
+ self._owner = owner
+
+ def __iter__(self):
+ for g in self._owner.run_after_groups:
+ #print len(g)
+ for task in g:
+ yield task
+ for task in self._set:
+ yield task
+
+ def add(self, obj):
+ self._set.add(obj)
+
+ def update(self, obj):
+ self._set.update(obj)
+
+def set_precedence_constraints(tasks):
+ cstr_groups = Utils.defaultdict(list)
+ for x in tasks:
+ x.run_after = SetOfTasks(x)
+ x.run_after_groups = []
+ x.waiting_sets = []
+
+ h = x.hash_constraints()
+ cstr_groups[h].append(x)
+
+ # create sets which can be reused for all tasks
+ for k in cstr_groups.keys():
+ cstr_groups[k] = set(cstr_groups[k])
+
+ # this list should be short
+ for key1, key2 in itertools.combinations(cstr_groups.keys(), 2):
+ group1 = cstr_groups[key1]
+ group2 = cstr_groups[key2]
+ # get the first entry of the set
+ t1 = next(iter(group1))
+ t2 = next(iter(group2))
+
+ # add the constraints based on the comparisons
+ if Task.is_before(t1, t2):
+ for x in group2:
+ x.run_after_groups.append(group1)
+ for k in group1:
+ k.waiting_sets.append(group1)
+ elif Task.is_before(t2, t1):
+ for x in group1:
+ x.run_after_groups.append(group2)
+ for k in group2:
+ k.waiting_sets.append(group2)
+
+Task.set_precedence_constraints = set_precedence_constraints
+
+def get_out(self):
+ tsk = self.out.get()
+ if not self.stop:
+ self.add_more_tasks(tsk)
+ self.count -= 1
+ self.dirty = True
+
+ # shrinking sets
+ try:
+ ws = tsk.waiting_sets
+ except AttributeError:
+ pass
+ else:
+ for k in ws:
+ try:
+ k.remove(tsk)
+ except KeyError:
+ pass
+
+ return tsk
+Runner.Parallel.get_out = get_out
+
+def skip(self, tsk):
+ tsk.hasrun = Task.SKIPPED
+
+ # shrinking sets
+ try:
+ ws = tsk.waiting_sets
+ except AttributeError:
+ pass
+ else:
+ for k in ws:
+ try:
+ k.remove(tsk)
+ except KeyError:
+ pass
+Runner.Parallel.skip = skip
diff --git a/third_party/waf/waflib/extras/misc.py b/third_party/waf/waflib/extras/misc.py
new file mode 100644
index 00000000000..802323ddcc7
--- /dev/null
+++ b/third_party/waf/waflib/extras/misc.py
@@ -0,0 +1,410 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+This tool is totally deprecated
+
+Try using:
+ .pc.in files for .pc files
+ the feature intltool_in - see demos/intltool
+ make-like rules
+"""
+
+import shutil, re, os
+from waflib import Node, Task, Utils, Errors
+from waflib.TaskGen import feature, after_method, before_method
+from waflib.Logs import debug
+
+def copy_attrs(orig, dest, names, only_if_set=False):
+ """
+ copy class attributes from an object to another
+ """
+ for a in Utils.to_list(names):
+ u = getattr(orig, a, ())
+ if u or not only_if_set:
+ setattr(dest, a, u)
+
+def copy_func(tsk):
+ "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
+ infile = tsk.inputs[0].abspath()
+ outfile = tsk.outputs[0].abspath()
+ try:
+ shutil.copy2(infile, outfile)
+ except EnvironmentError:
+ return 1
+ else:
+ if tsk.chmod: os.chmod(outfile, tsk.chmod)
+ return 0
+
+def action_process_file_func(tsk):
+ "Ask the function attached to the task to process it"
+ if not tsk.fun: raise Errors.WafError('task must have a function attached to it for copy_func to work!')
+ return tsk.fun(tsk)
+
+@feature('cmd')
+def apply_cmd(self):
+ "call a command everytime"
+ if not self.fun: raise Errors.WafError('cmdobj needs a function!')
+ tsk = Task.TaskBase()
+ tsk.fun = self.fun
+ tsk.env = self.env
+ self.tasks.append(tsk)
+ tsk.install_path = self.install_path
+
+@feature('copy')
+@before_method('process_source')
+def apply_copy(self):
+ Utils.def_attrs(self, fun=copy_func)
+ self.default_install_path = 0
+
+ lst = self.to_list(self.source)
+ self.meths.remove('process_source')
+
+ for filename in lst:
+ node = self.path.find_resource(filename)
+ if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
+
+ target = self.target
+ if not target or len(lst)>1: target = node.name
+
+ # TODO the file path may be incorrect
+ newnode = self.path.find_or_declare(target)
+
+ tsk = self.create_task('copy', node, newnode)
+ tsk.fun = self.fun
+ tsk.chmod = getattr(self, 'chmod', Utils.O644)
+
+ if not tsk.env:
+ tsk.debug()
+ raise Errors.WafError('task without an environment')
+
+def subst_func(tsk):
+ "Substitutes variables in a .in file"
+
+ m4_re = re.compile('@(\w+)@', re.M)
+
+ code = tsk.inputs[0].read() #Utils.readf(infile)
+
+ # replace all % by %% to prevent errors by % signs in the input file while string formatting
+ code = code.replace('%', '%%')
+
+ s = m4_re.sub(r'%(\1)s', code)
+
+ env = tsk.env
+ di = getattr(tsk, 'dict', {}) or getattr(tsk.generator, 'dict', {})
+ if not di:
+ names = m4_re.findall(code)
+ for i in names:
+ di[i] = env.get_flat(i) or env.get_flat(i.upper())
+
+ tsk.outputs[0].write(s % di)
+
+@feature('subst')
+@before_method('process_source')
+def apply_subst(self):
+ Utils.def_attrs(self, fun=subst_func)
+ lst = self.to_list(self.source)
+ self.meths.remove('process_source')
+
+ self.dict = getattr(self, 'dict', {})
+
+ for filename in lst:
+ node = self.path.find_resource(filename)
+ if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
+
+ if self.target:
+ newnode = self.path.find_or_declare(self.target)
+ else:
+ newnode = node.change_ext('')
+
+ try:
+ self.dict = self.dict.get_merged_dict()
+ except AttributeError:
+ pass
+
+ if self.dict and not self.env['DICT_HASH']:
+ self.env = self.env.derive()
+ keys = list(self.dict.keys())
+ keys.sort()
+ lst = [self.dict[x] for x in keys]
+ self.env['DICT_HASH'] = str(Utils.h_list(lst))
+
+ tsk = self.create_task('copy', node, newnode)
+ tsk.fun = self.fun
+ tsk.dict = self.dict
+ tsk.dep_vars = ['DICT_HASH']
+ tsk.chmod = getattr(self, 'chmod', Utils.O644)
+
+ if not tsk.env:
+ tsk.debug()
+ raise Errors.WafError('task without an environment')
+
+####################
+## command-output ####
+####################
+
+class cmd_arg(object):
+ """command-output arguments for representing files or folders"""
+ def __init__(self, name, template='%s'):
+ self.name = name
+ self.template = template
+ self.node = None
+
+class input_file(cmd_arg):
+ def find_node(self, base_path):
+ assert isinstance(base_path, Node.Node)
+ self.node = base_path.find_resource(self.name)
+ if self.node is None:
+ raise Errors.WafError("Input file %s not found in " % (self.name, base_path))
+
+ def get_path(self, env, absolute):
+ if absolute:
+ return self.template % self.node.abspath()
+ else:
+ return self.template % self.node.srcpath()
+
+class output_file(cmd_arg):
+ def find_node(self, base_path):
+ assert isinstance(base_path, Node.Node)
+ self.node = base_path.find_or_declare(self.name)
+ if self.node is None:
+ raise Errors.WafError("Output file %s not found in " % (self.name, base_path))
+
+ def get_path(self, env, absolute):
+ if absolute:
+ return self.template % self.node.abspath()
+ else:
+ return self.template % self.node.bldpath()
+
+class cmd_dir_arg(cmd_arg):
+ def find_node(self, base_path):
+ assert isinstance(base_path, Node.Node)
+ self.node = base_path.find_dir(self.name)
+ if self.node is None:
+ raise Errors.WafError("Directory %s not found in " % (self.name, base_path))
+
+class input_dir(cmd_dir_arg):
+ def get_path(self, dummy_env, dummy_absolute):
+ return self.template % self.node.abspath()
+
+class output_dir(cmd_dir_arg):
+ def get_path(self, env, dummy_absolute):
+ return self.template % self.node.abspath()
+
+
+class command_output(Task.Task):
+ color = "BLUE"
+ def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
+ Task.Task.__init__(self, env=env)
+ assert isinstance(command, (str, Node.Node))
+ self.command = command
+ self.command_args = command_args
+ self.stdin = stdin
+ self.stdout = stdout
+ self.cwd = cwd
+ self.os_env = os_env
+ self.stderr = stderr
+
+ if command_node is not None: self.dep_nodes = [command_node]
+ self.dep_vars = [] # additional environment variables to look
+
+ def run(self):
+ task = self
+ #assert len(task.inputs) > 0
+
+ def input_path(node, template):
+ if task.cwd is None:
+ return template % node.bldpath()
+ else:
+ return template % node.abspath()
+ def output_path(node, template):
+ fun = node.abspath
+ if task.cwd is None: fun = node.bldpath
+ return template % fun()
+
+ if isinstance(task.command, Node.Node):
+ argv = [input_path(task.command, '%s')]
+ else:
+ argv = [task.command]
+
+ for arg in task.command_args:
+ if isinstance(arg, str):
+ argv.append(arg)
+ else:
+ assert isinstance(arg, cmd_arg)
+ argv.append(arg.get_path(task.env, (task.cwd is not None)))
+
+ if task.stdin:
+ stdin = open(input_path(task.stdin, '%s'))
+ else:
+ stdin = None
+
+ if task.stdout:
+ stdout = open(output_path(task.stdout, '%s'), "w")
+ else:
+ stdout = None
+
+ if task.stderr:
+ stderr = open(output_path(task.stderr, '%s'), "w")
+ else:
+ stderr = None
+
+ if task.cwd is None:
+ cwd = ('None (actually %r)' % os.getcwd())
+ else:
+ cwd = repr(task.cwd)
+ debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
+ (cwd, stdin, stdout, argv))
+
+ if task.os_env is None:
+ os_env = os.environ
+ else:
+ os_env = task.os_env
+ command = Utils.subprocess.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
+ return command.wait()
+
+@feature('command-output')
+def init_cmd_output(self):
+ Utils.def_attrs(self,
+ stdin = None,
+ stdout = None,
+ stderr = None,
+ # the command to execute
+ command = None,
+
+ # whether it is an external command; otherwise it is assumed
+ # to be an executable binary or script that lives in the
+ # source or build tree.
+ command_is_external = False,
+
+ # extra parameters (argv) to pass to the command (excluding
+ # the command itself)
+ argv = [],
+
+ # dependencies to other objects -> this is probably not what you want (ita)
+ # values must be 'task_gen' instances (not names!)
+ dependencies = [],
+
+ # dependencies on env variable contents
+ dep_vars = [],
+
+ # input files that are implicit, i.e. they are not
+ # stdin, nor are they mentioned explicitly in argv
+ hidden_inputs = [],
+
+ # output files that are implicit, i.e. they are not
+ # stdout, nor are they mentioned explicitly in argv
+ hidden_outputs = [],
+
+ # change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
+ cwd = None,
+
+ # OS environment variables to pass to the subprocess
+ # if None, use the default environment variables unchanged
+ os_env = None)
+
+@feature('command-output')
+@after_method('init_cmd_output')
+def apply_cmd_output(self):
+ if self.command is None:
+ raise Errors.WafError("command-output missing command")
+ if self.command_is_external:
+ cmd = self.command
+ cmd_node = None
+ else:
+ cmd_node = self.path.find_resource(self.command)
+ assert cmd_node is not None, ('''Could not find command '%s' in source tree.
+Hint: if this is an external command,
+use command_is_external=True''') % (self.command,)
+ cmd = cmd_node
+
+ if self.cwd is None:
+ cwd = None
+
+ inputs = []
+ outputs = []
+
+ for arg in self.argv:
+ if isinstance(arg, cmd_arg):
+ arg.find_node(self.path)
+ if isinstance(arg, input_file):
+ inputs.append(arg.node)
+ if isinstance(arg, output_file):
+ outputs.append(arg.node)
+
+ if self.stdout is None:
+ stdout = None
+ else:
+ assert isinstance(self.stdout, str)
+ stdout = self.path.find_or_declare(self.stdout)
+ if stdout is None:
+ raise Errors.WafError("File %s not found" % (self.stdout,))
+ outputs.append(stdout)
+
+ if self.stderr is None:
+ stderr = None
+ else:
+ assert isinstance(self.stderr, str)
+ stderr = self.path.find_or_declare(self.stderr)
+ if stderr is None:
+ raise Errors.WafError("File %s not found" % (self.stderr,))
+ outputs.append(stderr)
+
+ if self.stdin is None:
+ stdin = None
+ else:
+ assert isinstance(self.stdin, str)
+ stdin = self.path.find_resource(self.stdin)
+ if stdin is None:
+ raise Errors.WafError("File %s not found" % (self.stdin,))
+ inputs.append(stdin)
+
+ for hidden_input in self.to_list(self.hidden_inputs):
+ node = self.path.find_resource(hidden_input)
+ if node is None:
+ raise Errors.WafError("File %s not found in dir %s" % (hidden_input, self.path))
+ inputs.append(node)
+
+ for hidden_output in self.to_list(self.hidden_outputs):
+ node = self.path.find_or_declare(hidden_output)
+ if node is None:
+ raise Errors.WafError("File %s not found in dir %s" % (hidden_output, self.path))
+ outputs.append(node)
+
+ if not (inputs or getattr(self, 'no_inputs', None)):
+ raise Errors.WafError('command-output objects must have at least one input file or give self.no_inputs')
+ if not (outputs or getattr(self, 'no_outputs', None)):
+ raise Errors.WafError('command-output objects must have at least one output file or give self.no_outputs')
+
+ cwd = self.bld.variant_dir
+ task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
+ task.generator = self
+ copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
+ self.tasks.append(task)
+
+ task.inputs = inputs
+ task.outputs = outputs
+ task.dep_vars = self.to_list(self.dep_vars)
+
+ for dep in self.dependencies:
+ assert dep is not self
+ dep.post()
+ for dep_task in dep.tasks:
+ task.set_run_after(dep_task)
+
+ if not task.inputs:
+ # the case for svnversion, always run, and update the output nodes
+ task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
+ task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
+
+ # TODO the case with no outputs?
+
+def post_run(self):
+ for x in self.outputs:
+ x.sig = Utils.h_file(x.abspath())
+
+def runnable_status(self):
+ return self.RUN_ME
+
+Task.task_factory('copy', vars=[], func=action_process_file_func)
diff --git a/third_party/waf/waflib/extras/msvcdeps.py b/third_party/waf/waflib/extras/msvcdeps.py
new file mode 100644
index 00000000000..98b06776d01
--- /dev/null
+++ b/third_party/waf/waflib/extras/msvcdeps.py
@@ -0,0 +1,262 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Copyright Garmin International or its subsidiaries, 2012-2013
+
+'''
+Off-load dependency scanning from Python code to MSVC compiler
+
+This tool is safe to load in any environment; it will only activate the
+MSVC exploits when it finds that a particular taskgen uses MSVC to
+compile.
+
+Empirical testing shows about a 10% execution time savings from using
+this tool as compared to c_preproc.
+
+The technique of gutting scan() and pushing the dependency calculation
+down to post_run() is cribbed from gccdeps.py.
+'''
+
+import os
+import sys
+import tempfile
+import threading
+
+from waflib import Context, Errors, Logs, Task, Utils
+from waflib.Tools import c_preproc, c, cxx, msvc
+from waflib.TaskGen import feature, before_method
+
+lock = threading.Lock()
+nodes = {} # Cache the path -> Node lookup
+
+PREPROCESSOR_FLAG = '/showIncludes'
+INCLUDE_PATTERN = 'Note: including file:'
+
+# Extensible by outside tools
+supported_compilers = ['msvc']
+
+@feature('c', 'cxx')
+@before_method('process_source')
+def apply_msvcdeps_flags(taskgen):
+ if taskgen.env.CC_NAME not in supported_compilers:
+ return
+
+ for flag in ('CFLAGS', 'CXXFLAGS'):
+ if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
+ taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
+
+ # Figure out what casing conventions the user's shell used when
+ # launching Waf
+ (drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath())
+ taskgen.msvcdeps_drive_lowercase = drive == drive.lower()
+
+def path_to_node(base_node, path, cached_nodes):
+ # Take the base node and the path and return a node
+ # Results are cached because searching the node tree is expensive
+ # The following code is executed by threads, it is not safe, so a lock is needed...
+ if getattr(path, '__hash__'):
+ node_lookup_key = (base_node, path)
+ else:
+ # Not hashable, assume it is a list and join into a string
+ node_lookup_key = (base_node, os.path.sep.join(path))
+ try:
+ lock.acquire()
+ node = cached_nodes[node_lookup_key]
+ except KeyError:
+ node = base_node.find_resource(path)
+ cached_nodes[node_lookup_key] = node
+ finally:
+ lock.release()
+ return node
+
+'''
+Register a task subclass that has hooks for running our custom
+dependency calculations rather than the C/C++ stock c_preproc
+method.
+'''
+def wrap_compiled_task(classname):
+ derived_class = type(classname, (Task.classes[classname],), {})
+
+ def post_run(self):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(derived_class, self).post_run()
+
+ if getattr(self, 'cached', None):
+ return Task.Task.post_run(self)
+
+ bld = self.generator.bld
+ unresolved_names = []
+ resolved_nodes = []
+
+ lowercase = self.generator.msvcdeps_drive_lowercase
+ correct_case_path = bld.path.abspath()
+ correct_case_path_len = len(correct_case_path)
+ correct_case_path_norm = os.path.normcase(correct_case_path)
+
+ # Dynamically bind to the cache
+ try:
+ cached_nodes = bld.cached_nodes
+ except AttributeError:
+ cached_nodes = bld.cached_nodes = {}
+
+ for path in self.msvcdeps_paths:
+ node = None
+ if os.path.isabs(path):
+ # Force drive letter to match conventions of main source tree
+ drive, tail = os.path.splitdrive(path)
+
+ if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm:
+ # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path.
+ path = correct_case_path + path[correct_case_path_len:]
+ else:
+ # Check the drive letter
+ if lowercase and (drive != drive.lower()):
+ path = drive.lower() + tail
+ elif (not lowercase) and (drive != drive.upper()):
+ path = drive.upper() + tail
+ node = path_to_node(bld.root, path, cached_nodes)
+ else:
+ base_node = bld.bldnode
+ # when calling find_resource, make sure the path does not begin by '..'
+ path = [k for k in Utils.split_path(path) if k and k != '.']
+ while path[0] == '..':
+ path = path[1:]
+ base_node = base_node.parent
+
+ node = path_to_node(base_node, path, cached_nodes)
+
+ if not node:
+ raise ValueError('could not find %r for %r' % (path, self))
+ else:
+ if not c_preproc.go_absolute:
+ if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
+ # System library
+ Logs.debug('msvcdeps: Ignoring system include %r' % node)
+ continue
+
+ if id(node) == id(self.inputs[0]):
+ # Self-dependency
+ continue
+
+ resolved_nodes.append(node)
+
+ bld.node_deps[self.uid()] = resolved_nodes
+ bld.raw_deps[self.uid()] = unresolved_names
+
+ try:
+ del self.cache_sig
+ except:
+ pass
+
+ Task.Task.post_run(self)
+
+ def scan(self):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(derived_class, self).scan()
+
+ resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
+ unresolved_names = []
+ return (resolved_nodes, unresolved_names)
+
+ def sig_implicit_deps(self):
+ if self.env.CC_NAME not in supported_compilers:
+ return super(derived_class, self).sig_implicit_deps()
+
+ try:
+ return Task.Task.sig_implicit_deps(self)
+ except Errors.WafError:
+ return Utils.SIG_NIL
+
+ def exec_response_command(self, cmd, **kw):
+ # exec_response_command() is only called from inside msvc.py anyway
+ assert self.env.CC_NAME in supported_compilers
+
+ # Only bother adding '/showIncludes' to compile tasks
+ if isinstance(self, (c.c, cxx.cxx)):
+ try:
+ # The Visual Studio IDE adds an environment variable that causes
+ # the MS compiler to send its textual output directly to the
+ # debugging window rather than normal stdout/stderr.
+ #
+ # This is unrecoverably bad for this tool because it will cause
+ # all the dependency scanning to see an empty stdout stream and
+ # assume that the file being compiled uses no headers.
+ #
+ # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
+ #
+ # Attempting to repair the situation by deleting the offending
+ # envvar at this point in tool execution will not be good enough--
+ # its presence poisons the 'waf configure' step earlier. We just
+ # want to put a sanity check here in order to help developers
+ # quickly diagnose the issue if an otherwise-good Waf tree
+ # is then executed inside the MSVS IDE.
+ assert 'VS_UNICODE_OUTPUT' not in kw['env']
+
+ tmp = None
+
+ # This block duplicated from Waflib's msvc.py
+ if sys.platform.startswith('win') and isinstance(cmd, list) and len(' '.join(cmd)) >= 8192:
+ program = cmd[0]
+ cmd = [self.quote_response_command(x) for x in cmd]
+ (fd, tmp) = tempfile.mkstemp()
+ os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode())
+ os.close(fd)
+ cmd = [program, '@' + tmp]
+ # ... end duplication
+
+ self.msvcdeps_paths = []
+
+ kw['env'] = kw.get('env', os.environ.copy())
+ kw['cwd'] = kw.get('cwd', os.getcwd())
+ kw['quiet'] = Context.STDOUT
+ kw['output'] = Context.STDOUT
+
+ out = []
+
+ try:
+ raw_out = self.generator.bld.cmd_and_log(cmd, **kw)
+ ret = 0
+ except Errors.WafError as e:
+ raw_out = e.stdout
+ ret = e.returncode
+
+ for line in raw_out.splitlines():
+ if line.startswith(INCLUDE_PATTERN):
+ inc_path = line[len(INCLUDE_PATTERN):].strip()
+ Logs.debug('msvcdeps: Regex matched %s' % inc_path)
+ self.msvcdeps_paths.append(inc_path)
+ else:
+ out.append(line)
+
+ # Pipe through the remaining stdout content (not related to /showIncludes)
+ if self.generator.bld.logger:
+ self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
+ else:
+ sys.stdout.write(os.linesep.join(out) + os.linesep)
+
+ finally:
+ if tmp:
+ try:
+ os.remove(tmp)
+ except OSError:
+ pass
+
+ return ret
+ else:
+ # Use base class's version of this method for linker tasks
+ return super(derived_class, self).exec_response_command(cmd, **kw)
+
+ def can_retrieve_cache(self):
+ # msvcdeps and netcaching are incompatible, so disable the cache
+ if self.env.CC_NAME not in supported_compilers:
+ return super(derived_class, self).can_retrieve_cache()
+ self.nocache = True # Disable sending the file to the cache
+ return False
+
+ derived_class.post_run = post_run
+ derived_class.scan = scan
+ derived_class.sig_implicit_deps = sig_implicit_deps
+ derived_class.exec_response_command = exec_response_command
+ derived_class.can_retrieve_cache = can_retrieve_cache
+
+for k in ('c', 'cxx'):
+ wrap_compiled_task(k)
diff --git a/third_party/waf/waflib/extras/msvs.py b/third_party/waf/waflib/extras/msvs.py
new file mode 100644
index 00000000000..5f76c269d1c
--- /dev/null
+++ b/third_party/waf/waflib/extras/msvs.py
@@ -0,0 +1,1033 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Avalanche Studios 2009-2011
+# Thomas Nagy 2011
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+"""
+To add this tool to your project:
+def options(conf):
+ opt.load('msvs')
+
+It can be a good idea to add the sync_exec tool too.
+
+To generate solution files:
+$ waf configure msvs
+
+To customize the outputs, provide subclasses in your wscript files:
+
+from waflib.extras import msvs
+class vsnode_target(msvs.vsnode_target):
+ def get_build_command(self, props):
+ # likely to be required
+ return "waf.bat build"
+ def collect_source(self):
+ # likely to be required
+ ...
+class msvs_bar(msvs.msvs_generator):
+ def init(self):
+ msvs.msvs_generator.init(self)
+ self.vsnode_target = vsnode_target
+
+The msvs class re-uses the same build() function for reading the targets (task generators),
+you may therefore specify msvs settings on the context object:
+
+def build(bld):
+ bld.solution_name = 'foo.sln'
+ bld.waf_command = 'waf.bat'
+ bld.projects_dir = bld.srcnode.make_node('.depproj')
+ bld.projects_dir.mkdir()
+
+For visual studio 2008, the command is called 'msvs2008', and the classes
+such as vsnode_target are wrapped by a decorator class 'wrap_2008' to
+provide special functionality.
+
+ASSUMPTIONS:
+* a project can be either a directory or a target, vcxproj files are written only for targets that have source files
+* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
+"""
+
+import os, re, sys
+import uuid # requires python 2.5
+from waflib.Build import BuildContext
+from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
+
+HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
+
+PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0"
+ xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+
+ <ItemGroup Label="ProjectConfigurations">
+ ${for b in project.build_properties}
+ <ProjectConfiguration Include="${b.configuration}|${b.platform}">
+ <Configuration>${b.configuration}</Configuration>
+ <Platform>${b.platform}</Platform>
+ </ProjectConfiguration>
+ ${endfor}
+ </ItemGroup>
+
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>{${project.uuid}}</ProjectGuid>
+ <Keyword>MakeFileProj</Keyword>
+ <ProjectName>${project.name}</ProjectName>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+
+ ${for b in project.build_properties}
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'" Label="Configuration">
+ <ConfigurationType>Makefile</ConfigurationType>
+ <OutDir>${b.outdir}</OutDir>
+ <PlatformToolset>v110</PlatformToolset>
+ </PropertyGroup>
+ ${endfor}
+
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+
+ ${for b in project.build_properties}
+ <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ ${endfor}
+
+ ${for b in project.build_properties}
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
+ <NMakeBuildCommandLine>${xml:project.get_build_command(b)}</NMakeBuildCommandLine>
+ <NMakeReBuildCommandLine>${xml:project.get_rebuild_command(b)}</NMakeReBuildCommandLine>
+ <NMakeCleanCommandLine>${xml:project.get_clean_command(b)}</NMakeCleanCommandLine>
+ <NMakeIncludeSearchPath>${xml:b.includes_search_path}</NMakeIncludeSearchPath>
+ <NMakePreprocessorDefinitions>${xml:b.preprocessor_definitions};$(NMakePreprocessorDefinitions)</NMakePreprocessorDefinitions>
+ <IncludePath>${xml:b.includes_search_path}</IncludePath>
+ <ExecutablePath>$(ExecutablePath)</ExecutablePath>
+
+ ${if getattr(b, 'output_file', None)}
+ <NMakeOutput>${xml:b.output_file}</NMakeOutput>
+ ${endif}
+ ${if getattr(b, 'deploy_dir', None)}
+ <RemoteRoot>${xml:b.deploy_dir}</RemoteRoot>
+ ${endif}
+ </PropertyGroup>
+ ${endfor}
+
+ ${for b in project.build_properties}
+ ${if getattr(b, 'deploy_dir', None)}
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
+ <Deploy>
+ <DeploymentType>CopyToHardDrive</DeploymentType>
+ </Deploy>
+ </ItemDefinitionGroup>
+ ${endif}
+ ${endfor}
+
+ <ItemGroup>
+ ${for x in project.source}
+ <${project.get_key(x)} Include='${x.win32path()}' />
+ ${endfor}
+ </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project>
+'''
+
+FILTER_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup>
+ ${for x in project.source}
+ <${project.get_key(x)} Include="${x.win32path()}">
+ <Filter>${project.get_filter_name(x.parent)}</Filter>
+ </${project.get_key(x)}>
+ ${endfor}
+ </ItemGroup>
+ <ItemGroup>
+ ${for x in project.dirs()}
+ <Filter Include="${project.get_filter_name(x)}">
+ <UniqueIdentifier>{${project.make_uuid(x.win32path())}}</UniqueIdentifier>
+ </Filter>
+ ${endfor}
+ </ItemGroup>
+</Project>
+'''
+
+PROJECT_2008_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
+<VisualStudioProject ProjectType="Visual C++" Version="9,00"
+ Name="${xml: project.name}" ProjectGUID="{${project.uuid}}"
+ Keyword="MakeFileProj"
+ TargetFrameworkVersion="196613">
+ <Platforms>
+ ${if project.build_properties}
+ ${for b in project.build_properties}
+ <Platform Name="${xml: b.platform}" />
+ ${endfor}
+ ${else}
+ <Platform Name="Win32" />
+ ${endif}
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ ${if project.build_properties}
+ ${for b in project.build_properties}
+ <Configuration
+ Name="${xml: b.configuration}|${xml: b.platform}"
+ IntermediateDirectory="$ConfigurationName"
+ OutputDirectory="${xml: b.outdir}"
+ ConfigurationType="0">
+ <Tool
+ Name="VCNMakeTool"
+ BuildCommandLine="${xml: project.get_build_command(b)}"
+ ReBuildCommandLine="${xml: project.get_rebuild_command(b)}"
+ CleanCommandLine="${xml: project.get_clean_command(b)}"
+ ${if getattr(b, 'output_file', None)}
+ Output="${xml: b.output_file}"
+ ${endif}
+ PreprocessorDefinitions="${xml: b.preprocessor_definitions}"
+ IncludeSearchPath="${xml: b.includes_search_path}"
+ ForcedIncludes=""
+ ForcedUsingAssemblies=""
+ AssemblySearchPath=""
+ CompileAsManaged=""
+ />
+ </Configuration>
+ ${endfor}
+ ${else}
+ <Configuration Name="Release|Win32" >
+ </Configuration>
+ ${endif}
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+${project.display_filter()}
+ </Files>
+</VisualStudioProject>
+'''
+
+SOLUTION_TEMPLATE = '''Microsoft Visual Studio Solution File, Format Version ${project.numver}
+# Visual Studio ${project.vsver}
+${for p in project.all_projects}
+Project("{${p.ptype()}}") = "${p.name}", "${p.title}", "{${p.uuid}}"
+EndProject${endfor}
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ ${if project.all_projects}
+ ${for (configuration, platform) in project.all_projects[0].ctx.project_configurations()}
+ ${configuration}|${platform} = ${configuration}|${platform}
+ ${endfor}
+ ${endif}
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ ${for p in project.all_projects}
+ ${if hasattr(p, 'source')}
+ ${for b in p.build_properties}
+ {${p.uuid}}.${b.configuration}|${b.platform}.ActiveCfg = ${b.configuration}|${b.platform}
+ ${if getattr(p, 'is_active', None)}
+ {${p.uuid}}.${b.configuration}|${b.platform}.Build.0 = ${b.configuration}|${b.platform}
+ ${endif}
+ ${if getattr(p, 'is_deploy', None)}
+ {${p.uuid}}.${b.configuration}|${b.platform}.Deploy.0 = ${b.configuration}|${b.platform}
+ ${endif}
+ ${endfor}
+ ${endif}
+ ${endfor}
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(NestedProjects) = preSolution
+ ${for p in project.all_projects}
+ ${if p.parent}
+ {${p.uuid}} = {${p.parent.uuid}}
+ ${endif}
+ ${endfor}
+ EndGlobalSection
+EndGlobal
+'''
+
+COMPILE_TEMPLATE = '''def f(project):
+ lst = []
+ def xml_escape(value):
+ return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+ %s
+
+ #f = open('cmd.txt', 'w')
+ #f.write(str(lst))
+ #f.close()
+ return ''.join(lst)
+'''
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
+def compile_template(line):
+ """
+ Compile a template expression into a python function (like jsps, but way shorter)
+ """
+ extr = []
+ def repl(match):
+ g = match.group
+ if g('dollar'): return "$"
+ elif g('backslash'):
+ return "\\"
+ elif g('subst'):
+ extr.append(g('code'))
+ return "<<|@|>>"
+ return None
+
+ line2 = reg_act.sub(repl, line)
+ params = line2.split('<<|@|>>')
+ assert(extr)
+
+
+ indent = 0
+ buf = []
+ app = buf.append
+
+ def app(txt):
+ buf.append(indent * '\t' + txt)
+
+ for x in range(len(extr)):
+ if params[x]:
+ app("lst.append(%r)" % params[x])
+
+ f = extr[x]
+ if f.startswith('if') or f.startswith('for'):
+ app(f + ':')
+ indent += 1
+ elif f.startswith('py:'):
+ app(f[3:])
+ elif f.startswith('endif') or f.startswith('endfor'):
+ indent -= 1
+ elif f.startswith('else') or f.startswith('elif'):
+ indent -= 1
+ app(f + ':')
+ indent += 1
+ elif f.startswith('xml:'):
+ app('lst.append(xml_escape(%s))' % f[4:])
+ else:
+ #app('lst.append((%s) or "cannot find %s")' % (f, f))
+ app('lst.append(%s)' % f)
+
+ if extr:
+ if params[-1]:
+ app("lst.append(%r)" % params[-1])
+
+ fun = COMPILE_TEMPLATE % "\n\t".join(buf)
+ #print(fun)
+ return Task.funex(fun)
+
+
+re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
+def rm_blank_lines(txt):
+ txt = re_blank.sub('\r\n', txt)
+ return txt
+
+BOM = '\xef\xbb\xbf'
+try:
+ BOM = bytes(BOM, 'iso8859-1') # python 3
+except TypeError:
+ pass
+
+def stealth_write(self, data, flags='wb'):
+ try:
+ unicode
+ except NameError:
+ data = data.encode('utf-8') # python 3
+ else:
+ data = data.decode(sys.getfilesystemencoding(), 'replace')
+ data = data.encode('utf-8')
+
+ if self.name.endswith('.vcproj') or self.name.endswith('.vcxproj'):
+ data = BOM + data
+
+ try:
+ txt = self.read(flags='rb')
+ if txt != data:
+ raise ValueError('must write')
+ except (IOError, ValueError):
+ self.write(data, flags=flags)
+ else:
+ Logs.debug('msvs: skipping %s' % self.win32path())
+Node.Node.stealth_write = stealth_write
+
+re_win32 = re.compile(r'^([/\\]cygdrive)?[/\\]([a-z])([^a-z0-9_-].*)', re.I)
+def win32path(self):
+ p = self.abspath()
+ m = re_win32.match(p)
+ if m:
+ return "%s:%s" % (m.group(2).upper(), m.group(3))
+ return p
+Node.Node.win32path = win32path
+
+re_quote = re.compile("[^a-zA-Z0-9-]")
+def quote(s):
+ return re_quote.sub("_", s)
+
+def xml_escape(value):
+ return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+def make_uuid(v, prefix = None):
+ """
+ simple utility function
+ """
+ if isinstance(v, dict):
+ keys = list(v.keys())
+ keys.sort()
+ tmp = str([(k, v[k]) for k in keys])
+ else:
+ tmp = str(v)
+ d = Utils.md5(tmp.encode()).hexdigest().upper()
+ if prefix:
+ d = '%s%s' % (prefix, d[8:])
+ gid = uuid.UUID(d, version = 4)
+ return str(gid).upper()
+
+def diff(node, fromnode):
+ # difference between two nodes, but with "(..)" instead of ".."
+ c1 = node
+ c2 = fromnode
+
+ c1h = c1.height()
+ c2h = c2.height()
+
+ lst = []
+ up = 0
+
+ while c1h > c2h:
+ lst.append(c1.name)
+ c1 = c1.parent
+ c1h -= 1
+
+ while c2h > c1h:
+ up += 1
+ c2 = c2.parent
+ c2h -= 1
+
+ while id(c1) != id(c2):
+ lst.append(c1.name)
+ up += 1
+
+ c1 = c1.parent
+ c2 = c2.parent
+
+ for i in range(up):
+ lst.append('(..)')
+ lst.reverse()
+ return tuple(lst)
+
+class build_property(object):
+ pass
+
+class vsnode(object):
+ """
+ Abstract class representing visual studio elements
+ We assume that all visual studio nodes have a uuid and a parent
+ """
+ def __init__(self, ctx):
+ self.ctx = ctx # msvs context
+ self.name = '' # string, mandatory
+ self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
+ self.uuid = '' # string, mandatory
+ self.parent = None # parent node for visual studio nesting
+
+ def get_waf(self):
+ """
+ Override in subclasses...
+ """
+ return 'cd /d "%s" & %s' % (self.ctx.srcnode.win32path(), getattr(self.ctx, 'waf_command', 'waf.bat'))
+
+ def ptype(self):
+ """
+ Return a special uuid for projects written in the solution file
+ """
+ pass
+
+ def write(self):
+ """
+ Write the project file, by default, do nothing
+ """
+ pass
+
+ def make_uuid(self, val):
+ """
+ Alias for creating uuid values easily (the templates cannot access global variables)
+ """
+ return make_uuid(val)
+
+class vsnode_vsdir(vsnode):
+ """
+ Nodes representing visual studio folders (which do not match the filesystem tree!)
+ """
+ VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
+ def __init__(self, ctx, uuid, name, vspath=''):
+ vsnode.__init__(self, ctx)
+ self.title = self.name = name
+ self.uuid = uuid
+ self.vspath = vspath or name
+
+ def ptype(self):
+ return self.VS_GUID_SOLUTIONFOLDER
+
+class vsnode_project(vsnode):
+ """
+ Abstract class representing visual studio project elements
+ A project is assumed to be writable, and has a node representing the file to write to
+ """
+ VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+ def ptype(self):
+ return self.VS_GUID_VCPROJ
+
+ def __init__(self, ctx, node):
+ vsnode.__init__(self, ctx)
+ self.path = node
+ self.uuid = make_uuid(node.win32path())
+ self.name = node.name
+ self.title = self.path.win32path()
+ self.source = [] # list of node objects
+ self.build_properties = [] # list of properties (nmake commands, output dir, etc)
+
+ def dirs(self):
+ """
+ Get the list of parent folders of the source files (header files included)
+ for writing the filters
+ """
+ lst = []
+ def add(x):
+ if x.height() > self.tg.path.height() and x not in lst:
+ lst.append(x)
+ add(x.parent)
+ for x in self.source:
+ add(x.parent)
+ return lst
+
+ def write(self):
+ Logs.debug('msvs: creating %r' % self.path)
+
+ # first write the project file
+ template1 = compile_template(PROJECT_TEMPLATE)
+ proj_str = template1(self)
+ proj_str = rm_blank_lines(proj_str)
+ self.path.stealth_write(proj_str)
+
+ # then write the filter
+ template2 = compile_template(FILTER_TEMPLATE)
+ filter_str = template2(self)
+ filter_str = rm_blank_lines(filter_str)
+ tmp = self.path.parent.make_node(self.path.name + '.filters')
+ tmp.stealth_write(filter_str)
+
+ def get_key(self, node):
+ """
+ required for writing the source files
+ """
+ name = node.name
+ if name.endswith('.cpp') or name.endswith('.c'):
+ return 'ClCompile'
+ return 'ClInclude'
+
+ def collect_properties(self):
+ """
+ Returns a list of triplet (configuration, platform, output_directory)
+ """
+ ret = []
+ for c in self.ctx.configurations:
+ for p in self.ctx.platforms:
+ x = build_property()
+ x.outdir = ''
+
+ x.configuration = c
+ x.platform = p
+
+ x.preprocessor_definitions = ''
+ x.includes_search_path = ''
+
+ # can specify "deploy_dir" too
+ ret.append(x)
+ self.build_properties = ret
+
+ def get_build_params(self, props):
+ opt = '--execsolution=%s' % self.ctx.get_solution_node().win32path()
+ return (self.get_waf(), opt)
+
+ def get_build_command(self, props):
+ return "%s build %s" % self.get_build_params(props)
+
+ def get_clean_command(self, props):
+ return "%s clean %s" % self.get_build_params(props)
+
+ def get_rebuild_command(self, props):
+ return "%s clean build %s" % self.get_build_params(props)
+
+ def get_filter_name(self, node):
+ lst = diff(node, self.tg.path)
+ return '\\'.join(lst) or '.'
+
+class vsnode_alias(vsnode_project):
+ def __init__(self, ctx, node, name):
+ vsnode_project.__init__(self, ctx, node)
+ self.name = name
+ self.output_file = ''
+
+class vsnode_build_all(vsnode_alias):
+ """
+ Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
+ This is the only alias enabled by default
+ """
+ def __init__(self, ctx, node, name='build_all_projects'):
+ vsnode_alias.__init__(self, ctx, node, name)
+ self.is_active = True
+
+class vsnode_install_all(vsnode_alias):
+ """
+ Fake target used to emulate the behaviour of "make install"
+ """
+ def __init__(self, ctx, node, name='install_all_projects'):
+ vsnode_alias.__init__(self, ctx, node, name)
+
+ def get_build_command(self, props):
+ return "%s build install %s" % self.get_build_params(props)
+
+ def get_clean_command(self, props):
+ return "%s clean %s" % self.get_build_params(props)
+
+ def get_rebuild_command(self, props):
+ return "%s clean build install %s" % self.get_build_params(props)
+
+class vsnode_project_view(vsnode_alias):
+ """
+ Fake target used to emulate a file system view
+ """
+ def __init__(self, ctx, node, name='project_view'):
+ vsnode_alias.__init__(self, ctx, node, name)
+ self.tg = self.ctx() # fake one, cannot remove
+ self.exclude_files = Node.exclude_regs + '''
+waf-1.8.*
+waf3-1.8.*/**
+.waf-1.8.*
+.waf3-1.8.*/**
+**/*.sdf
+**/*.suo
+**/*.ncb
+**/%s
+ ''' % Options.lockfile
+
+ def collect_source(self):
+ # this is likely to be slow
+ self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
+
+ def get_build_command(self, props):
+ params = self.get_build_params(props) + (self.ctx.cmd,)
+ return "%s %s %s" % params
+
+ def get_clean_command(self, props):
+ return ""
+
+ def get_rebuild_command(self, props):
+ return self.get_build_command(props)
+
+class vsnode_target(vsnode_project):
+ """
+ Visual studio project representing a targets (programs, libraries, etc) and bound
+ to a task generator
+ """
+ def __init__(self, ctx, tg):
+ """
+ A project is more or less equivalent to a file/folder
+ """
+ base = getattr(ctx, 'projects_dir', None) or tg.path
+ node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
+ vsnode_project.__init__(self, ctx, node)
+ self.name = quote(tg.name)
+ self.tg = tg # task generator
+
+ def get_build_params(self, props):
+ """
+ Override the default to add the target name
+ """
+ opt = '--execsolution=%s' % self.ctx.get_solution_node().win32path()
+ if getattr(self, 'tg', None):
+ opt += " --targets=%s" % self.tg.name
+ return (self.get_waf(), opt)
+
+ def collect_source(self):
+ tg = self.tg
+ source_files = tg.to_nodes(getattr(tg, 'source', []))
+ include_dirs = Utils.to_list(getattr(tg, 'msvs_includes', []))
+ include_files = []
+ for x in include_dirs:
+ if isinstance(x, str):
+ x = tg.path.find_node(x)
+ if x:
+ lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
+ include_files.extend(lst)
+
+ # remove duplicates
+ self.source.extend(list(set(source_files + include_files)))
+ self.source.sort(key=lambda x: x.win32path())
+
+ def collect_properties(self):
+ """
+ Visual studio projects are associated with platforms and configurations (for building especially)
+ """
+ super(vsnode_target, self).collect_properties()
+ for x in self.build_properties:
+ x.outdir = self.path.parent.win32path()
+ x.preprocessor_definitions = ''
+ x.includes_search_path = ''
+
+ try:
+ tsk = self.tg.link_task
+ except AttributeError:
+ pass
+ else:
+ x.output_file = tsk.outputs[0].win32path()
+ x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
+ x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
+
+class msvs_generator(BuildContext):
+ '''generates a visual studio 2010 solution'''
+ cmd = 'msvs'
+ fun = 'build'
+
+ def init(self):
+ """
+ Some data that needs to be present
+ """
+ if not getattr(self, 'configurations', None):
+ self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
+ if not getattr(self, 'platforms', None):
+ self.platforms = ['Win32']
+ if not getattr(self, 'all_projects', None):
+ self.all_projects = []
+ if not getattr(self, 'project_extension', None):
+ self.project_extension = '.vcxproj'
+ if not getattr(self, 'projects_dir', None):
+ self.projects_dir = self.srcnode.make_node('.depproj')
+ self.projects_dir.mkdir()
+
+ # bind the classes to the object, so that subclass can provide custom generators
+ if not getattr(self, 'vsnode_vsdir', None):
+ self.vsnode_vsdir = vsnode_vsdir
+ if not getattr(self, 'vsnode_target', None):
+ self.vsnode_target = vsnode_target
+ if not getattr(self, 'vsnode_build_all', None):
+ self.vsnode_build_all = vsnode_build_all
+ if not getattr(self, 'vsnode_install_all', None):
+ self.vsnode_install_all = vsnode_install_all
+ if not getattr(self, 'vsnode_project_view', None):
+ self.vsnode_project_view = vsnode_project_view
+
+ self.numver = '11.00'
+ self.vsver = '2010'
+
+ def execute(self):
+ """
+ Entry point
+ """
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
+
+ # user initialization
+ self.init()
+
+ # two phases for creating the solution
+ self.collect_projects() # add project objects into "self.all_projects"
+ self.write_files() # write the corresponding project and solution files
+
+ def collect_projects(self):
+ """
+ Fill the list self.all_projects with project objects
+ Fill the list of build targets
+ """
+ self.collect_targets()
+ self.add_aliases()
+ self.collect_dirs()
+ default_project = getattr(self, 'default_project', None)
+ def sortfun(x):
+ if x.name == default_project:
+ return ''
+ return getattr(x, 'path', None) and x.path.win32path() or x.name
+ self.all_projects.sort(key=sortfun)
+
+ def write_files(self):
+ """
+ Write the project and solution files from the data collected
+ so far. It is unlikely that you will want to change this
+ """
+ for p in self.all_projects:
+ p.write()
+
+ # and finally write the solution file
+ node = self.get_solution_node()
+ node.parent.mkdir()
+ Logs.warn('Creating %r' % node)
+ template1 = compile_template(SOLUTION_TEMPLATE)
+ sln_str = template1(self)
+ sln_str = rm_blank_lines(sln_str)
+ node.stealth_write(sln_str)
+
+ def get_solution_node(self):
+ """
+ The solution filename is required when writing the .vcproj files
+ return self.solution_node and if it does not exist, make one
+ """
+ try:
+ return self.solution_node
+ except AttributeError:
+ pass
+
+ solution_name = getattr(self, 'solution_name', None)
+ if not solution_name:
+ solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.sln'
+ if os.path.isabs(solution_name):
+ self.solution_node = self.root.make_node(solution_name)
+ else:
+ self.solution_node = self.srcnode.make_node(solution_name)
+ return self.solution_node
+
+ def project_configurations(self):
+ """
+ Helper that returns all the pairs (config,platform)
+ """
+ ret = []
+ for c in self.configurations:
+ for p in self.platforms:
+ ret.append((c, p))
+ return ret
+
+ def collect_targets(self):
+ """
+ Process the list of task generators
+ """
+ for g in self.groups:
+ for tg in g:
+ if not isinstance(tg, TaskGen.task_gen):
+ continue
+
+ if not hasattr(tg, 'msvs_includes'):
+ tg.msvs_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
+ tg.post()
+ if not getattr(tg, 'link_task', None):
+ continue
+
+ p = self.vsnode_target(self, tg)
+ p.collect_source() # delegate this processing
+ p.collect_properties()
+ self.all_projects.append(p)
+
+ def add_aliases(self):
+ """
+ Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
+ We also add an alias for "make install" (disabled by default)
+ """
+ base = getattr(self, 'projects_dir', None) or self.tg.path
+
+ node_project = base.make_node('build_all_projects' + self.project_extension) # Node
+ p_build = self.vsnode_build_all(self, node_project)
+ p_build.collect_properties()
+ self.all_projects.append(p_build)
+
+ node_project = base.make_node('install_all_projects' + self.project_extension) # Node
+ p_install = self.vsnode_install_all(self, node_project)
+ p_install.collect_properties()
+ self.all_projects.append(p_install)
+
+ node_project = base.make_node('project_view' + self.project_extension) # Node
+ p_view = self.vsnode_project_view(self, node_project)
+ p_view.collect_source()
+ p_view.collect_properties()
+ self.all_projects.append(p_view)
+
+ n = self.vsnode_vsdir(self, make_uuid(self.srcnode.win32path() + 'build_aliases'), "build_aliases")
+ p_build.parent = p_install.parent = p_view.parent = n
+ self.all_projects.append(n)
+
+ def collect_dirs(self):
+ """
+ Create the folder structure in the Visual studio project view
+ """
+ seen = {}
+ def make_parents(proj):
+ # look at a project, try to make a parent
+ if getattr(proj, 'parent', None):
+ # aliases already have parents
+ return
+ x = proj.iter_path
+ if x in seen:
+ proj.parent = seen[x]
+ return
+
+ # There is not vsnode_vsdir for x.
+ # So create a project representing the folder "x"
+ n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.win32path()), x.name)
+ n.iter_path = x.parent
+ self.all_projects.append(n)
+
+ # recurse up to the project directory
+ if x.height() > self.srcnode.height() + 1:
+ make_parents(n)
+
+ for p in self.all_projects[:]: # iterate over a copy of all projects
+ if not getattr(p, 'tg', None):
+ # but only projects that have a task generator
+ continue
+
+ # make a folder for each task generator
+ p.iter_path = p.tg.path
+ make_parents(p)
+
+def wrap_2008(cls):
+ class dec(cls):
+ def __init__(self, *k, **kw):
+ cls.__init__(self, *k, **kw)
+ self.project_template = PROJECT_2008_TEMPLATE
+
+ def display_filter(self):
+
+ root = build_property()
+ root.subfilters = []
+ root.sourcefiles = []
+ root.source = []
+ root.name = ''
+
+ @Utils.run_once
+ def add_path(lst):
+ if not lst:
+ return root
+ child = build_property()
+ child.subfilters = []
+ child.sourcefiles = []
+ child.source = []
+ child.name = lst[-1]
+
+ par = add_path(lst[:-1])
+ par.subfilters.append(child)
+ return child
+
+ for x in self.source:
+ # this crap is for enabling subclasses to override get_filter_name
+ tmp = self.get_filter_name(x.parent)
+ tmp = tmp != '.' and tuple(tmp.split('\\')) or ()
+ par = add_path(tmp)
+ par.source.append(x)
+
+ def display(n):
+ buf = []
+ for x in n.source:
+ buf.append('<File RelativePath="%s" FileType="%s"/>\n' % (xml_escape(x.win32path()), self.get_key(x)))
+ for x in n.subfilters:
+ buf.append('<Filter Name="%s">' % xml_escape(x.name))
+ buf.append(display(x))
+ buf.append('</Filter>')
+ return '\n'.join(buf)
+
+ return display(root)
+
+ def get_key(self, node):
+ """
+ If you do not want to let visual studio use the default file extensions,
+ override this method to return a value:
+ 0: C/C++ Code, 1: C++ Class, 2: C++ Header File, 3: C++ Form,
+ 4: C++ Control, 5: Text File, 6: DEF File, 7: IDL File,
+ 8: Makefile, 9: RGS File, 10: RC File, 11: RES File, 12: XSD File,
+ 13: XML File, 14: HTML File, 15: CSS File, 16: Bitmap, 17: Icon,
+ 18: Resx File, 19: BSC File, 20: XSX File, 21: C++ Web Service,
+ 22: ASAX File, 23: Asp Page, 24: Document, 25: Discovery File,
+ 26: C# File, 27: eFileTypeClassDiagram, 28: MHTML Document,
+ 29: Property Sheet, 30: Cursor, 31: Manifest, 32: eFileTypeRDLC
+ """
+ return ''
+
+ def write(self):
+ Logs.debug('msvs: creating %r' % self.path)
+ template1 = compile_template(self.project_template)
+ proj_str = template1(self)
+ proj_str = rm_blank_lines(proj_str)
+ self.path.stealth_write(proj_str)
+
+ return dec
+
+class msvs_2008_generator(msvs_generator):
+ '''generates a visual studio 2008 solution'''
+ cmd = 'msvs2008'
+ fun = msvs_generator.fun
+
+ def init(self):
+ if not getattr(self, 'project_extension', None):
+ self.project_extension = '_2008.vcproj'
+ if not getattr(self, 'solution_name', None):
+ self.solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '_2008.sln'
+
+ if not getattr(self, 'vsnode_target', None):
+ self.vsnode_target = wrap_2008(vsnode_target)
+ if not getattr(self, 'vsnode_build_all', None):
+ self.vsnode_build_all = wrap_2008(vsnode_build_all)
+ if not getattr(self, 'vsnode_install_all', None):
+ self.vsnode_install_all = wrap_2008(vsnode_install_all)
+ if not getattr(self, 'vsnode_project_view', None):
+ self.vsnode_project_view = wrap_2008(vsnode_project_view)
+
+ msvs_generator.init(self)
+ self.numver = '10.00'
+ self.vsver = '2008'
+
+def options(ctx):
+ """
+ If the msvs option is used, try to detect if the build is made from visual studio
+ """
+ ctx.add_option('--execsolution', action='store', help='when building with visual studio, use a build state file')
+
+ old = BuildContext.execute
+ def override_build_state(ctx):
+ def lock(rm, add):
+ uns = ctx.options.execsolution.replace('.sln', rm)
+ uns = ctx.root.make_node(uns)
+ try:
+ uns.delete()
+ except OSError:
+ pass
+
+ uns = ctx.options.execsolution.replace('.sln', add)
+ uns = ctx.root.make_node(uns)
+ try:
+ uns.write('')
+ except EnvironmentError:
+ pass
+
+ if ctx.options.execsolution:
+ ctx.launch_dir = Context.top_dir # force a build for the whole project (invalid cwd when called by visual studio)
+ lock('.lastbuildstate', '.unsuccessfulbuild')
+ old(ctx)
+ lock('.unsuccessfulbuild', '.lastbuildstate')
+ else:
+ old(ctx)
+ BuildContext.execute = override_build_state
diff --git a/third_party/waf/waflib/extras/netcache_client.py b/third_party/waf/waflib/extras/netcache_client.py
new file mode 100644
index 00000000000..63859b6a207
--- /dev/null
+++ b/third_party/waf/waflib/extras/netcache_client.py
@@ -0,0 +1,389 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011-2015 (ita)
+
+"""
+A client for the network cache (playground/netcache/). Launch the server with:
+./netcache_server, then use it for the builds by adding the following:
+
+ def build(bld):
+ bld.load('netcache_client')
+
+The parameters should be present in the environment in the form:
+ NETCACHE=host:port waf configure build
+
+Or in a more detailed way:
+ NETCACHE_PUSH=host:port NETCACHE_PULL=host:port waf configure build
+
+where:
+ host: host where the server resides, by default localhost
+ port: by default push on 11001 and pull on 12001
+
+Use the server provided in playground/netcache/Netcache.java
+"""
+
+import os, socket, time, atexit, sys
+from waflib import Task, Logs, Utils, Build, Runner
+from waflib.Configure import conf
+
+BUF = 8192 * 16
+HEADER_SIZE = 128
+MODES = ['PUSH', 'PULL', 'PUSH_PULL']
+STALE_TIME = 30 # seconds
+
+GET = 'GET'
+PUT = 'PUT'
+LST = 'LST'
+BYE = 'BYE'
+
+all_sigs_in_cache = (0.0, [])
+
+def put_data(conn, data):
+ if sys.hexversion > 0x3000000:
+ data = data.encode('iso8859-1')
+ cnt = 0
+ while cnt < len(data):
+ sent = conn.send(data[cnt:])
+ if sent == 0:
+ raise RuntimeError('connection ended')
+ cnt += sent
+
+push_connections = Runner.Queue(0)
+pull_connections = Runner.Queue(0)
+def get_connection(push=False):
+ # return a new connection... do not forget to release it!
+ try:
+ if push:
+ ret = push_connections.get(block=False)
+ else:
+ ret = pull_connections.get(block=False)
+ except Exception:
+ ret = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ if push:
+ ret.connect(Task.push_addr)
+ else:
+ ret.connect(Task.pull_addr)
+ return ret
+
+def release_connection(conn, msg='', push=False):
+ if conn:
+ if push:
+ push_connections.put(conn)
+ else:
+ pull_connections.put(conn)
+
+def close_connection(conn, msg=''):
+ if conn:
+ data = '%s,%s' % (BYE, msg)
+ try:
+ put_data(conn, data.ljust(HEADER_SIZE))
+ except:
+ pass
+ try:
+ conn.close()
+ except:
+ pass
+
+def close_all():
+ for q in (push_connections, pull_connections):
+ while q.qsize():
+ conn = q.get()
+ try:
+ close_connection(conn)
+ except:
+ # ignore errors when cleaning up
+ pass
+atexit.register(close_all)
+
+def read_header(conn):
+ cnt = 0
+ buf = []
+ while cnt < HEADER_SIZE:
+ data = conn.recv(HEADER_SIZE - cnt)
+ if not data:
+ #import traceback
+ #traceback.print_stack()
+ raise ValueError('connection ended when reading a header %r' % buf)
+ buf.append(data)
+ cnt += len(data)
+ if sys.hexversion > 0x3000000:
+ ret = ''.encode('iso8859-1').join(buf)
+ ret = ret.decode('iso8859-1')
+ else:
+ ret = ''.join(buf)
+ return ret
+
+def check_cache(conn, ssig):
+ """
+ List the files on the server, this is an optimization because it assumes that
+ concurrent builds are rare
+ """
+ global all_sigs_in_cache
+ if not STALE_TIME:
+ return
+ if time.time() - all_sigs_in_cache[0] > STALE_TIME:
+
+ params = (LST,'')
+ put_data(conn, ','.join(params).ljust(HEADER_SIZE))
+
+ # read what is coming back
+ ret = read_header(conn)
+ size = int(ret.split(',')[0])
+
+ buf = []
+ cnt = 0
+ while cnt < size:
+ data = conn.recv(min(BUF, size-cnt))
+ if not data:
+ raise ValueError('connection ended %r %r' % (cnt, size))
+ buf.append(data)
+ cnt += len(data)
+
+ if sys.hexversion > 0x3000000:
+ ret = ''.encode('iso8859-1').join(buf)
+ ret = ret.decode('iso8859-1')
+ else:
+ ret = ''.join(buf)
+
+ all_sigs_in_cache = (time.time(), ret.splitlines())
+ Logs.debug('netcache: server cache has %r entries' % len(all_sigs_in_cache[1]))
+
+ if not ssig in all_sigs_in_cache[1]:
+ raise ValueError('no file %s in cache' % ssig)
+
+class MissingFile(Exception):
+ pass
+
+def recv_file(conn, ssig, count, p):
+ check_cache(conn, ssig)
+
+ params = (GET, ssig, str(count))
+ put_data(conn, ','.join(params).ljust(HEADER_SIZE))
+ data = read_header(conn)
+
+ size = int(data.split(',')[0])
+
+ if size == -1:
+ raise MissingFile('no file %s - %s in cache' % (ssig, count))
+
+ # get the file, writing immediately
+ # TODO a tmp file would be better
+ f = open(p, 'wb')
+ cnt = 0
+ while cnt < size:
+ data = conn.recv(min(BUF, size-cnt))
+ if not data:
+ raise ValueError('connection ended %r %r' % (cnt, size))
+ f.write(data)
+ cnt += len(data)
+ f.close()
+
+def sock_send(conn, ssig, cnt, p):
+ #print "pushing %r %r %r" % (ssig, cnt, p)
+ size = os.stat(p).st_size
+ params = (PUT, ssig, str(cnt), str(size))
+ put_data(conn, ','.join(params).ljust(HEADER_SIZE))
+ f = open(p, 'rb')
+ cnt = 0
+ while cnt < size:
+ r = f.read(min(BUF, size-cnt))
+ while r:
+ k = conn.send(r)
+ if not k:
+ raise ValueError('connection ended')
+ cnt += k
+ r = r[k:]
+
+def can_retrieve_cache(self):
+ if not Task.pull_addr:
+ return False
+ if not self.outputs:
+ return False
+ self.cached = False
+
+ cnt = 0
+ sig = self.signature()
+ ssig = Utils.to_hex(self.uid() + sig)
+
+ conn = None
+ err = False
+ try:
+ try:
+ conn = get_connection()
+ for node in self.outputs:
+ p = node.abspath()
+ recv_file(conn, ssig, cnt, p)
+ cnt += 1
+ except MissingFile as e:
+ Logs.debug('netcache: file is not in the cache %r' % e)
+ err = True
+
+ except Exception as e:
+ Logs.debug('netcache: could not get the files %r' % e)
+ err = True
+
+ # broken connection? remove this one
+ close_connection(conn)
+ conn = None
+ finally:
+ release_connection(conn)
+ if err:
+ return False
+
+ for node in self.outputs:
+ node.sig = sig
+ #if self.generator.bld.progress_bar < 1:
+ # self.generator.bld.to_log('restoring from cache %r\n' % node.abspath())
+
+ self.cached = True
+ return True
+
+@Utils.run_once
+def put_files_cache(self):
+ if not Task.push_addr:
+ return
+ if not self.outputs:
+ return
+ if getattr(self, 'cached', None):
+ return
+
+ #print "called put_files_cache", id(self)
+ bld = self.generator.bld
+ sig = self.signature()
+ ssig = Utils.to_hex(self.uid() + sig)
+
+ conn = None
+ cnt = 0
+ try:
+ for node in self.outputs:
+ # We could re-create the signature of the task with the signature of the outputs
+ # in practice, this means hashing the output files
+ # this is unnecessary
+ try:
+ if not conn:
+ conn = get_connection(push=True)
+ sock_send(conn, ssig, cnt, node.abspath())
+ except Exception as e:
+ Logs.debug("netcache: could not push the files %r" % e)
+
+ # broken connection? remove this one
+ close_connection(conn)
+ conn = None
+ cnt += 1
+ finally:
+ release_connection(conn, push=True)
+
+ bld.task_sigs[self.uid()] = self.cache_sig
+
+def hash_env_vars(self, env, vars_lst):
+ # reimplement so that the resulting hash does not depend on local paths
+ if not env.table:
+ env = env.parent
+ if not env:
+ return Utils.SIG_NIL
+
+ idx = str(id(env)) + str(vars_lst)
+ try:
+ cache = self.cache_env
+ except AttributeError:
+ cache = self.cache_env = {}
+ else:
+ try:
+ return self.cache_env[idx]
+ except KeyError:
+ pass
+
+ v = str([env[a] for a in vars_lst])
+ v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
+ m = Utils.md5()
+ m.update(v.encode())
+ ret = m.digest()
+
+ Logs.debug('envhash: %r %r', ret, v)
+
+ cache[idx] = ret
+
+ return ret
+
+def uid(self):
+ # reimplement so that the signature does not depend on local paths
+ try:
+ return self.uid_
+ except AttributeError:
+ m = Utils.md5()
+ src = self.generator.bld.srcnode
+ up = m.update
+ up(self.__class__.__name__.encode())
+ for x in self.inputs + self.outputs:
+ up(x.path_from(src).encode())
+ self.uid_ = m.digest()
+ return self.uid_
+
+
+def make_cached(cls):
+ if getattr(cls, 'nocache', None):
+ return
+
+ m1 = cls.run
+ def run(self):
+ if getattr(self, 'nocache', False):
+ return m1(self)
+ if self.can_retrieve_cache():
+ return 0
+ return m1(self)
+ cls.run = run
+
+ m2 = cls.post_run
+ def post_run(self):
+ if getattr(self, 'nocache', False):
+ return m2(self)
+ bld = self.generator.bld
+ ret = m2(self)
+ if bld.cache_global:
+ self.put_files_cache()
+ if hasattr(self, 'chmod'):
+ for node in self.outputs:
+ os.chmod(node.abspath(), self.chmod)
+ return ret
+ cls.post_run = post_run
+
+@conf
+def setup_netcache(ctx, push_addr, pull_addr):
+ Task.Task.can_retrieve_cache = can_retrieve_cache
+ Task.Task.put_files_cache = put_files_cache
+ Task.Task.uid = uid
+ Task.push_addr = push_addr
+ Task.pull_addr = pull_addr
+ Build.BuildContext.hash_env_vars = hash_env_vars
+ ctx.cache_global = True
+
+ for x in Task.classes.values():
+ make_cached(x)
+
+def build(bld):
+ if not 'NETCACHE' in os.environ and not 'NETCACHE_PULL' in os.environ and not 'NETCACHE_PUSH' in os.environ:
+ Logs.warn('Setting NETCACHE_PULL=127.0.0.1:11001 and NETCACHE_PUSH=127.0.0.1:12001')
+ os.environ['NETCACHE_PULL'] = '127.0.0.1:12001'
+ os.environ['NETCACHE_PUSH'] = '127.0.0.1:11001'
+
+ if 'NETCACHE' in os.environ:
+ if not 'NETCACHE_PUSH' in os.environ:
+ os.environ['NETCACHE_PUSH'] = os.environ['NETCACHE']
+ if not 'NETCACHE_PULL' in os.environ:
+ os.environ['NETCACHE_PULL'] = os.environ['NETCACHE']
+
+ v = os.environ['NETCACHE_PULL']
+ if v:
+ h, p = v.split(':')
+ pull_addr = (h, int(p))
+ else:
+ pull_addr = None
+
+ v = os.environ['NETCACHE_PUSH']
+ if v:
+ h, p = v.split(':')
+ push_addr = (h, int(p))
+ else:
+ push_addr = None
+
+ setup_netcache(bld, push_addr, pull_addr)
diff --git a/third_party/waf/waflib/extras/nobuild.py b/third_party/waf/waflib/extras/nobuild.py
new file mode 100644
index 00000000000..c628af834dd
--- /dev/null
+++ b/third_party/waf/waflib/extras/nobuild.py
@@ -0,0 +1,23 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+Override the build commands to write empty files.
+This is useful for profiling and evaluating the Python overhead.
+
+To use::
+
+ def build(bld):
+ ...
+ bld.load('nobuild')
+
+"""
+
+from waflib import Task
+def build(bld):
+ def run(self):
+ for x in self.outputs:
+ x.write('')
+ for (name, cls) in Task.classes.items():
+ cls.run = run
diff --git a/third_party/waf/waflib/extras/objcopy.py b/third_party/waf/waflib/extras/objcopy.py
new file mode 100644
index 00000000000..939c2c1291b
--- /dev/null
+++ b/third_party/waf/waflib/extras/objcopy.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+# Grygoriy Fuchedzhy 2010
+
+"""
+Support for converting linked targets to ihex, srec or binary files using
+objcopy. Use the 'objcopy' feature in conjuction with the 'cc' or 'cxx'
+feature. The 'objcopy' feature uses the following attributes:
+
+objcopy_bfdname Target object format name (eg. ihex, srec, binary).
+ Defaults to ihex.
+objcopy_target File name used for objcopy output. This defaults to the
+ target name with objcopy_bfdname as extension.
+objcopy_install_path Install path for objcopy_target file. Defaults to ${PREFIX}/fw.
+objcopy_flags Additional flags passed to objcopy.
+"""
+
+from waflib.Utils import def_attrs
+from waflib import Task
+from waflib.TaskGen import feature, after_method
+
+class objcopy(Task.Task):
+ run_str = '${OBJCOPY} -O ${TARGET_BFDNAME} ${OBJCOPYFLAGS} ${SRC} ${TGT}'
+ color = 'CYAN'
+
+@feature('objcopy')
+@after_method('apply_link')
+def map_objcopy(self):
+ def_attrs(self,
+ objcopy_bfdname = 'ihex',
+ objcopy_target = None,
+ objcopy_install_path = "${PREFIX}/firmware",
+ objcopy_flags = '')
+
+ link_output = self.link_task.outputs[0]
+ if not self.objcopy_target:
+ self.objcopy_target = link_output.change_ext('.' + self.objcopy_bfdname).name
+ task = self.create_task('objcopy', src=link_output, tgt=self.path.find_or_declare(self.objcopy_target))
+
+ task.env.append_unique('TARGET_BFDNAME', self.objcopy_bfdname)
+ try:
+ task.env.append_unique('OBJCOPYFLAGS', getattr(self, 'objcopy_flags'))
+ except AttributeError:
+ pass
+
+ if self.objcopy_install_path:
+ self.bld.install_files(self.objcopy_install_path,
+ task.outputs[0],
+ env=task.env.derive())
+
+def configure(ctx):
+ ctx.find_program('objcopy', var='OBJCOPY', mandatory=True)
diff --git a/third_party/waf/waflib/extras/package.py b/third_party/waf/waflib/extras/package.py
new file mode 100644
index 00000000000..387a3cdc3b4
--- /dev/null
+++ b/third_party/waf/waflib/extras/package.py
@@ -0,0 +1,75 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011
+
+"""
+Obtain packages, unpack them in a location, and add associated uselib variables
+(CFLAGS_pkgname, LIBPATH_pkgname, etc).
+
+The default is use a Dependencies.txt file in the source directory.
+
+This is a work in progress.
+
+Usage:
+
+def options(opt):
+ opt.load('package')
+
+def configure(conf):
+ conf.load_packages()
+"""
+
+from waflib import Logs
+from waflib.Configure import conf
+
+try:
+ from urllib import request
+except ImportError:
+ from urllib import urlopen
+else:
+ urlopen = request.urlopen
+
+
+CACHEVAR = 'WAFCACHE_PACKAGE'
+
+@conf
+def get_package_cache_dir(self):
+ cache = None
+ if CACHEVAR in conf.environ:
+ cache = conf.environ[CACHEVAR]
+ cache = self.root.make_node(cache)
+ elif self.env[CACHEVAR]:
+ cache = self.env[CACHEVAR]
+ cache = self.root.make_node(cache)
+ else:
+ cache = self.srcnode.make_node('.wafcache_package')
+ cache.mkdir()
+ return cache
+
+@conf
+def download_archive(self, src, dst):
+ for x in self.env.PACKAGE_REPO:
+ url = '/'.join((x, src))
+ try:
+ web = urlopen(url)
+ try:
+ if web.getcode() != 200:
+ continue
+ except AttributeError:
+ pass
+ except Exception:
+ # on python3 urlopen throws an exception
+ # python 2.3 does not have getcode and throws an exception to fail
+ continue
+ else:
+ tmp = self.root.make_node(dst)
+ tmp.write(web.read())
+ Logs.warn('Downloaded %s from %s' % (tmp.abspath(), url))
+ break
+ else:
+ self.fatal('Could not get the package %s' % src)
+
+@conf
+def load_packages(self):
+ self.get_package_cache_dir()
+ # read the dependencies, get the archives, ..
diff --git a/third_party/waf/waflib/extras/parallel_debug.py b/third_party/waf/waflib/extras/parallel_debug.py
new file mode 100644
index 00000000000..94191250549
--- /dev/null
+++ b/third_party/waf/waflib/extras/parallel_debug.py
@@ -0,0 +1,441 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2007-2010 (ita)
+
+"""
+Debugging helper for parallel compilation, outputs
+a file named pdebug.svg in the source directory::
+
+ def options(opt):
+ opt.load('parallel_debug')
+ def build(bld):
+ ...
+"""
+
+import time, sys, re
+try: from Queue import Queue
+except: from queue import Queue
+from waflib import Runner, Options, Utils, Task, Logs, Errors
+
+#import random
+#random.seed(100)
+
+SVG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0"
+ x="${project.x}" y="${project.y}" width="${project.width}" height="${project.height}" id="svg602" xml:space="preserve">
+
+<style type='text/css' media='screen'>
+ g.over rect { stroke:#FF0000; fill-opacity:0.4 }
+</style>
+
+<script type='text/javascript'><![CDATA[
+var svg = document.getElementsByTagName('svg')[0];
+
+svg.addEventListener('mouseover', function(e) {
+ var g = e.target.parentNode;
+ var x = document.getElementById('r_' + g.id);
+ if (x) {
+ g.setAttribute('class', g.getAttribute('class') + ' over');
+ x.setAttribute('class', x.getAttribute('class') + ' over');
+ showInfo(e, g.id);
+ }
+}, false);
+
+svg.addEventListener('mouseout', function(e) {
+ var g = e.target.parentNode;
+ var x = document.getElementById('r_' + g.id);
+ if (x) {
+ g.setAttribute('class', g.getAttribute('class').replace(' over', ''));
+ x.setAttribute('class', x.getAttribute('class').replace(' over', ''));
+ hideInfo(e);
+ }
+}, false);
+
+function showInfo(evt, txt) {
+ tooltip = document.getElementById('tooltip');
+
+ var t = document.getElementById('tooltiptext');
+ t.firstChild.data = txt;
+
+ var x = evt.clientX + 9;
+ if (x > 250) { x -= t.getComputedTextLength() + 16; }
+ var y = evt.clientY + 20;
+ tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
+ tooltip.setAttributeNS(null, "visibility", "visible");
+
+ var r = document.getElementById('tooltiprect');
+ r.setAttribute('width', t.getComputedTextLength() + 6);
+}
+
+function hideInfo(evt) {
+ var tooltip = document.getElementById('tooltip');
+ tooltip.setAttributeNS(null,"visibility","hidden");
+}
+]]></script>
+
+<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
+<rect
+ x='${project.x}' y='${project.y}' width='${project.width}' height='${project.height}'
+ style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"
+ />
+
+${if project.title}
+ <text x="${project.title_x}" y="${project.title_y}"
+ style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">${project.title}</text>
+${endif}
+
+
+${for cls in project.groups}
+ <g id='${cls.classname}'>
+ ${for rect in cls.rects}
+ <rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
+ ${endfor}
+ </g>
+${endfor}
+
+${for info in project.infos}
+ <g id='r_${info.classname}'>
+ <rect x='${info.x}' y='${info.y}' width='${info.width}' height='${info.height}' style="font-size:10;fill:${info.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
+ <text x="${info.text_x}" y="${info.text_y}"
+ style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
+ >${info.text}</text>
+ </g>
+${endfor}
+
+ <g transform="translate(0,0)" visibility="hidden" id="tooltip">
+ <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
+ <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;" />
+ </g>
+
+</svg>
+"""
+
+COMPILE_TEMPLATE = '''def f(project):
+ lst = []
+ def xml_escape(value):
+ return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+ %s
+ return ''.join(lst)
+'''
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
+def compile_template(line):
+
+ extr = []
+ def repl(match):
+ g = match.group
+ if g('dollar'): return "$"
+ elif g('backslash'):
+ return "\\"
+ elif g('subst'):
+ extr.append(g('code'))
+ return "<<|@|>>"
+ return None
+
+ line2 = reg_act.sub(repl, line)
+ params = line2.split('<<|@|>>')
+ assert(extr)
+
+
+ indent = 0
+ buf = []
+ app = buf.append
+
+ def app(txt):
+ buf.append(indent * '\t' + txt)
+
+ for x in range(len(extr)):
+ if params[x]:
+ app("lst.append(%r)" % params[x])
+
+ f = extr[x]
+ if f.startswith('if') or f.startswith('for'):
+ app(f + ':')
+ indent += 1
+ elif f.startswith('py:'):
+ app(f[3:])
+ elif f.startswith('endif') or f.startswith('endfor'):
+ indent -= 1
+ elif f.startswith('else') or f.startswith('elif'):
+ indent -= 1
+ app(f + ':')
+ indent += 1
+ elif f.startswith('xml:'):
+ app('lst.append(xml_escape(%s))' % f[4:])
+ else:
+ #app('lst.append((%s) or "cannot find %s")' % (f, f))
+ app('lst.append(str(%s))' % f)
+
+ if extr:
+ if params[-1]:
+ app("lst.append(%r)" % params[-1])
+
+ fun = COMPILE_TEMPLATE % "\n\t".join(buf)
+ # uncomment the following to debug the template
+ #for i, x in enumerate(fun.splitlines()):
+ # print i, x
+ return Task.funex(fun)
+
+# red #ff4d4d
+# green #4da74d
+# lila #a751ff
+
+color2code = {
+ 'GREEN' : '#4da74d',
+ 'YELLOW' : '#fefe44',
+ 'PINK' : '#a751ff',
+ 'RED' : '#cc1d1d',
+ 'BLUE' : '#6687bb',
+ 'CYAN' : '#34e2e2',
+}
+
+mp = {}
+info = [] # list of (text,color)
+
+def map_to_color(name):
+ if name in mp:
+ return mp[name]
+ try:
+ cls = Task.classes[name]
+ except KeyError:
+ return color2code['RED']
+ if cls.color in mp:
+ return mp[cls.color]
+ if cls.color in color2code:
+ return color2code[cls.color]
+ return color2code['RED']
+
+def process(self):
+ m = self.master
+ if m.stop:
+ m.out.put(self)
+ return
+
+ self.master.set_running(1, id(Utils.threading.currentThread()), self)
+
+ # remove the task signature immediately before it is executed
+ # in case of failure the task will be executed again
+ try:
+ del self.generator.bld.task_sigs[self.uid()]
+ except:
+ pass
+
+ try:
+ self.generator.bld.returned_tasks.append(self)
+ self.log_display(self.generator.bld)
+ ret = self.run()
+ except Exception:
+ self.err_msg = Utils.ex_stack()
+ self.hasrun = Task.EXCEPTION
+
+ # TODO cleanup
+ m.error_handler(self)
+ m.out.put(self)
+ return
+
+ if ret:
+ self.err_code = ret
+ self.hasrun = Task.CRASHED
+ else:
+ try:
+ self.post_run()
+ except Errors.WafError:
+ pass
+ except Exception:
+ self.err_msg = Utils.ex_stack()
+ self.hasrun = Task.EXCEPTION
+ else:
+ self.hasrun = Task.SUCCESS
+ if self.hasrun != Task.SUCCESS:
+ m.error_handler(self)
+
+ self.master.set_running(-1, id(Utils.threading.currentThread()), self)
+ m.out.put(self)
+Task.TaskBase.process_back = Task.TaskBase.process
+Task.TaskBase.process = process
+
+old_start = Runner.Parallel.start
+def do_start(self):
+ try:
+ Options.options.dband
+ except AttributeError:
+ self.bld.fatal('use def options(opt): opt.load("parallel_debug")!')
+
+ self.taskinfo = Queue()
+ old_start(self)
+ if self.dirty:
+ make_picture(self)
+Runner.Parallel.start = do_start
+
+def set_running(self, by, i, tsk):
+ self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by) )
+Runner.Parallel.set_running = set_running
+
+def name2class(name):
+ return name.replace(' ', '_').replace('.', '_')
+
+def make_picture(producer):
+ # first, cast the parameters
+ if not hasattr(producer.bld, 'path'):
+ return
+
+ tmp = []
+ try:
+ while True:
+ tup = producer.taskinfo.get(False)
+ tmp.append(list(tup))
+ except:
+ pass
+
+ try:
+ ini = float(tmp[0][2])
+ except:
+ return
+
+ if not info:
+ seen = []
+ for x in tmp:
+ name = x[3]
+ if not name in seen:
+ seen.append(name)
+ else:
+ continue
+
+ info.append((name, map_to_color(name)))
+ info.sort(key=lambda x: x[0])
+
+ thread_count = 0
+ acc = []
+ for x in tmp:
+ thread_count += x[6]
+ acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
+
+ data_node = producer.bld.path.make_node('pdebug.dat')
+ data_node.write('\n'.join(acc))
+
+ tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
+
+ st = {}
+ for l in tmp:
+ if not l[0] in st:
+ st[l[0]] = len(st.keys())
+ tmp = [ [st[lst[0]]] + lst[1:] for lst in tmp ]
+ THREAD_AMOUNT = len(st.keys())
+
+ st = {}
+ for l in tmp:
+ if not l[1] in st:
+ st[l[1]] = len(st.keys())
+ tmp = [ [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
+
+
+ BAND = Options.options.dband
+
+ seen = {}
+ acc = []
+ for x in range(len(tmp)):
+ line = tmp[x]
+ id = line[1]
+
+ if id in seen:
+ continue
+ seen[id] = True
+
+ begin = line[2]
+ thread_id = line[0]
+ for y in range(x + 1, len(tmp)):
+ line = tmp[y]
+ if line[1] == id:
+ end = line[2]
+ #print id, thread_id, begin, end
+ #acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
+ acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
+ break
+
+ if Options.options.dmaxtime < 0.1:
+ gwidth = 1
+ for x in tmp:
+ m = BAND * x[2]
+ if m > gwidth:
+ gwidth = m
+ else:
+ gwidth = BAND * Options.options.dmaxtime
+
+ ratio = float(Options.options.dwidth) / gwidth
+ gwidth = Options.options.dwidth
+ gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
+
+
+ # simple data model for our template
+ class tobject(object):
+ pass
+
+ model = tobject()
+ model.x = 0
+ model.y = 0
+ model.width = gwidth + 4
+ model.height = gheight + 4
+
+ model.title = Options.options.dtitle
+ model.title_x = gwidth / 2
+ model.title_y = gheight + - 5
+
+ groups = {}
+ for (x, y, w, h, clsname) in acc:
+ try:
+ groups[clsname].append((x, y, w, h))
+ except:
+ groups[clsname] = [(x, y, w, h)]
+
+ # groups of rectangles (else js highlighting is slow)
+ model.groups = []
+ for cls in groups:
+ g = tobject()
+ model.groups.append(g)
+ g.classname = name2class(cls)
+ g.rects = []
+ for (x, y, w, h) in groups[cls]:
+ r = tobject()
+ g.rects.append(r)
+ r.x = 2 + x * ratio
+ r.y = 2 + y
+ r.width = w * ratio
+ r.height = h
+ r.color = map_to_color(cls)
+
+ cnt = THREAD_AMOUNT
+
+ # caption
+ model.infos = []
+ for (text, color) in info:
+ inf = tobject()
+ model.infos.append(inf)
+ inf.classname = name2class(text)
+ inf.x = 2 + BAND
+ inf.y = 5 + (cnt + 0.5) * BAND
+ inf.width = BAND/2
+ inf.height = BAND/2
+ inf.color = color
+
+ inf.text = text
+ inf.text_x = 2 + 2 * BAND
+ inf.text_y = 5 + (cnt + 0.5) * BAND + 10
+
+ cnt += 1
+
+ # write the file...
+ template1 = compile_template(SVG_TEMPLATE)
+ txt = template1(model)
+
+ node = producer.bld.path.make_node('pdebug.svg')
+ node.write(txt)
+ Logs.warn('Created the diagram %r' % node.abspath())
+
+def options(opt):
+ opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
+ help='title for the svg diagram', dest='dtitle')
+ opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=800, dest='dwidth')
+ opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
+ opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
+ opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
diff --git a/third_party/waf/waflib/extras/pch.py b/third_party/waf/waflib/extras/pch.py
new file mode 100644
index 00000000000..8b107ac59da
--- /dev/null
+++ b/third_party/waf/waflib/extras/pch.py
@@ -0,0 +1,148 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Alexander Afanasyev (UCLA), 2014
+
+"""
+Enable precompiled C++ header support (currently only clang++ and g++ are supported)
+
+To use this tool, wscript should look like:
+
+ def options(opt):
+ opt.load('pch')
+ # This will add `--with-pch` configure option.
+ # Unless --with-pch during configure stage specified, the precompiled header support is disabled
+
+ def configure(conf):
+ conf.load('pch')
+ # this will set conf.env.WITH_PCH if --with-pch is specified and the supported compiler is used
+ # Unless conf.env.WITH_PCH is set, the precompiled header support is disabled
+
+ def build(bld):
+ bld(features='cxx pch',
+ target='precompiled-headers',
+ name='precompiled-headers',
+ headers='a.h b.h c.h', # headers to pre-compile into `precompiled-headers`
+
+ # Other parameters to compile precompiled headers
+ # includes=...,
+ # export_includes=...,
+ # use=...,
+ # ...
+
+ # Exported parameters will be propagated even if precompiled headers are disabled
+ )
+
+ bld(
+ target='test',
+ features='cxx cxxprogram',
+ source='a.cpp b.cpp d.cpp main.cpp',
+ use='precompiled-headers',
+ )
+
+ # or
+
+ bld(
+ target='test',
+ features='pch cxx cxxprogram',
+ source='a.cpp b.cpp d.cpp main.cpp',
+ headers='a.h b.h c.h',
+ )
+
+Note that precompiled header must have multiple inclusion guards. If the guards are missing, any benefit of precompiled header will be voided and compilation may fail in some cases.
+"""
+
+import os
+from waflib import Task, TaskGen, Utils
+from waflib.Tools import c_preproc, cxx
+
+
+PCH_COMPILER_OPTIONS = {
+ 'clang++': [['-include'], '.pch', ['-x', 'c++-header']],
+ 'g++': [['-include'], '.gch', ['-x', 'c++-header']],
+}
+
+
+def options(opt):
+ opt.add_option('--without-pch', action='store_false', default=True, dest='with_pch', help='''Try to use precompiled header to speed up compilation (only g++ and clang++)''')
+
+def configure(conf):
+ if (conf.options.with_pch and conf.env['COMPILER_CXX'] in PCH_COMPILER_OPTIONS.keys()):
+ conf.env.WITH_PCH = True
+ flags = PCH_COMPILER_OPTIONS[conf.env['COMPILER_CXX']]
+ conf.env.CXXPCH_F = flags[0]
+ conf.env.CXXPCH_EXT = flags[1]
+ conf.env.CXXPCH_FLAGS = flags[2]
+
+
+@TaskGen.feature('pch')
+@TaskGen.before('process_source')
+def apply_pch(self):
+ if not self.env.WITH_PCH:
+ return
+
+ if getattr(self.bld, 'pch_tasks', None) is None:
+ self.bld.pch_tasks = {}
+
+ if getattr(self, 'headers', None) is None:
+ return
+
+ self.headers = self.to_nodes(self.headers)
+
+ if getattr(self, 'name', None):
+ try:
+ task = self.bld.pch_tasks[self.name]
+ self.bld.fatal("Duplicated 'pch' task with name %r" % self.name)
+ except KeyError:
+ pass
+
+ out = '%s.%d%s' % (self.target, self.idx, self.env['CXXPCH_EXT'])
+ out = self.path.find_or_declare(out)
+ task = self.create_task('gchx', self.headers, out)
+
+ # target should be an absolute path of `out`, but without precompiled header extension
+ task.target = out.abspath()[:-len(out.suffix())]
+
+ self.pch_task = task
+ if getattr(self, 'name', None):
+ self.bld.pch_tasks[self.name] = task
+
+@TaskGen.feature('cxx')
+@TaskGen.after_method('process_source', 'propagate_uselib_vars')
+def add_pch(self):
+ if not (self.env['WITH_PCH'] and getattr(self, 'use', None) and getattr(self, 'compiled_tasks', None) and getattr(self.bld, 'pch_tasks', None)):
+ return
+
+ pch = None
+ # find pch task, if any
+
+ if getattr(self, 'pch_task', None):
+ pch = self.pch_task
+ else:
+ for use in Utils.to_list(self.use):
+ try:
+ pch = self.bld.pch_tasks[use]
+ except KeyError:
+ pass
+
+ if pch:
+ for x in self.compiled_tasks:
+ x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target])
+
+class gchx(Task.Task):
+ run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()}'
+ scan = c_preproc.scan
+ color = 'BLUE'
+ ext_out=['.h']
+
+ def runnable_status(self):
+ try:
+ node_deps = self.generator.bld.node_deps[self.uid()]
+ except KeyError:
+ node_deps = []
+ ret = Task.Task.runnable_status(self)
+ if ret == Task.SKIP_ME and self.env.CXX_NAME == 'clang':
+ t = os.stat(self.outputs[0].abspath()).st_mtime
+ for n in self.inputs + node_deps:
+ if os.stat(n.abspath()).st_mtime > t:
+ return Task.RUN_ME
+ return ret
diff --git a/third_party/waf/waflib/extras/pep8.py b/third_party/waf/waflib/extras/pep8.py
new file mode 100644
index 00000000000..3709d9be6fc
--- /dev/null
+++ b/third_party/waf/waflib/extras/pep8.py
@@ -0,0 +1,106 @@
+#! /usr/bin/env python
+# encoding: utf-8
+#
+# written by Sylvain Rouquette, 2011
+
+'''
+Install pep8 module:
+$ easy_install pep8
+ or
+$ pip install pep8
+
+To add the boost tool to the waf file:
+$ ./waf-light --tools=compat15,pep8
+ or, if you have waf >= 1.6.2
+$ ./waf update --files=pep8
+
+
+Then add this to your wscript:
+
+[at]extension('.py', 'wscript')
+def run_pep8(self, node):
+ self.create_task('Pep8', node)
+
+'''
+
+import threading
+from waflib import Task, Options
+
+pep8 = __import__('pep8')
+
+
+class Pep8(Task.Task):
+ color = 'PINK'
+ lock = threading.Lock()
+
+ def check_options(self):
+ if pep8.options:
+ return
+ pep8.options = Options.options
+ pep8.options.prog = 'pep8'
+ excl = pep8.options.exclude.split(',')
+ pep8.options.exclude = [s.rstrip('/') for s in excl]
+ if pep8.options.filename:
+ pep8.options.filename = pep8.options.filename.split(',')
+ if pep8.options.select:
+ pep8.options.select = pep8.options.select.split(',')
+ else:
+ pep8.options.select = []
+ if pep8.options.ignore:
+ pep8.options.ignore = pep8.options.ignore.split(',')
+ elif pep8.options.select:
+ # Ignore all checks which are not explicitly selected
+ pep8.options.ignore = ['']
+ elif pep8.options.testsuite or pep8.options.doctest:
+ # For doctest and testsuite, all checks are required
+ pep8.options.ignore = []
+ else:
+ # The default choice: ignore controversial checks
+ pep8.options.ignore = pep8.DEFAULT_IGNORE.split(',')
+ pep8.options.physical_checks = pep8.find_checks('physical_line')
+ pep8.options.logical_checks = pep8.find_checks('logical_line')
+ pep8.options.counters = dict.fromkeys(pep8.BENCHMARK_KEYS, 0)
+ pep8.options.messages = {}
+
+ def run(self):
+ with Pep8.lock:
+ self.check_options()
+ pep8.input_file(self.inputs[0].abspath())
+ return 0 if not pep8.get_count() else -1
+
+
+def options(opt):
+ opt.add_option('-q', '--quiet', default=0, action='count',
+ help="report only file names, or nothing with -qq")
+ opt.add_option('-r', '--repeat', action='store_true',
+ help="show all occurrences of the same error")
+ opt.add_option('--exclude', metavar='patterns',
+ default=pep8.DEFAULT_EXCLUDE,
+ help="exclude files or directories which match these "
+ "comma separated patterns (default: %s)" %
+ pep8.DEFAULT_EXCLUDE,
+ dest='exclude')
+ opt.add_option('--filename', metavar='patterns', default='*.py',
+ help="when parsing directories, only check filenames "
+ "matching these comma separated patterns (default: "
+ "*.py)")
+ opt.add_option('--select', metavar='errors', default='',
+ help="select errors and warnings (e.g. E,W6)")
+ opt.add_option('--ignore', metavar='errors', default='',
+ help="skip errors and warnings (e.g. E4,W)")
+ opt.add_option('--show-source', action='store_true',
+ help="show source code for each error")
+ opt.add_option('--show-pep8', action='store_true',
+ help="show text of PEP 8 for each error")
+ opt.add_option('--statistics', action='store_true',
+ help="count errors and warnings")
+ opt.add_option('--count', action='store_true',
+ help="print total number of errors and warnings "
+ "to standard error and set exit code to 1 if "
+ "total is not null")
+ opt.add_option('--benchmark', action='store_true',
+ help="measure processing speed")
+ opt.add_option('--testsuite', metavar='dir',
+ help="run regression tests from dir")
+ opt.add_option('--doctest', action='store_true',
+ help="run doctest on myself")
diff --git a/third_party/waf/waflib/extras/prefork.py b/third_party/waf/waflib/extras/prefork.py
new file mode 100755
index 00000000000..b912c5b1b7c
--- /dev/null
+++ b/third_party/waf/waflib/extras/prefork.py
@@ -0,0 +1,401 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+Execute commands through pre-forked servers. This tool creates as many servers as build threads.
+On a benchmark executed on Linux Kubuntu 14, 8 virtual cores and SSD drive::
+
+ ./genbench.py /tmp/build 200 100 15 5
+ waf clean build -j24
+ # no prefork: 2m7.179s
+ # prefork: 0m55.400s
+
+To use::
+
+ def options(opt):
+ # optional, will spawn 40 servers early
+ opt.load('prefork')
+
+ def build(bld):
+ bld.load('prefork')
+ ...
+ more code
+
+The servers and the build process are using a shared nonce to prevent undesirable external connections.
+"""
+
+import os, re, socket, threading, sys, subprocess, time, atexit, traceback, random, signal
+try:
+ import SocketServer
+except ImportError:
+ import socketserver as SocketServer
+try:
+ from queue import Queue
+except ImportError:
+ from Queue import Queue
+try:
+ import cPickle
+except ImportError:
+ import pickle as cPickle
+
+SHARED_KEY = None
+HEADER_SIZE = 64
+
+REQ = 'REQ'
+RES = 'RES'
+BYE = 'BYE'
+
+def make_header(params, cookie=''):
+ header = ','.join(params)
+ header = header.ljust(HEADER_SIZE - len(cookie))
+ assert(len(header) == HEADER_SIZE - len(cookie))
+ header = header + cookie
+ if sys.hexversion > 0x3000000:
+ header = header.encode('iso8859-1')
+ return header
+
+def safe_compare(x, y):
+ sum = 0
+ for (a, b) in zip(x, y):
+ sum |= ord(a) ^ ord(b)
+ return sum == 0
+
+re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
+class req(SocketServer.StreamRequestHandler):
+ def handle(self):
+ try:
+ while self.process_command():
+ pass
+ except KeyboardInterrupt:
+ return
+ except Exception as e:
+ print(e)
+
+ def send_response(self, ret, out, err, exc):
+ if out or err or exc:
+ data = (out, err, exc)
+ data = cPickle.dumps(data, -1)
+ else:
+ data = ''
+
+ params = [RES, str(ret), str(len(data))]
+
+ # no need for the cookie in the response
+ self.wfile.write(make_header(params))
+ if data:
+ self.wfile.write(data)
+ self.wfile.flush()
+
+ def process_command(self):
+ query = self.rfile.read(HEADER_SIZE)
+ if not query:
+ return None
+ #print(len(query))
+ assert(len(query) == HEADER_SIZE)
+ if sys.hexversion > 0x3000000:
+ query = query.decode('iso8859-1')
+
+ # magic cookie
+ key = query[-20:]
+ if not safe_compare(key, SHARED_KEY):
+ print('%r %r' % (key, SHARED_KEY))
+ self.send_response(-1, '', '', 'Invalid key given!')
+ return 'meh'
+
+ query = query[:-20]
+ #print "%r" % query
+ if not re_valid_query.match(query):
+ self.send_response(-1, '', '', 'Invalid query %r' % query)
+ raise ValueError('Invalid query %r' % query)
+
+ query = query.strip().split(',')
+
+ if query[0] == REQ:
+ self.run_command(query[1:])
+ elif query[0] == BYE:
+ raise ValueError('Exit')
+ else:
+ raise ValueError('Invalid query %r' % query)
+ return 'ok'
+
+ def run_command(self, query):
+
+ size = int(query[0])
+ data = self.rfile.read(size)
+ assert(len(data) == size)
+ kw = cPickle.loads(data)
+
+ # run command
+ ret = out = err = exc = None
+ cmd = kw['cmd']
+ del kw['cmd']
+ #print(cmd)
+
+ try:
+ if kw['stdout'] or kw['stderr']:
+ p = subprocess.Popen(cmd, **kw)
+ (out, err) = p.communicate()
+ ret = p.returncode
+ else:
+ ret = subprocess.Popen(cmd, **kw).wait()
+ except KeyboardInterrupt:
+ raise
+ except Exception as e:
+ ret = -1
+ exc = str(e) + traceback.format_exc()
+
+ self.send_response(ret, out, err, exc)
+
+def create_server(conn, cls):
+ # child processes do not need the key, so we remove it from the OS environment
+ global SHARED_KEY
+ SHARED_KEY = os.environ['SHARED_KEY']
+ os.environ['SHARED_KEY'] = ''
+
+ ppid = int(os.environ['PREFORKPID'])
+ def reap():
+ if os.sep != '/':
+ os.waitpid(ppid, 0)
+ else:
+ while 1:
+ try:
+ os.kill(ppid, 0)
+ except OSError:
+ break
+ else:
+ time.sleep(1)
+ os.kill(os.getpid(), signal.SIGKILL)
+ t = threading.Thread(target=reap)
+ t.setDaemon(True)
+ t.start()
+
+ server = SocketServer.TCPServer(conn, req)
+ print(server.server_address[1])
+ sys.stdout.flush()
+ #server.timeout = 6000 # seconds
+ server.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ try:
+ server.serve_forever(poll_interval=0.001)
+ except KeyboardInterrupt:
+ pass
+
+if __name__ == '__main__':
+ conn = ("127.0.0.1", 0)
+ #print("listening - %r %r\n" % conn)
+ create_server(conn, req)
+else:
+
+ from waflib import Logs, Utils, Runner, Errors, Options
+
+ def init_task_pool(self):
+ # lazy creation, and set a common pool for all task consumers
+ pool = self.pool = []
+ for i in range(self.numjobs):
+ consumer = Runner.get_pool()
+ pool.append(consumer)
+ consumer.idx = i
+ self.ready = Queue(0)
+ def setq(consumer):
+ consumer.ready = self.ready
+ try:
+ threading.current_thread().idx = consumer.idx
+ except Exception as e:
+ print(e)
+ for x in pool:
+ x.ready.put(setq)
+ return pool
+ Runner.Parallel.init_task_pool = init_task_pool
+
+ def make_server(bld, idx):
+ cmd = [sys.executable, os.path.abspath(__file__)]
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ return proc
+
+ def make_conn(bld, srv):
+ port = srv.port
+ conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ conn.connect(('127.0.0.1', port))
+ return conn
+
+
+ SERVERS = []
+ CONNS = []
+ def close_all():
+ global SERVERS, CONNS
+ while CONNS:
+ conn = CONNS.pop()
+ try:
+ conn.close()
+ except:
+ pass
+ while SERVERS:
+ srv = SERVERS.pop()
+ try:
+ srv.kill()
+ except:
+ pass
+ atexit.register(close_all)
+
+ def put_data(conn, data):
+ cnt = 0
+ while cnt < len(data):
+ sent = conn.send(data[cnt:])
+ if sent == 0:
+ raise RuntimeError('connection ended')
+ cnt += sent
+
+ def read_data(conn, siz):
+ cnt = 0
+ buf = []
+ while cnt < siz:
+ data = conn.recv(min(siz - cnt, 1024))
+ if not data:
+ raise RuntimeError('connection ended %r %r' % (cnt, siz))
+ buf.append(data)
+ cnt += len(data)
+ if sys.hexversion > 0x3000000:
+ ret = ''.encode('iso8859-1').join(buf)
+ else:
+ ret = ''.join(buf)
+ return ret
+
+ def exec_command(self, cmd, **kw):
+ if 'stdout' in kw:
+ if kw['stdout'] not in (None, subprocess.PIPE):
+ return self.exec_command_old(cmd, **kw)
+ elif 'stderr' in kw:
+ if kw['stderr'] not in (None, subprocess.PIPE):
+ return self.exec_command_old(cmd, **kw)
+
+ kw['shell'] = isinstance(cmd, str)
+ Logs.debug('runner: %r' % cmd)
+ Logs.debug('runner_env: kw=%s' % kw)
+
+ if self.logger:
+ self.logger.info(cmd)
+
+ if 'stdout' not in kw:
+ kw['stdout'] = subprocess.PIPE
+ if 'stderr' not in kw:
+ kw['stderr'] = subprocess.PIPE
+
+ if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+ raise Errors.WafError("Program %s not found!" % cmd[0])
+
+ idx = threading.current_thread().idx
+ kw['cmd'] = cmd
+
+ # serialization..
+ #print("sub %r %r" % (idx, cmd))
+ #print("write to %r %r" % (idx, cmd))
+
+ data = cPickle.dumps(kw, -1)
+ params = [REQ, str(len(data))]
+ header = make_header(params, self.SHARED_KEY)
+
+ conn = CONNS[idx]
+
+ put_data(conn, header + data)
+ #put_data(conn, data)
+
+ #print("running %r %r" % (idx, cmd))
+ #print("read from %r %r" % (idx, cmd))
+
+ data = read_data(conn, HEADER_SIZE)
+ if sys.hexversion > 0x3000000:
+ data = data.decode('iso8859-1')
+
+ #print("received %r" % data)
+ lst = data.split(',')
+ ret = int(lst[1])
+ dlen = int(lst[2])
+
+ out = err = None
+ if dlen:
+ data = read_data(conn, dlen)
+ (out, err, exc) = cPickle.loads(data)
+ if exc:
+ raise Errors.WafError('Execution failure: %s' % exc)
+
+ if out:
+ if not isinstance(out, str):
+ out = out.decode(sys.stdout.encoding or 'iso8859-1')
+ if self.logger:
+ self.logger.debug('out: %s' % out)
+ else:
+ Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
+ if err:
+ if not isinstance(err, str):
+ err = err.decode(sys.stdout.encoding or 'iso8859-1')
+ if self.logger:
+ self.logger.error('err: %s' % err)
+ else:
+ Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
+
+ return ret
+
+ def init_key(ctx):
+ try:
+ key = ctx.SHARED_KEY = os.environ['SHARED_KEY']
+ except KeyError:
+ key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)])
+ os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key
+
+ os.environ['PREFORKPID'] = str(os.getpid())
+ return key
+
+ def init_servers(ctx, maxval):
+ while len(SERVERS) < maxval:
+ i = len(SERVERS)
+ srv = make_server(ctx, i)
+ SERVERS.append(srv)
+ while len(CONNS) < maxval:
+ i = len(CONNS)
+ srv = SERVERS[i]
+
+ # postpone the connection
+ srv.port = int(srv.stdout.readline())
+
+ conn = None
+ for x in range(30):
+ try:
+ conn = make_conn(ctx, srv)
+ break
+ except socket.error:
+ time.sleep(0.01)
+ if not conn:
+ raise ValueError('Could not start the server!')
+ if srv.poll() is not None:
+ Logs.warn('Looks like it it not our server process - concurrent builds are unsupported at this stage')
+ raise ValueError('Could not start the server')
+ CONNS.append(conn)
+
+ def init_smp(self):
+ if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
+ return
+ if Utils.unversioned_sys_platform() in ('freebsd',):
+ pid = os.getpid()
+ cmd = ['cpuset', '-l', '0', '-p', str(pid)]
+ elif Utils.unversioned_sys_platform() in ('linux',):
+ pid = os.getpid()
+ cmd = ['taskset', '-pc', '0', str(pid)]
+ if cmd:
+ self.cmd_and_log(cmd, quiet=0)
+
+ def options(opt):
+ init_key(opt)
+ init_servers(opt, 40)
+ opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
+
+ def build(bld):
+ if bld.cmd == 'clean':
+ return
+
+ init_key(bld)
+ init_servers(bld, bld.jobs)
+ init_smp(bld)
+
+ bld.__class__.exec_command_old = bld.__class__.exec_command
+ bld.__class__.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/preforkjava.py b/third_party/waf/waflib/extras/preforkjava.py
new file mode 100644
index 00000000000..e93461b4da9
--- /dev/null
+++ b/third_party/waf/waflib/extras/preforkjava.py
@@ -0,0 +1,236 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+# TODO: have the child process terminate if the parent is killed abruptly
+
+import os, socket, threading, sys, subprocess, time, atexit, random
+try:
+ from queue import Queue
+except ImportError:
+ from Queue import Queue
+
+import json as pickle
+
+SHARED_KEY = None
+HEADER_SIZE = 64
+
+REQ = 'REQ'
+RES = 'RES'
+BYE = 'BYE'
+
+def make_header(params, cookie=''):
+ header = ','.join(params)
+ header = header.ljust(HEADER_SIZE - len(cookie))
+ assert(len(header) == HEADER_SIZE - len(cookie))
+ header = header + cookie
+ if sys.hexversion > 0x3000000:
+ header = header.encode('iso8859-1')
+ return header
+
+if 1:
+ from waflib import Logs, Utils, Runner, Errors, Options
+
+ def init_task_pool(self):
+ # lazy creation, and set a common pool for all task consumers
+ pool = self.pool = []
+ for i in range(self.numjobs):
+ consumer = Runner.get_pool()
+ pool.append(consumer)
+ consumer.idx = i
+ self.ready = Queue(0)
+ def setq(consumer):
+ consumer.ready = self.ready
+ try:
+ threading.current_thread().idx = consumer.idx
+ except Exception as e:
+ print(e)
+ for x in pool:
+ x.ready.put(setq)
+ return pool
+ Runner.Parallel.init_task_pool = init_task_pool
+
+ def make_server(bld, idx):
+ top = getattr(bld, 'preforkjava_top', os.path.dirname(os.path.abspath('__file__')))
+ cp = getattr(bld, 'preforkjava_cp', os.path.join(top, 'minimal-json-0.9.3-SNAPSHOT.jar') + os.pathsep + top)
+
+ for x in cp.split(os.pathsep):
+ if x and not os.path.exists(x):
+ Logs.warn('Invalid classpath: %r' % cp)
+ Logs.warn('Set for example bld.preforkjava_cp to /path/to/minimal-json:/path/to/Prefork.class/')
+
+ cwd = getattr(bld, 'preforkjava_cwd', top)
+ port = getattr(bld, 'preforkjava_port', 51200)
+ cmd = getattr(bld, 'preforkjava_cmd', 'java -cp %s%s Prefork %d' % (cp, os.pathsep, port))
+ proc = subprocess.Popen(cmd.split(), shell=False, cwd=cwd)
+ proc.port = port
+ return proc
+
+ def make_conn(bld, srv):
+ #port = PORT + idx
+ port = srv.port
+ conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ conn.connect(('127.0.0.1', port))
+ return conn
+
+ SERVERS = []
+ CONNS = []
+ def close_all():
+ global SERVERS
+ while SERVERS:
+ srv = SERVERS.pop()
+ #pid = srv.pid
+ try:
+ srv.kill()
+ except Exception:
+ pass
+ atexit.register(close_all)
+
+ def put_data(conn, data):
+ cnt = 0
+ while cnt < len(data):
+ sent = conn.send(data[cnt:])
+ if sent == 0:
+ raise RuntimeError('connection ended')
+ cnt += sent
+
+ def read_data(conn, siz):
+ cnt = 0
+ buf = []
+ while cnt < siz:
+ data = conn.recv(min(siz - cnt, 1024))
+ if not data:
+ raise RuntimeError('connection ended %r %r' % (cnt, siz))
+ buf.append(data)
+ cnt += len(data)
+ if sys.hexversion > 0x3000000:
+ ret = ''.encode('iso8859-1').join(buf)
+ else:
+ ret = ''.join(buf)
+ return ret
+
+ def exec_command(self, cmd, **kw):
+ if 'stdout' in kw:
+ if kw['stdout'] not in (None, subprocess.PIPE):
+ return self.exec_command_old(cmd, **kw)
+ elif 'stderr' in kw:
+ if kw['stderr'] not in (None, subprocess.PIPE):
+ return self.exec_command_old(cmd, **kw)
+
+ kw['shell'] = isinstance(cmd, str)
+ Logs.debug('runner: %r' % cmd)
+ Logs.debug('runner_env: kw=%s' % kw)
+
+ if self.logger:
+ self.logger.info(cmd)
+
+ if 'stdout' not in kw:
+ kw['stdout'] = subprocess.PIPE
+ if 'stderr' not in kw:
+ kw['stderr'] = subprocess.PIPE
+
+ if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+ raise Errors.WafError("Program %s not found!" % cmd[0])
+
+ idx = threading.current_thread().idx
+ kw['cmd'] = cmd
+
+ data = pickle.dumps(kw)
+ params = [REQ, str(len(data))]
+ header = make_header(params, self.SHARED_KEY)
+
+ conn = CONNS[idx]
+
+ if sys.hexversion > 0x3000000:
+ data = data.encode('iso8859-1')
+ put_data(conn, header + data)
+
+ data = read_data(conn, HEADER_SIZE)
+ if sys.hexversion > 0x3000000:
+ data = data.decode('iso8859-1')
+
+ #print("received %r" % data)
+ lst = data.split(',')
+ ret = int(lst[1])
+ dlen = int(lst[2])
+
+ out = err = None
+ if dlen:
+ data = read_data(conn, dlen)
+ (out, err, exc) = pickle.loads(data)
+ if exc:
+ raise Errors.WafError('Execution failure: %s' % exc)
+
+ if out:
+ if not isinstance(out, str):
+ out = out.decode(sys.stdout.encoding or 'iso8859-1')
+ if self.logger:
+ self.logger.debug('out: %s' % out)
+ else:
+ Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
+ if err:
+ if not isinstance(err, str):
+ err = err.decode(sys.stdout.encoding or 'iso8859-1')
+ if self.logger:
+ self.logger.error('err: %s' % err)
+ else:
+ Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
+
+ return ret
+
+ def init_key(ctx):
+ try:
+ key = ctx.SHARED_KEY = os.environ['SHARED_KEY']
+ except KeyError:
+ key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)])
+ os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key
+ os.environ['PREFORKPID'] = str(os.getpid())
+ return key
+
+ def init_servers(ctx, maxval):
+ while len(SERVERS) < 1:
+ i = len(SERVERS)
+ srv = make_server(ctx, i)
+ SERVERS.append(srv)
+ while len(CONNS) < maxval:
+ i = len(CONNS)
+ srv = SERVERS[0]
+ conn = None
+ for x in range(30):
+ try:
+ conn = make_conn(ctx, srv)
+ break
+ except socket.error:
+ time.sleep(0.01)
+ if not conn:
+ raise ValueError('Could not start the server!')
+ CONNS.append(conn)
+
+ def init_smp(self):
+ if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
+ return
+ if Utils.unversioned_sys_platform() in ('freebsd',):
+ pid = os.getpid()
+ cmd = ['cpuset', '-l', '0', '-p', str(pid)]
+ elif Utils.unversioned_sys_platform() in ('linux',):
+ pid = os.getpid()
+ cmd = ['taskset', '-pc', '0', str(pid)]
+ if cmd:
+ self.cmd_and_log(cmd, quiet=0)
+
+ def options(opt):
+ opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
+ init_key(opt)
+ init_servers(opt, 40)
+
+ def build(bld):
+ if bld.cmd == 'clean':
+ return
+
+ init_key(bld)
+ init_servers(bld, bld.jobs)
+ init_smp(bld)
+
+ bld.__class__.exec_command_old = bld.__class__.exec_command
+ bld.__class__.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/preforkunix.py b/third_party/waf/waflib/extras/preforkunix.py
new file mode 100644
index 00000000000..ec9aeeb10e7
--- /dev/null
+++ b/third_party/waf/waflib/extras/preforkunix.py
@@ -0,0 +1,317 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+A version of prefork.py that uses unix sockets. The advantage is that it does not expose
+connections to the outside. Yet performance it only works on unix-like systems
+and performance can be slightly worse.
+
+To use::
+
+ def options(opt):
+ # recommended, fork new processes before using more memory
+ opt.load('preforkunix')
+
+ def build(bld):
+ bld.load('preforkunix')
+ ...
+ more code
+"""
+
+import os, re, socket, threading, sys, subprocess, atexit, traceback, signal, time
+try:
+ from queue import Queue
+except ImportError:
+ from Queue import Queue
+try:
+ import cPickle
+except ImportError:
+ import pickle as cPickle
+
+HEADER_SIZE = 20
+
+REQ = 'REQ'
+RES = 'RES'
+BYE = 'BYE'
+
+def make_header(params, cookie=''):
+ header = ','.join(params)
+ header = header.ljust(HEADER_SIZE - len(cookie))
+ assert(len(header) == HEADER_SIZE - len(cookie))
+ header = header + cookie
+ if sys.hexversion > 0x3000000:
+ header = header.encode('iso8859-1')
+ return header
+
+re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
+if 1:
+ def send_response(conn, ret, out, err, exc):
+ if out or err or exc:
+ data = (out, err, exc)
+ data = cPickle.dumps(data, -1)
+ else:
+ data = ''
+
+ params = [RES, str(ret), str(len(data))]
+
+ # no need for the cookie in the response
+ conn.send(make_header(params))
+ if data:
+ conn.send(data)
+
+ def process_command(conn):
+ query = conn.recv(HEADER_SIZE)
+ if not query:
+ return None
+ #print(len(query))
+ assert(len(query) == HEADER_SIZE)
+ if sys.hexversion > 0x3000000:
+ query = query.decode('iso8859-1')
+
+ #print "%r" % query
+ if not re_valid_query.match(query):
+ send_response(conn, -1, '', '', 'Invalid query %r' % query)
+ raise ValueError('Invalid query %r' % query)
+
+ query = query.strip().split(',')
+
+ if query[0] == REQ:
+ run_command(conn, query[1:])
+ elif query[0] == BYE:
+ raise ValueError('Exit')
+ else:
+ raise ValueError('Invalid query %r' % query)
+ return 'ok'
+
+ def run_command(conn, query):
+
+ size = int(query[0])
+ data = conn.recv(size)
+ assert(len(data) == size)
+ kw = cPickle.loads(data)
+
+ # run command
+ ret = out = err = exc = None
+ cmd = kw['cmd']
+ del kw['cmd']
+ #print(cmd)
+
+ try:
+ if kw['stdout'] or kw['stderr']:
+ p = subprocess.Popen(cmd, **kw)
+ (out, err) = p.communicate()
+ ret = p.returncode
+ else:
+ ret = subprocess.Popen(cmd, **kw).wait()
+ except KeyboardInterrupt:
+ raise
+ except Exception as e:
+ ret = -1
+ exc = str(e) + traceback.format_exc()
+
+ send_response(conn, ret, out, err, exc)
+
+if 1:
+
+ from waflib import Logs, Utils, Runner, Errors, Options
+
+ def init_task_pool(self):
+ # lazy creation, and set a common pool for all task consumers
+ pool = self.pool = []
+ for i in range(self.numjobs):
+ consumer = Runner.get_pool()
+ pool.append(consumer)
+ consumer.idx = i
+ self.ready = Queue(0)
+ def setq(consumer):
+ consumer.ready = self.ready
+ try:
+ threading.current_thread().idx = consumer.idx
+ except Exception as e:
+ print(e)
+ for x in pool:
+ x.ready.put(setq)
+ return pool
+ Runner.Parallel.init_task_pool = init_task_pool
+
+ def make_conn(bld):
+ child_socket, parent_socket = socket.socketpair(socket.AF_UNIX)
+ ppid = os.getpid()
+ pid = os.fork()
+ if pid == 0:
+ parent_socket.close()
+
+ # if the parent crashes, try to exit cleanly
+ def reap():
+ while 1:
+ try:
+ os.kill(ppid, 0)
+ except OSError:
+ break
+ else:
+ time.sleep(1)
+ os.kill(os.getpid(), signal.SIGKILL)
+ t = threading.Thread(target=reap)
+ t.setDaemon(True)
+ t.start()
+
+ # write to child_socket only
+ try:
+ while process_command(child_socket):
+ pass
+ except KeyboardInterrupt:
+ sys.exit(2)
+ else:
+ child_socket.close()
+ return (pid, parent_socket)
+
+ SERVERS = []
+ CONNS = []
+ def close_all():
+ global SERVERS, CONS
+ while CONNS:
+ conn = CONNS.pop()
+ try:
+ conn.close()
+ except:
+ pass
+ while SERVERS:
+ pid = SERVERS.pop()
+ try:
+ os.kill(pid, 9)
+ except:
+ pass
+ atexit.register(close_all)
+
+ def put_data(conn, data):
+ cnt = 0
+ while cnt < len(data):
+ sent = conn.send(data[cnt:])
+ if sent == 0:
+ raise RuntimeError('connection ended')
+ cnt += sent
+
+ def read_data(conn, siz):
+ cnt = 0
+ buf = []
+ while cnt < siz:
+ data = conn.recv(min(siz - cnt, 1024))
+ if not data:
+ raise RuntimeError('connection ended %r %r' % (cnt, siz))
+ buf.append(data)
+ cnt += len(data)
+ if sys.hexversion > 0x3000000:
+ ret = ''.encode('iso8859-1').join(buf)
+ else:
+ ret = ''.join(buf)
+ return ret
+
+ def exec_command(self, cmd, **kw):
+ if 'stdout' in kw:
+ if kw['stdout'] not in (None, subprocess.PIPE):
+ return self.exec_command_old(cmd, **kw)
+ elif 'stderr' in kw:
+ if kw['stderr'] not in (None, subprocess.PIPE):
+ return self.exec_command_old(cmd, **kw)
+
+ kw['shell'] = isinstance(cmd, str)
+ Logs.debug('runner: %r' % cmd)
+ Logs.debug('runner_env: kw=%s' % kw)
+
+ if self.logger:
+ self.logger.info(cmd)
+
+ if 'stdout' not in kw:
+ kw['stdout'] = subprocess.PIPE
+ if 'stderr' not in kw:
+ kw['stderr'] = subprocess.PIPE
+
+ if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+ raise Errors.WafError("Program %s not found!" % cmd[0])
+
+ idx = threading.current_thread().idx
+ kw['cmd'] = cmd
+
+ # serialization..
+ #print("sub %r %r" % (idx, cmd))
+ #print("write to %r %r" % (idx, cmd))
+
+ data = cPickle.dumps(kw, -1)
+ params = [REQ, str(len(data))]
+ header = make_header(params)
+
+ conn = CONNS[idx]
+
+ put_data(conn, header + data)
+
+ #print("running %r %r" % (idx, cmd))
+ #print("read from %r %r" % (idx, cmd))
+
+ data = read_data(conn, HEADER_SIZE)
+ if sys.hexversion > 0x3000000:
+ data = data.decode('iso8859-1')
+
+ #print("received %r" % data)
+ lst = data.split(',')
+ ret = int(lst[1])
+ dlen = int(lst[2])
+
+ out = err = None
+ if dlen:
+ data = read_data(conn, dlen)
+ (out, err, exc) = cPickle.loads(data)
+ if exc:
+ raise Errors.WafError('Execution failure: %s' % exc)
+
+ if out:
+ if not isinstance(out, str):
+ out = out.decode(sys.stdout.encoding or 'iso8859-1')
+ if self.logger:
+ self.logger.debug('out: %s' % out)
+ else:
+ Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
+ if err:
+ if not isinstance(err, str):
+ err = err.decode(sys.stdout.encoding or 'iso8859-1')
+ if self.logger:
+ self.logger.error('err: %s' % err)
+ else:
+ Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
+
+ return ret
+
+ def init_smp(self):
+ if not getattr(Options.options, 'smp', getattr(self, 'smp', None)):
+ return
+ if Utils.unversioned_sys_platform() in ('freebsd',):
+ pid = os.getpid()
+ cmd = ['cpuset', '-l', '0', '-p', str(pid)]
+ elif Utils.unversioned_sys_platform() in ('linux',):
+ pid = os.getpid()
+ cmd = ['taskset', '-pc', '0', str(pid)]
+ if cmd:
+ self.cmd_and_log(cmd, quiet=0)
+
+ def options(opt):
+ # memory consumption might be at the lowest point while processing options
+ opt.add_option('--pin-process', action='store_true', dest='smp', default=False)
+ if Utils.is_win32 or os.sep != '/':
+ return
+ while len(CONNS) < 30:
+ (pid, conn) = make_conn(opt)
+ SERVERS.append(pid)
+ CONNS.append(conn)
+
+ def build(bld):
+ if Utils.is_win32 or os.sep != '/':
+ return
+ if bld.cmd == 'clean':
+ return
+ while len(CONNS) < bld.jobs:
+ (pid, conn) = make_conn(bld)
+ SERVERS.append(pid)
+ CONNS.append(conn)
+ init_smp(bld)
+ bld.__class__.exec_command_old = bld.__class__.exec_command
+ bld.__class__.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/print_commands.py b/third_party/waf/waflib/extras/print_commands.py
new file mode 100644
index 00000000000..ada0ee53f8c
--- /dev/null
+++ b/third_party/waf/waflib/extras/print_commands.py
@@ -0,0 +1,84 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+#! /usr/bin/env python
+
+"""
+Illustrate how to override a class method to do something
+
+In this case, print the commands being executed as strings
+(the commands are usually lists, so this can be misleading)
+"""
+
+import sys
+from waflib import Context, Utils, Errors, Logs
+
+def exec_command(self, cmd, **kw):
+ subprocess = Utils.subprocess
+ kw['shell'] = isinstance(cmd, str)
+
+ if isinstance(cmd, str):
+ kw['shell'] = True
+ txt = cmd
+ else:
+ txt = ' '.join(repr(x) if ' ' in x else x for x in cmd)
+
+ Logs.debug('runner: %s', txt)
+ Logs.debug('runner_env: kw=%s', kw)
+
+ if self.logger:
+ self.logger.info(cmd)
+
+ if 'stdout' not in kw:
+ kw['stdout'] = subprocess.PIPE
+ if 'stderr' not in kw:
+ kw['stderr'] = subprocess.PIPE
+
+ if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
+ raise Errors.WafError("Program %s not found!" % cmd[0])
+
+ wargs = {}
+ if 'timeout' in kw:
+ if kw['timeout'] is not None:
+ wargs['timeout'] = kw['timeout']
+ del kw['timeout']
+ if 'input' in kw:
+ if kw['input']:
+ wargs['input'] = kw['input']
+ kw['stdin'] = Utils.subprocess.PIPE
+ del kw['input']
+
+ if 'cwd' in kw:
+ if not isinstance(kw['cwd'], str):
+ kw['cwd'] = kw['cwd'].abspath()
+
+ try:
+ if kw['stdout'] or kw['stderr']:
+ p = subprocess.Popen(cmd, **kw)
+ (out, err) = p.communicate(**wargs)
+ ret = p.returncode
+ else:
+ out, err = (None, None)
+ ret = subprocess.Popen(cmd, **kw).wait(**wargs)
+ except Exception ,e:
+ raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
+
+ if out:
+ if not isinstance(out, str):
+ out = out.decode(sys.stdout.encoding or 'iso8859-1')
+ if self.logger:
+ self.logger.debug('out: %s' % out)
+ else:
+ Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
+ if err:
+ if not isinstance(err, str):
+ err = err.decode(sys.stdout.encoding or 'iso8859-1')
+ if self.logger:
+ self.logger.error('err: %s' % err)
+ else:
+ Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
+
+ return ret
+
+Context.Context.exec_command = exec_command
diff --git a/third_party/waf/waflib/extras/proc.py b/third_party/waf/waflib/extras/proc.py
new file mode 100644
index 00000000000..fec4c4ccd63
--- /dev/null
+++ b/third_party/waf/waflib/extras/proc.py
@@ -0,0 +1,54 @@
+#! /usr/bin/env python
+# per rosengren 2011
+
+from os import environ, path
+from waflib import TaskGen, Utils
+
+def options(opt):
+ grp = opt.add_option_group('Oracle ProC Options')
+ grp.add_option('--oracle_home', action='store', default=environ.get('PROC_ORACLE'), help='Path to Oracle installation home (has bin/lib)')
+ grp.add_option('--tns_admin', action='store', default=environ.get('TNS_ADMIN'), help='Directory containing server list (TNS_NAMES.ORA)')
+ grp.add_option('--connection', action='store', default='dummy-user/dummy-password@dummy-server', help='Format: user/password@server')
+
+def configure(cnf):
+ env = cnf.env
+ if not env.PROC_ORACLE:
+ env.PROC_ORACLE = cnf.options.oracle_home
+ if not env.PROC_TNS_ADMIN:
+ env.PROC_TNS_ADMIN = cnf.options.tns_admin
+ if not env.PROC_CONNECTION:
+ env.PROC_CONNECTION = cnf.options.connection
+ cnf.find_program('proc', var='PROC', path_list=env.PROC_ORACLE + path.sep + 'bin')
+
+def proc(tsk):
+ env = tsk.env
+ gen = tsk.generator
+ inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES'])
+
+ # FIXME the if-else construct will not work in python 2
+ cmd = (
+ [env.PROC] +
+ ['SQLCHECK=SEMANTICS'] +
+ (['SYS_INCLUDE=(' + ','.join(env.PROC_INCLUDES) + ')']
+ if env.PROC_INCLUDES else []) +
+ ['INCLUDE=(' + ','.join(
+ [i.bldpath() for i in inc_nodes]
+ ) + ')'] +
+ ['userid=' + env.PROC_CONNECTION] +
+ ['INAME=' + tsk.inputs[0].bldpath()] +
+ ['ONAME=' + tsk.outputs[0].bldpath()]
+ )
+ exec_env = {
+ 'ORACLE_HOME': env.PROC_ORACLE,
+ 'LD_LIBRARY_PATH': env.PROC_ORACLE + path.sep + 'lib',
+ }
+ if env.PROC_TNS_ADMIN:
+ exec_env['TNS_ADMIN'] = env.PROC_TNS_ADMIN
+ return tsk.exec_command(cmd, env=exec_env)
+
+TaskGen.declare_chain(
+ name = 'proc',
+ rule = proc,
+ ext_in = '.pc',
+ ext_out = '.c',
+)
diff --git a/third_party/waf/waflib/extras/protoc.py b/third_party/waf/waflib/extras/protoc.py
new file mode 100644
index 00000000000..97d1ef3c340
--- /dev/null
+++ b/third_party/waf/waflib/extras/protoc.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Philipp Bender, 2012
+# Matt Clarkson, 2012
+
+import re
+from waflib.Task import Task
+from waflib.TaskGen import extension
+
+"""
+A simple tool to integrate protocol buffers into your build system.
+
+Example::
+
+ def configure(conf):
+ conf.load('compiler_cxx cxx protoc')
+
+ def build(bld):
+ bld(
+ features = 'cxx cxxprogram'
+ source = 'main.cpp file1.proto proto/file2.proto',
+ include = '. proto',
+ target = 'executable')
+
+Notes when using this tool:
+
+- protoc command line parsing is tricky.
+
+ The generated files can be put in subfolders which depend on
+ the order of the include paths.
+
+ Try to be simple when creating task generators
+ containing protoc stuff.
+
+"""
+
+class protoc(Task):
+ # protoc expects the input proto file to be an absolute path.
+ run_str = '${PROTOC} ${PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${SRC[0].abspath()}'
+ color = 'BLUE'
+ ext_out = ['.h', 'pb.cc']
+ def scan(self):
+ """
+ Scan .proto dependencies
+ """
+ node = self.inputs[0]
+
+ nodes = []
+ names = []
+ seen = []
+
+ if not node: return (nodes, names)
+
+ def parse_node(node):
+ if node in seen:
+ return
+ seen.append(node)
+ code = node.read().splitlines()
+ for line in code:
+ m = re.search(r'^import\s+"(.*)";.*(//)?.*', line)
+ if m:
+ dep = m.groups()[0]
+ for incpath in self.env.INCPATHS:
+ found = incpath.find_resource(dep)
+ if found:
+ nodes.append(found)
+ parse_node(found)
+ else:
+ names.append(dep)
+
+ parse_node(node)
+ return (nodes, names)
+
+@extension('.proto')
+def process_protoc(self, node):
+ cpp_node = node.change_ext('.pb.cc')
+ hpp_node = node.change_ext('.pb.h')
+ self.create_task('protoc', node, [cpp_node, hpp_node])
+ self.source.append(cpp_node)
+
+ if 'cxx' in self.features and not self.env.PROTOC_FLAGS:
+ #self.env.PROTOC_FLAGS = '--cpp_out=%s' % node.parent.get_bld().abspath() # <- this does not work
+ self.env.PROTOC_FLAGS = '--cpp_out=%s' % node.parent.get_bld().bldpath()
+
+ use = getattr(self, 'use', '')
+ if not 'PROTOBUF' in use:
+ self.use = self.to_list(use) + ['PROTOBUF']
+
+def configure(conf):
+ conf.check_cfg(package="protobuf", uselib_store="PROTOBUF", args=['--cflags', '--libs'])
+ conf.find_program('protoc', var='PROTOC')
+ conf.env.PROTOC_ST = '-I%s'
diff --git a/third_party/waf/waflib/extras/relocation.py b/third_party/waf/waflib/extras/relocation.py
new file mode 100644
index 00000000000..052890b6837
--- /dev/null
+++ b/third_party/waf/waflib/extras/relocation.py
@@ -0,0 +1,83 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Waf 1.6
+
+Try to detect if the project directory was relocated, and if it was,
+change the node representing the project directory. Just call:
+
+ waf configure build
+
+Note that if the project directory name changes, the signatures for the tasks using
+files in that directory will change, causing a partial build.
+"""
+
+import os
+from waflib import Build, ConfigSet, Task, Utils, Errors
+from waflib.TaskGen import feature, after_method
+
+EXTRA_LOCK = '.old_srcdir'
+
+old1 = Build.BuildContext.store
+def store(self):
+ old1(self)
+ db = os.path.join(self.variant_dir, EXTRA_LOCK)
+ env = ConfigSet.ConfigSet()
+ env.SRCDIR = self.srcnode.abspath()
+ env.store(db)
+Build.BuildContext.store = store
+
+old2 = Build.BuildContext.init_dirs
+def init_dirs(self):
+
+ if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)):
+ raise Errors.WafError('The project was not configured: run "waf configure" first!')
+
+ srcdir = None
+ db = os.path.join(self.variant_dir, EXTRA_LOCK)
+ env = ConfigSet.ConfigSet()
+ try:
+ env.load(db)
+ srcdir = env.SRCDIR
+ except:
+ pass
+
+ if srcdir:
+ d = self.root.find_node(srcdir)
+ if d and srcdir != self.top_dir and getattr(d, 'children', ''):
+ srcnode = self.root.make_node(self.top_dir)
+ print("relocating the source directory %r -> %r" % (srcdir, self.top_dir))
+ srcnode.children = {}
+
+ for (k, v) in d.children.items():
+ srcnode.children[k] = v
+ v.parent = srcnode
+ d.children = {}
+
+ old2(self)
+
+Build.BuildContext.init_dirs = init_dirs
+
+
+def uid(self):
+ try:
+ return self.uid_
+ except AttributeError:
+ # this is not a real hot zone, but we want to avoid surprises here
+ m = Utils.md5()
+ up = m.update
+ up(self.__class__.__name__.encode())
+ for x in self.inputs + self.outputs:
+ up(x.path_from(x.ctx.srcnode).encode())
+ self.uid_ = m.digest()
+ return self.uid_
+Task.Task.uid = uid
+
+@feature('c', 'cxx', 'd', 'go', 'asm', 'fc', 'includes')
+@after_method('propagate_uselib_vars', 'process_source')
+def apply_incpaths(self):
+ lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
+ self.includes_nodes = lst
+ bld = self.bld
+ self.env['INCPATHS'] = [x.is_child_of(bld.srcnode) and x.path_from(bld.bldnode) or x.abspath() for x in lst]
diff --git a/third_party/waf/waflib/extras/remote.py b/third_party/waf/waflib/extras/remote.py
new file mode 100644
index 00000000000..6aca854f32f
--- /dev/null
+++ b/third_party/waf/waflib/extras/remote.py
@@ -0,0 +1,326 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Remote Builds tool using rsync+ssh
+
+__author__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2013"
+
+"""
+Simple Remote Builds
+********************
+
+This tool is an *experimental* tool (meaning, do not even try to pollute
+the waf bug tracker with bugs in here, contact me directly) providing simple
+remote builds.
+
+It uses rsync and ssh to perform the remote builds.
+It is intended for performing cross-compilation on platforms where
+a cross-compiler is either unavailable (eg. MacOS, QNX) a specific product
+does not exist (eg. Windows builds using Visual Studio) or simply not installed.
+This tool sends the sources and the waf script to the remote host,
+and commands the usual waf execution.
+
+There are alternatives to using this tool, such as setting up shared folders,
+logging on to remote machines, and building on the shared folders.
+Electing one method or another depends on the size of the program.
+
+
+Usage
+=====
+
+1. Set your wscript file so it includes a list of variants,
+ e.g.::
+
+ from waflib import Utils
+ top = '.'
+ out = 'build'
+
+ variants = [
+ 'linux_64_debug',
+ 'linux_64_release',
+ 'linux_32_debug',
+ 'linux_32_release',
+ ]
+
+ from waflib.extras import remote
+
+ def options(opt):
+ # normal stuff from here on
+ opt.load('compiler_c')
+
+ def configure(conf):
+ if not conf.variant:
+ return
+ # normal stuff from here on
+ conf.load('compiler_c')
+
+ def build(bld):
+ if not bld.variant:
+ return
+ # normal stuff from here on
+ bld(features='c cprogram', target='app', source='main.c')
+
+
+2. Build the waf file, so it includes this tool, and put it in the current
+ directory
+
+ .. code:: bash
+
+ ./waf-light --tools=remote
+
+3. Set the host names to access the hosts:
+
+ .. code:: bash
+
+ export REMOTE_QNX=user@kiunix
+
+4. Setup the ssh server and ssh keys
+
+ The ssh key should not be protected by a password, or it will prompt for it everytime.
+ Create the key on the client:
+
+ .. code:: bash
+
+ ssh-keygen -t rsa -f foo.rsa
+
+ Then copy foo.rsa.pub to the remote machine (user@kiunix:/home/user/.ssh/authorized_keys),
+ and make sure the permissions are correct (chmod go-w ~ ~/.ssh ~/.ssh/authorized_keys)
+
+ A separate key for the build processes can be set in the environment variable WAF_SSH_KEY.
+ The tool will then use 'ssh-keyscan' to avoid prompting for remote hosts, so
+ be warned to use this feature on internal networks only (MITM).
+
+ .. code:: bash
+
+ export WAF_SSH_KEY=~/foo.rsa
+
+5. Perform the build:
+
+ .. code:: bash
+
+ waf configure_all build_all --remote
+
+"""
+
+
+import getpass, os, re, sys
+from collections import OrderedDict
+from waflib import Context, Options, Utils, ConfigSet
+
+from waflib.Build import BuildContext, CleanContext, InstallContext, UninstallContext
+from waflib.Configure import ConfigurationContext
+
+
+is_remote = False
+if '--remote' in sys.argv:
+ is_remote = True
+ sys.argv.remove('--remote')
+
+class init(Context.Context):
+ """
+ Generates the *_all commands
+ """
+ cmd = 'init'
+ fun = 'init'
+ def execute(self):
+ for x in list(Context.g_module.variants):
+ self.make_variant(x)
+ lst = ['remote']
+ for k in Options.commands:
+ if k.endswith('_all'):
+ name = k.replace('_all', '')
+ for x in Context.g_module.variants:
+ lst.append('%s_%s' % (name, x))
+ else:
+ lst.append(k)
+ del Options.commands[:]
+ Options.commands += lst
+
+ def make_variant(self, x):
+ for y in (BuildContext, CleanContext, InstallContext, UninstallContext):
+ name = y.__name__.replace('Context','').lower()
+ class tmp(y):
+ cmd = name + '_' + x
+ fun = 'build'
+ variant = x
+ class tmp(ConfigurationContext):
+ cmd = 'configure_' + x
+ fun = 'configure'
+ variant = x
+ def __init__(self, **kw):
+ ConfigurationContext.__init__(self, **kw)
+ self.setenv(x)
+
+class remote(BuildContext):
+ cmd = 'remote'
+ fun = 'build'
+
+ def get_ssh_hosts(self):
+ lst = []
+ for v in Context.g_module.variants:
+ self.env.HOST = self.login_to_host(self.variant_to_login(v))
+ cmd = Utils.subst_vars('${SSH_KEYSCAN} -t rsa,ecdsa ${HOST}', self.env)
+ out, err = self.cmd_and_log(cmd, output=Context.BOTH, quiet=Context.BOTH)
+ lst.append(out.strip())
+ return lst
+
+ def setup_private_ssh_key(self):
+ """
+ When WAF_SSH_KEY points to a private key, a .ssh directory will be created in the build directory
+ Make sure that the ssh key does not prompt for a password
+ """
+ key = os.environ.get('WAF_SSH_KEY', '')
+ if not key:
+ return
+ if not os.path.isfile(key):
+ self.fatal('Key in WAF_SSH_KEY must point to a valid file')
+ self.ssh_dir = os.path.join(self.path.abspath(), 'build', '.ssh')
+ self.ssh_hosts = os.path.join(self.ssh_dir, 'known_hosts')
+ self.ssh_key = os.path.join(self.ssh_dir, os.path.basename(key))
+ self.ssh_config = os.path.join(self.ssh_dir, 'config')
+ for x in self.ssh_hosts, self.ssh_key, self.ssh_config:
+ if not os.path.isfile(x):
+ if not os.path.isdir(self.ssh_dir):
+ os.makedirs(self.ssh_dir)
+ Utils.writef(self.ssh_key, Utils.readf(key), 'wb')
+ os.chmod(self.ssh_key, 448)
+
+ Utils.writef(self.ssh_hosts, '\n'.join(self.get_ssh_hosts()))
+ os.chmod(self.ssh_key, 448)
+
+ Utils.writef(self.ssh_config, 'UserKnownHostsFile %s' % self.ssh_hosts, 'wb')
+ os.chmod(self.ssh_config, 448)
+ self.env.SSH_OPTS = ['-F', self.ssh_config, '-i', self.ssh_key]
+ self.env.append_value('RSYNC_SEND_OPTS', '--exclude=build/.ssh')
+
+ def skip_unbuildable_variant(self):
+ # skip variants that cannot be built on this OS
+ for k in Options.commands:
+ a, _, b = k.partition('_')
+ if b in Context.g_module.variants:
+ c, _, _ = b.partition('_')
+ if c != Utils.unversioned_sys_platform():
+ Options.commands.remove(k)
+
+ def login_to_host(self, login):
+ return re.sub('(\w+@)', '', login)
+
+ def variant_to_login(self, variant):
+ """linux_32_debug -> search env.LINUX_32 and then env.LINUX"""
+ x = variant[:variant.rfind('_')]
+ ret = os.environ.get('REMOTE_' + x.upper(), '')
+ if not ret:
+ x = x[:x.find('_')]
+ ret = os.environ.get('REMOTE_' + x.upper(), '')
+ if not ret:
+ ret = '%s@localhost' % getpass.getuser()
+ return ret
+
+ def execute(self):
+ global is_remote
+ if not is_remote:
+ self.skip_unbuildable_variant()
+ else:
+ BuildContext.execute(self)
+
+ def restore(self):
+ self.top_dir = os.path.abspath(Context.g_module.top)
+ self.srcnode = self.root.find_node(self.top_dir)
+ self.path = self.srcnode
+
+ self.out_dir = os.path.join(self.top_dir, Context.g_module.out)
+ self.bldnode = self.root.make_node(self.out_dir)
+ self.bldnode.mkdir()
+
+ self.env = ConfigSet.ConfigSet()
+
+ def extract_groups_of_builds(self):
+ """Return a dict mapping each variants to the commands to build"""
+ self.vgroups = {}
+ for x in reversed(Options.commands):
+ _, _, variant = x.partition('_')
+ if variant in Context.g_module.variants:
+ try:
+ dct = self.vgroups[variant]
+ except KeyError:
+ dct = self.vgroups[variant] = OrderedDict()
+ try:
+ dct[variant].append(x)
+ except KeyError:
+ dct[variant] = [x]
+ Options.commands.remove(x)
+
+ def custom_options(self, login):
+ try:
+ return Context.g_module.host_options[login]
+ except (AttributeError, KeyError):
+ return {}
+
+ def recurse(self, *k, **kw):
+ self.env.RSYNC = getattr(Context.g_module, 'rsync', 'rsync -a --chmod=u+rwx')
+ self.env.SSH = getattr(Context.g_module, 'ssh', 'ssh')
+ self.env.SSH_KEYSCAN = getattr(Context.g_module, 'ssh_keyscan', 'ssh-keyscan')
+ try:
+ self.env.WAF = getattr(Context.g_module, 'waf')
+ except AttributeError:
+ try:
+ os.stat('waf')
+ except KeyError:
+ self.fatal('Put a waf file in the directory (./waf-light --tools=remote)')
+ else:
+ self.env.WAF = './waf'
+
+ self.extract_groups_of_builds()
+ self.setup_private_ssh_key()
+ for k, v in self.vgroups.items():
+ task = self(rule=rsync_and_ssh, always=True)
+ task.env.login = self.variant_to_login(k)
+
+ task.env.commands = []
+ for opt, value in v.items():
+ task.env.commands += value
+ task.env.variant = task.env.commands[0].partition('_')[2]
+ for opt, value in self.custom_options(k):
+ task.env[opt] = value
+ self.jobs = len(self.vgroups)
+
+ def make_mkdir_command(self, task):
+ return Utils.subst_vars('${SSH} ${SSH_OPTS} ${login} "rm -fr ${remote_dir} && mkdir -p ${remote_dir}"', task.env)
+
+ def make_send_command(self, task):
+ return Utils.subst_vars('${RSYNC} ${RSYNC_SEND_OPTS} -e "${SSH} ${SSH_OPTS}" ${local_dir} ${login}:${remote_dir}', task.env)
+
+ def make_exec_command(self, task):
+ txt = '''${SSH} ${SSH_OPTS} ${login} "cd ${remote_dir} && ${WAF} ${commands}"'''
+ return Utils.subst_vars(txt, task.env)
+
+ def make_save_command(self, task):
+ return Utils.subst_vars('${RSYNC} ${RSYNC_SAVE_OPTS} -e "${SSH} ${SSH_OPTS}" ${login}:${remote_dir_variant} ${build_dir}', task.env)
+
+def rsync_and_ssh(task):
+
+ # remove a warning
+ task.uid_ = id(task)
+
+ bld = task.generator.bld
+
+ task.env.user, _, _ = task.env.login.partition('@')
+ task.env.hdir = Utils.to_hex(Utils.h_list((task.generator.path.abspath(), task.env.variant)))
+ task.env.remote_dir = '~%s/wafremote/%s' % (task.env.user, task.env.hdir)
+ task.env.local_dir = bld.srcnode.abspath() + '/'
+
+ task.env.remote_dir_variant = '%s/%s/%s' % (task.env.remote_dir, Context.g_module.out, task.env.variant)
+ task.env.build_dir = bld.bldnode.abspath()
+
+ ret = task.exec_command(bld.make_mkdir_command(task))
+ if ret:
+ return ret
+ ret = task.exec_command(bld.make_send_command(task))
+ if ret:
+ return ret
+ ret = task.exec_command(bld.make_exec_command(task))
+ if ret:
+ return ret
+ ret = task.exec_command(bld.make_save_command(task))
+ if ret:
+ return ret
diff --git a/third_party/waf/waflib/extras/review.py b/third_party/waf/waflib/extras/review.py
new file mode 100644
index 00000000000..34796cb6236
--- /dev/null
+++ b/third_party/waf/waflib/extras/review.py
@@ -0,0 +1,321 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Laurent Birtz, 2011
+# moved the code into a separate tool (ita)
+
+"""
+There are several things here:
+- a different command-line option management making options persistent
+- the review command to display the options set
+
+Assumptions:
+- configuration options are not always added to the right group (and do not count on the users to do it...)
+- the options are persistent between the executions (waf options are NOT persistent by design), even for the configuration
+- when the options change, the build is invalidated (forcing a reconfiguration)
+"""
+
+import os, textwrap, shutil
+from waflib import Logs, Context, ConfigSet, Options, Build, Configure
+
+class Odict(dict):
+ """Ordered dictionary"""
+ def __init__(self, data=None):
+ self._keys = []
+ dict.__init__(self)
+ if data:
+ # we were provided a regular dict
+ if isinstance(data, dict):
+ self.append_from_dict(data)
+
+ # we were provided a tuple list
+ elif type(data) == list:
+ self.append_from_plist(data)
+
+ # we were provided invalid input
+ else:
+ raise Exception("expected a dict or a tuple list")
+
+ def append_from_dict(self, dict):
+ map(self.__setitem__, dict.keys(), dict.values())
+
+ def append_from_plist(self, plist):
+ for pair in plist:
+ if len(pair) != 2:
+ raise Exception("invalid pairs list")
+ for (k, v) in plist:
+ self.__setitem__(k, v)
+
+ def __delitem__(self, key):
+ if not key in self._keys:
+ raise KeyError(key)
+ dict.__delitem__(self, key)
+ self._keys.remove(key)
+
+ def __setitem__(self, key, item):
+ dict.__setitem__(self, key, item)
+ if key not in self._keys:
+ self._keys.append(key)
+
+ def clear(self):
+ dict.clear(self)
+ self._keys = []
+
+ def copy(self):
+ return Odict(self.plist())
+
+ def items(self):
+ return zip(self._keys, self.values())
+
+ def keys(self):
+ return list(self._keys) # return a copy of the list
+
+ def values(self):
+ return map(self.get, self._keys)
+
+ def plist(self):
+ p = []
+ for k, v in self.items():
+ p.append( (k, v) )
+ return p
+
+ def __str__(self):
+ buf = []
+ buf.append("{ ")
+ for k, v in self.items():
+ buf.append('%r : %r, ' % (k, v))
+ buf.append("}")
+ return ''.join(buf)
+
+review_options = Odict()
+"""
+Ordered dictionary mapping configuration option names to their optparse option.
+"""
+
+review_defaults = {}
+"""
+Dictionary mapping configuration option names to their default value.
+"""
+
+old_review_set = None
+"""
+Review set containing the configuration values before parsing the command line.
+"""
+
+new_review_set = None
+"""
+Review set containing the configuration values after parsing the command line.
+"""
+
+class OptionsReview(Options.OptionsContext):
+ def __init__(self, **kw):
+ super(self.__class__, self).__init__(**kw)
+
+ def prepare_config_review(self):
+ """
+ Find the configuration options that are reviewable, detach
+ their default value from their optparse object and store them
+ into the review dictionaries.
+ """
+ gr = self.get_option_group('configure options')
+ for opt in gr.option_list:
+ if opt.action != 'store' or opt.dest in ("out", "top"):
+ continue
+ review_options[opt.dest] = opt
+ review_defaults[opt.dest] = opt.default
+ if gr.defaults.has_key(opt.dest):
+ del gr.defaults[opt.dest]
+ opt.default = None
+
+ def parse_args(self):
+ self.prepare_config_review()
+ self.parser.get_option('--prefix').help = 'installation prefix'
+ super(OptionsReview, self).parse_args()
+ Context.create_context('review').refresh_review_set()
+
+class ReviewContext(Context.Context):
+ '''reviews the configuration values'''
+
+ cmd = 'review'
+
+ def __init__(self, **kw):
+ super(self.__class__, self).__init__(**kw)
+
+ out = Options.options.out
+ if not out:
+ out = getattr(Context.g_module, Context.OUT, None)
+ if not out:
+ out = Options.lockfile.replace('.lock-waf', '')
+ self.build_path = (os.path.isabs(out) and self.root or self.path).make_node(out).abspath()
+ """Path to the build directory"""
+
+ self.cache_path = os.path.join(self.build_path, Build.CACHE_DIR)
+ """Path to the cache directory"""
+
+ self.review_path = os.path.join(self.cache_path, 'review.cache')
+ """Path to the review cache file"""
+
+ def execute(self):
+ """
+ Display and store the review set. Invalidate the cache as required.
+ """
+ if not self.compare_review_set(old_review_set, new_review_set):
+ self.invalidate_cache()
+ self.store_review_set(new_review_set)
+ print(self.display_review_set(new_review_set))
+
+ def invalidate_cache(self):
+ """Invalidate the cache to prevent bad builds."""
+ try:
+ Logs.warn("Removing the cached configuration since the options have changed")
+ shutil.rmtree(self.cache_path)
+ except:
+ pass
+
+ def refresh_review_set(self):
+ """
+ Obtain the old review set and the new review set, and import the new set.
+ """
+ global old_review_set, new_review_set
+ old_review_set = self.load_review_set()
+ new_review_set = self.update_review_set(old_review_set)
+ self.import_review_set(new_review_set)
+
+ def load_review_set(self):
+ """
+ Load and return the review set from the cache if it exists.
+ Otherwise, return an empty set.
+ """
+ if os.path.isfile(self.review_path):
+ return ConfigSet.ConfigSet(self.review_path)
+ return ConfigSet.ConfigSet()
+
+ def store_review_set(self, review_set):
+ """
+ Store the review set specified in the cache.
+ """
+ if not os.path.isdir(self.cache_path):
+ os.makedirs(self.cache_path)
+ review_set.store(self.review_path)
+
+ def update_review_set(self, old_set):
+ """
+ Merge the options passed on the command line with those imported
+ from the previous review set and return the corresponding
+ preview set.
+ """
+
+ # Convert value to string. It's important that 'None' maps to
+ # the empty string.
+ def val_to_str(val):
+ if val == None or val == '':
+ return ''
+ return str(val)
+
+ new_set = ConfigSet.ConfigSet()
+ opt_dict = Options.options.__dict__
+
+ for name in review_options.keys():
+ # the option is specified explicitly on the command line
+ if name in opt_dict:
+ # if the option is the default, pretend it was never specified
+ if val_to_str(opt_dict[name]) != val_to_str(review_defaults[name]):
+ new_set[name] = opt_dict[name]
+ # the option was explicitly specified in a previous command
+ elif name in old_set:
+ new_set[name] = old_set[name]
+
+ return new_set
+
+ def import_review_set(self, review_set):
+ """
+ Import the actual value of the reviewable options in the option
+ dictionary, given the current review set.
+ """
+ for name in review_options.keys():
+ if name in review_set:
+ value = review_set[name]
+ else:
+ value = review_defaults[name]
+ setattr(Options.options, name, value)
+
+ def compare_review_set(self, set1, set2):
+ """
+ Return true if the review sets specified are equal.
+ """
+ if len(set1.keys()) != len(set2.keys()): return False
+ for key in set1.keys():
+ if not key in set2 or set1[key] != set2[key]:
+ return False
+ return True
+
+ def display_review_set(self, review_set):
+ """
+ Return the string representing the review set specified.
+ """
+ term_width = Logs.get_term_cols()
+ lines = []
+ for dest in review_options.keys():
+ opt = review_options[dest]
+ name = ", ".join(opt._short_opts + opt._long_opts)
+ help = opt.help
+ actual = None
+ if dest in review_set: actual = review_set[dest]
+ default = review_defaults[dest]
+ lines.append(self.format_option(name, help, actual, default, term_width))
+ return "Configuration:\n\n" + "\n\n".join(lines) + "\n"
+
+ def format_option(self, name, help, actual, default, term_width):
+ """
+ Return the string representing the option specified.
+ """
+ def val_to_str(val):
+ if val == None or val == '':
+ return "(void)"
+ return str(val)
+
+ max_name_len = 20
+ sep_len = 2
+
+ w = textwrap.TextWrapper()
+ w.width = term_width - 1
+ if w.width < 60: w.width = 60
+
+ out = ""
+
+ # format the help
+ out += w.fill(help) + "\n"
+
+ # format the name
+ name_len = len(name)
+ out += Logs.colors.CYAN + name + Logs.colors.NORMAL
+
+ # set the indentation used when the value wraps to the next line
+ w.subsequent_indent = " ".rjust(max_name_len + sep_len)
+ w.width -= (max_name_len + sep_len)
+
+ # the name string is too long, switch to the next line
+ if name_len > max_name_len:
+ out += "\n" + w.subsequent_indent
+
+ # fill the remaining of the line with spaces
+ else:
+ out += " ".rjust(max_name_len + sep_len - name_len)
+
+ # format the actual value, if there is one
+ if actual != None:
+ out += Logs.colors.BOLD + w.fill(val_to_str(actual)) + Logs.colors.NORMAL + "\n" + w.subsequent_indent
+
+ # format the default value
+ default_fmt = val_to_str(default)
+ if actual != None:
+ default_fmt = "default: " + default_fmt
+ out += Logs.colors.NORMAL + w.fill(default_fmt) + Logs.colors.NORMAL
+
+ return out
+
+# Monkey-patch ConfigurationContext.execute() to have it store the review set.
+old_configure_execute = Configure.ConfigurationContext.execute
+def new_configure_execute(self):
+ old_configure_execute(self)
+ Context.create_context('review').store_review_set(new_review_set)
+Configure.ConfigurationContext.execute = new_configure_execute
diff --git a/third_party/waf/waflib/extras/rst.py b/third_party/waf/waflib/extras/rst.py
new file mode 100644
index 00000000000..c8cd7526dcc
--- /dev/null
+++ b/third_party/waf/waflib/extras/rst.py
@@ -0,0 +1,251 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Jérôme Carretero, 2013 (zougloub)
+
+"""
+reStructuredText support (experimental)
+
+Example::
+
+ def configure(conf):
+ conf.load('rst')
+ if not conf.env.RST2HTML:
+ conf.fatal('The program rst2html is required')
+
+ def build(bld):
+ bld(
+ features = 'rst',
+ type = 'rst2html', # rst2html, rst2pdf, ...
+ source = 'index.rst', # mandatory, the source
+ deps = 'image.png', # to give additional non-trivial dependencies
+ )
+
+By default the tool looks for a set of programs in PATH.
+The tools are defined in `rst_progs`.
+To configure with a special program use::
+
+ $ RST2HTML=/path/to/rst2html waf configure
+
+This tool is experimental; don't hesitate to contribute to it.
+
+"""
+
+import re
+from waflib import Node, Utils, Task, Errors, Logs
+from waflib.TaskGen import feature, before_method
+
+rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split()
+
+def parse_rst_node(node, nodes, names, seen):
+ # TODO add extensibility, to handle custom rst include tags...
+ if node in seen:
+ return
+ seen.append(node)
+ code = node.read()
+ re_rst = re.compile(r'^\s*.. ((?P<subst>\|\S+\|) )?(?P<type>include|image|figure):: (?P<file>.*)$', re.M)
+ for match in re_rst.finditer(code):
+ ipath = match.group('file')
+ itype = match.group('type')
+ Logs.debug("rst: visiting %s: %s" % (itype, ipath))
+ found = node.parent.find_resource(ipath)
+ if found:
+ nodes.append(found)
+ if itype == 'include':
+ parse_rst_node(found, nodes, names, seen)
+ else:
+ names.append(ipath)
+
+class docutils(Task.Task):
+ """
+ Compile a rst file.
+ """
+
+ def scan(self):
+ """
+ A recursive regex-based scanner that finds rst dependencies.
+ """
+
+ nodes = []
+ names = []
+ seen = []
+
+ node = self.inputs[0]
+
+ if not node:
+ return (nodes, names)
+
+ parse_rst_node(node, nodes, names, seen)
+
+ Logs.debug("rst: %s: found the following file deps: %s" % (repr(self), nodes))
+ if names:
+ Logs.warn("rst: %s: could not find the following file deps: %s" % (repr(self), names))
+
+ return (nodes, names)
+
+ def check_status(self, msg, retcode):
+ """
+ Check an exit status and raise an error with a particular message
+
+ :param msg: message to display if the code is non-zero
+ :type msg: string
+ :param retcode: condition
+ :type retcode: boolean
+ """
+ if retcode != 0:
+ raise Errors.WafError("%r command exit status %r" % (msg, retcode))
+
+ def run(self):
+ """
+ Runs the rst compilation using docutils
+ """
+ raise NotImplementedError()
+
+class rst2html(docutils):
+ color = 'BLUE'
+
+ def __init__(self, *args, **kw):
+ docutils.__init__(self, *args, **kw)
+ self.command = self.generator.env.RST2HTML
+ self.attributes = ['stylesheet']
+
+ def scan(self):
+ nodes, names = docutils.scan(self)
+
+ for attribute in self.attributes:
+ stylesheet = getattr(self.generator, attribute, None)
+ if stylesheet is not None:
+ ssnode = self.generator.to_nodes(stylesheet)[0]
+ nodes.append(ssnode)
+ Logs.debug("rst: adding dep to %s %s" % (attribute, stylesheet))
+
+ return nodes, names
+
+ def run(self):
+ cwdn = self.outputs[0].parent
+ src = self.inputs[0].path_from(cwdn)
+ dst = self.outputs[0].path_from(cwdn)
+
+ cmd = self.command + [src, dst]
+ cmd += Utils.to_list(getattr(self.generator, 'options', []))
+ for attribute in self.attributes:
+ stylesheet = getattr(self.generator, attribute, None)
+ if stylesheet is not None:
+ stylesheet = self.generator.to_nodes(stylesheet)[0]
+ cmd += ['--%s' % attribute, stylesheet.path_from(cwdn)]
+
+ return self.exec_command(cmd, cwd=cwdn.abspath())
+
+class rst2s5(rst2html):
+ def __init__(self, *args, **kw):
+ rst2html.__init__(self, *args, **kw)
+ self.command = self.generator.env.RST2S5
+ self.attributes = ['stylesheet']
+
+class rst2latex(rst2html):
+ def __init__(self, *args, **kw):
+ rst2html.__init__(self, *args, **kw)
+ self.command = self.generator.env.RST2LATEX
+ self.attributes = ['stylesheet']
+
+class rst2xetex(rst2html):
+ def __init__(self, *args, **kw):
+ rst2html.__init__(self, *args, **kw)
+ self.command = self.generator.env.RST2XETEX
+ self.attributes = ['stylesheet']
+
+class rst2pdf(docutils):
+ color = 'BLUE'
+ def run(self):
+ cwdn = self.outputs[0].parent
+ src = self.inputs[0].path_from(cwdn)
+ dst = self.outputs[0].path_from(cwdn)
+
+ cmd = self.generator.env.RST2PDF + [src, '-o', dst]
+ cmd += Utils.to_list(getattr(self.generator, 'options', []))
+
+ return self.exec_command(cmd, cwd=cwdn.abspath())
+
+
+@feature('rst')
+@before_method('process_source')
+def apply_rst(self):
+ """
+ Create :py:class:`rst` or other rst-related task objects
+ """
+
+ if self.target:
+ if isinstance(self.target, Node.Node):
+ tgt = self.target
+ elif isinstance(self.target, str):
+ tgt = self.path.get_bld().make_node(self.target)
+ else:
+ self.bld.fatal("rst: Don't know how to build target name %s which is not a string or Node for %s" % (self.target, self))
+ else:
+ tgt = None
+
+ tsk_type = getattr(self, 'type', None)
+
+ src = self.to_nodes(self.source)
+ assert len(src) == 1
+ src = src[0]
+
+ if tsk_type is not None and tgt is None:
+ if tsk_type.startswith('rst2'):
+ ext = tsk_type[4:]
+ else:
+ self.bld.fatal("rst: Could not detect the output file extension for %s" % self)
+ tgt = src.change_ext('.%s' % ext)
+ elif tsk_type is None and tgt is not None:
+ out = tgt.name
+ ext = out[out.rfind('.')+1:]
+ self.type = 'rst2' + ext
+ elif tsk_type is not None and tgt is not None:
+ # the user knows what he wants
+ pass
+ else:
+ self.bld.fatal("rst: Need to indicate task type or target name for %s" % self)
+
+ deps_lst = []
+
+ if getattr(self, 'deps', None):
+ deps = self.to_list(self.deps)
+ for filename in deps:
+ n = self.path.find_resource(filename)
+ if not n:
+ self.bld.fatal('Could not find %r for %r' % (filename, self))
+ if not n in deps_lst:
+ deps_lst.append(n)
+
+ try:
+ task = self.create_task(self.type, src, tgt)
+ except KeyError:
+ self.bld.fatal("rst: Task of type %s not implemented (created by %s)" % (self.type, self))
+
+ task.env = self.env
+
+ # add the manual dependencies
+ if deps_lst:
+ try:
+ lst = self.bld.node_deps[task.uid()]
+ for n in deps_lst:
+ if not n in lst:
+ lst.append(n)
+ except KeyError:
+ self.bld.node_deps[task.uid()] = deps_lst
+
+ inst_to = getattr(self, 'install_path', None)
+ if inst_to:
+ self.install_task = self.bld.install_files(inst_to, task.outputs[:], env=self.env)
+
+ self.source = []
+
+def configure(self):
+ """
+ Try to find the rst programs.
+
+ Do not raise any error if they are not found.
+ You'll have to use additional code in configure() to die
+ if programs were not found.
+ """
+ for p in rst_progs:
+ self.find_program(p, mandatory=False)
diff --git a/third_party/waf/waflib/extras/smart_continue.py b/third_party/waf/waflib/extras/smart_continue.py
new file mode 100644
index 00000000000..8c171a8d96c
--- /dev/null
+++ b/third_party/waf/waflib/extras/smart_continue.py
@@ -0,0 +1,80 @@
+#! /usr/bin/env python
+# Thomas Nagy, 2011
+
+# Try to cancel the tasks that cannot run with the option -k when an error occurs:
+# 1 direct file dependencies
+# 2 tasks listed in the before/after/ext_in/ext_out attributes
+
+from waflib import Task, Runner
+
+Task.CANCELED = 4
+
+def cancel_next(self, tsk):
+ if not isinstance(tsk, Task.TaskBase):
+ return
+ if tsk.hasrun >= Task.SKIPPED:
+ # normal execution, no need to do anything here
+ return
+
+ try:
+ canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes
+ except AttributeError:
+ canceled_tasks = self.canceled_tasks = set([])
+ canceled_nodes = self.canceled_nodes = set([])
+
+ try:
+ canceled_nodes.update(tsk.outputs)
+ except AttributeError:
+ pass
+
+ try:
+ canceled_tasks.add(tsk)
+ except AttributeError:
+ pass
+
+def get_out(self):
+ tsk = self.out.get()
+ if not self.stop:
+ self.add_more_tasks(tsk)
+ self.count -= 1
+ self.dirty = True
+ self.cancel_next(tsk) # new code
+
+def error_handler(self, tsk):
+ if not self.bld.keep:
+ self.stop = True
+ self.error.append(tsk)
+ self.cancel_next(tsk) # new code
+
+Runner.Parallel.cancel_next = cancel_next
+Runner.Parallel.get_out = get_out
+Runner.Parallel.error_handler = error_handler
+
+def get_next_task(self):
+ tsk = self.get_next_task_smart_continue()
+ if not tsk:
+ return tsk
+
+ try:
+ canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes
+ except AttributeError:
+ pass
+ else:
+ # look in the tasks that this one is waiting on
+ # if one of them was canceled, cancel this one too
+ for x in tsk.run_after:
+ if x in canceled_tasks:
+ tsk.hasrun = Task.CANCELED
+ self.cancel_next(tsk)
+ break
+ else:
+ # so far so good, now consider the nodes
+ for x in getattr(tsk, 'inputs', []) + getattr(tsk, 'deps', []):
+ if x in canceled_nodes:
+ tsk.hasrun = Task.CANCELED
+ self.cancel_next(tsk)
+ break
+ return tsk
+
+Runner.Parallel.get_next_task_smart_continue = Runner.Parallel.get_next_task
+Runner.Parallel.get_next_task = get_next_task
diff --git a/third_party/waf/waflib/extras/stale.py b/third_party/waf/waflib/extras/stale.py
new file mode 100644
index 00000000000..a1e63ee58f0
--- /dev/null
+++ b/third_party/waf/waflib/extras/stale.py
@@ -0,0 +1,96 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy, 2006-2015 (ita)
+
+"""
+Add a pre-build hook to remove build files (declared in the system)
+that do not have a corresponding target
+
+This can be used for example to remove the targets
+that have changed name without performing
+a full 'waf clean'
+
+Of course, it will only work if there are no dynamically generated
+nodes/tasks, in which case the method will have to be modified
+to exclude some folders for example.
+"""
+
+from waflib import Logs, Build
+from waflib.Runner import Parallel
+
+DYNAMIC_EXT = [] # add your non-cleanable files/extensions here
+MOC_H_EXTS = '.cpp .cxx .hpp .hxx .h'.split()
+
+def can_delete(node):
+ """Imperfect moc cleanup which does not look for a Q_OBJECT macro in the files"""
+ if not node.name.endswith('.moc'):
+ return True
+ base = node.name[:-4]
+ p1 = node.parent.get_src()
+ p2 = node.parent.get_bld()
+ for k in MOC_H_EXTS:
+ h_name = base + k
+ n = p1.search_node(h_name)
+ if n:
+ return False
+ n = p2.search_node(h_name)
+ if n:
+ return False
+
+ # foo.cpp.moc, foo.h.moc, etc.
+ if base.endswith(k):
+ return False
+
+ return True
+
+# recursion over the nodes to find the stale files
+def stale_rec(node, nodes):
+ if node.abspath() in node.ctx.env[Build.CFG_FILES]:
+ return
+
+ if getattr(node, 'children', []):
+ for x in node.children.values():
+ if x.name != "c4che":
+ stale_rec(x, nodes)
+ else:
+ for ext in DYNAMIC_EXT:
+ if node.name.endswith(ext):
+ break
+ else:
+ if not node in nodes:
+ if can_delete(node):
+ Logs.warn("Removing stale file -> %s" % node.abspath())
+ node.delete()
+
+old = Parallel.refill_task_list
+def refill_task_list(self):
+ iit = old(self)
+ bld = self.bld
+
+ # execute this operation only once
+ if getattr(self, 'stale_done', False):
+ return iit
+ self.stale_done = True
+
+ # this does not work in partial builds
+ if hasattr(bld, 'options') and bld.options.targets and bld.options.targets != '*':
+ return iit
+
+ # this does not work in dynamic builds
+ if not hasattr(bld, 'post_mode') or bld.post_mode == Build.POST_LAZY:
+ return iit
+
+ # obtain the nodes to use during the build
+ nodes = []
+ for i in range(len(bld.groups)):
+ tasks = bld.get_tasks_group(i)
+ for x in tasks:
+ try:
+ nodes.extend(x.outputs)
+ except:
+ pass
+
+ stale_rec(bld.bldnode, nodes)
+ return iit
+
+Parallel.refill_task_list = refill_task_list
diff --git a/third_party/waf/waflib/extras/stracedeps.py b/third_party/waf/waflib/extras/stracedeps.py
new file mode 100644
index 00000000000..f9581a9e908
--- /dev/null
+++ b/third_party/waf/waflib/extras/stracedeps.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+Execute tasks through strace to obtain dependencies after the process is run. This
+scheme is similar to that of the Fabricate script.
+
+To use::
+
+ def configure(conf):
+ conf.load('strace')
+
+WARNING:
+* This will not work when advanced scanners are needed (qt4/qt5)
+* The overhead of running 'strace' is significant (56s -> 1m29s)
+* It will not work on Windows :-)
+"""
+
+import os, re, threading
+from waflib import Task, Logs, Utils
+
+#TRACECALLS = 'trace=access,chdir,clone,creat,execve,exit_group,fork,lstat,lstat64,mkdir,open,rename,stat,stat64,symlink,vfork'
+TRACECALLS = 'trace=process,file'
+
+BANNED = ('/tmp', '/proc', '/sys', '/dev')
+
+s_process = r'(?:clone|fork|vfork)\(.*?(?P<npid>\d+)'
+s_file = r'(?P<call>\w+)\("(?P<path>([^"\\]|\\.)*)"(.*)'
+re_lines = re.compile(r'^(?P<pid>\d+)\s+(?:(?:%s)|(?:%s))\r*$' % (s_file, s_process), re.IGNORECASE | re.MULTILINE)
+strace_lock = threading.Lock()
+
+def configure(conf):
+ conf.find_program('strace')
+
+def task_method(func):
+ # Decorator function to bind/replace methods on the base Task class
+ #
+ # The methods Task.exec_command and Task.sig_implicit_deps already exists and are rarely overridden
+ # we thus expect that we are the only ones doing this
+ try:
+ setattr(Task.Task, 'nostrace_%s' % func.__name__, getattr(Task.Task, func.__name__))
+ except AttributeError:
+ pass
+ setattr(Task.Task, func.__name__, func)
+ return func
+
+@task_method
+def get_strace_file(self):
+ try:
+ return self.strace_file
+ except AttributeError:
+ pass
+
+ if self.outputs:
+ ret = self.outputs[0].abspath() + '.strace'
+ else:
+ ret = '%s%s%d%s' % (self.generator.bld.bldnode.abspath(), os.sep, id(self), '.strace')
+ self.strace_file = ret
+ return ret
+
+@task_method
+def get_strace_args(self):
+ return (self.env.STRACE or ['strace']) + ['-e', TRACECALLS, '-f', '-o', self.get_strace_file()]
+
+@task_method
+def exec_command(self, cmd, **kw):
+ bld = self.generator.bld
+ try:
+ if not kw.get('cwd', None):
+ kw['cwd'] = bld.cwd
+ except AttributeError:
+ bld.cwd = kw['cwd'] = bld.variant_dir
+
+ args = self.get_strace_args()
+ fname = self.get_strace_file()
+ if isinstance(cmd, list):
+ cmd = args + cmd
+ else:
+ cmd = '%s %s' % (' '.join(args), cmd)
+
+ try:
+ ret = bld.exec_command(cmd, **kw)
+ finally:
+ if not ret:
+ self.parse_strace_deps(fname, kw['cwd'])
+ return ret
+
+@task_method
+def sig_implicit_deps(self):
+ # bypass the scanner functions
+ return
+
+@task_method
+def parse_strace_deps(self, path, cwd):
+ # uncomment the following line to disable the dependencies and force a file scan
+ # return
+ try:
+ cnt = Utils.readf(path)
+ finally:
+ try:
+ os.remove(path)
+ except OSError:
+ pass
+
+ nodes = []
+ bld = self.generator.bld
+ try:
+ cache = bld.strace_cache
+ except AttributeError:
+ cache = bld.strace_cache = {}
+
+ # chdir and relative paths
+ pid_to_cwd = {}
+
+ global BANNED
+ done = set([])
+ for m in re.finditer(re_lines, cnt):
+ # scraping the output of strace
+ pid = m.group('pid')
+ if m.group('npid'):
+ npid = m.group('npid')
+ pid_to_cwd[npid] = pid_to_cwd.get(pid, cwd)
+ continue
+
+ p = m.group('path').replace('\\"', '"')
+
+ if p == '.' or m.group().find('= -1 ENOENT') > -1:
+ # just to speed it up a bit
+ continue
+
+ if not os.path.isabs(p):
+ p = os.path.join(pid_to_cwd.get(pid, cwd), p)
+
+ call = m.group('call')
+ if call == 'chdir':
+ pid_to_cwd[pid] = p
+ continue
+
+ if p in done:
+ continue
+ done.add(p)
+
+ for x in BANNED:
+ if p.startswith(x):
+ break
+ else:
+ if p.endswith('/') or os.path.isdir(p):
+ continue
+
+ try:
+ node = cache[p]
+ except KeyError:
+ strace_lock.acquire()
+ try:
+ cache[p] = node = bld.root.find_node(p)
+ if not node:
+ continue
+ finally:
+ strace_lock.release()
+ nodes.append(node)
+
+ # record the dependencies then force the task signature recalculation for next time
+ if Logs.verbose:
+ Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
+ bld = self.generator.bld
+ bld.node_deps[self.uid()] = nodes
+ bld.raw_deps[self.uid()] = []
+ try:
+ del self.cache_sig
+ except AttributeError:
+ pass
+ self.signature()
diff --git a/third_party/waf/waflib/extras/swig.py b/third_party/waf/waflib/extras/swig.py
new file mode 100644
index 00000000000..b654db7f211
--- /dev/null
+++ b/third_party/waf/waflib/extras/swig.py
@@ -0,0 +1,178 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Petar Forai
+# Thomas Nagy 2008-2010 (ita)
+
+import re
+from waflib import Task, Logs
+from waflib.TaskGen import extension
+from waflib.Configure import conf
+from waflib.Tools import c_preproc
+
+"""
+tasks have to be added dynamically:
+- swig interface files may be created at runtime
+- the module name may be unknown in advance
+"""
+
+SWIG_EXTS = ['.swig', '.i']
+
+re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
+
+re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
+re_2 = re.compile('[#%]include [<"](.*)[">]', re.M)
+
+class swig(Task.Task):
+ color = 'BLUE'
+ run_str = '${SWIG} ${SWIGFLAGS} ${SWIGPATH_ST:INCPATHS} ${SWIGDEF_ST:DEFINES} ${SRC}'
+ ext_out = ['.h'] # might produce .h files although it is not mandatory
+ vars = ['SWIG_VERSION', 'SWIGDEPS']
+
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'init_outputs', None):
+ self.init_outputs = True
+ if not getattr(self, 'module', None):
+ # search the module name
+ txt = self.inputs[0].read()
+ m = re_module.search(txt)
+ if not m:
+ raise ValueError("could not find the swig module name")
+ self.module = m.group(1)
+
+ swig_c(self)
+
+ # add the language-specific output files as nodes
+ # call funs in the dict swig_langs
+ for x in self.env['SWIGFLAGS']:
+ # obtain the language
+ x = x[1:]
+ try:
+ fun = swig_langs[x]
+ except KeyError:
+ pass
+ else:
+ fun(self)
+
+ return super(swig, self).runnable_status()
+
+ def scan(self):
+ "scan for swig dependencies, climb the .i files"
+ lst_src = []
+
+ seen = []
+ to_see = [self.inputs[0]]
+
+ while to_see:
+ node = to_see.pop(0)
+ if node in seen:
+ continue
+ seen.append(node)
+ lst_src.append(node)
+
+ # read the file
+ code = node.read()
+ code = c_preproc.re_nl.sub('', code)
+ code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+
+ # find .i files and project headers
+ names = re_2.findall(code)
+ for n in names:
+ for d in self.generator.includes_nodes + [node.parent]:
+ u = d.find_resource(n)
+ if u:
+ to_see.append(u)
+ break
+ else:
+ Logs.warn('could not find %r' % n)
+
+ return (lst_src, [])
+
+# provide additional language processing
+swig_langs = {}
+def swigf(fun):
+ swig_langs[fun.__name__.replace('swig_', '')] = fun
+swig.swigf = swigf
+
+def swig_c(self):
+ ext = '.swigwrap_%d.c' % self.generator.idx
+ flags = self.env['SWIGFLAGS']
+ if '-c++' in flags:
+ ext += 'xx'
+ out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
+
+ if '-c++' in flags:
+ c_tsk = self.generator.cxx_hook(out_node)
+ else:
+ c_tsk = self.generator.c_hook(out_node)
+
+ c_tsk.set_run_after(self)
+
+ ge = self.generator.bld.producer
+ ge.outstanding.insert(0, c_tsk)
+ ge.total += 1
+
+ try:
+ ltask = self.generator.link_task
+ except AttributeError:
+ pass
+ else:
+ ltask.set_run_after(c_tsk)
+ ltask.inputs.append(c_tsk.outputs[0])
+
+ self.outputs.append(out_node)
+
+ if not '-o' in self.env['SWIGFLAGS']:
+ self.env.append_value('SWIGFLAGS', ['-o', self.outputs[0].abspath()])
+
+@swigf
+def swig_python(tsk):
+ node = tsk.inputs[0].parent
+ if tsk.outdir:
+ node = tsk.outdir
+ tsk.set_outputs(node.find_or_declare(tsk.module+'.py'))
+
+@swigf
+def swig_ocaml(tsk):
+ node = tsk.inputs[0].parent
+ if tsk.outdir:
+ node = tsk.outdir
+ tsk.set_outputs(node.find_or_declare(tsk.module+'.ml'))
+ tsk.set_outputs(node.find_or_declare(tsk.module+'.mli'))
+
+@extension(*SWIG_EXTS)
+def i_file(self, node):
+ # the task instance
+ tsk = self.create_task('swig')
+ tsk.set_inputs(node)
+ tsk.module = getattr(self, 'swig_module', None)
+
+ flags = self.to_list(getattr(self, 'swig_flags', []))
+ tsk.env.append_value('SWIGFLAGS', flags)
+
+ tsk.outdir = None
+ if '-outdir' in flags:
+ outdir = flags[flags.index('-outdir')+1]
+ outdir = tsk.generator.bld.bldnode.make_node(outdir)
+ outdir.mkdir()
+ tsk.outdir = outdir
+
+@conf
+def check_swig_version(self):
+ """Returns a tuple representing the swig version, like (1,3,28)"""
+ reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
+ swig_out = self.cmd_and_log(self.env.SWIG + ['-version'])
+
+ swigver = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')])
+ self.env['SWIG_VERSION'] = swigver
+ msg = 'Checking for swig version'
+ self.msg(msg, '.'.join(map(str, swigver)))
+ return swigver
+
+def configure(conf):
+ conf.find_program('swig', var='SWIG')
+ conf.env.SWIGPATH_ST = '-I%s'
+ conf.env.SWIGDEF_ST = '-D%s'
diff --git a/third_party/waf/waflib/extras/syms.py b/third_party/waf/waflib/extras/syms.py
new file mode 100644
index 00000000000..d2efd993452
--- /dev/null
+++ b/third_party/waf/waflib/extras/syms.py
@@ -0,0 +1,86 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+this tool supports the export_symbols_regex to export the symbols in a shared library.
+by default, all symbols are exported by gcc, and nothing by msvc.
+to use the tool, do something like:
+
+def build(ctx):
+ ctx(features='c cshlib syms', source='a.c b.c', export_symbols_regex='mylib_.*', target='testlib')
+
+only the symbols starting with 'mylib_' will be exported.
+"""
+
+import os
+import re
+from waflib.Context import STDOUT
+from waflib.Task import Task
+from waflib.Errors import WafError
+from waflib.TaskGen import feature, after_method
+
+class gen_sym(Task):
+ def run(self):
+ obj = self.inputs[0]
+ kw = {}
+ if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+ re_nm = re.compile(r'External\s+\|\s+_(' + self.generator.export_symbols_regex + r')\b')
+
+ cmd = (self.env.DUMPBIN or ['dumpbin']) + ['/symbols', obj.abspath()]
+
+ # Dumpbin requires custom environment sniffed out by msvc.py earlier
+ if self.env['PATH']:
+ env = dict(self.env.env or os.environ)
+ env.update(PATH = os.pathsep.join(self.env['PATH']))
+ kw['env'] = env
+
+ else:
+ if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows
+ re_nm = re.compile(r'T\s+_(' + self.generator.export_symbols_regex + r')\b')
+ elif self.env.DEST_BINFMT=='mac-o':
+ re_nm=re.compile(r'T\s+(_?'+self.generator.export_symbols_regex+r')\b')
+ else:
+ re_nm = re.compile(r'T\s+(' + self.generator.export_symbols_regex + r')\b')
+ cmd = [self.env.NM[0] or 'nm', '-g', obj.abspath()]
+ syms = re_nm.findall(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))
+ self.outputs[0].write('%r' % syms)
+
+class compile_sym(Task):
+ def run(self):
+ syms = {}
+ for x in self.inputs:
+ slist = eval(x.read())
+ for s in slist:
+ syms[s] = 1
+ lsyms = list(syms.keys())
+ lsyms.sort()
+ if self.env.DEST_BINFMT == 'pe':
+ self.outputs[0].write('EXPORTS\n' + '\n'.join(lsyms))
+ elif self.env.DEST_BINFMT == 'elf':
+ self.outputs[0].write('{ global:\n' + ';\n'.join(lsyms) + ";\nlocal: *; };\n")
+ elif self.env.DEST_BINFMT=='mac-o':
+ self.outputs[0].write('\n'.join(lsyms) + '\n')
+ else:
+ raise WafError('NotImplemented')
+
+@feature('syms')
+@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local')
+def do_the_symbol_stuff(self):
+ ins = [x.outputs[0] for x in self.compiled_tasks]
+ self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
+
+ tsk = self.create_task('compile_sym',
+ [x.outputs[0] for x in self.gen_sym_tasks],
+ self.path.find_or_declare(getattr(self, 'sym_filename', self.target + '.def')))
+ self.link_task.set_run_after(tsk)
+ self.link_task.dep_nodes.append(tsk.outputs[0])
+ if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+ self.link_task.env.append_value('LINKFLAGS', ['/def:' + tsk.outputs[0].bldpath()])
+ elif self.env.DEST_BINFMT == 'pe': #gcc on windows takes *.def as an additional input
+ self.link_task.inputs.append(tsk.outputs[0])
+ elif self.env.DEST_BINFMT == 'elf':
+ self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + tsk.outputs[0].bldpath()])
+ elif self.env.DEST_BINFMT=='mac-o':
+ self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,'+tsk.outputs[0].bldpath()])
+ else:
+ raise WafError('NotImplemented')
diff --git a/third_party/waf/waflib/extras/sync_exec.py b/third_party/waf/waflib/extras/sync_exec.py
new file mode 100644
index 00000000000..ba241fc69b4
--- /dev/null
+++ b/third_party/waf/waflib/extras/sync_exec.py
@@ -0,0 +1,8 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+This tool is obsolete, the sync_exec feature is now the default
+"""
+
+pass
diff --git a/third_party/waf/waflib/extras/unc.py b/third_party/waf/waflib/extras/unc.py
new file mode 100644
index 00000000000..e630c2a7d05
--- /dev/null
+++ b/third_party/waf/waflib/extras/unc.py
@@ -0,0 +1,110 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2014 (ita)
+
+"""
+This module enables automatic handling of network paths of the form \\server\share for both input
+and output files. While a typical script may require the following::
+
+ import os
+ def build(bld):
+
+ node = bld.root.make_node('\\\\COMPUTER\\share\\test.txt')
+
+ # mark the server/share levels as folders
+ k = node.parent
+ while k:
+ k.cache_isdir = True
+ k = k.parent
+
+ # clear the file if removed
+ if not os.path.isfile(node.abspath()):
+ node.sig = None
+
+ # create the folder structure
+ if node.parent.height() > 2:
+ node.parent.mkdir()
+
+ # then the task generator
+ def myfun(tsk):
+ tsk.outputs[0].write("data")
+ bld(rule=myfun, source='wscript', target=[nd])
+
+this tool will make the process much easier, for example::
+
+ def configure(conf):
+ conf.load('unc') # do not import the module directly
+
+ def build(bld):
+ def myfun(tsk):
+ tsk.outputs[0].write("data")
+ bld(rule=myfun, update_outputs=True,
+ source='wscript',
+ target='\\\\COMPUTER\\share\\test.txt')
+ bld(rule=myfun, update_outputs=True,
+ source='\\\\COMPUTER\\share\\test.txt',
+ target='\\\\COMPUTER\\share\\test2.txt')
+"""
+
+import os
+from waflib import Node, Utils, Context
+
+def find_resource(self, lst):
+ if isinstance(lst, str):
+ lst = [x for x in Node.split_path(lst) if x and x != '.']
+
+ if lst[0].startswith('\\\\'):
+ if len(lst) < 3:
+ return None
+ node = self.ctx.root.make_node(lst[0]).make_node(lst[1])
+ node.cache_isdir = True
+ node.parent.cache_isdir = True
+
+ ret = node.search_node(lst[2:])
+ if not ret:
+ ret = node.find_node(lst[2:])
+ if ret and os.path.isdir(ret.abspath()):
+ return None
+ return ret
+
+ return self.find_resource_orig(lst)
+
+def find_or_declare(self, lst):
+ if isinstance(lst, str):
+ lst = [x for x in Node.split_path(lst) if x and x != '.']
+
+ if lst[0].startswith('\\\\'):
+ if len(lst) < 3:
+ return None
+ node = self.ctx.root.make_node(lst[0]).make_node(lst[1])
+ node.cache_isdir = True
+ node.parent.cache_isdir = True
+ ret = node.find_node(lst[2:])
+ if not ret:
+ ret = node.make_node(lst[2:])
+ if not os.path.isfile(ret.abspath()):
+ ret.sig = None
+ ret.parent.mkdir()
+ return ret
+
+ return self.find_or_declare_orig(lst)
+
+def abspath(self):
+ """For MAX_PATH limitations"""
+ ret = self.abspath_orig()
+ if not ret.startswith("\\"):
+ return "\\\\?\\" + ret
+ return ret
+
+if Utils.is_win32:
+ Node.Node.find_resource_orig = Node.Node.find_resource
+ Node.Node.find_resource = find_resource
+
+ Node.Node.find_or_declare_orig = Node.Node.find_or_declare
+ Node.Node.find_or_declare = find_or_declare
+
+ Node.Node.abspath_orig = Node.Node.abspath
+ Node.Node.abspath = abspath
+
+ for k in list(Context.cache_modules.keys()):
+ Context.cache_modules["\\\\?\\" + k] = Context.cache_modules[k]
diff --git a/third_party/waf/waflib/extras/unity.py b/third_party/waf/waflib/extras/unity.py
new file mode 100644
index 00000000000..f30ba50ca45
--- /dev/null
+++ b/third_party/waf/waflib/extras/unity.py
@@ -0,0 +1,67 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Compile whole groups of C/C++ files at once.
+
+def build(bld):
+ bld.load('compiler_cxx unity')
+"""
+
+import sys
+from waflib import Task, Options
+from waflib.Tools import c_preproc
+from waflib import TaskGen
+
+MAX_BATCH = 50
+
+def options(opt):
+ global MAX_BATCH
+ opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH, help='batch size (0 for no batch)')
+
+class unity(Task.Task):
+ color = 'BLUE'
+ scan = c_preproc.scan
+ def run(self):
+ lst = ['#include "%s"\n' % node.abspath() for node in self.inputs]
+ txt = ''.join(lst)
+ self.outputs[0].write(txt)
+
+@TaskGen.taskgen_method
+def batch_size(self):
+ return getattr(Options.options, 'batchsize', MAX_BATCH)
+
+def make_batch_fun(ext):
+ # this generic code makes this quite unreadable, defining the function two times might have been better
+ def make_batch(self, node):
+ cnt = self.batch_size()
+ if cnt <= 1:
+ return self.create_compiled_task(ext, node)
+ x = getattr(self, 'master_%s' % ext, None)
+ if not x or len(x.inputs) >= cnt:
+ x = self.create_task('unity')
+ setattr(self, 'master_%s' % ext, x)
+
+ cnt_cur = getattr(self, 'cnt_%s' % ext, 0)
+ cxxnode = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, ext))
+ x.outputs = [cxxnode]
+ setattr(self, 'cnt_%s' % ext, cnt_cur + 1)
+ self.create_compiled_task(ext, cxxnode)
+ x.inputs.append(node)
+ return make_batch
+
+def enable_support(cc, cxx):
+ if cxx or not cc:
+ TaskGen.extension('.cpp', '.cc', '.cxx', '.C', '.c++')(make_batch_fun('cxx'))
+ if cc:
+ TaskGen.extension('.c')(make_batch_fun('c'))
+ else:
+ TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']
+
+has_c = '.c' in TaskGen.task_gen.mappings or 'waflib.Tools.compiler_c' in sys.modules
+has_cpp = '.cpp' in TaskGen.task_gen.mappings or 'waflib.Tools.compiler_cxx' in sys.modules
+enable_support(has_c, has_cpp) # by default
+
+def build(bld):
+ # it is best to do this
+ enable_support(bld.env.CC_NAME, bld.env.CXX_NAME)
diff --git a/third_party/waf/waflib/extras/use_config.py b/third_party/waf/waflib/extras/use_config.py
new file mode 100644
index 00000000000..ffaafce85d5
--- /dev/null
+++ b/third_party/waf/waflib/extras/use_config.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python
+# coding=utf-8
+# Mathieu Courtois - EDF R&D, 2013 - http://www.code-aster.org
+
+"""
+When a project has a lot of options the 'waf configure' command line can be
+very long and it becomes a cause of error.
+This tool provides a convenient way to load a set of configuration parameters
+from a local file or from a remote url.
+
+The configuration parameters are stored in a Python file that is imported as
+an extra waf tool can be.
+
+Example:
+$ waf configure --use-config-dir=http://www.anywhere.org --use-config=myconf1 ...
+
+The file 'myconf1' will be downloaded from 'http://www.anywhere.org'
+(or 'http://www.anywhere.org/wafcfg').
+If the files are available locally, it could be:
+$ waf configure --use-config-dir=/somewhere/myconfigurations --use-config=myconf1 ...
+
+The configuration of 'myconf1.py' is automatically loaded by calling
+its 'configure' function. In this example, it defines environment variables and
+set options:
+
+def configure(self):
+ self.env['CC'] = 'gcc-4.8'
+ self.env.append_value('LIBPATH', [...])
+ self.options.perlbinary = '/usr/local/bin/perl'
+ self.options.pyc = False
+
+The corresponding command line should have been:
+$ CC=gcc-4.8 LIBPATH=... waf configure --nopyc --with-perl-binary=/usr/local/bin/perl
+
+
+This is an extra tool, not bundled with the default waf binary.
+To add the use_config tool to the waf file:
+$ ./waf-light --tools=use_config
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('use_config')
+
+ def configure(conf):
+ conf.load('use_config')
+"""
+
+import sys
+import os.path as osp
+import os
+
+try:
+ from urllib import request
+except ImportError:
+ from urllib import urlopen
+else:
+ urlopen = request.urlopen
+
+
+from waflib import Errors, Context, Logs, Utils, Options, Configure
+
+try:
+ from urllib.parse import urlparse
+except ImportError:
+ from urlparse import urlparse
+
+
+
+
+DEFAULT_DIR = 'wafcfg'
+# add first the current wafcfg subdirectory
+sys.path.append(osp.abspath(DEFAULT_DIR))
+
+def options(self):
+ group = self.add_option_group('configure options')
+ group.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing')
+
+ group.add_option('--use-config', action='store', default=None,
+ metavar='CFG', dest='use_config',
+ help='force the configuration parameters by importing '
+ 'CFG.py. Several modules may be provided (comma '
+ 'separated).')
+ group.add_option('--use-config-dir', action='store', default=DEFAULT_DIR,
+ metavar='CFG_DIR', dest='use_config_dir',
+ help='path or url where to find the configuration file')
+
+def download_check(node):
+ """
+ Hook to check for the tools which are downloaded. Replace with your function if necessary.
+ """
+ pass
+
+
+def download_tool(tool, force=False, ctx=None):
+ """
+ Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`::
+
+ $ waf configure --download
+ """
+ for x in Utils.to_list(Context.remote_repo):
+ for sub in Utils.to_list(Context.remote_locs):
+ url = '/'.join((x, sub, tool + '.py'))
+ try:
+ web = urlopen(url)
+ try:
+ if web.getcode() != 200:
+ continue
+ except AttributeError:
+ pass
+ except Exception:
+ # on python3 urlopen throws an exception
+ # python 2.3 does not have getcode and throws an exception to fail
+ continue
+ else:
+ tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
+ tmp.write(web.read(), 'wb')
+ Logs.warn('Downloaded %s from %s' % (tool, url))
+ download_check(tmp)
+ try:
+ module = Context.load_tool(tool)
+ except Exception:
+ Logs.warn('The tool %s from %s is unusable' % (tool, url))
+ try:
+ tmp.delete()
+ except Exception:
+ pass
+ continue
+ return module
+
+ raise Errors.WafError('Could not load the Waf tool')
+
+def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
+ try:
+ module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path)
+ except ImportError as e:
+ if Options.options.download:
+ module = download_tool(tool, ctx=ctx)
+ if not module:
+ ctx.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
+ else:
+ ctx.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e))
+ return module
+
+Context.load_tool_default = Context.load_tool
+Context.load_tool = load_tool
+Configure.download_tool = download_tool
+
+def configure(self):
+ opts = self.options
+ use_cfg = opts.use_config
+ if use_cfg is None:
+ return
+ url = urlparse(opts.use_config_dir)
+ kwargs = {}
+ if url.scheme:
+ kwargs['download'] = True
+ kwargs['remote_url'] = url.geturl()
+ # search first with the exact url, else try with +'/wafcfg'
+ kwargs['remote_locs'] = ['', DEFAULT_DIR]
+ tooldir = url.geturl() + ' ' + DEFAULT_DIR
+ for cfg in use_cfg.split(','):
+ Logs.pprint('NORMAL', "Searching configuration '%s'..." % cfg)
+ self.load(cfg, tooldir=tooldir, **kwargs)
+ self.start_msg('Checking for configuration')
+ self.end_msg(use_cfg)
diff --git a/third_party/waf/waflib/extras/why.py b/third_party/waf/waflib/extras/why.py
new file mode 100644
index 00000000000..c3875f4e3de
--- /dev/null
+++ b/third_party/waf/waflib/extras/why.py
@@ -0,0 +1,75 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2010 (ita)
+
+"""
+This tool modifies the task signature scheme to store and obtain
+information about the task execution (why it must run, etc)::
+
+ def configure(conf):
+ conf.load('why')
+
+After adding the tool, a full rebuild is necessary:
+waf clean build --zones=task
+"""
+
+from waflib import Task, Utils, Logs, Errors
+
+def signature(self):
+ # compute the result one time, and suppose the scan_signature will give the good result
+ try: return self.cache_sig
+ except AttributeError: pass
+
+ self.m = Utils.md5()
+ self.m.update(self.hcode)
+ id_sig = self.m.digest()
+
+ # explicit deps
+ self.m = Utils.md5()
+ self.sig_explicit_deps()
+ exp_sig = self.m.digest()
+
+ # env vars
+ self.m = Utils.md5()
+ self.sig_vars()
+ var_sig = self.m.digest()
+
+ # implicit deps / scanner results
+ self.m = Utils.md5()
+ if self.scan:
+ try:
+ self.sig_implicit_deps()
+ except Errors.TaskRescan:
+ return self.signature()
+ impl_sig = self.m.digest()
+
+ ret = self.cache_sig = impl_sig + id_sig + exp_sig + var_sig
+ return ret
+
+
+Task.Task.signature = signature
+
+old = Task.Task.runnable_status
+def runnable_status(self):
+ ret = old(self)
+ if ret == Task.RUN_ME:
+ try:
+ old_sigs = self.generator.bld.task_sigs[self.uid()]
+ except (KeyError, AttributeError):
+ Logs.debug("task: task must run as no previous signature exists")
+ else:
+ new_sigs = self.cache_sig
+ def v(x):
+ return Utils.to_hex(x)
+
+ Logs.debug("Task %r" % self)
+ msgs = ['* Implicit or scanner dependency', '* Task code', '* Source file, explicit or manual dependency', '* Configuration data variable']
+ tmp = 'task: -> %s: %s %s'
+ for x in range(len(msgs)):
+ l = len(Utils.SIG_NIL)
+ a = new_sigs[x*l : (x+1)*l]
+ b = old_sigs[x*l : (x+1)*l]
+ if (a != b):
+ Logs.debug(tmp % (msgs[x].ljust(35), v(a), v(b)))
+ return ret
+Task.Task.runnable_status = runnable_status
diff --git a/third_party/waf/waflib/extras/win32_opts.py b/third_party/waf/waflib/extras/win32_opts.py
new file mode 100644
index 00000000000..28491cd328f
--- /dev/null
+++ b/third_party/waf/waflib/extras/win32_opts.py
@@ -0,0 +1,175 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Windows-specific optimizations
+
+This module can help reducing the overhead of listing files on windows (more than 10000 files).
+"""
+
+import os
+from waflib import Utils, Build, Node, Logs
+
+try:
+ TP = '%s\\*'.decode('ascii')
+except AttributeError:
+ TP = '%s\\*'
+
+if Utils.is_win32:
+ from waflib.extras import md5_tstamp
+ import ctypes, ctypes.wintypes
+
+ FindFirstFile = ctypes.windll.kernel32.FindFirstFileW
+ FindNextFile = ctypes.windll.kernel32.FindNextFileW
+ FindClose = ctypes.windll.kernel32.FindClose
+ FILE_ATTRIBUTE_DIRECTORY = 0x10
+ INVALID_HANDLE_VALUE = -1
+ UPPER_FOLDERS = ('.', '..')
+ try:
+ UPPER_FOLDERS = [unicode(x) for x in UPPER_FOLDERS]
+ except NameError:
+ pass
+
+ def cached_hash_file(self):
+ try:
+ cache = self.ctx.cache_listdir_cache_hash_file
+ except AttributeError:
+ cache = self.ctx.cache_listdir_cache_hash_file = {}
+
+ if id(self.parent) in cache:
+ try:
+ t = cache[id(self.parent)][self.name]
+ except KeyError:
+ raise IOError('Not a file')
+ else:
+ # an opportunity to list the files and the timestamps at once
+ findData = ctypes.wintypes.WIN32_FIND_DATAW()
+ find = FindFirstFile(TP % self.parent.abspath(), ctypes.byref(findData))
+
+ if find == INVALID_HANDLE_VALUE:
+ cache[id(self.parent)] = {}
+ raise IOError('Not a file')
+
+ cache[id(self.parent)] = lst_files = {}
+ try:
+ while True:
+ if findData.cFileName not in UPPER_FOLDERS:
+ thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
+ if not thatsadir:
+ ts = findData.ftLastWriteTime
+ d = (ts.dwLowDateTime << 32) | ts.dwHighDateTime
+ lst_files[str(findData.cFileName)] = d
+ if not FindNextFile(find, ctypes.byref(findData)):
+ break
+ except Exception:
+ cache[id(self.parent)] = {}
+ raise IOError('Not a file')
+ finally:
+ FindClose(find)
+ t = lst_files[self.name]
+
+ fname = self.abspath()
+ if fname in Build.hashes_md5_tstamp:
+ if Build.hashes_md5_tstamp[fname][0] == t:
+ return Build.hashes_md5_tstamp[fname][1]
+
+ try:
+ fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
+ except OSError:
+ raise IOError('Cannot read from %r' % fname)
+ f = os.fdopen(fd, 'rb')
+ m = Utils.md5()
+ rb = 1
+ try:
+ while rb:
+ rb = f.read(200000)
+ m.update(rb)
+ finally:
+ f.close()
+
+ # ensure that the cache is overwritten
+ Build.hashes_md5_tstamp[fname] = (t, m.digest())
+ return m.digest()
+ Node.Node.cached_hash_file = cached_hash_file
+
+ def get_bld_sig_win32(self):
+ try:
+ return self.ctx.hash_cache[id(self)]
+ except KeyError:
+ pass
+ except AttributeError:
+ self.ctx.hash_cache = {}
+
+ if not self.is_bld():
+ if self.is_child_of(self.ctx.srcnode):
+ self.sig = self.cached_hash_file()
+ else:
+ self.sig = Utils.h_file(self.abspath())
+ self.ctx.hash_cache[id(self)] = ret = self.sig
+ return ret
+ Node.Node.get_bld_sig = get_bld_sig_win32
+
+ def isfile_cached(self):
+ # optimize for nt.stat calls, assuming there are many files for few folders
+ try:
+ cache = self.__class__.cache_isfile_cache
+ except AttributeError:
+ cache = self.__class__.cache_isfile_cache = {}
+
+ try:
+ c1 = cache[id(self.parent)]
+ except KeyError:
+ c1 = cache[id(self.parent)] = []
+
+ curpath = self.parent.abspath()
+ findData = ctypes.wintypes.WIN32_FIND_DATAW()
+ find = FindFirstFile(TP % curpath, ctypes.byref(findData))
+
+ if find == INVALID_HANDLE_VALUE:
+ Logs.error("invalid win32 handle isfile_cached %r" % self.abspath())
+ return os.path.isfile(self.abspath())
+
+ try:
+ while True:
+ if findData.cFileName not in UPPER_FOLDERS:
+ thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
+ if not thatsadir:
+ c1.append(str(findData.cFileName))
+ if not FindNextFile(find, ctypes.byref(findData)):
+ break
+ except Exception as e:
+ Logs.error('exception while listing a folder %r %r' % (self.abspath(), e))
+ return os.path.isfile(self.abspath())
+ finally:
+ FindClose(find)
+ return self.name in c1
+ Node.Node.isfile_cached = isfile_cached
+
+ def find_or_declare_win32(self, lst):
+ # assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile
+ if isinstance(lst, str):
+ lst = [x for x in Node.split_path(lst) if x and x != '.']
+
+ node = self.get_bld().search(lst)
+ if node:
+ if not node.isfile_cached():
+ node.sig = None
+ try:
+ node.parent.mkdir()
+ except OSError:
+ pass
+ return node
+ self = self.get_src()
+ node = self.find_node(lst)
+ if node:
+ if not node.isfile_cached():
+ node.sig = None
+ try:
+ node.parent.mkdir()
+ except OSError:
+ pass
+ return node
+ node = self.get_bld().make_node(lst)
+ node.parent.mkdir()
+ return node
+ Node.Node.find_or_declare = find_or_declare_win32