summaryrefslogtreecommitdiff
path: root/waflib/Tools
diff options
context:
space:
mode:
authorKarl Linden <karl.j.linden@gmail.com>2018-10-06 10:51:14 +0200
committerKarl Linden <karl.j.linden@gmail.com>2018-10-06 13:16:24 +0200
commiteeef49954a0cc4bc5f120c12fe9eb988bd93ccd8 (patch)
treeacc5261a87172e240c77217eaaec0eb586aa9dfa /waflib/Tools
parentf5f22c6befc1b4bc5807de00496c087125f92adf (diff)
downloadjack2-eeef49954a0cc4bc5f120c12fe9eb988bd93ccd8.tar.gz
Revert "Stupid attempt at updating waf"
This reverts commit cf3f8205c4509966f04e6b77dad7c002db16d9d8. It was a good initiative, but waf 2.0 introces backward incompatible changes that break the pkg-config checks. The config checks will be updated before migrating to waf 2.0.
Diffstat (limited to 'waflib/Tools')
-rw-r--r--waflib/Tools/__init__.py2
-rw-r--r--waflib/Tools/ar.py4
-rw-r--r--waflib/Tools/asm.py73
-rw-r--r--waflib/Tools/bison.py49
-rw-r--r--waflib/Tools/c.py14
-rw-r--r--waflib/Tools/c_aliases.py33
-rw-r--r--waflib/Tools/c_config.py688
-rw-r--r--waflib/Tools/c_osx.py54
-rw-r--r--waflib/Tools/c_preproc.py470
-rw-r--r--waflib/Tools/c_tests.py25
-rw-r--r--waflib/Tools/ccroot.py140
-rw-r--r--waflib/Tools/clang.py2
-rw-r--r--waflib/Tools/clangxx.py4
-rw-r--r--waflib/Tools/compiler_c.py24
-rw-r--r--waflib/Tools/compiler_cxx.py22
-rw-r--r--waflib/Tools/compiler_d.py85
-rw-r--r--waflib/Tools/compiler_fc.py73
-rw-r--r--waflib/Tools/cs.py211
-rw-r--r--waflib/Tools/cxx.py14
-rw-r--r--waflib/Tools/d.py97
-rw-r--r--waflib/Tools/d_config.py64
-rw-r--r--waflib/Tools/d_scan.py211
-rw-r--r--waflib/Tools/dbus.py70
-rw-r--r--waflib/Tools/dmd.py80
-rw-r--r--waflib/Tools/errcheck.py88
-rw-r--r--waflib/Tools/fc.py189
-rw-r--r--waflib/Tools/fc_config.py488
-rw-r--r--waflib/Tools/fc_scan.py114
-rw-r--r--waflib/Tools/flex.py62
-rw-r--r--waflib/Tools/g95.py66
-rw-r--r--waflib/Tools/gas.py18
-rw-r--r--waflib/Tools/gcc.py113
-rw-r--r--waflib/Tools/gdc.py55
-rw-r--r--waflib/Tools/gfortran.py93
-rw-r--r--waflib/Tools/glib2.py489
-rw-r--r--waflib/Tools/gnu_dirs.py131
-rw-r--r--waflib/Tools/gxx.py114
-rw-r--r--waflib/Tools/icc.py9
-rw-r--r--waflib/Tools/icpc.py9
-rw-r--r--waflib/Tools/ifort.py413
-rw-r--r--waflib/Tools/intltool.py231
-rw-r--r--waflib/Tools/irixcc.py54
-rw-r--r--waflib/Tools/javaw.py464
-rw-r--r--waflib/Tools/ldc2.py56
-rw-r--r--waflib/Tools/lua.py38
-rw-r--r--waflib/Tools/md5_tstamp.py39
-rw-r--r--waflib/Tools/msvc.py869
-rw-r--r--waflib/Tools/nasm.py26
-rw-r--r--waflib/Tools/nobuild.py24
-rw-r--r--waflib/Tools/perl.py156
-rw-r--r--waflib/Tools/python.py627
-rw-r--r--waflib/Tools/qt5.py796
-rw-r--r--waflib/Tools/ruby.py186
-rw-r--r--waflib/Tools/suncc.py56
-rw-r--r--waflib/Tools/suncxx.py53
-rw-r--r--waflib/Tools/tex.py543
-rw-r--r--waflib/Tools/vala.py355
-rw-r--r--waflib/Tools/waf_unit_test.py211
-rw-r--r--waflib/Tools/winres.py78
-rw-r--r--waflib/Tools/xlc.py54
-rw-r--r--waflib/Tools/xlcxx.py54
61 files changed, 1508 insertions, 8422 deletions
diff --git a/waflib/Tools/__init__.py b/waflib/Tools/__init__.py
index 079df358..c8a3c349 100644
--- a/waflib/Tools/__init__.py
+++ b/waflib/Tools/__init__.py
@@ -1,3 +1,3 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
+# Thomas Nagy, 2005-2010 (ita)
diff --git a/waflib/Tools/ar.py b/waflib/Tools/ar.py
index b39b6459..aac39c0c 100644
--- a/waflib/Tools/ar.py
+++ b/waflib/Tools/ar.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
+# Thomas Nagy, 2006-2010 (ita)
# Ralf Habacker, 2006 (rh)
"""
@@ -16,7 +16,7 @@ def find_ar(conf):
conf.load('ar')
def configure(conf):
- """Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``"""
+ """Find the ar program and set the default flags in ``conf.env.ARFLAGS``"""
conf.find_program('ar', var='AR')
conf.add_os_flags('ARFLAGS')
if not conf.env.ARFLAGS:
diff --git a/waflib/Tools/asm.py b/waflib/Tools/asm.py
deleted file mode 100644
index b6f26fb3..00000000
--- a/waflib/Tools/asm.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008-2018 (ita)
-
-"""
-Assembly support, used by tools such as gas and nasm
-
-To declare targets using assembly::
-
- def configure(conf):
- conf.load('gcc gas')
-
- def build(bld):
- bld(
- features='c cstlib asm',
- source = 'test.S',
- target = 'asmtest')
-
- bld(
- features='asm asmprogram',
- source = 'test.S',
- target = 'asmtest')
-
-Support for pure asm programs and libraries should also work::
-
- def configure(conf):
- conf.load('nasm')
- conf.find_program('ld', 'ASLINK')
-
- def build(bld):
- bld(
- features='asm asmprogram',
- source = 'test.S',
- target = 'asmtest')
-"""
-
-from waflib import Task
-from waflib.Tools.ccroot import link_task, stlink_task
-from waflib.TaskGen import extension
-
-class asm(Task.Task):
- """
- Compiles asm files by gas/nasm/yasm/...
- """
- color = 'BLUE'
- run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
-
-@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP')
-def asm_hook(self, node):
- """
- Binds the asm extension to the asm task
-
- :param node: input file
- :type node: :py:class:`waflib.Node.Node`
- """
- return self.create_compiled_task('asm', node)
-
-class asmprogram(link_task):
- "Links object files into a c program"
- run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
- ext_out = ['.bin']
- inst_to = '${BINDIR}'
-
-class asmshlib(asmprogram):
- "Links object files into a c shared library"
- inst_to = '${LIBDIR}'
-
-class asmstlib(stlink_task):
- "Links object files into a c static library"
- pass # do not remove
-
-def configure(conf):
- conf.env.ASMPATH_ST = '-I%s'
diff --git a/waflib/Tools/bison.py b/waflib/Tools/bison.py
deleted file mode 100644
index eef56dcd..00000000
--- a/waflib/Tools/bison.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# John O'Meara, 2006
-# Thomas Nagy 2009-2018 (ita)
-
-"""
-The **bison** program is a code generator which creates C or C++ files.
-The generated files are compiled into object files.
-"""
-
-from waflib import Task
-from waflib.TaskGen import extension
-
-class bison(Task.Task):
- """Compiles bison files"""
- color = 'BLUE'
- run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
- ext_out = ['.h'] # just to make sure
-
-@extension('.y', '.yc', '.yy')
-def big_bison(self, node):
- """
- Creates a bison task, which must be executed from the directory of the output file.
- """
- has_h = '-d' in self.env.BISONFLAGS
-
- outs = []
- if node.name.endswith('.yc'):
- outs.append(node.change_ext('.tab.cc'))
- if has_h:
- outs.append(node.change_ext('.tab.hh'))
- else:
- outs.append(node.change_ext('.tab.c'))
- if has_h:
- outs.append(node.change_ext('.tab.h'))
-
- tsk = self.create_task('bison', node, outs)
- tsk.cwd = node.parent.get_bld()
-
- # and the c/cxx file must be compiled too
- self.source.append(outs[0])
-
-def configure(conf):
- """
- Detects the *bison* program
- """
- conf.find_program('bison', var='BISON')
- conf.env.BISONFLAGS = ['-d']
-
diff --git a/waflib/Tools/c.py b/waflib/Tools/c.py
index effd6b6e..0b06a7fa 100644
--- a/waflib/Tools/c.py
+++ b/waflib/Tools/c.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
+# Thomas Nagy, 2006-2010 (ita)
"Base for c programs/libraries"
@@ -10,30 +10,30 @@ from waflib.Tools.ccroot import link_task, stlink_task
@TaskGen.extension('.c')
def c_hook(self, node):
- "Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances"
+ "Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
if not self.env.CC and self.env.CXX:
return self.create_compiled_task('cxx', node)
return self.create_compiled_task('c', node)
class c(Task.Task):
- "Compiles C files into object files"
- run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
+ "Compile C files into object files"
+ run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()}'
vars = ['CCDEPS'] # unused variable to depend on, just in case
ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
scan = c_preproc.scan
class cprogram(link_task):
- "Links object files into c programs"
+ "Link object files into a c program"
run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
ext_out = ['.bin']
vars = ['LINKDEPS']
inst_to = '${BINDIR}'
class cshlib(cprogram):
- "Links object files into c shared libraries"
+ "Link object files into a c shared library"
inst_to = '${LIBDIR}'
class cstlib(stlink_task):
- "Links object files into a c static libraries"
+ "Link object files into a c static library"
pass # do not remove
diff --git a/waflib/Tools/c_aliases.py b/waflib/Tools/c_aliases.py
index c9d53692..0747abf5 100644
--- a/waflib/Tools/c_aliases.py
+++ b/waflib/Tools/c_aliases.py
@@ -9,8 +9,6 @@ from waflib.Configure import conf
def get_extensions(lst):
"""
- Returns the file extensions for the list of files given as input
-
:param lst: files to process
:list lst: list of string or :py:class:`waflib.Node.Node`
:return: list of file extensions
@@ -18,15 +16,17 @@ def get_extensions(lst):
"""
ret = []
for x in Utils.to_list(lst):
- if not isinstance(x, str):
- x = x.name
- ret.append(x[x.rfind('.') + 1:])
+ try:
+ if not isinstance(x, str):
+ x = x.name
+ ret.append(x[x.rfind('.') + 1:])
+ except Exception:
+ pass
return ret
def sniff_features(**kw):
"""
- Computes and returns the features required for a task generator by
- looking at the file extensions. This aimed for C/C++ mainly::
+ Look at the source files and return the features for a task generator (mainly cc and cxx)::
snif_features(source=['foo.c', 'foo.cxx'], type='shlib')
# returns ['cxx', 'c', 'cxxshlib', 'cshlib']
@@ -39,7 +39,7 @@ def sniff_features(**kw):
:rtype: list of string
"""
exts = get_extensions(kw['source'])
- typ = kw['typ']
+ type = kw['_type']
feats = []
# watch the order, cxx will have the precedence
@@ -63,27 +63,18 @@ def sniff_features(**kw):
feats.append('java')
return 'java'
- if typ in ('program', 'shlib', 'stlib'):
+ if type in ('program', 'shlib', 'stlib'):
will_link = False
for x in feats:
if x in ('cxx', 'd', 'fc', 'c'):
- feats.append(x + typ)
+ feats.append(x + type)
will_link = True
if not will_link and not kw.get('features', []):
raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw)
return feats
-def set_features(kw, typ):
- """
- Inserts data in the input dict *kw* based on existing data and on the type of target
- required (typ).
-
- :param kw: task generator parameters
- :type kw: dict
- :param typ: type of target
- :type typ: string
- """
- kw['typ'] = typ
+def set_features(kw, _type):
+ kw['_type'] = _type
kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw))
@conf
diff --git a/waflib/Tools/c_config.py b/waflib/Tools/c_config.py
index d2b3c0d8..f114dffb 100644
--- a/waflib/Tools/c_config.py
+++ b/waflib/Tools/c_config.py
@@ -1,13 +1,11 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
+# Thomas Nagy, 2005-2010 (ita)
"""
C/C++/D configuration helpers
"""
-from __future__ import with_statement
-
import os, re, shlex
from waflib import Build, Utils, Task, Options, Logs, Errors, Runner
from waflib.TaskGen import after_method, feature
@@ -19,6 +17,32 @@ WAF_CONFIG_H = 'config.h'
DEFKEYS = 'define_key'
INCKEYS = 'include_key'
+cfg_ver = {
+ 'atleast-version': '>=',
+ 'exact-version': '==',
+ 'max-version': '<=',
+}
+
+SNIP_FUNCTION = '''
+int main(int argc, char **argv) {
+ void (*p)();
+ (void)argc; (void)argv;
+ p=(void(*)())(%s);
+ return !p;
+}
+'''
+"""Code template for checking for functions"""
+
+SNIP_TYPE = '''
+int main(int argc, char **argv) {
+ (void)argc; (void)argv;
+ if ((%(type_name)s *) 0) return 0;
+ if (sizeof (%(type_name)s)) return 0;
+ return 1;
+}
+'''
+"""Code template for checking for types"""
+
SNIP_EMPTY_PROGRAM = '''
int main(int argc, char **argv) {
(void)argc; (void)argv;
@@ -26,6 +50,15 @@ int main(int argc, char **argv) {
}
'''
+SNIP_FIELD = '''
+int main(int argc, char **argv) {
+ char *off;
+ (void)argc; (void)argv;
+ off = (char*) &((%(type_name)s*)0)->%(field_name)s;
+ return (size_t) off < sizeof(%(type_name)s);
+}
+'''
+
MACRO_TO_DESTOS = {
'__linux__' : 'linux',
'__GNU__' : 'gnu', # hurd
@@ -42,7 +75,7 @@ MACRO_TO_DESTOS = {
'_WIN64' : 'win32',
'_WIN32' : 'win32',
# Note about darwin: this is also tested with 'defined __APPLE__ && defined __MACH__' somewhere below in this file.
-'__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__' : 'darwin',
+'__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__' : 'darwin',
'__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__' : 'darwin', # iphone
'__QNX__' : 'qnx',
'__native_client__' : 'nacl' # google native client platform
@@ -67,13 +100,12 @@ MACRO_TO_DEST_CPU = {
'__s390x__' : 's390x',
'__s390__' : 's390',
'__sh__' : 'sh',
-'__xtensa__' : 'xtensa',
}
@conf
def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=None):
"""
- Parses flags from the input lines, and adds them to the relevant use variables::
+ Parse the flags from the input lines, and add them to the relevant use variables::
def configure(conf):
conf.parse_flags('-O3', 'FOO')
@@ -105,11 +137,9 @@ def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=No
# append_unique is not always possible
# for example, apple flags may require both -arch i386 and -arch ppc
+ app = env.append_value
+ appu = env.append_unique
uselib = uselib_store
- def app(var, val):
- env.append_value('%s_%s' % (var, uselib), val)
- def appu(var, val):
- env.append_unique('%s_%s' % (var, uselib), val)
static = False
while lst:
x = lst.pop(0)
@@ -117,79 +147,69 @@ def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=No
ot = x[2:]
if st == '-I' or st == '/I':
- if not ot:
- ot = lst.pop(0)
- appu('INCLUDES', ot)
+ if not ot: ot = lst.pop(0)
+ appu('INCLUDES_' + uselib, [ot])
elif st == '-i':
tmp = [x, lst.pop(0)]
app('CFLAGS', tmp)
app('CXXFLAGS', tmp)
elif st == '-D' or (env.CXX_NAME == 'msvc' and st == '/D'): # not perfect but..
- if not ot:
- ot = lst.pop(0)
- app('DEFINES', ot)
+ if not ot: ot = lst.pop(0)
+ app('DEFINES_' + uselib, [ot])
elif st == '-l':
- if not ot:
- ot = lst.pop(0)
- prefix = 'STLIB' if (force_static or static) else 'LIB'
- app(prefix, ot)
+ if not ot: ot = lst.pop(0)
+ prefix = (force_static or static) and 'STLIB_' or 'LIB_'
+ appu(prefix + uselib, [ot])
elif st == '-L':
- if not ot:
- ot = lst.pop(0)
- prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH'
- appu(prefix, ot)
+ if not ot: ot = lst.pop(0)
+ prefix = (force_static or static) and 'STLIBPATH_' or 'LIBPATH_'
+ appu(prefix + uselib, [ot])
elif x.startswith('/LIBPATH:'):
- prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH'
- appu(prefix, x.replace('/LIBPATH:', ''))
- elif x.startswith('-std='):
- prefix = 'CXXFLAGS' if '++' in x else 'CFLAGS'
- app(prefix, x)
- elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie'):
- app('CFLAGS', x)
- app('CXXFLAGS', x)
- app('LINKFLAGS', x)
+ prefix = (force_static or static) and 'STLIBPATH_' or 'LIBPATH_'
+ appu(prefix + uselib, [x.replace('/LIBPATH:', '')])
+ elif x == '-pthread' or x.startswith('+') or x.startswith('-std'):
+ app('CFLAGS_' + uselib, [x])
+ app('CXXFLAGS_' + uselib, [x])
+ app('LINKFLAGS_' + uselib, [x])
elif x == '-framework':
- appu('FRAMEWORK', lst.pop(0))
+ appu('FRAMEWORK_' + uselib, [lst.pop(0)])
elif x.startswith('-F'):
- appu('FRAMEWORKPATH', x[2:])
+ appu('FRAMEWORKPATH_' + uselib, [x[2:]])
elif x == '-Wl,-rpath' or x == '-Wl,-R':
- app('RPATH', lst.pop(0).lstrip('-Wl,'))
+ app('RPATH_' + uselib, lst.pop(0).lstrip('-Wl,'))
elif x.startswith('-Wl,-R,'):
- app('RPATH', x[7:])
+ app('RPATH_' + uselib, x[7:])
elif x.startswith('-Wl,-R'):
- app('RPATH', x[6:])
+ app('RPATH_' + uselib, x[6:])
elif x.startswith('-Wl,-rpath,'):
- app('RPATH', x[11:])
+ app('RPATH_' + uselib, x[11:])
elif x == '-Wl,-Bstatic' or x == '-Bstatic':
static = True
elif x == '-Wl,-Bdynamic' or x == '-Bdynamic':
static = False
- elif x.startswith('-Wl') or x in ('-rdynamic', '-pie'):
- app('LINKFLAGS', x)
- elif x.startswith(('-m', '-f', '-dynamic', '-O', '-g')):
- # Adding the -W option breaks python builds on Openindiana
- app('CFLAGS', x)
- app('CXXFLAGS', x)
+ elif x.startswith('-Wl'):
+ app('LINKFLAGS_' + uselib, [x])
+ elif x.startswith('-m') or x.startswith('-f') or x.startswith('-dynamic'):
+ app('CFLAGS_' + uselib, [x])
+ app('CXXFLAGS_' + uselib, [x])
elif x.startswith('-bundle'):
- app('LINKFLAGS', x)
- elif x.startswith(('-undefined', '-Xlinker')):
+ app('LINKFLAGS_' + uselib, [x])
+ elif x.startswith('-undefined') or x.startswith('-Xlinker'):
arg = lst.pop(0)
- app('LINKFLAGS', [x, arg])
- elif x.startswith(('-arch', '-isysroot')):
+ app('LINKFLAGS_' + uselib, [x, arg])
+ elif x.startswith('-arch') or x.startswith('-isysroot'):
tmp = [x, lst.pop(0)]
- app('CFLAGS', tmp)
- app('CXXFLAGS', tmp)
- app('LINKFLAGS', tmp)
- elif x.endswith(('.a', '.so', '.dylib', '.lib')):
- appu('LINKFLAGS', x) # not cool, #762
- else:
- self.to_log('Unhandled flag %r' % x)
+ app('CFLAGS_' + uselib, tmp)
+ app('CXXFLAGS_' + uselib, tmp)
+ app('LINKFLAGS_' + uselib, tmp)
+ elif x.endswith('.a') or x.endswith('.so') or x.endswith('.dylib') or x.endswith('.lib'):
+ appu('LINKFLAGS_' + uselib, [x]) # not cool, #762
@conf
def validate_cfg(self, kw):
"""
- Searches for the program *pkg-config* if missing, and validates the
- parameters to pass to :py:func:`waflib.Tools.c_config.exec_cfg`.
+ Search for the program *pkg-config* if missing, and validate the parameters to pass to
+ :py:func:`waflib.Tools.c_config.exec_cfg`.
:param path: the **-config program to use** (default is *pkg-config*)
:type path: list of string
@@ -205,42 +225,47 @@ def validate_cfg(self, kw):
self.find_program('pkg-config', var='PKGCONFIG')
kw['path'] = self.env.PKGCONFIG
- # verify that exactly one action is requested
- s = ('atleast_pkgconfig_version' in kw) + ('modversion' in kw) + ('package' in kw)
- if s != 1:
- raise ValueError('exactly one of atleast_pkgconfig_version, modversion and package must be set')
- if not 'msg' in kw:
- if 'atleast_pkgconfig_version' in kw:
+ # pkg-config version
+ if 'atleast_pkgconfig_version' in kw:
+ if not 'msg' in kw:
kw['msg'] = 'Checking for pkg-config version >= %r' % kw['atleast_pkgconfig_version']
- elif 'modversion' in kw:
- kw['msg'] = 'Checking for %r version' % kw['modversion']
- else:
- kw['msg'] = 'Checking for %r' %(kw['package'])
+ return
- # let the modversion check set the okmsg to the detected version
- if not 'okmsg' in kw and not 'modversion' in kw:
+ if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
if not 'errmsg' in kw:
kw['errmsg'] = 'not found'
- # pkg-config version
- if 'atleast_pkgconfig_version' in kw:
- pass
- elif 'modversion' in kw:
- if not 'uselib_store' in kw:
- kw['uselib_store'] = kw['modversion']
- if not 'define_name' in kw:
- kw['define_name'] = '%s_VERSION' % Utils.quote_define_name(kw['uselib_store'])
- else:
- if not 'uselib_store' in kw:
- kw['uselib_store'] = Utils.to_list(kw['package'])[0].upper()
- if not 'define_name' in kw:
- kw['define_name'] = self.have_define(kw['uselib_store'])
+ if 'modversion' in kw:
+ if not 'msg' in kw:
+ kw['msg'] = 'Checking for %r version' % kw['modversion']
+ return
+
+ # checking for the version of a module, for the moment, one thing at a time
+ for x in cfg_ver.keys():
+ y = x.replace('-', '_')
+ if y in kw:
+ if not 'package' in kw:
+ raise ValueError('%s requires a package' % x)
+
+ if not 'msg' in kw:
+ kw['msg'] = 'Checking for %r %s %s' % (kw['package'], cfg_ver[x], kw[y])
+ return
+
+ if not 'define_name' in kw:
+ pkgname = kw.get('uselib_store', kw['package'].upper())
+ kw['define_name'] = self.have_define(pkgname)
+
+ if not 'uselib_store' in kw:
+ self.undefine(kw['define_name'])
+
+ if not 'msg' in kw:
+ kw['msg'] = 'Checking for %r' % (kw['package'] or kw['path'])
@conf
def exec_cfg(self, kw):
"""
- Executes ``pkg-config`` or other ``-config`` applications to collect configuration flags:
+ Execute the program *pkg-config*:
* if atleast_pkgconfig_version is given, check that pkg-config has the version n and return
* if modversion is given, then return the module version
@@ -264,39 +289,42 @@ def exec_cfg(self, kw):
path = Utils.to_list(kw['path'])
env = self.env.env or None
- if kw.get('pkg_config_path'):
- if not env:
- env = dict(self.environ)
- env['PKG_CONFIG_PATH'] = kw['pkg_config_path']
-
def define_it():
- define_name = kw['define_name']
- # by default, add HAVE_X to the config.h, else provide DEFINES_X for use=X
- if kw.get('global_define', 1):
- self.define(define_name, 1, False)
+ pkgname = kw.get('uselib_store', kw['package'].upper())
+ if kw.get('global_define'):
+ # compatibility, replace by pkgname in WAF 1.9?
+ self.define(self.have_define(kw['package']), 1, False)
else:
- self.env.append_unique('DEFINES_%s' % kw['uselib_store'], "%s=1" % define_name)
-
- if kw.get('add_have_to_env', 1):
- self.env[define_name] = 1
+ self.env.append_unique('DEFINES_%s' % pkgname, "%s=1" % self.have_define(pkgname))
+ self.env[self.have_define(pkgname)] = 1
# pkg-config version
if 'atleast_pkgconfig_version' in kw:
cmd = path + ['--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']]
self.cmd_and_log(cmd, env=env)
+ if not 'okmsg' in kw:
+ kw['okmsg'] = 'yes'
return
- # single version for a module
+ # checking for the version of a module
+ for x in cfg_ver:
+ y = x.replace('-', '_')
+ if y in kw:
+ self.cmd_and_log(path + ['--%s=%s' % (x, kw[y]), kw['package']], env=env)
+ if not 'okmsg' in kw:
+ kw['okmsg'] = 'yes'
+ define_it()
+ break
+
+ # retrieving the version of a module
if 'modversion' in kw:
version = self.cmd_and_log(path + ['--modversion', kw['modversion']], env=env).strip()
- if not 'okmsg' in kw:
- kw['okmsg'] = version
- self.define(kw['define_name'], version)
+ self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
return version
lst = [] + path
- defi = kw.get('define_variable')
+ defi = kw.get('define_variable', None)
if not defi:
defi = self.env.PKG_CONFIG_DEFINES or {}
for key, val in defi.items():
@@ -314,32 +342,39 @@ def exec_cfg(self, kw):
# retrieving variables of a module
if 'variables' in kw:
- v_env = kw.get('env', self.env)
+ v = kw.get('env', self.env)
+ uselib = kw.get('uselib_store', kw['package'].upper())
vars = Utils.to_list(kw['variables'])
for v in vars:
val = self.cmd_and_log(lst + ['--variable=' + v], env=env).strip()
- var = '%s_%s' % (kw['uselib_store'], v)
- v_env[var] = val
+ var = '%s_%s' % (uselib, v)
+ v[var] = val
+ if not 'okmsg' in kw:
+ kw['okmsg'] = 'yes'
return
# so we assume the command-line will output flags to be parsed afterwards
ret = self.cmd_and_log(lst, env=env)
+ if not 'okmsg' in kw:
+ kw['okmsg'] = 'yes'
define_it()
- self.parse_flags(ret, kw['uselib_store'], kw.get('env', self.env), force_static=static, posix=kw.get('posix'))
+ self.parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env), force_static=static, posix=kw.get('posix', None))
return ret
@conf
def check_cfg(self, *k, **kw):
"""
- Checks for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc).
- This wraps internal calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg`
+ Check for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc).
+ Encapsulate the calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg`
A few examples::
def configure(conf):
conf.load('compiler_c')
conf.check_cfg(package='glib-2.0', args='--libs --cflags')
+ conf.check_cfg(package='glib-2.0', uselib_store='GLIB', atleast_version='2.10.0',
+ args='--cflags --libs')
conf.check_cfg(package='pango')
conf.check_cfg(package='pango', uselib_store='MYPANGO', args=['--cflags', '--libs'])
conf.check_cfg(package='pango',
@@ -352,18 +387,24 @@ def check_cfg(self, *k, **kw):
conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO')
print(conf.env.FOO_includedir)
"""
+ if k:
+ lst = k[0].split()
+ kw['package'] = lst[0]
+ kw['args'] = ' '.join(lst[1:])
+
self.validate_cfg(kw)
if 'msg' in kw:
self.start_msg(kw['msg'], **kw)
ret = None
try:
ret = self.exec_cfg(kw)
- except self.errors.WafError as e:
+ except self.errors.WafError:
if 'errmsg' in kw:
self.end_msg(kw['errmsg'], 'YELLOW', **kw)
if Logs.verbose > 1:
- self.to_log('Command failure: %s' % e)
- self.fatal('The configuration failed')
+ raise
+ else:
+ self.fatal('The configuration failed')
else:
if not ret:
ret = True
@@ -374,9 +415,6 @@ def check_cfg(self, *k, **kw):
return ret
def build_fun(bld):
- """
- Build function that is used for running configuration tests with ``conf.check()``
- """
if bld.kw['compile_filename']:
node = bld.srcnode.make_node(bld.kw['compile_filename'])
node.write(bld.kw['code'])
@@ -386,13 +424,13 @@ def build_fun(bld):
for k, v in bld.kw.items():
setattr(o, k, v)
- if not bld.kw.get('quiet'):
+ if not bld.kw.get('quiet', None):
bld.conf.to_log("==>\n%s\n<==" % bld.kw['code'])
@conf
def validate_c(self, kw):
"""
- Pre-checks the parameters that will be given to :py:func:`waflib.Configure.run_build`
+ pre-check the parameters that will be given to :py:func:`waflib.Configure.run_build`
:param compiler: c or cxx (tries to guess what is best)
:type compiler: string
@@ -417,9 +455,6 @@ def validate_c(self, kw):
:param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers
:type auto_add_header_name: bool
"""
- for x in ('type_name', 'field_name', 'function_name'):
- if x in kw:
- Logs.warn('Invalid argument %r in test' % x)
if not 'build_fun' in kw:
kw['build_fun'] = build_fun
@@ -430,17 +465,17 @@ def validate_c(self, kw):
if not 'compiler' in kw and not 'features' in kw:
kw['compiler'] = 'c'
- if env.CXX_NAME and Task.classes.get('cxx'):
+ if env['CXX_NAME'] and Task.classes.get('cxx', None):
kw['compiler'] = 'cxx'
- if not self.env.CXX:
+ if not self.env['CXX']:
self.fatal('a c++ compiler is required')
else:
- if not self.env.CC:
+ if not self.env['CC']:
self.fatal('a c compiler is required')
if not 'compile_mode' in kw:
kw['compile_mode'] = 'c'
- if 'cxx' in Utils.to_list(kw.get('features', [])) or kw.get('compiler') == 'cxx':
+ if 'cxx' in Utils.to_list(kw.get('features',[])) or kw.get('compiler', '') == 'cxx':
kw['compile_mode'] = 'cxx'
if not 'type' in kw:
@@ -457,36 +492,71 @@ def validate_c(self, kw):
if not 'compile_filename' in kw:
kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
+
def to_header(dct):
if 'header_name' in dct:
dct = Utils.to_list(dct['header_name'])
return ''.join(['#include <%s>\n' % x for x in dct])
return ''
+ #OSX
if 'framework_name' in kw:
- # OSX, not sure this is used anywhere
fwkname = kw['framework_name']
if not 'uselib_store' in kw:
kw['uselib_store'] = fwkname.upper()
- if not kw.get('no_header'):
+
+ if not kw.get('no_header', False):
+ if not 'header_name' in kw:
+ kw['header_name'] = []
fwk = '%s/%s.h' % (fwkname, fwkname)
- if kw.get('remove_dot_h'):
+ if kw.get('remove_dot_h', None):
fwk = fwk[:-2]
- val = kw.get('header_name', [])
- kw['header_name'] = Utils.to_list(val) + [fwk]
+ kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
+
kw['msg'] = 'Checking for framework %s' % fwkname
kw['framework'] = fwkname
+ #kw['frameworkpath'] = set it yourself
+
+ if 'function_name' in kw:
+ fu = kw['function_name']
+ if not 'msg' in kw:
+ kw['msg'] = 'Checking for function %s' % fu
+ kw['code'] = to_header(kw) + SNIP_FUNCTION % fu
+ if not 'uselib_store' in kw:
+ kw['uselib_store'] = fu.upper()
+ if not 'define_name' in kw:
+ kw['define_name'] = self.have_define(fu)
+
+ elif 'type_name' in kw:
+ tu = kw['type_name']
+ if not 'header_name' in kw:
+ kw['header_name'] = 'stdint.h'
+ if 'field_name' in kw:
+ field = kw['field_name']
+ kw['code'] = to_header(kw) + SNIP_FIELD % {'type_name' : tu, 'field_name' : field}
+ if not 'msg' in kw:
+ kw['msg'] = 'Checking for field %s in %s' % (field, tu)
+ if not 'define_name' in kw:
+ kw['define_name'] = self.have_define((tu + '_' + field).upper())
+ else:
+ kw['code'] = to_header(kw) + SNIP_TYPE % {'type_name' : tu}
+ if not 'msg' in kw:
+ kw['msg'] = 'Checking for type %s' % tu
+ if not 'define_name' in kw:
+ kw['define_name'] = self.have_define(tu.upper())
elif 'header_name' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for header %s' % kw['header_name']
l = Utils.to_list(kw['header_name'])
- assert len(l), 'list of headers in header_name is empty'
+ assert len(l)>0, 'list of headers in header_name is empty'
kw['code'] = to_header(kw) + SNIP_EMPTY_PROGRAM
+
if not 'uselib_store' in kw:
kw['uselib_store'] = l[0].upper()
+
if not 'define_name' in kw:
kw['define_name'] = self.have_define(l[0])
@@ -522,7 +592,7 @@ def validate_c(self, kw):
kw['execute'] = False
if kw['execute']:
kw['features'].append('test_exec')
- kw['chmod'] = Utils.O755
+ kw['chmod'] = 493
if not 'errmsg' in kw:
kw['errmsg'] = 'not found'
@@ -538,12 +608,11 @@ def validate_c(self, kw):
kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code']
# in case defines lead to very long command-lines
- if kw.get('merge_config_header') or env.merge_config_header:
+ if kw.get('merge_config_header', False) or env.merge_config_header:
kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code'])
env.DEFINES = [] # modify the copy
- if not kw.get('success'):
- kw['success'] = None
+ if not kw.get('success'): kw['success'] = None
if 'define_name' in kw:
self.undefine(kw['define_name'])
@@ -552,76 +621,63 @@ def validate_c(self, kw):
@conf
def post_check(self, *k, **kw):
- """
- Sets the variables after a test executed in
- :py:func:`waflib.Tools.c_config.check` was run successfully
- """
+ "Set the variables after a test executed in :py:func:`waflib.Tools.c_config.check` was run successfully"
+
is_success = 0
if kw['execute']:
if kw['success'] is not None:
- if kw.get('define_ret'):
+ if kw.get('define_ret', False):
is_success = kw['success']
else:
is_success = (kw['success'] == 0)
else:
is_success = (kw['success'] == 0)
- if kw.get('define_name'):
- comment = kw.get('comment', '')
- define_name = kw['define_name']
- if kw['execute'] and kw.get('define_ret') and isinstance(is_success, str):
- if kw.get('global_define', 1):
- self.define(define_name, is_success, quote=kw.get('quote', 1), comment=comment)
+ if 'define_name' in kw:
+ # TODO simplify!
+ if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
+ if kw['execute'] and kw.get('define_ret', None) and isinstance(is_success, str):
+ self.define(kw['define_name'], is_success, quote=kw.get('quote', 1))
else:
- if kw.get('quote', 1):
- succ = '"%s"' % is_success
- else:
- succ = int(is_success)
- val = '%s=%s' % (define_name, succ)
- var = 'DEFINES_%s' % kw['uselib_store']
- self.env.append_value(var, val)
+ self.define_cond(kw['define_name'], is_success)
else:
- if kw.get('global_define', 1):
- self.define_cond(define_name, is_success, comment=comment)
- else:
- var = 'DEFINES_%s' % kw['uselib_store']
- self.env.append_value(var, '%s=%s' % (define_name, int(is_success)))
-
- # define conf.env.HAVE_X to 1
- if kw.get('add_have_to_env', 1):
- if kw.get('uselib_store'):
- self.env[self.have_define(kw['uselib_store'])] = 1
- elif kw['execute'] and kw.get('define_ret'):
- self.env[define_name] = is_success
- else:
- self.env[define_name] = int(is_success)
+ self.define_cond(kw['define_name'], is_success)
+
+ # consistency with check_cfg
+ if kw.get('global_define', None):
+ self.env[kw['define_name']] = is_success
if 'header_name' in kw:
- if kw.get('auto_add_header_name'):
+ if kw.get('auto_add_header_name', False):
self.env.append_value(INCKEYS, Utils.to_list(kw['header_name']))
if is_success and 'uselib_store' in kw:
from waflib.Tools import ccroot
- # See get_uselib_vars in ccroot.py
- _vars = set()
+
+ # TODO see get_uselib_vars from ccroot.py
+ _vars = set([])
for x in kw['features']:
if x in ccroot.USELIB_VARS:
_vars |= ccroot.USELIB_VARS[x]
for k in _vars:
- x = k.lower()
- if x in kw:
- self.env.append_value(k + '_' + kw['uselib_store'], kw[x])
+ lk = k.lower()
+ if lk in kw:
+ val = kw[lk]
+ # remove trailing slash
+ if isinstance(val, str):
+ val = val.rstrip(os.path.sep)
+ self.env.append_unique(k + '_' + kw['uselib_store'], Utils.to_list(val))
return is_success
@conf
def check(self, *k, **kw):
"""
- Performs a configuration test by calling :py:func:`waflib.Configure.run_build`.
+ Perform a configuration test by calling :py:func:`waflib.Configure.run_build`.
For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`.
- To force a specific compiler, pass ``compiler='c'`` or ``compiler='cxx'`` to the list of arguments
+ To force a specific compiler, pass "compiler='c'" or "compiler='cxx'" in the arguments
- Besides build targets, complete builds can be given through a build function. All files will
+ Besides build targets, complete builds can be given though a build function. All files will
be written to a temporary directory::
def build(bld):
@@ -655,7 +711,7 @@ def check(self, *k, **kw):
class test_exec(Task.Task):
"""
- A task that runs programs after they are built. See :py:func:`waflib.Tools.c_config.test_exec_fun`.
+ A task for executing a programs after they are built. See :py:func:`waflib.Tools.c_config.test_exec_fun`.
"""
color = 'PINK'
def run(self):
@@ -690,51 +746,20 @@ def test_exec_fun(self):
@conf
def check_cxx(self, *k, **kw):
- """
- Runs a test with a task generator of the form::
-
- conf.check(features='cxx cxxprogram', ...)
- """
+ # DO NOT USE
kw['compiler'] = 'cxx'
return self.check(*k, **kw)
@conf
def check_cc(self, *k, **kw):
- """
- Runs a test with a task generator of the form::
-
- conf.check(features='c cprogram', ...)
- """
+ # DO NOT USE
kw['compiler'] = 'c'
return self.check(*k, **kw)
@conf
-def set_define_comment(self, key, comment):
+def define(self, key, val, quote=True):
"""
- Sets a comment that will appear in the configuration header
-
- :type key: string
- :type comment: string
- """
- coms = self.env.DEFINE_COMMENTS
- if not coms:
- coms = self.env.DEFINE_COMMENTS = {}
- coms[key] = comment or ''
-
-@conf
-def get_define_comment(self, key):
- """
- Returns the comment associated to a define
-
- :type key: string
- """
- coms = self.env.DEFINE_COMMENTS or {}
- return coms.get(key, '')
-
-@conf
-def define(self, key, val, quote=True, comment=''):
- """
- Stores a single define and its state into ``conf.env.DEFINES``. The value is cast to an integer (0/1).
+ Store a single define and its state into conf.env.DEFINES. If the value is True, False or None it is cast to 1 or 0.
:param key: define name
:type key: string
@@ -743,9 +768,8 @@ def define(self, key, val, quote=True, comment=''):
:param quote: enclose strings in quotes (yes by default)
:type quote: bool
"""
- assert isinstance(key, str)
- if not key:
- return
+ assert key and isinstance(key, str)
+
if val is True:
val = 1
elif val in (False, None):
@@ -758,7 +782,7 @@ def define(self, key, val, quote=True, comment=''):
app = s % (key, str(val))
ban = key + '='
- lst = self.env.DEFINES
+ lst = self.env['DEFINES']
for x in lst:
if x.startswith(ban):
lst[lst.index(x)] = app
@@ -767,29 +791,26 @@ def define(self, key, val, quote=True, comment=''):
self.env.append_value('DEFINES', app)
self.env.append_unique(DEFKEYS, key)
- self.set_define_comment(key, comment)
@conf
-def undefine(self, key, comment=''):
+def undefine(self, key):
"""
- Removes a global define from ``conf.env.DEFINES``
+ Remove a define from conf.env.DEFINES
:param key: define name
:type key: string
"""
- assert isinstance(key, str)
- if not key:
- return
+ assert key and isinstance(key, str)
+
ban = key + '='
- lst = [x for x in self.env.DEFINES if not x.startswith(ban)]
- self.env.DEFINES = lst
+ lst = [x for x in self.env['DEFINES'] if not x.startswith(ban)]
+ self.env['DEFINES'] = lst
self.env.append_unique(DEFKEYS, key)
- self.set_define_comment(key, comment)
@conf
-def define_cond(self, key, val, comment=''):
+def define_cond(self, key, val):
"""
- Conditionally defines a name::
+ Conditionally define a name::
def configure(conf):
conf.define_cond('A', True)
@@ -802,19 +823,16 @@ def define_cond(self, key, val, comment=''):
:param val: value
:type val: int or string
"""
- assert isinstance(key, str)
- if not key:
- return
+ assert key and isinstance(key, str)
+
if val:
- self.define(key, 1, comment=comment)
+ self.define(key, 1)
else:
- self.undefine(key, comment=comment)
+ self.undefine(key)
@conf
def is_defined(self, key):
"""
- Indicates whether a particular define is globally set in ``conf.env.DEFINES``.
-
:param key: define name
:type key: string
:return: True if the define is set
@@ -823,7 +841,7 @@ def is_defined(self, key):
assert key and isinstance(key, str)
ban = key + '='
- for x in self.env.DEFINES:
+ for x in self.env['DEFINES']:
if x.startswith(ban):
return True
return False
@@ -831,16 +849,14 @@ def is_defined(self, key):
@conf
def get_define(self, key):
"""
- Returns the value of an existing define, or None if not found
-
:param key: define name
:type key: string
- :rtype: string
+ :return: the value of a previously stored define or None if it is not set
"""
assert key and isinstance(key, str)
ban = key + '='
- for x in self.env.DEFINES:
+ for x in self.env['DEFINES']:
if x.startswith(ban):
return x[len(ban):]
return None
@@ -848,9 +864,6 @@ def get_define(self, key):
@conf
def have_define(self, key):
"""
- Returns a variable suitable for command-line or header use by removing invalid characters
- and prefixing it with ``HAVE_``
-
:param key: define name
:type key: string
:return: the input key prefixed by *HAVE_* and substitute any invalid characters.
@@ -861,7 +874,7 @@ def have_define(self, key):
@conf
def write_config_header(self, configfile='', guard='', top=False, defines=True, headers=False, remove=True, define_prefix=''):
"""
- Writes a configuration header containing defines and includes::
+ Write a configuration header containing defines and includes::
def configure(cnf):
cnf.define('A', 1)
@@ -885,8 +898,7 @@ def write_config_header(self, configfile='', guard='', top=False, defines=True,
:type define_prefix: string
:param define_prefix: prefix all the defines in the file with a particular prefix
"""
- if not configfile:
- configfile = WAF_CONFIG_H
+ if not configfile: configfile = WAF_CONFIG_H
waf_guard = guard or 'W_%s_WAF' % Utils.quote_define_name(configfile)
node = top and self.bldnode or self.path.get_bld()
@@ -911,7 +923,7 @@ def write_config_header(self, configfile='', guard='', top=False, defines=True,
@conf
def get_config_header(self, defines=True, headers=False, define_prefix=''):
"""
- Creates the contents of a ``config.h`` file from the defines and includes
+ Create the contents of a ``config.h`` file from the defines and includes
set in conf.env.define_key / conf.env.include_key. No include guards are added.
A prelude will be added from the variable env.WAF_CONFIG_H_PRELUDE if provided. This
@@ -941,25 +953,22 @@ def get_config_header(self, defines=True, headers=False, define_prefix=''):
if defines:
tbl = {}
- for k in self.env.DEFINES:
+ for k in self.env['DEFINES']:
a, _, b = k.partition('=')
tbl[a] = b
for k in self.env[DEFKEYS]:
- caption = self.get_define_comment(k)
- if caption:
- caption = ' /* %s */' % caption
try:
- txt = '#define %s%s %s%s' % (define_prefix, k, tbl[k], caption)
+ txt = '#define %s%s %s' % (define_prefix, k, tbl[k])
except KeyError:
- txt = '/* #undef %s%s */%s' % (define_prefix, k, caption)
+ txt = '/* #undef %s%s */' % (define_prefix, k)
lst.append(txt)
return "\n".join(lst)
@conf
def cc_add_flags(conf):
"""
- Adds CFLAGS / CPPFLAGS from os.environ to conf.env
+ Add CFLAGS / CPPFLAGS from os.environ to conf.env
"""
conf.add_os_flags('CPPFLAGS', dup=False)
conf.add_os_flags('CFLAGS', dup=False)
@@ -967,7 +976,7 @@ def cc_add_flags(conf):
@conf
def cxx_add_flags(conf):
"""
- Adds CXXFLAGS / CPPFLAGS from os.environ to conf.env
+ Add CXXFLAGS / CPPFLAGS from os.environ to conf.env
"""
conf.add_os_flags('CPPFLAGS', dup=False)
conf.add_os_flags('CXXFLAGS', dup=False)
@@ -975,7 +984,7 @@ def cxx_add_flags(conf):
@conf
def link_add_flags(conf):
"""
- Adds LINKFLAGS / LDFLAGS from os.environ to conf.env
+ Add LINKFLAGS / LDFLAGS from os.environ to conf.env
"""
conf.add_os_flags('LINKFLAGS', dup=False)
conf.add_os_flags('LDFLAGS', dup=False)
@@ -983,7 +992,7 @@ def link_add_flags(conf):
@conf
def cc_load_tools(conf):
"""
- Loads the Waf c extensions
+ Load the c tool
"""
if not conf.env.DEST_OS:
conf.env.DEST_OS = Utils.unversioned_sys_platform()
@@ -992,7 +1001,7 @@ def cc_load_tools(conf):
@conf
def cxx_load_tools(conf):
"""
- Loads the Waf c++ extensions
+ Load the cxx tool
"""
if not conf.env.DEST_OS:
conf.env.DEST_OS = Utils.unversioned_sys_platform()
@@ -1001,17 +1010,15 @@ def cxx_load_tools(conf):
@conf
def get_cc_version(conf, cc, gcc=False, icc=False, clang=False):
"""
- Runs the preprocessor to determine the gcc/icc/clang version
+ Run the preprocessor to determine the compiler version
The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env*
-
- :raise: :py:class:`waflib.Errors.ConfigurationError`
"""
cmd = cc + ['-dM', '-E', '-']
env = conf.env.env or None
try:
out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env)
- except Errors.WafError:
+ except Exception:
conf.fatal('Could not determine the compiler version %r' % cmd)
if gcc:
@@ -1059,8 +1066,6 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False):
conf.env.DEST_BINFMT = 'elf'
elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'):
conf.env.DEST_BINFMT = 'pe'
- if not conf.env.IMPLIBDIR:
- conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files
conf.env.LIBDIR = conf.env.BINDIR
elif isD('__APPLE__'):
conf.env.DEST_BINFMT = 'mac-o'
@@ -1077,22 +1082,19 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False):
Logs.debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
if icc:
ver = k['__INTEL_COMPILER']
- conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1])
+ conf.env['CC_VERSION'] = (ver[:-2], ver[-2], ver[-1])
else:
if isD('__clang__') and isD('__clang_major__'):
- conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
+ conf.env['CC_VERSION'] = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
else:
# older clang versions and gcc
- conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0'))
+ conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0'))
return k
@conf
def get_xlc_version(conf, cc):
- """
- Returns the Aix compiler version
+ """Get the compiler version"""
- :raise: :py:class:`waflib.Errors.ConfigurationError`
- """
cmd = cc + ['-qversion']
try:
out, err = conf.cmd_and_log(cmd, output=0)
@@ -1105,18 +1107,15 @@ def get_xlc_version(conf, cc):
match = version_re(out or err)
if match:
k = match.groupdict()
- conf.env.CC_VERSION = (k['major'], k['minor'])
+ conf.env['CC_VERSION'] = (k['major'], k['minor'])
break
else:
conf.fatal('Could not determine the XLC version.')
@conf
def get_suncc_version(conf, cc):
- """
- Returns the Sun compiler version
+ """Get the compiler version"""
- :raise: :py:class:`waflib.Errors.ConfigurationError`
- """
cmd = cc + ['-V']
try:
out, err = conf.cmd_and_log(cmd, output=0)
@@ -1130,14 +1129,11 @@ def get_suncc_version(conf, cc):
version = (out or err)
version = version.splitlines()[0]
- # cc: Sun C 5.10 SunOS_i386 2009/06/03
- # cc: Studio 12.5 Sun C++ 5.14 SunOS_sparc Beta 2015/11/17
- # cc: WorkShop Compilers 5.0 98/12/15 C 5.0
- version_re = re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P<major>\d*)\.(?P<minor>\d*)', re.I).search
+ version_re = re.compile(r'cc:\s+sun\s+(c\+\+|c)\s+(?P<major>\d*)\.(?P<minor>\d*)', re.I).search
match = version_re(version)
if match:
k = match.groupdict()
- conf.env.CC_VERSION = (k['major'], k['minor'])
+ conf.env['CC_VERSION'] = (k['major'], k['minor'])
else:
conf.fatal('Could not determine the suncc version.')
@@ -1146,7 +1142,7 @@ def get_suncc_version(conf, cc):
@conf
def add_as_needed(self):
"""
- Adds ``--as-needed`` to the *LINKFLAGS*
+ Add ``--as-needed`` to the *LINKFLAGS*
On some platforms, it is a default flag. In some cases (e.g., in NS-3) it is necessary to explicitly disable this feature with `-Wl,--no-as-needed` flag.
"""
if self.env.DEST_BINFMT == 'elf' and 'gcc' in (self.env.CXX_NAME, self.env.CC_NAME):
@@ -1154,31 +1150,22 @@ def add_as_needed(self):
# ============ parallel configuration
-class cfgtask(Task.Task):
+class cfgtask(Task.TaskBase):
"""
- A task that executes build configuration tests (calls conf.check)
+ A task that executes configuration tests
+ make sure that the checks write to conf.env in a thread-safe manner
- Make sure to use locks if concurrent access to the same conf.env data is necessary.
+ for the moment it only executes conf.check
"""
- def __init__(self, *k, **kw):
- Task.Task.__init__(self, *k, **kw)
- self.run_after = set()
-
def display(self):
return ''
def runnable_status(self):
- for x in self.run_after:
- if not x.hasrun:
- return Task.ASK_LATER
return Task.RUN_ME
def uid(self):
return Utils.SIG_NIL
- def signature(self):
- return Utils.SIG_NIL
-
def run(self):
conf = self.conf
bld = Build.BuildContext(top_dir=conf.srcnode.abspath(), out_dir=conf.bldnode.abspath())
@@ -1186,73 +1173,22 @@ class cfgtask(Task.Task):
bld.init_dirs()
bld.in_msg = 1 # suppress top-level start_msg
bld.logger = self.logger
- bld.multicheck_task = self
- args = self.args
try:
- if 'func' in args:
- bld.test(build_fun=args['func'],
- msg=args.get('msg', ''),
- okmsg=args.get('okmsg', ''),
- errmsg=args.get('errmsg', ''),
- )
- else:
- args['multicheck_mandatory'] = args.get('mandatory', True)
- args['mandatory'] = True
- try:
- bld.check(**args)
- finally:
- args['mandatory'] = args['multicheck_mandatory']
+ bld.check(**self.args)
except Exception:
return 1
- def process(self):
- Task.Task.process(self)
- if 'msg' in self.args:
- with self.generator.bld.multicheck_lock:
- self.conf.start_msg(self.args['msg'])
- if self.hasrun == Task.NOT_RUN:
- self.conf.end_msg('test cancelled', 'YELLOW')
- elif self.hasrun != Task.SUCCESS:
- self.conf.end_msg(self.args.get('errmsg', 'no'), 'YELLOW')
- else:
- self.conf.end_msg(self.args.get('okmsg', 'yes'), 'GREEN')
-
@conf
def multicheck(self, *k, **kw):
"""
- Runs configuration tests in parallel; results are printed sequentially at the end of the build
- but each test must provide its own msg value to display a line::
-
- def test_build(ctx):
- ctx.in_msg = True # suppress console outputs
- ctx.check_large_file(mandatory=False)
-
- conf.multicheck(
- {'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store':'STDIO', 'global_define':False},
- {'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False},
- {'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'},
- {'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'},
- msg = 'Checking for headers in parallel',
- mandatory = True, # mandatory tests raise an error at the end
- run_all_tests = True, # try running all tests
- )
-
- The configuration tests may modify the values in conf.env in any order, and the define
- values can affect configuration tests being executed. It is hence recommended
- to provide `uselib_store` values with `global_define=False` to prevent such issues.
+ Use tuples to perform parallel configuration tests
"""
self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)), **kw)
- # Force a copy so that threads append to the same list at least
- # no order is guaranteed, but the values should not disappear at least
- for var in ('DEFINES', DEFKEYS):
- self.env.append_value(var, [])
- self.env.DEFINE_COMMENTS = self.env.DEFINE_COMMENTS or {}
-
- # define a task object that will execute our tests
class par(object):
def __init__(self):
self.keep = False
+ self.returned_tasks = []
self.task_sigs = {}
self.progress_bar = 0
def total(self):
@@ -1261,13 +1197,9 @@ def multicheck(self, *k, **kw):
return
bld = par()
- bld.keep = kw.get('run_all_tests', True)
- bld.imp_sigs = {}
tasks = []
-
- id_to_task = {}
for dct in k:
- x = Task.classes['cfgtask'](bld=bld, env=None)
+ x = cfgtask(bld=bld)
tasks.append(x)
x.args = dct
x.bld = bld
@@ -1277,38 +1209,18 @@ def multicheck(self, *k, **kw):
# bind a logger that will keep the info in memory
x.logger = Logs.make_mem_logger(str(id(x)), self.logger)
- if 'id' in dct:
- id_to_task[dct['id']] = x
-
- # second pass to set dependencies with after_test/before_test
- for x in tasks:
- for key in Utils.to_list(x.args.get('before_tests', [])):
- tsk = id_to_task[key]
- if not tsk:
- raise ValueError('No test named %r' % key)
- tsk.run_after.add(x)
- for key in Utils.to_list(x.args.get('after_tests', [])):
- tsk = id_to_task[key]
- if not tsk:
- raise ValueError('No test named %r' % key)
- x.run_after.add(tsk)
-
def it():
yield tasks
while 1:
yield []
- bld.producer = p = Runner.Parallel(bld, Options.options.jobs)
- bld.multicheck_lock = Utils.threading.Lock()
+ p = Runner.Parallel(bld, Options.options.jobs)
p.biter = it()
-
- self.end_msg('started')
p.start()
# flush the logs in order into the config.log
for x in tasks:
x.logger.memhandler.flush()
- self.start_msg('-> processing test results')
if p.error:
for x in p.error:
if getattr(x, 'err_msg', None):
@@ -1316,36 +1228,10 @@ def multicheck(self, *k, **kw):
self.end_msg('fail', color='RED')
raise Errors.WafError('There is an error in the library, read config.log for more information')
- failure_count = 0
- for x in tasks:
- if x.hasrun not in (Task.SUCCESS, Task.NOT_RUN):
- failure_count += 1
-
- if failure_count:
- self.end_msg(kw.get('errmsg', '%s test failed' % failure_count), color='YELLOW', **kw)
- else:
- self.end_msg('all ok', **kw)
-
for x in tasks:
if x.hasrun != Task.SUCCESS:
- if x.args.get('mandatory', True):
- self.fatal(kw.get('fatalmsg') or 'One of the tests has failed, read config.log for more information')
+ self.end_msg(kw.get('errmsg', 'no'), color='YELLOW', **kw)
+ self.fatal(kw.get('fatalmsg', None) or 'One of the tests has failed, read config.log for more information')
-@conf
-def check_gcc_o_space(self, mode='c'):
- if int(self.env.CC_VERSION[0]) > 4:
- # this is for old compilers
- return
- self.env.stash()
- if mode == 'c':
- self.env.CCLNK_TGT_F = ['-o', '']
- elif mode == 'cxx':
- self.env.CXXLNK_TGT_F = ['-o', '']
- features = '%s %sshlib' % (mode, mode)
- try:
- self.check(msg='Checking if the -o link must be split from arguments', fragment=SNIP_EMPTY_PROGRAM, features=features)
- except self.errors.ConfigurationError:
- self.env.revert()
- else:
- self.env.commit()
+ self.end_msg('ok', **kw)
diff --git a/waflib/Tools/c_osx.py b/waflib/Tools/c_osx.py
index f70b128b..4337cc68 100644
--- a/waflib/Tools/c_osx.py
+++ b/waflib/Tools/c_osx.py
@@ -1,13 +1,13 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy 2008-2018 (ita)
+# Thomas Nagy 2008-2010
"""
MacOSX related tools
"""
import os, shutil, platform
-from waflib import Task, Utils
+from waflib import Task, Utils, Errors
from waflib.TaskGen import taskgen_method, feature, after_method, before_method
app_info = '''
@@ -37,8 +37,8 @@ def set_macosx_deployment_target(self):
"""
see WAF issue 285 and also and also http://trac.macports.org/ticket/17059
"""
- if self.env.MACOSX_DEPLOYMENT_TARGET:
- os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET
+ if self.env['MACOSX_DEPLOYMENT_TARGET']:
+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
if Utils.unversioned_sys_platform() == 'darwin':
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
@@ -46,7 +46,7 @@ def set_macosx_deployment_target(self):
@taskgen_method
def create_bundle_dirs(self, name, out):
"""
- Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
+ Create bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
"""
dir = out.parent.find_or_declare(name)
dir.mkdir()
@@ -78,7 +78,7 @@ def create_task_macapp(self):
bld.env.MACAPP = True
bld.shlib(source='a.c', target='foo')
"""
- if self.env.MACAPP or getattr(self, 'mac_app', False):
+ if self.env['MACAPP'] or getattr(self, 'mac_app', False):
out = self.link_task.outputs[0]
name = bundle_name_for_output(out)
@@ -88,7 +88,7 @@ def create_task_macapp(self):
self.apptask = self.create_task('macapp', self.link_task.outputs, n1)
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name
- self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755)
+ self.bld.install_files(inst_to, n1, chmod=Utils.O755)
if getattr(self, 'mac_files', None):
# this only accepts files; they will be installed as seen from mac_files_root
@@ -102,19 +102,38 @@ def create_task_macapp(self):
for node in self.to_nodes(self.mac_files):
relpath = node.path_from(mac_files_root or node.parent)
self.create_task('macapp', node, res_dir.make_node(relpath))
- self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node)
+ self.bld.install_as(os.path.join(inst_to, relpath), node)
+
+ if getattr(self, 'mac_resources', None):
+ # TODO remove in waf 1.9
+ res_dir = n1.parent.parent.make_node('Resources')
+ inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name
+ for x in self.to_list(self.mac_resources):
+ node = self.path.find_node(x)
+ if not node:
+ raise Errors.WafError('Missing mac_resource %r in %r' % (x, self))
+
+ parent = node.parent
+ if os.path.isdir(node.abspath()):
+ nodes = node.ant_glob('**')
+ else:
+ nodes = [node]
+ for node in nodes:
+ rel = node.path_from(parent)
+ self.create_task('macapp', node, res_dir.make_node(rel))
+ self.bld.install_as(inst_to + '/%s' % rel, node)
if getattr(self.bld, 'is_install', None):
- # disable regular binary installation
+ # disable the normal binary installation
self.install_task.hasrun = Task.SKIP_ME
@feature('cprogram', 'cxxprogram')
@after_method('apply_link')
def create_task_macplist(self):
"""
- Creates a :py:class:`waflib.Tools.c_osx.macplist` instance.
+ Create a :py:class:`waflib.Tools.c_osx.macplist` instance.
"""
- if self.env.MACAPP or getattr(self, 'mac_app', False):
+ if self.env['MACAPP'] or getattr(self, 'mac_app', False):
out = self.link_task.outputs[0]
name = bundle_name_for_output(out)
@@ -141,7 +160,7 @@ def create_task_macplist(self):
plisttask.code = app_info
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name
- self.add_install_files(install_to=inst_to, install_from=n1)
+ self.bld.install_files(inst_to, n1)
@feature('cshlib', 'cxxshlib')
@before_method('apply_link', 'propagate_uselib_vars')
@@ -158,9 +177,9 @@ def apply_bundle(self):
bld.env.MACBUNDLE = True
bld.shlib(source='a.c', target='foo')
"""
- if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False):
- self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag
- self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN
+ if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
+ self.env['LINKFLAGS_cshlib'] = self.env['LINKFLAGS_cxxshlib'] = [] # disable the '-dynamiclib' flag
+ self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['macbundle_PATTERN']
use = self.use = self.to_list(getattr(self, 'use', []))
if not 'MACBUNDLE' in use:
use.append('MACBUNDLE')
@@ -169,7 +188,7 @@ app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
class macapp(Task.Task):
"""
- Creates mac applications
+ Create mac applications
"""
color = 'PINK'
def run(self):
@@ -178,7 +197,7 @@ class macapp(Task.Task):
class macplist(Task.Task):
"""
- Creates plist files
+ Create plist files
"""
color = 'PINK'
ext_in = ['.bin']
@@ -190,4 +209,3 @@ class macplist(Task.Task):
context = getattr(self, 'context', {})
txt = txt.format(**context)
self.outputs[0].write(txt)
-
diff --git a/waflib/Tools/c_preproc.py b/waflib/Tools/c_preproc.py
index 7e04b4a7..c244f642 100644
--- a/waflib/Tools/c_preproc.py
+++ b/waflib/Tools/c_preproc.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
+# Thomas Nagy, 2006-2010 (ita)
"""
C/C++ preprocessor for finding dependencies
@@ -28,13 +28,11 @@ A dumb preprocessor is also available in the tool *c_dumbpreproc*
import re, string, traceback
from waflib import Logs, Utils, Errors
+from waflib.Logs import debug, error
class PreprocError(Errors.WafError):
pass
-FILE_CACHE_SIZE = 100000
-LINE_CACHE_SIZE = 100000
-
POPFILE = '-'
"Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously"
@@ -44,15 +42,15 @@ recursion_limit = 150
go_absolute = False
"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"
-standard_includes = ['/usr/local/include', '/usr/include']
+standard_includes = ['/usr/include']
if Utils.is_win32:
standard_includes = []
use_trigraphs = 0
"""Apply trigraph rules (False by default)"""
-# obsolete, do not use
strict_quotes = 0
+"""Reserve the "#include <>" quotes for system includes (do not search for those includes). False by default."""
g_optrans = {
'not':'!',
@@ -71,7 +69,7 @@ g_optrans = {
# ignore #warning and #error
re_lines = re.compile(
- '^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
+ '^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
re.IGNORECASE | re.MULTILINE)
"""Match #include lines"""
@@ -139,22 +137,54 @@ skipped = 's'
def repl(m):
"""Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`"""
- s = m.group()
- if s[0] == '/':
+ s = m.group(0)
+ if s.startswith('/'):
return ' '
return s
+def filter_comments(filename):
+ """
+ Filter the comments from a c/h file, and return the preprocessor lines.
+ The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.
+
+ :return: the preprocessor directives as a list of (keyword, line)
+ :rtype: a list of string pairs
+ """
+ # return a list of tuples : keyword, line
+ code = Utils.readf(filename)
+ if use_trigraphs:
+ for (a, b) in trig_def: code = code.split(a).join(b)
+ code = re_nl.sub('', code)
+ code = re_cpp.sub(repl, code)
+ return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
+
prec = {}
"""
-Operator precedence rules required for parsing expressions of the form::
+Operator precendence rules required for parsing expressions of the form::
#if 1 && 2 != 0
"""
ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
-for x, syms in enumerate(ops):
+for x in range(len(ops)):
+ syms = ops[x]
for u in syms.split():
prec[u] = x
+def trimquotes(s):
+ """
+ Remove the single quotes around an expression::
+
+ trimquotes("'test'") == "test"
+
+ :param s: expression to transform
+ :type s: string
+ :rtype: string
+ """
+ if not s: return ''
+ s = s.rstrip()
+ if s[0] == "'" and s[-1] == "'": return s[1:-1]
+ return s
+
def reduce_nums(val_1, val_2, val_op):
"""
Apply arithmetic rules to compute a result
@@ -170,56 +200,32 @@ def reduce_nums(val_1, val_2, val_op):
#print val_1, val_2, val_op
# now perform the operation, make certain a and b are numeric
- try:
- a = 0 + val_1
- except TypeError:
- a = int(val_1)
- try:
- b = 0 + val_2
- except TypeError:
- b = int(val_2)
+ try: a = 0 + val_1
+ except TypeError: a = int(val_1)
+ try: b = 0 + val_2
+ except TypeError: b = int(val_2)
d = val_op
- if d == '%':
- c = a % b
- elif d=='+':
- c = a + b
- elif d=='-':
- c = a - b
- elif d=='*':
- c = a * b
- elif d=='/':
- c = a / b
- elif d=='^':
- c = a ^ b
- elif d=='==':
- c = int(a == b)
- elif d=='|' or d == 'bitor':
- c = a | b
- elif d=='||' or d == 'or' :
- c = int(a or b)
- elif d=='&' or d == 'bitand':
- c = a & b
- elif d=='&&' or d == 'and':
- c = int(a and b)
- elif d=='!=' or d == 'not_eq':
- c = int(a != b)
- elif d=='^' or d == 'xor':
- c = int(a^b)
- elif d=='<=':
- c = int(a <= b)
- elif d=='<':
- c = int(a < b)
- elif d=='>':
- c = int(a > b)
- elif d=='>=':
- c = int(a >= b)
- elif d=='<<':
- c = a << b
- elif d=='>>':
- c = a >> b
- else:
- c = 0
+ if d == '%': c = a%b
+ elif d=='+': c = a+b
+ elif d=='-': c = a-b
+ elif d=='*': c = a*b
+ elif d=='/': c = a/b
+ elif d=='^': c = a^b
+ elif d=='==': c = int(a == b)
+ elif d=='|' or d == 'bitor': c = a|b
+ elif d=='||' or d == 'or' : c = int(a or b)
+ elif d=='&' or d == 'bitand': c = a&b
+ elif d=='&&' or d == 'and': c = int(a and b)
+ elif d=='!=' or d == 'not_eq': c = int(a != b)
+ elif d=='^' or d == 'xor': c = int(a^b)
+ elif d=='<=': c = int(a <= b)
+ elif d=='<': c = int(a < b)
+ elif d=='>': c = int(a > b)
+ elif d=='>=': c = int(a >= b)
+ elif d=='<<': c = a<<b
+ elif d=='>>': c = a>>b
+ else: c = 0
return c
def get_num(lst):
@@ -231,8 +237,7 @@ def get_num(lst):
:return: a pair containing the number and the rest of the list
:rtype: tuple(value, list)
"""
- if not lst:
- raise PreprocError('empty list for get_num')
+ if not lst: raise PreprocError("empty list for get_num")
(p, v) = lst[0]
if p == OP:
if v == '(':
@@ -250,7 +255,7 @@ def get_num(lst):
count_par += 1
i += 1
else:
- raise PreprocError('rparen expected %r' % lst)
+ raise PreprocError("rparen expected %r" % lst)
(num, _) = get_term(lst[1:i])
return (num, lst[i+1:])
@@ -267,14 +272,14 @@ def get_num(lst):
num, lst = get_num(lst[1:])
return (~ int(num), lst)
else:
- raise PreprocError('Invalid op token %r for get_num' % lst)
+ raise PreprocError("Invalid op token %r for get_num" % lst)
elif p == NUM:
return v, lst[1:]
elif p == IDENT:
# all macros should have been replaced, remaining identifiers eval to 0
return 0, lst[1:]
else:
- raise PreprocError('Invalid token %r for get_num' % lst)
+ raise PreprocError("Invalid token %r for get_num" % lst)
def get_term(lst):
"""
@@ -288,8 +293,7 @@ def get_term(lst):
:rtype: value, list
"""
- if not lst:
- raise PreprocError('empty list for get_term')
+ if not lst: raise PreprocError("empty list for get_term")
num, lst = get_num(lst)
if not lst:
return (num, [])
@@ -314,7 +318,7 @@ def get_term(lst):
break
i += 1
else:
- raise PreprocError('rparen expected %r' % lst)
+ raise PreprocError("rparen expected %r" % lst)
if int(num):
return get_term(lst[1:i])
@@ -332,7 +336,7 @@ def get_term(lst):
# operator precedence
p2, v2 = lst[0]
if p2 != OP:
- raise PreprocError('op expected %r' % lst)
+ raise PreprocError("op expected %r" % lst)
if prec[v2] >= prec[v]:
num2 = reduce_nums(num, num2, v)
@@ -343,7 +347,7 @@ def get_term(lst):
return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
- raise PreprocError('cannot reduce %r' % lst)
+ raise PreprocError("cannot reduce %r" % lst)
def reduce_eval(lst):
"""
@@ -428,7 +432,7 @@ def reduce_tokens(lst, defs, ban=[]):
else:
lst[i] = (NUM, 0)
else:
- raise PreprocError('Invalid define expression %r' % lst)
+ raise PreprocError("Invalid define expression %r" % lst)
elif p == IDENT and v in defs:
@@ -443,8 +447,8 @@ def reduce_tokens(lst, defs, ban=[]):
del lst[i]
accu = to_add[:]
reduce_tokens(accu, defs, ban+[v])
- for tmp in accu:
- lst.insert(i, tmp)
+ for x in range(len(accu)):
+ lst.insert(i, accu[x])
i += 1
else:
# collect the arguments for the funcall
@@ -453,11 +457,11 @@ def reduce_tokens(lst, defs, ban=[]):
del lst[i]
if i >= len(lst):
- raise PreprocError('expected ( after %r (got nothing)' % v)
+ raise PreprocError("expected '(' after %r (got nothing)" % v)
(p2, v2) = lst[i]
if p2 != OP or v2 != '(':
- raise PreprocError('expected ( after %r' % v)
+ raise PreprocError("expected '(' after %r" % v)
del lst[i]
@@ -472,22 +476,18 @@ def reduce_tokens(lst, defs, ban=[]):
one_param.append((p2, v2))
count_paren += 1
elif v2 == ')':
- if one_param:
- args.append(one_param)
+ if one_param: args.append(one_param)
break
elif v2 == ',':
- if not one_param:
- raise PreprocError('empty param in funcall %r' % v)
+ if not one_param: raise PreprocError("empty param in funcall %s" % v)
args.append(one_param)
one_param = []
else:
one_param.append((p2, v2))
else:
one_param.append((p2, v2))
- if v2 == '(':
- count_paren += 1
- elif v2 == ')':
- count_paren -= 1
+ if v2 == '(': count_paren += 1
+ elif v2 == ')': count_paren -= 1
else:
raise PreprocError('malformed macro')
@@ -524,6 +524,7 @@ def reduce_tokens(lst, defs, ban=[]):
accu.append((p2, v2))
accu.extend(toks)
elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
+ # TODO not sure
# first collect the tokens
va_toks = []
st = len(macro_def[0])
@@ -531,8 +532,7 @@ def reduce_tokens(lst, defs, ban=[]):
for x in args[pt-st+1:]:
va_toks.extend(x)
va_toks.append((OP, ','))
- if va_toks:
- va_toks.pop() # extra comma
+ if va_toks: va_toks.pop() # extra comma
if len(accu)>1:
(p3, v3) = accu[-1]
(p4, v4) = accu[-2]
@@ -580,15 +580,8 @@ def eval_macro(lst, defs):
:rtype: int
"""
reduce_tokens(lst, defs, [])
- if not lst:
- raise PreprocError('missing tokens to evaluate')
-
- if lst:
- p, v = lst[0]
- if p == IDENT and v not in defs:
- raise PreprocError('missing macro %r' % lst)
-
- p, v = reduce_eval(lst)
+ if not lst: raise PreprocError("missing tokens to evaluate")
+ (p, v) = reduce_eval(lst)
return int(v) != 0
def extract_macro(txt):
@@ -608,8 +601,7 @@ def extract_macro(txt):
p, name = t[0]
p, v = t[1]
- if p != OP:
- raise PreprocError('expected (')
+ if p != OP: raise PreprocError("expected open parenthesis")
i = 1
pindex = 0
@@ -628,27 +620,27 @@ def extract_macro(txt):
elif p == OP and v == ')':
break
else:
- raise PreprocError('unexpected token (3)')
+ raise PreprocError("unexpected token (3)")
elif prev == IDENT:
if p == OP and v == ',':
prev = v
elif p == OP and v == ')':
break
else:
- raise PreprocError('comma or ... expected')
+ raise PreprocError("comma or ... expected")
elif prev == ',':
if p == IDENT:
params[v] = pindex
pindex += 1
prev = p
elif p == OP and v == '...':
- raise PreprocError('not implemented (1)')
+ raise PreprocError("not implemented (1)")
else:
- raise PreprocError('comma or ... expected (2)')
+ raise PreprocError("comma or ... expected (2)")
elif prev == '...':
- raise PreprocError('not implemented (2)')
+ raise PreprocError("not implemented (2)")
else:
- raise PreprocError('unexpected else')
+ raise PreprocError("unexpected else")
#~ print (name, [params, t[i+1:]])
return (name, [params, t[i+1:]])
@@ -660,7 +652,7 @@ def extract_macro(txt):
# empty define, assign an empty token
return (v, [[], [('T','')]])
-re_include = re.compile('^\s*(<(?:.*)>|"(?:.*)")')
+re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
def extract_include(txt, defs):
"""
Process a line in the form::
@@ -676,15 +668,15 @@ def extract_include(txt, defs):
"""
m = re_include.search(txt)
if m:
- txt = m.group(1)
- return txt[0], txt[1:-1]
+ if m.group('a'): return '<', m.group('a')
+ if m.group('b'): return '"', m.group('b')
# perform preprocessing and look at the result, it must match an include
toks = tokenize(txt)
reduce_tokens(toks, defs, ['waf_include'])
if not toks:
- raise PreprocError('could not parse include %r' % txt)
+ raise PreprocError("could not parse include %s" % txt)
if len(toks) == 1:
if toks[0][0] == STR:
@@ -694,7 +686,7 @@ def extract_include(txt, defs):
ret = '<', stringize(toks).lstrip('<').rstrip('>')
return ret
- raise PreprocError('could not parse include %r' % txt)
+ raise PreprocError("could not parse include %s." % txt)
def parse_char(txt):
"""
@@ -706,26 +698,21 @@ def parse_char(txt):
:rtype: string
"""
- if not txt:
- raise PreprocError('attempted to parse a null char')
+ if not txt: raise PreprocError("attempted to parse a null char")
if txt[0] != '\\':
return ord(txt)
c = txt[1]
if c == 'x':
- if len(txt) == 4 and txt[3] in string.hexdigits:
- return int(txt[2:], 16)
+ if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
return int(txt[2:], 16)
elif c.isdigit():
- if c == '0' and len(txt)==2:
- return 0
+ if c == '0' and len(txt)==2: return 0
for i in 3, 2, 1:
if len(txt) > i and txt[1:1+i].isdigit():
return (1+i, int(txt[1:1+i], 8))
else:
- try:
- return chr_esc[c]
- except KeyError:
- raise PreprocError('could not parse char literal %r' % txt)
+ try: return chr_esc[c]
+ except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
def tokenize(s):
"""
@@ -738,6 +725,7 @@ def tokenize(s):
"""
return tokenize_private(s)[:] # force a copy of the results
+@Utils.run_once
def tokenize_private(s):
ret = []
for match in re_clexer.finditer(s):
@@ -746,32 +734,28 @@ def tokenize_private(s):
v = m(name)
if v:
if name == IDENT:
- if v in g_optrans:
+ try:
+ g_optrans[v];
name = OP
- elif v.lower() == "true":
- v = 1
- name = NUM
- elif v.lower() == "false":
- v = 0
- name = NUM
+ except KeyError:
+ # c++ specific
+ if v.lower() == "true":
+ v = 1
+ name = NUM
+ elif v.lower() == "false":
+ v = 0
+ name = NUM
elif name == NUM:
- if m('oct'):
- v = int(v, 8)
- elif m('hex'):
- v = int(m('hex'), 16)
- elif m('n0'):
- v = m('n0')
+ if m('oct'): v = int(v, 8)
+ elif m('hex'): v = int(m('hex'), 16)
+ elif m('n0'): v = m('n0')
else:
v = m('char')
- if v:
- v = parse_char(v)
- else:
- v = m('n2') or m('n4')
+ if v: v = parse_char(v)
+ else: v = m('n2') or m('n4')
elif name == OP:
- if v == '%:':
- v = '#'
- elif v == '%:%:':
- v = '##'
+ if v == '%:': v = '#'
+ elif v == '%:%:': v = '##'
elif name == STR:
# remove the quotes around the string
v = v[1:-1]
@@ -779,20 +763,15 @@ def tokenize_private(s):
break
return ret
-def format_defines(lst):
- ret = []
- for y in lst:
- if y:
- pos = y.find('=')
- if pos == -1:
- # "-DFOO" should give "#define FOO 1"
- ret.append(y)
- elif pos > 0:
- # all others are assumed to be -DX=Y
- ret.append('%s %s' % (y[:pos], y[pos+1:]))
- else:
- raise ValueError('Invalid define expression %r' % y)
- return ret
+@Utils.run_once
+def define_name(line):
+ """
+ :param line: define line
+ :type line: string
+ :rtype: string
+ :return: the define name
+ """
+ return re_mac.match(line).group(0)
class c_parser(object):
"""
@@ -824,12 +803,9 @@ class c_parser(object):
self.curfile = ''
"""Current file"""
- self.ban_includes = set()
+ self.ban_includes = set([])
"""Includes that must not be read (#pragma once)"""
- self.listed = set()
- """Include nodes/names already listed to avoid duplicates in self.nodes/self.names"""
-
def cached_find_resource(self, node, filename):
"""
Find a file from the input directory
@@ -842,13 +818,13 @@ class c_parser(object):
:rtype: :py:class:`waflib.Node.Node`
"""
try:
- cache = node.ctx.preproc_cache_node
+ nd = node.ctx.cache_nd
except AttributeError:
- cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE)
+ nd = node.ctx.cache_nd = {}
- key = (node, filename)
+ tup = (node, filename)
try:
- return cache[key]
+ return nd[tup]
except KeyError:
ret = node.find_resource(filename)
if ret:
@@ -858,10 +834,10 @@ class c_parser(object):
tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
if tmp and getattr(tmp, 'children', None):
ret = None
- cache[key] = ret
+ nd[tup] = ret
return ret
- def tryfind(self, filename, kind='"', env=None):
+ def tryfind(self, filename):
"""
Try to obtain a node from the filename based from the include paths. Will add
the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
@@ -875,70 +851,29 @@ class c_parser(object):
"""
if filename.endswith('.moc'):
# we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated
- # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient.
+ # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient. TODO waf 1.9
self.names.append(filename)
return None
self.curfile = filename
- found = None
- if kind == '"':
- if env.MSVC_VERSION:
- for n in reversed(self.currentnode_stack):
- found = self.cached_find_resource(n, filename)
- if found:
- break
- else:
- found = self.cached_find_resource(self.currentnode_stack[-1], filename)
+ # for msvc it should be a for loop over the whole stack
+ found = self.cached_find_resource(self.currentnode_stack[-1], filename)
- if not found:
- for n in self.nodepaths:
- found = self.cached_find_resource(n, filename)
- if found:
- break
+ for n in self.nodepaths:
+ if found:
+ break
+ found = self.cached_find_resource(n, filename)
- listed = self.listed
if found and not found in self.ban_includes:
- if found not in listed:
- listed.add(found)
- self.nodes.append(found)
+ # TODO duplicates do not increase the no-op build times too much, but they may be worth removing
+ self.nodes.append(found)
self.addlines(found)
else:
- if filename not in listed:
- listed.add(filename)
+ if not filename in self.names:
self.names.append(filename)
return found
- def filter_comments(self, node):
- """
- Filter the comments from a c/h file, and return the preprocessor lines.
- The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.
-
- :return: the preprocessor directives as a list of (keyword, line)
- :rtype: a list of string pairs
- """
- # return a list of tuples : keyword, line
- code = node.read()
- if use_trigraphs:
- for (a, b) in trig_def:
- code = code.split(a).join(b)
- code = re_nl.sub('', code)
- code = re_cpp.sub(repl, code)
- return re_lines.findall(code)
-
- def parse_lines(self, node):
- try:
- cache = node.ctx.preproc_cache_lines
- except AttributeError:
- cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE)
- try:
- return cache[node]
- except KeyError:
- cache[node] = lines = self.filter_comments(node)
- lines.append((POPFILE, ''))
- lines.reverse()
- return lines
-
def addlines(self, node):
"""
Add the lines from a header in the list of preprocessor lines to parse
@@ -948,23 +883,34 @@ class c_parser(object):
"""
self.currentnode_stack.append(node.parent)
+ filepath = node.abspath()
self.count_files += 1
if self.count_files > recursion_limit:
# issue #812
- raise PreprocError('recursion limit exceeded')
+ raise PreprocError("recursion limit exceeded")
+ pc = self.parse_cache
+ debug('preproc: reading file %r', filepath)
+ try:
+ lns = pc[filepath]
+ except KeyError:
+ pass
+ else:
+ self.lines.extend(lns)
+ return
- if Logs.verbose:
- Logs.debug('preproc: reading file %r', node)
try:
- lines = self.parse_lines(node)
- except EnvironmentError:
- raise PreprocError('could not read the file %r' % node)
+ lines = filter_comments(filepath)
+ lines.append((POPFILE, ''))
+ lines.reverse()
+ pc[filepath] = lines # cache the lines filtered
+ self.lines.extend(lines)
+ except IOError:
+ raise PreprocError("could not read the file %s" % filepath)
except Exception:
if Logs.verbose > 0:
- Logs.error('parsing %r failed %s', node, traceback.format_exc())
- else:
- self.lines.extend(lines)
+ error("parsing %s failed" % filepath)
+ traceback.print_exc()
def start(self, node, env):
"""
@@ -976,16 +922,27 @@ class c_parser(object):
:param env: config set containing additional defines to take into account
:type env: :py:class:`waflib.ConfigSet.ConfigSet`
"""
- Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
+
+ debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
+
+ bld = node.ctx
+ try:
+ self.parse_cache = bld.parse_cache
+ except AttributeError:
+ self.parse_cache = bld.parse_cache = {}
self.current_file = node
self.addlines(node)
# macros may be defined on the command-line, so they must be parsed as if they were part of the file
- if env.DEFINES:
- lst = format_defines(env.DEFINES)
- lst.reverse()
- self.lines.extend([('define', x) for x in lst])
+ if env['DEFINES']:
+ try:
+ lst = ['%s %s' % (x[0], trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in env['DEFINES']]]
+ lst.reverse()
+ self.lines.extend([('define', x) for x in lst])
+ except AttributeError:
+ # if the defines are invalid the compiler will tell the user
+ pass
while self.lines:
(token, line) = self.lines.pop()
@@ -995,6 +952,8 @@ class c_parser(object):
continue
try:
+ ve = Logs.verbose
+ if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
state = self.state
# make certain we define the state if we are about to enter in an if block
@@ -1010,27 +969,23 @@ class c_parser(object):
if token == 'if':
ret = eval_macro(tokenize(line), self.defs)
- if ret:
- state[-1] = accepted
- else:
- state[-1] = ignored
+ if ret: state[-1] = accepted
+ else: state[-1] = ignored
elif token == 'ifdef':
m = re_mac.match(line)
- if m and m.group() in self.defs:
- state[-1] = accepted
- else:
- state[-1] = ignored
+ if m and m.group(0) in self.defs: state[-1] = accepted
+ else: state[-1] = ignored
elif token == 'ifndef':
m = re_mac.match(line)
- if m and m.group() in self.defs:
- state[-1] = ignored
- else:
- state[-1] = accepted
+ if m and m.group(0) in self.defs: state[-1] = ignored
+ else: state[-1] = accepted
elif token == 'include' or token == 'import':
(kind, inc) = extract_include(line, self.defs)
- self.current_file = self.tryfind(inc, kind, env)
- if token == 'import':
- self.ban_includes.add(self.current_file)
+ if ve: debug('preproc: include found %s (%s) ', inc, kind)
+ if kind == '"' or not strict_quotes:
+ self.current_file = self.tryfind(inc)
+ if token == 'import':
+ self.ban_includes.add(self.current_file)
elif token == 'elif':
if state[-1] == accepted:
state[-1] = skipped
@@ -1038,35 +993,24 @@ class c_parser(object):
if eval_macro(tokenize(line), self.defs):
state[-1] = accepted
elif token == 'else':
- if state[-1] == accepted:
- state[-1] = skipped
- elif state[-1] == ignored:
- state[-1] = accepted
+ if state[-1] == accepted: state[-1] = skipped
+ elif state[-1] == ignored: state[-1] = accepted
elif token == 'define':
try:
- self.defs[self.define_name(line)] = line
- except AttributeError:
- raise PreprocError('Invalid define line %r' % line)
+ self.defs[define_name(line)] = line
+ except Exception:
+ raise PreprocError("Invalid define line %s" % line)
elif token == 'undef':
m = re_mac.match(line)
- if m and m.group() in self.defs:
- self.defs.__delitem__(m.group())
+ if m and m.group(0) in self.defs:
+ self.defs.__delitem__(m.group(0))
#print "undef %s" % name
elif token == 'pragma':
if re_pragma_once.match(line.lower()):
self.ban_includes.add(self.current_file)
except Exception as e:
if Logs.verbose:
- Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc())
-
- def define_name(self, line):
- """
- :param line: define line
- :type line: string
- :rtype: string
- :return: the define name
- """
- return re_mac.match(line).group()
+ debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
def scan(task):
"""
@@ -1076,6 +1020,9 @@ def scan(task):
This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
"""
+
+ global go_absolute
+
try:
incn = task.generator.includes_nodes
except AttributeError:
@@ -1088,4 +1035,7 @@ def scan(task):
tmp = c_parser(nodepaths)
tmp.start(task.inputs[0], task.env)
+ if Logs.verbose:
+ debug('deps: deps for %r: %r; unresolved %r' % (task.inputs, tmp.nodes, tmp.names))
return (tmp.nodes, tmp.names)
+
diff --git a/waflib/Tools/c_tests.py b/waflib/Tools/c_tests.py
index f858df57..3b37f54c 100644
--- a/waflib/Tools/c_tests.py
+++ b/waflib/Tools/c_tests.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2016-2018 (ita)
+# Thomas Nagy, 2010 (ita)
"""
Various configuration tests.
@@ -58,7 +58,7 @@ def link_lib_test_fun(self):
@conf
def check_library(self, mode=None, test_exec=True):
"""
- Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
+ Check if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
:param mode: c or cxx or d
:type mode: string
@@ -72,7 +72,8 @@ def check_library(self, mode=None, test_exec=True):
features = 'link_lib_test',
msg = 'Checking for libraries',
mode = mode,
- test_exec = test_exec)
+ test_exec = test_exec,
+ )
########################################################################################
@@ -88,7 +89,7 @@ INLINE_VALUES = ['inline', '__inline__', '__inline']
@conf
def check_inline(self, **kw):
"""
- Checks for the right value for inline macro.
+ Check for the right value for inline macro.
Define INLINE_MACRO to 1 if the define is found.
If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)
@@ -97,6 +98,7 @@ def check_inline(self, **kw):
:param features: by default *c* or *cxx* depending on the compiler present
:type features: list of string
"""
+
self.start_msg('Checking for inline')
if not 'define_name' in kw:
@@ -133,7 +135,7 @@ int main(int argc, char **argv) {
@conf
def check_large_file(self, **kw):
"""
- Checks for large file support and define the macro HAVE_LARGEFILE
+ Check for large file support and define the macro HAVE_LARGEFILE
The test is skipped on win32 systems (DEST_BINFMT == pe).
:param define_name: define to set, by default *HAVE_LARGEFILE*
@@ -141,6 +143,7 @@ def check_large_file(self, **kw):
:param execute: execute the test (yes by default)
:type execute: bool
"""
+
if not 'define_name' in kw:
kw['define_name'] = 'HAVE_LARGEFILE'
if not 'execute' in kw:
@@ -194,12 +197,9 @@ extern int foo;
'''
class grep_for_endianness(Task.Task):
- """
- Task that reads a binary and tries to determine the endianness
- """
color = 'PINK'
def run(self):
- txt = self.inputs[0].read(flags='rb').decode('latin-1')
+ txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
if txt.find('LiTTleEnDian') > -1:
self.generator.tmp.append('little')
elif txt.find('BIGenDianSyS') > -1:
@@ -211,19 +211,18 @@ class grep_for_endianness(Task.Task):
@after_method('process_source')
def grep_for_endianness_fun(self):
"""
- Used by the endianness configuration test
+ Used by the endiannes configuration test
"""
self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])
@conf
def check_endianness(self):
"""
- Executes a configuration test to determine the endianness
+ Execute a configuration test to determine the endianness
"""
tmp = []
def check_msg(self):
return tmp[0]
- self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness',
- msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
+ self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', msg="Checking for endianness", define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
return tmp[0]
diff --git a/waflib/Tools/ccroot.py b/waflib/Tools/ccroot.py
index cfef8bf5..7fb53bba 100644
--- a/waflib/Tools/ccroot.py
+++ b/waflib/Tools/ccroot.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
+# Thomas Nagy, 2005-2010 (ita)
"""
Classes and methods shared by tools providing support for C-like language such
@@ -8,7 +8,7 @@ as C/C++/D/Assembly/Go (this support module is almost never used alone).
"""
import os, re
-from waflib import Task, Utils, Node, Errors, Logs
+from waflib import Task, Utils, Node, Errors
from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
from waflib.Configure import conf
@@ -77,7 +77,7 @@ def to_incnodes(self, inlst):
:return: list of include folders as nodes
"""
lst = []
- seen = set()
+ seen = set([])
for x in self.to_list(inlst):
if x in seen or not x:
continue
@@ -118,10 +118,9 @@ def apply_incpaths(self):
and the list of include paths in ``tg.env.INCLUDES``.
"""
- lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES)
+ lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
self.includes_nodes = lst
- cwd = self.get_cwd()
- self.env.INCPATHS = [x.path_from(cwd) for x in lst]
+ self.env['INCPATHS'] = [x.abspath() for x in lst]
class link_task(Task.Task):
"""
@@ -131,9 +130,6 @@ class link_task(Task.Task):
"""
color = 'YELLOW'
- weight = 3
- """Try to process link tasks as early as possible"""
-
inst_to = None
"""Default installation path for the link task outputs, or None to disable"""
@@ -146,12 +142,6 @@ class link_task(Task.Task):
The settings are retrieved from ``env.clsname_PATTERN``
"""
if isinstance(target, str):
- base = self.generator.path
- if target.startswith('#'):
- # for those who like flat structures
- target = target[1:]
- base = self.generator.bld.bldnode
-
pattern = self.env[self.__class__.__name__ + '_PATTERN']
if not pattern:
pattern = '%s'
@@ -161,7 +151,7 @@ class link_task(Task.Task):
nums = self.generator.vnum.split('.')
if self.env.DEST_BINFMT == 'pe':
# include the version in the dll file name,
- # the import lib file name stays unversioned.
+ # the import lib file name stays unversionned.
name = name + '-' + nums[0]
elif self.env.DEST_OS == 'openbsd':
pattern = '%s.%s' % (pattern, nums[0])
@@ -172,51 +162,9 @@ class link_task(Task.Task):
tmp = folder + os.sep + pattern % name
else:
tmp = pattern % name
- target = base.find_or_declare(tmp)
+ target = self.generator.path.find_or_declare(tmp)
self.set_outputs(target)
- def exec_command(self, *k, **kw):
- ret = super(link_task, self).exec_command(*k, **kw)
- if not ret and self.env.DO_MANIFEST:
- ret = self.exec_mf()
- return ret
-
- def exec_mf(self):
- """
- Create manifest files for VS-like compilers (msvc, ifort, ...)
- """
- if not self.env.MT:
- return 0
-
- manifest = None
- for out_node in self.outputs:
- if out_node.name.endswith('.manifest'):
- manifest = out_node.abspath()
- break
- else:
- # Should never get here. If we do, it means the manifest file was
- # never added to the outputs list, thus we don't have a manifest file
- # to embed, so we just return.
- return 0
-
- # embedding mode. Different for EXE's and DLL's.
- # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
- mode = ''
- for x in Utils.to_list(self.generator.features):
- if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'):
- mode = 1
- elif x in ('cshlib', 'cxxshlib', 'fcshlib'):
- mode = 2
-
- Logs.debug('msvc: embedding manifest in mode %r', mode)
-
- lst = [] + self.env.MT
- lst.extend(Utils.to_list(self.env.MTFLAGS))
- lst.extend(['-manifest', manifest])
- lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode))
-
- return super(link_task, self).exec_command(lst)
-
class stlink_task(link_task):
"""
Base for static link tasks, which use *ar* most of the time.
@@ -230,10 +178,8 @@ class stlink_task(link_task):
def rm_tgt(cls):
old = cls.run
def wrap(self):
- try:
- os.remove(self.outputs[0].abspath())
- except OSError:
- pass
+ try: os.remove(self.outputs[0].abspath())
+ except OSError: pass
return old(self)
setattr(cls, 'run', wrap)
rm_tgt(stlink_task)
@@ -273,12 +219,10 @@ def apply_link(self):
try:
inst_to = self.install_path
except AttributeError:
- inst_to = self.link_task.inst_to
+ inst_to = self.link_task.__class__.inst_to
if inst_to:
# install a copy of the node list we have at this moment (implib not added)
- self.install_task = self.add_install_files(
- install_to=inst_to, install_from=self.link_task.outputs[:],
- chmod=self.link_task.chmod, task=self.link_task)
+ self.install_task = self.bld.install_files(inst_to, self.link_task.outputs[:], env=self.env, chmod=self.link_task.chmod, task=self.link_task)
@taskgen_method
def use_rec(self, name, **kw):
@@ -338,7 +282,7 @@ def process_use(self):
See :py:func:`waflib.Tools.ccroot.use_rec`.
"""
- use_not = self.tmp_use_not = set()
+ use_not = self.tmp_use_not = set([])
self.tmp_use_seen = [] # we would like an ordered set
use_prec = self.tmp_use_prec = {}
self.uselib = self.to_list(getattr(self, 'uselib', []))
@@ -353,7 +297,7 @@ def process_use(self):
del use_prec[x]
# topological sort
- out = self.tmp_use_sorted = []
+ out = []
tmp = []
for x in self.tmp_use_seen:
for k in use_prec.values():
@@ -389,15 +333,14 @@ def process_use(self):
if var == 'LIB' or y.tmp_use_stlib or x in names:
self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]])
self.link_task.dep_nodes.extend(y.link_task.outputs)
- tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd())
+ tmp_path = y.link_task.outputs[0].parent.path_from(self.bld.bldnode)
self.env.append_unique(var + 'PATH', [tmp_path])
else:
if y.tmp_use_objects:
self.add_objects_from_tgen(y)
if getattr(y, 'export_includes', None):
- # self.includes may come from a global variable #2035
- self.includes = self.includes + y.to_incnodes(y.export_includes)
+ self.includes.extend(y.to_incnodes(y.export_includes))
if getattr(y, 'export_defines', None):
self.env.append_value('DEFINES', self.to_list(y.export_defines))
@@ -447,7 +390,7 @@ def get_uselib_vars(self):
:return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`)
:rtype: list of string
"""
- _vars = set()
+ _vars = set([])
for x in self.features:
if x in USELIB_VARS:
_vars |= USELIB_VARS[x]
@@ -462,7 +405,7 @@ def propagate_uselib_vars(self):
def build(bld):
bld.env.AFLAGS_aaa = ['bar']
from waflib.Tools.ccroot import USELIB_VARS
- USELIB_VARS['aaa'] = ['AFLAGS']
+ USELIB_VARS['aaa'] = set('AFLAGS')
tg = bld(features='aaa', aflags='test')
@@ -504,20 +447,20 @@ def apply_implib(self):
name = self.target.name
else:
name = os.path.split(self.target)[1]
- implib = self.env.implib_PATTERN % name
+ implib = self.env['implib_PATTERN'] % name
implib = dll.parent.find_or_declare(implib)
- self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath())
+ self.env.append_value('LINKFLAGS', self.env['IMPLIB_ST'] % implib.bldpath())
self.link_task.outputs.append(implib)
if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe':
node = self.path.find_resource(self.defs)
if not node:
raise Errors.WafError('invalid def file %r' % self.defs)
- if self.env.def_PATTERN:
- self.env.append_value('LINKFLAGS', self.env.def_PATTERN % node.path_from(self.get_cwd()))
+ if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
+ self.env.append_value('LINKFLAGS', '/def:%s' % node.path_from(self.bld.bldnode))
self.link_task.dep_nodes.append(node)
else:
- # gcc for windows takes *.def file as input without any special flag
+ #gcc for windows takes *.def file a an input without any special flag
self.link_task.inputs.append(node)
# where to put the import library
@@ -532,11 +475,10 @@ def apply_implib(self):
except AttributeError:
# else, put the library in BINDIR and the import library in LIBDIR
inst_to = '${IMPLIBDIR}'
- self.install_task.install_to = '${BINDIR}'
+ self.install_task.dest = '${BINDIR}'
if not self.env.IMPLIBDIR:
self.env.IMPLIBDIR = self.env.LIBDIR
- self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib,
- chmod=self.link_task.chmod, task=self.link_task)
+ self.implib_install_task = self.bld.install_files(inst_to, implib, env=self.env, chmod=self.link_task.chmod, task=self.link_task)
# ============ the code above must not know anything about vnum processing on unix platforms =========
@@ -593,34 +535,34 @@ def apply_vnum(self):
# the following task is just to enable execution from the build dir :-/
if self.env.DEST_OS != 'openbsd':
- outs = [node.parent.make_node(name3)]
+ outs = [node.parent.find_or_declare(name3)]
if name2 != name3:
- outs.append(node.parent.make_node(name2))
+ outs.append(node.parent.find_or_declare(name2))
self.create_task('vnum', node, outs)
if getattr(self, 'install_task', None):
- self.install_task.hasrun = Task.SKIPPED
- self.install_task.no_errcheck_out = True
- path = self.install_task.install_to
+ self.install_task.hasrun = Task.SKIP_ME
+ bld = self.bld
+ path = self.install_task.dest
if self.env.DEST_OS == 'openbsd':
libname = self.link_task.outputs[0].name
- t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod)
+ t1 = bld.install_as('%s%s%s' % (path, os.sep, libname), node, env=self.env, chmod=self.link_task.chmod)
self.vnum_install_task = (t1,)
else:
- t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod)
- t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3)
+ t1 = bld.install_as(path + os.sep + name3, node, env=self.env, chmod=self.link_task.chmod)
+ t3 = bld.symlink_as(path + os.sep + libname, name3)
if name2 != name3:
- t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3)
+ t2 = bld.symlink_as(path + os.sep + name2, name3)
self.vnum_install_task = (t1, t2, t3)
else:
self.vnum_install_task = (t1, t3)
- if '-dynamiclib' in self.env.LINKFLAGS:
+ if '-dynamiclib' in self.env['LINKFLAGS']:
# this requires after(propagate_uselib_vars)
try:
inst_to = self.install_path
except AttributeError:
- inst_to = self.link_task.inst_to
+ inst_to = self.link_task.__class__.inst_to
if inst_to:
p = Utils.subst_vars(inst_to, self.env)
path = os.path.join(p, name2)
@@ -633,6 +575,7 @@ class vnum(Task.Task):
Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum`
"""
color = 'CYAN'
+ quient = True
ext_in = ['.bin']
def keyword(self):
return 'Symlinking'
@@ -657,6 +600,9 @@ class fake_shlib(link_task):
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
+
+ for x in self.outputs:
+ x.sig = Utils.h_file(x.abspath())
return Task.SKIP_ME
class fake_stlib(stlink_task):
@@ -667,6 +613,9 @@ class fake_stlib(stlink_task):
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
+
+ for x in self.outputs:
+ x.sig = Utils.h_file(x.abspath())
return Task.SKIP_ME
@conf
@@ -709,10 +658,7 @@ def process_lib(self):
for y in names:
node = x.find_node(y)
if node:
- try:
- Utils.h_file(node.abspath())
- except EnvironmentError:
- raise ValueError('Could not read %r' % y)
+ node.sig = Utils.h_file(node.abspath())
break
else:
continue
diff --git a/waflib/Tools/clang.py b/waflib/Tools/clang.py
index 3828e391..2259c5df 100644
--- a/waflib/Tools/clang.py
+++ b/waflib/Tools/clang.py
@@ -12,7 +12,7 @@ from waflib.Configure import conf
@conf
def find_clang(conf):
"""
- Finds the program clang and executes it to ensure it really is clang
+ Find the program clang and execute it to ensure it really is clang
"""
cc = conf.find_program('clang', var='CC')
conf.get_cc_version(cc, clang=True)
diff --git a/waflib/Tools/clangxx.py b/waflib/Tools/clangxx.py
index 152013ce..b539b287 100644
--- a/waflib/Tools/clangxx.py
+++ b/waflib/Tools/clangxx.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy 2009-2018 (ita)
+# Thomas Nagy 2009-2010 (ita)
"""
Detect the Clang++ C++ compiler
@@ -12,7 +12,7 @@ from waflib.Configure import conf
@conf
def find_clangxx(conf):
"""
- Finds the program clang++, and executes it to ensure it really is clang++
+ Find the program clang++, and execute it to ensure it really is clang++
"""
cxx = conf.find_program('clang++', var='CXX')
conf.get_cc_version(cxx, clang=True)
diff --git a/waflib/Tools/compiler_c.py b/waflib/Tools/compiler_c.py
index 2dba3f82..7d4e22ca 100644
--- a/waflib/Tools/compiler_c.py
+++ b/waflib/Tools/compiler_c.py
@@ -47,10 +47,10 @@ c_compiler = {
'osf1V': ['gcc'],
'gnu': ['gcc', 'clang'],
'java': ['gcc', 'msvc', 'clang', 'icc'],
-'default':['clang', 'gcc'],
+'default':['gcc', 'clang'],
}
"""
-Dict mapping platform names to Waf tools finding specific C compilers::
+Dict mapping the platform names to Waf tools finding specific C compilers::
from waflib.Tools.compiler_c import c_compiler
c_compiler['linux'] = ['gcc', 'icc', 'suncc']
@@ -63,14 +63,10 @@ def default_compilers():
def configure(conf):
"""
- Detects a suitable C compiler
-
- :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
+ Try to find a suitable C compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
"""
- try:
- test_for_compiler = conf.options.check_c_compiler or default_compilers()
- except AttributeError:
- conf.fatal("Add options(opt): opt.load('compiler_c')")
+ try: test_for_compiler = conf.options.check_c_compiler or default_compilers()
+ except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_c')")
for compiler in re.split('[ ,]+', test_for_compiler):
conf.env.stash()
@@ -80,21 +76,19 @@ def configure(conf):
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
- debug('compiler_c: %r', e)
+ debug('compiler_c: %r' % e)
else:
- if conf.env.CC:
+ if conf.env['CC']:
conf.end_msg(conf.env.get_flat('CC'))
- conf.env.COMPILER_CC = compiler
- conf.env.commit()
+ conf.env['COMPILER_CC'] = compiler
break
- conf.env.revert()
conf.end_msg(False)
else:
conf.fatal('could not configure a C compiler!')
def options(opt):
"""
- This is how to provide compiler preferences on the command-line::
+ Restrict the compiler detection from the command-line::
$ waf configure --check-c-compiler=gcc
"""
diff --git a/waflib/Tools/compiler_cxx.py b/waflib/Tools/compiler_cxx.py
index 1af65a22..6f0ea9ea 100644
--- a/waflib/Tools/compiler_cxx.py
+++ b/waflib/Tools/compiler_cxx.py
@@ -48,7 +48,7 @@ cxx_compiler = {
'osf1V': ['g++'],
'gnu': ['g++', 'clang++'],
'java': ['g++', 'msvc', 'clang++', 'icpc'],
-'default': ['clang++', 'g++']
+'default': ['g++', 'clang++']
}
"""
Dict mapping the platform names to Waf tools finding specific C++ compilers::
@@ -64,14 +64,10 @@ def default_compilers():
def configure(conf):
"""
- Detects a suitable C++ compiler
-
- :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
+ Try to find a suitable C++ compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
"""
- try:
- test_for_compiler = conf.options.check_cxx_compiler or default_compilers()
- except AttributeError:
- conf.fatal("Add options(opt): opt.load('compiler_cxx')")
+ try: test_for_compiler = conf.options.check_cxx_compiler or default_compilers()
+ except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_cxx')")
for compiler in re.split('[ ,]+', test_for_compiler):
conf.env.stash()
@@ -81,21 +77,19 @@ def configure(conf):
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
- debug('compiler_cxx: %r', e)
+ debug('compiler_cxx: %r' % e)
else:
- if conf.env.CXX:
+ if conf.env['CXX']:
conf.end_msg(conf.env.get_flat('CXX'))
- conf.env.COMPILER_CXX = compiler
- conf.env.commit()
+ conf.env['COMPILER_CXX'] = compiler
break
- conf.env.revert()
conf.end_msg(False)
else:
conf.fatal('could not configure a C++ compiler!')
def options(opt):
"""
- This is how to provide compiler preferences on the command-line::
+ Restrict the compiler detection from the command-line::
$ waf configure --check-cxx-compiler=gxx
"""
diff --git a/waflib/Tools/compiler_d.py b/waflib/Tools/compiler_d.py
deleted file mode 100644
index 43bb1f64..00000000
--- a/waflib/Tools/compiler_d.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2016-2018 (ita)
-
-"""
-Try to detect a D compiler from the list of supported compilers::
-
- def options(opt):
- opt.load('compiler_d')
- def configure(cnf):
- cnf.load('compiler_d')
- def build(bld):
- bld.program(source='main.d', target='app')
-
-Only three D compilers are really present at the moment:
-
-* gdc
-* dmd, the ldc compiler having a very similar command-line interface
-* ldc2
-"""
-
-import re
-from waflib import Utils, Logs
-
-d_compiler = {
-'default' : ['gdc', 'dmd', 'ldc2']
-}
-"""
-Dict mapping the platform names to lists of names of D compilers to try, in order of preference::
-
- from waflib.Tools.compiler_d import d_compiler
- d_compiler['default'] = ['gdc', 'dmd', 'ldc2']
-"""
-
-def default_compilers():
- build_platform = Utils.unversioned_sys_platform()
- possible_compiler_list = d_compiler.get(build_platform, d_compiler['default'])
- return ' '.join(possible_compiler_list)
-
-def configure(conf):
- """
- Detects a suitable D compiler
-
- :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
- """
- try:
- test_for_compiler = conf.options.check_d_compiler or default_compilers()
- except AttributeError:
- conf.fatal("Add options(opt): opt.load('compiler_d')")
-
- for compiler in re.split('[ ,]+', test_for_compiler):
- conf.env.stash()
- conf.start_msg('Checking for %r (D compiler)' % compiler)
- try:
- conf.load(compiler)
- except conf.errors.ConfigurationError as e:
- conf.env.revert()
- conf.end_msg(False)
- Logs.debug('compiler_d: %r', e)
- else:
- if conf.env.D:
- conf.end_msg(conf.env.get_flat('D'))
- conf.env.COMPILER_D = compiler
- conf.env.commit()
- break
- conf.env.revert()
- conf.end_msg(False)
- else:
- conf.fatal('could not configure a D compiler!')
-
-def options(opt):
- """
- This is how to provide compiler preferences on the command-line::
-
- $ waf configure --check-d-compiler=dmd
- """
- test_for_compiler = default_compilers()
- d_compiler_opts = opt.add_option_group('Configuration options')
- d_compiler_opts.add_option('--check-d-compiler', default=None,
- help='list of D compilers to try [%s]' % test_for_compiler, dest='check_d_compiler')
-
- for x in test_for_compiler.split():
- opt.load('%s' % x)
-
diff --git a/waflib/Tools/compiler_fc.py b/waflib/Tools/compiler_fc.py
deleted file mode 100644
index 96b58e70..00000000
--- a/waflib/Tools/compiler_fc.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-
-import re
-from waflib import Utils, Logs
-from waflib.Tools import fc
-
-fc_compiler = {
- 'win32' : ['gfortran','ifort'],
- 'darwin' : ['gfortran', 'g95', 'ifort'],
- 'linux' : ['gfortran', 'g95', 'ifort'],
- 'java' : ['gfortran', 'g95', 'ifort'],
- 'default': ['gfortran'],
- 'aix' : ['gfortran']
-}
-"""
-Dict mapping the platform names to lists of names of Fortran compilers to try, in order of preference::
-
- from waflib.Tools.compiler_c import c_compiler
- c_compiler['linux'] = ['gfortran', 'g95', 'ifort']
-"""
-
-def default_compilers():
- build_platform = Utils.unversioned_sys_platform()
- possible_compiler_list = fc_compiler.get(build_platform, fc_compiler['default'])
- return ' '.join(possible_compiler_list)
-
-def configure(conf):
- """
- Detects a suitable Fortran compiler
-
- :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
- """
- try:
- test_for_compiler = conf.options.check_fortran_compiler or default_compilers()
- except AttributeError:
- conf.fatal("Add options(opt): opt.load('compiler_fc')")
- for compiler in re.split('[ ,]+', test_for_compiler):
- conf.env.stash()
- conf.start_msg('Checking for %r (Fortran compiler)' % compiler)
- try:
- conf.load(compiler)
- except conf.errors.ConfigurationError as e:
- conf.env.revert()
- conf.end_msg(False)
- Logs.debug('compiler_fortran: %r', e)
- else:
- if conf.env.FC:
- conf.end_msg(conf.env.get_flat('FC'))
- conf.env.COMPILER_FORTRAN = compiler
- conf.env.commit()
- break
- conf.env.revert()
- conf.end_msg(False)
- else:
- conf.fatal('could not configure a Fortran compiler!')
-
-def options(opt):
- """
- This is how to provide compiler preferences on the command-line::
-
- $ waf configure --check-fortran-compiler=ifort
- """
- test_for_compiler = default_compilers()
- opt.load_special_tools('fc_*.py')
- fortran_compiler_opts = opt.add_option_group('Configuration options')
- fortran_compiler_opts.add_option('--check-fortran-compiler', default=None,
- help='list of Fortran compiler to try [%s]' % test_for_compiler,
- dest="check_fortran_compiler")
-
- for x in test_for_compiler.split():
- opt.load('%s' % x)
-
diff --git a/waflib/Tools/cs.py b/waflib/Tools/cs.py
deleted file mode 100644
index aecca6da..00000000
--- a/waflib/Tools/cs.py
+++ /dev/null
@@ -1,211 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-C# support. A simple example::
-
- def configure(conf):
- conf.load('cs')
- def build(bld):
- bld(features='cs', source='main.cs', gen='foo')
-
-Note that the configuration may compile C# snippets::
-
- FRAG = '''
- namespace Moo {
- public class Test { public static int Main(string[] args) { return 0; } }
- }'''
- def configure(conf):
- conf.check(features='cs', fragment=FRAG, compile_filename='test.cs', gen='test.exe',
- bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support')
-"""
-
-from waflib import Utils, Task, Options, Errors
-from waflib.TaskGen import before_method, after_method, feature
-from waflib.Tools import ccroot
-from waflib.Configure import conf
-
-ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
-ccroot.lib_patterns['csshlib'] = ['%s']
-
-@feature('cs')
-@before_method('process_source')
-def apply_cs(self):
- """
- Create a C# task bound to the attribute *cs_task*. There can be only one C# task by task generator.
- """
- cs_nodes = []
- no_nodes = []
- for x in self.to_nodes(self.source):
- if x.name.endswith('.cs'):
- cs_nodes.append(x)
- else:
- no_nodes.append(x)
- self.source = no_nodes
-
- bintype = getattr(self, 'bintype', self.gen.endswith('.dll') and 'library' or 'exe')
- self.cs_task = tsk = self.create_task('mcs', cs_nodes, self.path.find_or_declare(self.gen))
- tsk.env.CSTYPE = '/target:%s' % bintype
- tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
- self.env.append_value('CSFLAGS', '/platform:%s' % getattr(self, 'platform', 'anycpu'))
-
- inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
- if inst_to:
- # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
- mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
- self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
-
-@feature('cs')
-@after_method('apply_cs')
-def use_cs(self):
- """
- C# applications honor the **use** keyword::
-
- def build(bld):
- bld(features='cs', source='My.cs', bintype='library', gen='my.dll', name='mylib')
- bld(features='cs', source='Hi.cs', includes='.', bintype='exe', gen='hi.exe', use='mylib', name='hi')
- """
- names = self.to_list(getattr(self, 'use', []))
- get = self.bld.get_tgen_by_name
- for x in names:
- try:
- y = get(x)
- except Errors.WafError:
- self.env.append_value('CSFLAGS', '/reference:%s' % x)
- continue
- y.post()
-
- tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None)
- if not tsk:
- self.bld.fatal('cs task has no link task for use %r' % self)
- self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
- self.cs_task.set_run_after(tsk) # order (redundant, the order is inferred from the nodes inputs/outputs)
- self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath())
-
-@feature('cs')
-@after_method('apply_cs', 'use_cs')
-def debug_cs(self):
- """
- The C# targets may create .mdb or .pdb files::
-
- def build(bld):
- bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full')
- # csdebug is a value in (True, 'full', 'pdbonly')
- """
- csdebug = getattr(self, 'csdebug', self.env.CSDEBUG)
- if not csdebug:
- return
-
- node = self.cs_task.outputs[0]
- if self.env.CS_NAME == 'mono':
- out = node.parent.find_or_declare(node.name + '.mdb')
- else:
- out = node.change_ext('.pdb')
- self.cs_task.outputs.append(out)
-
- if getattr(self, 'install_task', None):
- self.pdb_install_task = self.add_install_files(
- install_to=self.install_task.install_to, install_from=out)
-
- if csdebug == 'pdbonly':
- val = ['/debug+', '/debug:pdbonly']
- elif csdebug == 'full':
- val = ['/debug+', '/debug:full']
- else:
- val = ['/debug-']
- self.env.append_value('CSFLAGS', val)
-
-@feature('cs')
-@after_method('debug_cs')
-def doc_cs(self):
- """
- The C# targets may create .xml documentation files::
-
- def build(bld):
- bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdoc=True)
- # csdoc is a boolean value
- """
- csdoc = getattr(self, 'csdoc', self.env.CSDOC)
- if not csdoc:
- return
-
- node = self.cs_task.outputs[0]
- out = node.change_ext('.xml')
- self.cs_task.outputs.append(out)
-
- if getattr(self, 'install_task', None):
- self.doc_install_task = self.add_install_files(
- install_to=self.install_task.install_to, install_from=out)
-
- self.env.append_value('CSFLAGS', '/doc:%s' % out.abspath())
-
-class mcs(Task.Task):
- """
- Compile C# files
- """
- color = 'YELLOW'
- run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
-
- def split_argfile(self, cmd):
- inline = [cmd[0]]
- infile = []
- for x in cmd[1:]:
- # csc doesn't want /noconfig in @file
- if x.lower() == '/noconfig':
- inline.append(x)
- else:
- infile.append(self.quote_flag(x))
- return (inline, infile)
-
-def configure(conf):
- """
- Find a C# compiler, set the variable MCS for the compiler and CS_NAME (mono or csc)
- """
- csc = getattr(Options.options, 'cscbinary', None)
- if csc:
- conf.env.MCS = csc
- conf.find_program(['csc', 'mcs', 'gmcs'], var='MCS')
- conf.env.ASS_ST = '/r:%s'
- conf.env.RES_ST = '/resource:%s'
-
- conf.env.CS_NAME = 'csc'
- if str(conf.env.MCS).lower().find('mcs') > -1:
- conf.env.CS_NAME = 'mono'
-
-def options(opt):
- """
- Add a command-line option for the configuration::
-
- $ waf configure --with-csc-binary=/foo/bar/mcs
- """
- opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
-
-class fake_csshlib(Task.Task):
- """
- Task used for reading a foreign .net assembly and adding the dependency on it
- """
- color = 'YELLOW'
- inst_to = None
-
- def runnable_status(self):
- return Task.SKIP_ME
-
-@conf
-def read_csshlib(self, name, paths=[]):
- """
- Read a foreign .net assembly for the *use* system::
-
- def build(bld):
- bld.read_csshlib('ManagedLibrary.dll', paths=[bld.env.mylibrarypath])
- bld(features='cs', source='Hi.cs', bintype='exe', gen='hi.exe', use='ManagedLibrary.dll')
-
- :param name: Name of the library
- :type name: string
- :param paths: Folders in which the library may be found
- :type paths: list of string
- :return: A task generator having the feature *fake_lib* which will call :py:func:`waflib.Tools.ccroot.process_lib`
- :rtype: :py:class:`waflib.TaskGen.task_gen`
- """
- return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib')
-
diff --git a/waflib/Tools/cxx.py b/waflib/Tools/cxx.py
index 194fad74..311c1e52 100644
--- a/waflib/Tools/cxx.py
+++ b/waflib/Tools/cxx.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
+# Thomas Nagy, 2005-2010 (ita)
"Base for c++ programs and libraries"
@@ -10,31 +10,31 @@ from waflib.Tools.ccroot import link_task, stlink_task
@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
def cxx_hook(self, node):
- "Binds c++ file extensions to create :py:class:`waflib.Tools.cxx.cxx` instances"
+ "Bind the c++ file extensions to the creation of a :py:class:`waflib.Tools.cxx.cxx` instance"
return self.create_compiled_task('cxx', node)
if not '.c' in TaskGen.task_gen.mappings:
TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']
class cxx(Task.Task):
- "Compiles C++ files into object files"
- run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
+ "Compile C++ files into object files"
+ run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}'
vars = ['CXXDEPS'] # unused variable to depend on, just in case
ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
scan = c_preproc.scan
class cxxprogram(link_task):
- "Links object files into c++ programs"
+ "Link object files into a c++ program"
run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
vars = ['LINKDEPS']
ext_out = ['.bin']
inst_to = '${BINDIR}'
class cxxshlib(cxxprogram):
- "Links object files into c++ shared libraries"
+ "Link object files into a c++ shared library"
inst_to = '${LIBDIR}'
class cxxstlib(stlink_task):
- "Links object files into c++ static libraries"
+ "Link object files into a c++ static library"
pass # do not remove
diff --git a/waflib/Tools/d.py b/waflib/Tools/d.py
deleted file mode 100644
index e4cf73bb..00000000
--- a/waflib/Tools/d.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2007-2018 (ita)
-
-from waflib import Utils, Task, Errors
-from waflib.TaskGen import taskgen_method, feature, extension
-from waflib.Tools import d_scan, d_config
-from waflib.Tools.ccroot import link_task, stlink_task
-
-class d(Task.Task):
- "Compile a d file into an object file"
- color = 'GREEN'
- run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}'
- scan = d_scan.scan
-
-class d_with_header(d):
- "Compile a d file and generate a header"
- run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}'
-
-class d_header(Task.Task):
- "Compile d headers"
- color = 'BLUE'
- run_str = '${D} ${D_HEADER} ${SRC}'
-
-class dprogram(link_task):
- "Link object files into a d program"
- run_str = '${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
- inst_to = '${BINDIR}'
-
-class dshlib(dprogram):
- "Link object files into a d shared library"
- inst_to = '${LIBDIR}'
-
-class dstlib(stlink_task):
- "Link object files into a d static library"
- pass # do not remove
-
-@extension('.d', '.di', '.D')
-def d_hook(self, node):
- """
- Compile *D* files. To get .di files as well as .o files, set the following::
-
- def build(bld):
- bld.program(source='foo.d', target='app', generate_headers=True)
-
- """
- ext = Utils.destos_to_binfmt(self.env.DEST_OS) == 'pe' and 'obj' or 'o'
- out = '%s.%d.%s' % (node.name, self.idx, ext)
- def create_compiled_task(self, name, node):
- task = self.create_task(name, node, node.parent.find_or_declare(out))
- try:
- self.compiled_tasks.append(task)
- except AttributeError:
- self.compiled_tasks = [task]
- return task
-
- if getattr(self, 'generate_headers', None):
- tsk = create_compiled_task(self, 'd_with_header', node)
- tsk.outputs.append(node.change_ext(self.env.DHEADER_ext))
- else:
- tsk = create_compiled_task(self, 'd', node)
- return tsk
-
-@taskgen_method
-def generate_header(self, filename):
- """
- See feature request #104::
-
- def build(bld):
- tg = bld.program(source='foo.d', target='app')
- tg.generate_header('blah.d')
- # is equivalent to:
- #tg = bld.program(source='foo.d', target='app', header_lst='blah.d')
-
- :param filename: header to create
- :type filename: string
- """
- try:
- self.header_lst.append([filename, self.install_path])
- except AttributeError:
- self.header_lst = [[filename, self.install_path]]
-
-@feature('d')
-def process_header(self):
- """
- Process the attribute 'header_lst' to create the d header compilation tasks::
-
- def build(bld):
- bld.program(source='foo.d', target='app', header_lst='blah.d')
- """
- for i in getattr(self, 'header_lst', []):
- node = self.path.find_resource(i[0])
- if not node:
- raise Errors.WafError('file %r not found on d obj' % i[0])
- self.create_task('d_header', node, node.change_ext('.di'))
-
diff --git a/waflib/Tools/d_config.py b/waflib/Tools/d_config.py
deleted file mode 100644
index 66375565..00000000
--- a/waflib/Tools/d_config.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2016-2018 (ita)
-
-from waflib import Utils
-from waflib.Configure import conf
-
-@conf
-def d_platform_flags(self):
- """
- Sets the extensions dll/so for d programs and libraries
- """
- v = self.env
- if not v.DEST_OS:
- v.DEST_OS = Utils.unversioned_sys_platform()
- binfmt = Utils.destos_to_binfmt(self.env.DEST_OS)
- if binfmt == 'pe':
- v.dprogram_PATTERN = '%s.exe'
- v.dshlib_PATTERN = 'lib%s.dll'
- v.dstlib_PATTERN = 'lib%s.a'
- elif binfmt == 'mac-o':
- v.dprogram_PATTERN = '%s'
- v.dshlib_PATTERN = 'lib%s.dylib'
- v.dstlib_PATTERN = 'lib%s.a'
- else:
- v.dprogram_PATTERN = '%s'
- v.dshlib_PATTERN = 'lib%s.so'
- v.dstlib_PATTERN = 'lib%s.a'
-
-DLIB = '''
-version(D_Version2) {
- import std.stdio;
- int main() {
- writefln("phobos2");
- return 0;
- }
-} else {
- version(Tango) {
- import tango.stdc.stdio;
- int main() {
- printf("tango");
- return 0;
- }
- } else {
- import std.stdio;
- int main() {
- writefln("phobos1");
- return 0;
- }
- }
-}
-'''
-"""Detection string for the D standard library"""
-
-@conf
-def check_dlibrary(self, execute=True):
- """
- Detects the kind of standard library that comes with the compiler,
- and sets conf.env.DLIBRARY to tango, phobos1 or phobos2
- """
- ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True)
- if execute:
- self.env.DLIBRARY = ret.strip()
-
diff --git a/waflib/Tools/d_scan.py b/waflib/Tools/d_scan.py
deleted file mode 100644
index 14c6c313..00000000
--- a/waflib/Tools/d_scan.py
+++ /dev/null
@@ -1,211 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2016-2018 (ita)
-
-"""
-Provide a scanner for finding dependencies on d files
-"""
-
-import re
-from waflib import Utils
-
-def filter_comments(filename):
- """
- :param filename: d file name
- :type filename: string
- :rtype: list
- :return: a list of characters
- """
- txt = Utils.readf(filename)
- i = 0
- buf = []
- max = len(txt)
- begin = 0
- while i < max:
- c = txt[i]
- if c == '"' or c == "'": # skip a string or character literal
- buf.append(txt[begin:i])
- delim = c
- i += 1
- while i < max:
- c = txt[i]
- if c == delim:
- break
- elif c == '\\': # skip the character following backslash
- i += 1
- i += 1
- i += 1
- begin = i
- elif c == '/': # try to replace a comment with whitespace
- buf.append(txt[begin:i])
- i += 1
- if i == max:
- break
- c = txt[i]
- if c == '+': # eat nesting /+ +/ comment
- i += 1
- nesting = 1
- c = None
- while i < max:
- prev = c
- c = txt[i]
- if prev == '/' and c == '+':
- nesting += 1
- c = None
- elif prev == '+' and c == '/':
- nesting -= 1
- if nesting == 0:
- break
- c = None
- i += 1
- elif c == '*': # eat /* */ comment
- i += 1
- c = None
- while i < max:
- prev = c
- c = txt[i]
- if prev == '*' and c == '/':
- break
- i += 1
- elif c == '/': # eat // comment
- i += 1
- while i < max and txt[i] != '\n':
- i += 1
- else: # no comment
- begin = i - 1
- continue
- i += 1
- begin = i
- buf.append(' ')
- else:
- i += 1
- buf.append(txt[begin:])
- return buf
-
-class d_parser(object):
- """
- Parser for d files
- """
- def __init__(self, env, incpaths):
- #self.code = ''
- #self.module = ''
- #self.imports = []
-
- self.allnames = []
-
- self.re_module = re.compile("module\s+([^;]+)")
- self.re_import = re.compile("import\s+([^;]+)")
- self.re_import_bindings = re.compile("([^:]+):(.*)")
- self.re_import_alias = re.compile("[^=]+=(.+)")
-
- self.env = env
-
- self.nodes = []
- self.names = []
-
- self.incpaths = incpaths
-
- def tryfind(self, filename):
- """
- Search file a file matching an module/import directive
-
- :param filename: file to read
- :type filename: string
- """
- found = 0
- for n in self.incpaths:
- found = n.find_resource(filename.replace('.', '/') + '.d')
- if found:
- self.nodes.append(found)
- self.waiting.append(found)
- break
- if not found:
- if not filename in self.names:
- self.names.append(filename)
-
- def get_strings(self, code):
- """
- :param code: d code to parse
- :type code: string
- :return: the modules that the code uses
- :rtype: a list of match objects
- """
- #self.imports = []
- self.module = ''
- lst = []
-
- # get the module name (if present)
-
- mod_name = self.re_module.search(code)
- if mod_name:
- self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
-
- # go through the code, have a look at all import occurrences
-
- # first, lets look at anything beginning with "import" and ending with ";"
- import_iterator = self.re_import.finditer(code)
- if import_iterator:
- for import_match in import_iterator:
- import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
-
- # does this end with an import bindings declaration?
- # (import bindings always terminate the list of imports)
- bindings_match = self.re_import_bindings.match(import_match_str)
- if bindings_match:
- import_match_str = bindings_match.group(1)
- # if so, extract the part before the ":" (since the module declaration(s) is/are located there)
-
- # split the matching string into a bunch of strings, separated by a comma
- matches = import_match_str.split(',')
-
- for match in matches:
- alias_match = self.re_import_alias.match(match)
- if alias_match:
- # is this an alias declaration? (alias = module name) if so, extract the module name
- match = alias_match.group(1)
-
- lst.append(match)
- return lst
-
- def start(self, node):
- """
- The parsing starts here
-
- :param node: input file
- :type node: :py:class:`waflib.Node.Node`
- """
- self.waiting = [node]
- # while the stack is not empty, add the dependencies
- while self.waiting:
- nd = self.waiting.pop(0)
- self.iter(nd)
-
- def iter(self, node):
- """
- Find all the modules that a file depends on, uses :py:meth:`waflib.Tools.d_scan.d_parser.tryfind` to process dependent files
-
- :param node: input file
- :type node: :py:class:`waflib.Node.Node`
- """
- path = node.abspath() # obtain the absolute path
- code = "".join(filter_comments(path)) # read the file and filter the comments
- names = self.get_strings(code) # obtain the import strings
- for x in names:
- # optimization
- if x in self.allnames:
- continue
- self.allnames.append(x)
-
- # for each name, see if it is like a node or not
- self.tryfind(x)
-
-def scan(self):
- "look for .d/.di used by a d file"
- env = self.env
- gruik = d_parser(env, self.generator.includes_nodes)
- node = self.inputs[0]
- gruik.start(node)
- nodes = gruik.nodes
- names = gruik.names
- return (nodes, names)
-
diff --git a/waflib/Tools/dbus.py b/waflib/Tools/dbus.py
deleted file mode 100644
index d520f1c0..00000000
--- a/waflib/Tools/dbus.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-
-"""
-Compiles dbus files with **dbus-binding-tool**
-
-Typical usage::
-
- def options(opt):
- opt.load('compiler_c dbus')
- def configure(conf):
- conf.load('compiler_c dbus')
- def build(bld):
- tg = bld.program(
- includes = '.',
- source = bld.path.ant_glob('*.c'),
- target = 'gnome-hello')
- tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server')
-"""
-
-from waflib import Task, Errors
-from waflib.TaskGen import taskgen_method, before_method
-
-@taskgen_method
-def add_dbus_file(self, filename, prefix, mode):
- """
- Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
-
- :param filename: xml file to compile
- :type filename: string
- :param prefix: dbus binding tool prefix (--prefix=prefix)
- :type prefix: string
- :param mode: dbus binding tool mode (--mode=mode)
- :type mode: string
- """
- if not hasattr(self, 'dbus_lst'):
- self.dbus_lst = []
- if not 'process_dbus' in self.meths:
- self.meths.append('process_dbus')
- self.dbus_lst.append([filename, prefix, mode])
-
-@before_method('process_source')
-def process_dbus(self):
- """
- Processes the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances.
- """
- for filename, prefix, mode in getattr(self, 'dbus_lst', []):
- node = self.path.find_resource(filename)
- if not node:
- raise Errors.WafError('file not found ' + filename)
- tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
- tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
- tsk.env.DBUS_BINDING_TOOL_MODE = mode
-
-class dbus_binding_tool(Task.Task):
- """
- Compiles a dbus file
- """
- color = 'BLUE'
- ext_out = ['.h']
- run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
- shell = True # temporary workaround for #795
-
-def configure(conf):
- """
- Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL``
- """
- conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
-
diff --git a/waflib/Tools/dmd.py b/waflib/Tools/dmd.py
deleted file mode 100644
index 8917ca1b..00000000
--- a/waflib/Tools/dmd.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2008-2018 (ita)
-
-import sys
-from waflib.Tools import ar, d
-from waflib.Configure import conf
-
-@conf
-def find_dmd(conf):
- """
- Finds the program *dmd*, *dmd2*, or *ldc* and set the variable *D*
- """
- conf.find_program(['dmd', 'dmd2', 'ldc'], var='D')
-
- # make sure that we're dealing with dmd1, dmd2, or ldc(1)
- out = conf.cmd_and_log(conf.env.D + ['--help'])
- if out.find("D Compiler v") == -1:
- out = conf.cmd_and_log(conf.env.D + ['-version'])
- if out.find("based on DMD v1.") == -1:
- conf.fatal("detected compiler is not dmd/ldc")
-
-@conf
-def common_flags_ldc(conf):
- """
- Sets the D flags required by *ldc*
- """
- v = conf.env
- v.DFLAGS = ['-d-version=Posix']
- v.LINKFLAGS = []
- v.DFLAGS_dshlib = ['-relocation-model=pic']
-
-@conf
-def common_flags_dmd(conf):
- """
- Set the flags required by *dmd* or *dmd2*
- """
- v = conf.env
-
- v.D_SRC_F = ['-c']
- v.D_TGT_F = '-of%s'
-
- v.D_LINKER = v.D
- v.DLNK_SRC_F = ''
- v.DLNK_TGT_F = '-of%s'
- v.DINC_ST = '-I%s'
-
- v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
- v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s'
- v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s'
-
- v.LINKFLAGS_dprogram= ['-quiet']
-
- v.DFLAGS_dshlib = ['-fPIC']
- v.LINKFLAGS_dshlib = ['-L-shared']
-
- v.DHEADER_ext = '.di'
- v.DFLAGS_d_with_header = ['-H', '-Hf']
- v.D_HDR_F = '%s'
-
-def configure(conf):
- """
- Configuration for *dmd*, *dmd2*, and *ldc*
- """
- conf.find_dmd()
-
- if sys.platform == 'win32':
- out = conf.cmd_and_log(conf.env.D + ['--help'])
- if out.find('D Compiler v2.') > -1:
- conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
-
- conf.load('ar')
- conf.load('d')
- conf.common_flags_dmd()
- conf.d_platform_flags()
-
- if str(conf.env.D).find('ldc') > -1:
- conf.common_flags_ldc()
-
diff --git a/waflib/Tools/errcheck.py b/waflib/Tools/errcheck.py
index de8d75a4..1f548aca 100644
--- a/waflib/Tools/errcheck.py
+++ b/waflib/Tools/errcheck.py
@@ -3,9 +3,9 @@
# Thomas Nagy, 2011 (ita)
"""
-Common mistakes highlighting.
+errcheck: highlight common mistakes
-There is a performance impact, so this tool is only loaded when running ``waf -v``
+There is a performance hit, so this tool is only loaded when running "waf -v"
"""
typos = {
@@ -18,14 +18,13 @@ typos = {
'importpath':'includes',
'installpath':'install_path',
'iscopy':'is_copy',
-'uses':'use',
}
meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects']
import sys
from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils
-from waflib.Tools import ccroot
+import waflib.Tools.ccroot
def check_same_targets(self):
mp = Utils.defaultdict(list)
@@ -34,8 +33,6 @@ def check_same_targets(self):
def check_task(tsk):
if not isinstance(tsk, Task.Task):
return
- if hasattr(tsk, 'no_errcheck_out'):
- return
for node in tsk.outputs:
mp[node].append(tsk)
@@ -61,34 +58,30 @@ def check_same_targets(self):
Logs.error(msg)
for x in v:
if Logs.verbose > 1:
- Logs.error(' %d. %r', 1 + v.index(x), x.generator)
+ Logs.error(' %d. %r' % (1 + v.index(x), x.generator))
else:
- Logs.error(' %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None))
- Logs.error('If you think that this is an error, set no_errcheck_out on the task instance')
+ Logs.error(' %d. %r in %r' % (1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None)))
if not dupe:
for (k, v) in uids.items():
if len(v) > 1:
Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
- tg_details = tsk.generator.name
- if Logs.verbose > 2:
- tg_details = tsk.generator
for tsk in v:
- Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details)
+ Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator))
def check_invalid_constraints(self):
- feat = set()
+ feat = set([])
for x in list(TaskGen.feats.values()):
feat.union(set(x))
for (x, y) in TaskGen.task_gen.prec.items():
feat.add(x)
feat.union(set(y))
- ext = set()
+ ext = set([])
for x in TaskGen.task_gen.mappings.values():
ext.add(x.__name__)
invalid = ext & feat
if invalid:
- Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method', list(invalid))
+ Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method' % list(invalid))
# the build scripts have been read, so we can check for invalid after/before attributes on task classes
for cls in list(Task.classes.values()):
@@ -97,15 +90,15 @@ def check_invalid_constraints(self):
for x in ('before', 'after'):
for y in Utils.to_list(getattr(cls, x, [])):
- if not Task.classes.get(y):
- Logs.error('Erroneous order constraint %r=%r on task class %r', x, y, cls.__name__)
+ if not Task.classes.get(y, None):
+ Logs.error('Erroneous order constraint %r=%r on task class %r' % (x, y, cls.__name__))
if getattr(cls, 'rule', None):
- Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")', cls.__name__)
+ Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")' % cls.__name__)
def replace(m):
"""
- Replaces existing BuildContext methods to verify parameter names,
- for example ``bld(source=)`` has no ending *s*
+ We could add properties, but they would not work in some cases:
+ bld.program(...) requires 'source' in the attributes
"""
oldcall = getattr(Build.BuildContext, m)
def call(self, *k, **kw):
@@ -114,13 +107,13 @@ def replace(m):
if x in kw:
if x == 'iscopy' and 'subst' in getattr(self, 'features', ''):
continue
- Logs.error('Fix the typo %r -> %r on %r', x, typos[x], ret)
+ Logs.error('Fix the typo %r -> %r on %r' % (x, typos[x], ret))
return ret
setattr(Build.BuildContext, m, call)
def enhance_lib():
"""
- Modifies existing classes and methods to enable error verification
+ modify existing classes and methods
"""
for m in meths_typos:
replace(m)
@@ -128,36 +121,26 @@ def enhance_lib():
# catch '..' in ant_glob patterns
def ant_glob(self, *k, **kw):
if k:
- lst = Utils.to_list(k[0])
+ lst=Utils.to_list(k[0])
for pat in lst:
- sp = pat.split('/')
- if '..' in sp:
- Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0])
- if '.' in sp:
- Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0])
- return self.old_ant_glob(*k, **kw)
- Node.Node.old_ant_glob = Node.Node.ant_glob
- Node.Node.ant_glob = ant_glob
-
- # catch ant_glob on build folders
- def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
- if remove:
+ if '..' in pat.split('/'):
+ Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'" % k[0])
+ if kw.get('remove', True):
try:
- if self.is_child_of(self.ctx.bldnode) and not quiet:
- quiet = True
- Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self)
+ if self.is_child_of(self.ctx.bldnode) and not kw.get('quiet', False):
+ Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)' % self)
except AttributeError:
pass
- return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet)
- Node.Node.old_ant_iter = Node.Node.ant_iter
- Node.Node.ant_iter = ant_iter
+ return self.old_ant_glob(*k, **kw)
+ Node.Node.old_ant_glob = Node.Node.ant_glob
+ Node.Node.ant_glob = ant_glob
# catch conflicting ext_in/ext_out/before/after declarations
old = Task.is_before
def is_before(t1, t2):
ret = old(t1, t2)
if ret and old(t2, t1):
- Logs.error('Contradictory order constraints in classes %r %r', t1, t2)
+ Logs.error('Contradictory order constraints in classes %r %r' % (t1, t2))
return ret
Task.is_before = is_before
@@ -169,7 +152,7 @@ def enhance_lib():
Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
for x in ('c', 'cxx', 'd', 'fc'):
if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]:
- Logs.error('%r features is probably missing %r', self, x)
+ Logs.error('%r features is probably missing %r' % (self, x))
TaskGen.feature('*')(check_err_features)
# check for erroneous order constraints
@@ -177,12 +160,12 @@ def enhance_lib():
if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features):
for x in ('before', 'after', 'ext_in', 'ext_out'):
if hasattr(self, x):
- Logs.warn('Erroneous order constraint %r on non-rule based task generator %r', x, self)
+ Logs.warn('Erroneous order constraint %r on non-rule based task generator %r' % (x, self))
else:
for x in ('before', 'after'):
for y in self.to_list(getattr(self, x, [])):
- if not Task.classes.get(y):
- Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self)
+ if not Task.classes.get(y, None):
+ Logs.error('Erroneous order constraint %s=%r on %r (no such class)' % (x, y, self))
TaskGen.feature('*')(check_err_order)
# check for @extension used with @feature/@before_method/@after_method
@@ -217,21 +200,24 @@ def enhance_lib():
TaskGen.task_gen.use_rec = use_rec
# check for env.append
- def _getattr(self, name, default=None):
+ def getattri(self, name, default=None):
if name == 'append' or name == 'add':
raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
elif name == 'prepend':
raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
if name in self.__slots__:
- return super(ConfigSet.ConfigSet, self).__getattr__(name, default)
+ return object.__getattr__(self, name, default)
else:
return self[name]
- ConfigSet.ConfigSet.__getattr__ = _getattr
+ ConfigSet.ConfigSet.__getattr__ = getattri
def options(opt):
"""
- Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options
+ Add a few methods
"""
enhance_lib()
+def configure(conf):
+ pass
+
diff --git a/waflib/Tools/fc.py b/waflib/Tools/fc.py
deleted file mode 100644
index d9e8d8c4..00000000
--- a/waflib/Tools/fc.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# DC 2008
-# Thomas Nagy 2016-2018 (ita)
-
-"""
-Fortran support
-"""
-
-from waflib import Utils, Task, Errors
-from waflib.Tools import ccroot, fc_config, fc_scan
-from waflib.TaskGen import extension
-from waflib.Configure import conf
-
-ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS'])
-ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
-ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
-ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS'])
-
-@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08')
-def fc_hook(self, node):
- "Binds the Fortran file extensions create :py:class:`waflib.Tools.fc.fc` instances"
- return self.create_compiled_task('fc', node)
-
-@conf
-def modfile(conf, name):
- """
- Turns a module name into the right module file name.
- Defaults to all lower case.
- """
- return {'lower' :name.lower() + '.mod',
- 'lower.MOD' :name.lower() + '.MOD',
- 'UPPER.mod' :name.upper() + '.mod',
- 'UPPER' :name.upper() + '.MOD'}[conf.env.FC_MOD_CAPITALIZATION or 'lower']
-
-def get_fortran_tasks(tsk):
- """
- Obtains all fortran tasks from the same build group. Those tasks must not have
- the attribute 'nomod' or 'mod_fortran_done'
-
- :return: a list of :py:class:`waflib.Tools.fc.fc` instances
- """
- bld = tsk.generator.bld
- tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator))
- return [x for x in tasks if isinstance(x, fc) and not getattr(x, 'nomod', None) and not getattr(x, 'mod_fortran_done', None)]
-
-class fc(Task.Task):
- """
- Fortran tasks can only run when all fortran tasks in a current task group are ready to be executed
- This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency)
- Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop
- """
- color = 'GREEN'
- run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}'
- vars = ["FORTRANMODPATHFLAG"]
-
- def scan(self):
- """Fortran dependency scanner"""
- tmp = fc_scan.fortran_parser(self.generator.includes_nodes)
- tmp.task = self
- tmp.start(self.inputs[0])
- return (tmp.nodes, tmp.names)
-
- def runnable_status(self):
- """
- Sets the mod file outputs and the dependencies on the mod files over all Fortran tasks
- executed by the main thread so there are no concurrency issues
- """
- if getattr(self, 'mod_fortran_done', None):
- return super(fc, self).runnable_status()
-
- # now, if we reach this part it is because this fortran task is the first in the list
- bld = self.generator.bld
-
- # obtain the fortran tasks
- lst = get_fortran_tasks(self)
-
- # disable this method for other tasks
- for tsk in lst:
- tsk.mod_fortran_done = True
-
- # wait for all the .f tasks to be ready for execution
- # and ensure that the scanners are called at least once
- for tsk in lst:
- ret = tsk.runnable_status()
- if ret == Task.ASK_LATER:
- # we have to wait for one of the other fortran tasks to be ready
- # this may deadlock if there are dependencies between fortran tasks
- # but this should not happen (we are setting them here!)
- for x in lst:
- x.mod_fortran_done = None
-
- return Task.ASK_LATER
-
- ins = Utils.defaultdict(set)
- outs = Utils.defaultdict(set)
-
- # the .mod files to create
- for tsk in lst:
- key = tsk.uid()
- for x in bld.raw_deps[key]:
- if x.startswith('MOD@'):
- name = bld.modfile(x.replace('MOD@', ''))
- node = bld.srcnode.find_or_declare(name)
- tsk.set_outputs(node)
- outs[node].add(tsk)
-
- # the .mod files to use
- for tsk in lst:
- key = tsk.uid()
- for x in bld.raw_deps[key]:
- if x.startswith('USE@'):
- name = bld.modfile(x.replace('USE@', ''))
- node = bld.srcnode.find_resource(name)
- if node and node not in tsk.outputs:
- if not node in bld.node_deps[key]:
- bld.node_deps[key].append(node)
- ins[node].add(tsk)
-
- # if the intersection matches, set the order
- for k in ins.keys():
- for a in ins[k]:
- a.run_after.update(outs[k])
- for x in outs[k]:
- self.generator.bld.producer.revdeps[x].add(a)
-
- # the scanner cannot output nodes, so we have to set them
- # ourselves as task.dep_nodes (additional input nodes)
- tmp = []
- for t in outs[k]:
- tmp.extend(t.outputs)
- a.dep_nodes.extend(tmp)
- a.dep_nodes.sort(key=lambda x: x.abspath())
-
- # the task objects have changed: clear the signature cache
- for tsk in lst:
- try:
- delattr(tsk, 'cache_sig')
- except AttributeError:
- pass
-
- return super(fc, self).runnable_status()
-
-class fcprogram(ccroot.link_task):
- """Links Fortran programs"""
- color = 'YELLOW'
- run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}'
- inst_to = '${BINDIR}'
-
-class fcshlib(fcprogram):
- """Links Fortran libraries"""
- inst_to = '${LIBDIR}'
-
-class fcstlib(ccroot.stlink_task):
- """Links Fortran static libraries (uses ar by default)"""
- pass # do not remove the pass statement
-
-class fcprogram_test(fcprogram):
- """Custom link task to obtain compiler outputs for Fortran configuration tests"""
-
- def runnable_status(self):
- """This task is always executed"""
- ret = super(fcprogram_test, self).runnable_status()
- if ret == Task.SKIP_ME:
- ret = Task.RUN_ME
- return ret
-
- def exec_command(self, cmd, **kw):
- """Stores the compiler std our/err onto the build context, to bld.out + bld.err"""
- bld = self.generator.bld
-
- kw['shell'] = isinstance(cmd, str)
- kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE
- kw['cwd'] = self.get_cwd()
- bld.out = bld.err = ''
-
- bld.to_log('command: %s\n' % cmd)
-
- kw['output'] = 0
- try:
- (bld.out, bld.err) = bld.cmd_and_log(cmd, **kw)
- except Errors.WafError:
- return -1
-
- if bld.out:
- bld.to_log('out: %s\n' % bld.out)
- if bld.err:
- bld.to_log('err: %s\n' % bld.err)
-
diff --git a/waflib/Tools/fc_config.py b/waflib/Tools/fc_config.py
deleted file mode 100644
index 222f3a55..00000000
--- a/waflib/Tools/fc_config.py
+++ /dev/null
@@ -1,488 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# DC 2008
-# Thomas Nagy 2016-2018 (ita)
-
-"""
-Fortran configuration helpers
-"""
-
-import re, os, sys, shlex
-from waflib.Configure import conf
-from waflib.TaskGen import feature, before_method
-
-FC_FRAGMENT = ' program main\n end program main\n'
-FC_FRAGMENT2 = ' PROGRAM MAIN\n END\n' # what's the actual difference between these?
-
-@conf
-def fc_flags(conf):
- """
- Defines common fortran configuration flags and file extensions
- """
- v = conf.env
-
- v.FC_SRC_F = []
- v.FC_TGT_F = ['-c', '-o']
- v.FCINCPATH_ST = '-I%s'
- v.FCDEFINES_ST = '-D%s'
-
- if not v.LINK_FC:
- v.LINK_FC = v.FC
-
- v.FCLNK_SRC_F = []
- v.FCLNK_TGT_F = ['-o']
-
- v.FCFLAGS_fcshlib = ['-fpic']
- v.LINKFLAGS_fcshlib = ['-shared']
- v.fcshlib_PATTERN = 'lib%s.so'
-
- v.fcstlib_PATTERN = 'lib%s.a'
-
- v.FCLIB_ST = '-l%s'
- v.FCLIBPATH_ST = '-L%s'
- v.FCSTLIB_ST = '-l%s'
- v.FCSTLIBPATH_ST = '-L%s'
- v.FCSTLIB_MARKER = '-Wl,-Bstatic'
- v.FCSHLIB_MARKER = '-Wl,-Bdynamic'
-
- v.SONAME_ST = '-Wl,-h,%s'
-
-@conf
-def fc_add_flags(conf):
- """
- Adds FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env
- """
- conf.add_os_flags('FCPPFLAGS', dup=False)
- conf.add_os_flags('FCFLAGS', dup=False)
- conf.add_os_flags('LINKFLAGS', dup=False)
- conf.add_os_flags('LDFLAGS', dup=False)
-
-@conf
-def check_fortran(self, *k, **kw):
- """
- Compiles a Fortran program to ensure that the settings are correct
- """
- self.check_cc(
- fragment = FC_FRAGMENT,
- compile_filename = 'test.f',
- features = 'fc fcprogram',
- msg = 'Compiling a simple fortran app')
-
-@conf
-def check_fc(self, *k, **kw):
- """
- Same as :py:func:`waflib.Tools.c_config.check` but defaults to the *Fortran* programming language
- (this overrides the C defaults in :py:func:`waflib.Tools.c_config.validate_c`)
- """
- kw['compiler'] = 'fc'
- if not 'compile_mode' in kw:
- kw['compile_mode'] = 'fc'
- if not 'type' in kw:
- kw['type'] = 'fcprogram'
- if not 'compile_filename' in kw:
- kw['compile_filename'] = 'test.f90'
- if not 'code' in kw:
- kw['code'] = FC_FRAGMENT
- return self.check(*k, **kw)
-
-# ------------------------------------------------------------------------
-# --- These are the default platform modifiers, refactored here for
-# convenience. gfortran and g95 have much overlap.
-# ------------------------------------------------------------------------
-
-@conf
-def fortran_modifier_darwin(conf):
- """
- Defines Fortran flags and extensions for OSX systems
- """
- v = conf.env
- v.FCFLAGS_fcshlib = ['-fPIC']
- v.LINKFLAGS_fcshlib = ['-dynamiclib']
- v.fcshlib_PATTERN = 'lib%s.dylib'
- v.FRAMEWORKPATH_ST = '-F%s'
- v.FRAMEWORK_ST = ['-framework']
-
- v.LINKFLAGS_fcstlib = []
-
- v.FCSHLIB_MARKER = ''
- v.FCSTLIB_MARKER = ''
- v.SONAME_ST = ''
-
-@conf
-def fortran_modifier_win32(conf):
- """
- Defines Fortran flags for Windows platforms
- """
- v = conf.env
- v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'
-
- v.fcshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = '%s.dll.a'
- v.IMPLIB_ST = '-Wl,--out-implib,%s'
-
- v.FCFLAGS_fcshlib = []
-
- # Auto-import is enabled by default even without this option,
- # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
- # that the linker emits otherwise.
- v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
-
-@conf
-def fortran_modifier_cygwin(conf):
- """
- Defines Fortran flags for use on cygwin
- """
- fortran_modifier_win32(conf)
- v = conf.env
- v.fcshlib_PATTERN = 'cyg%s.dll'
- v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base'])
- v.FCFLAGS_fcshlib = []
-
-# ------------------------------------------------------------------------
-
-@conf
-def check_fortran_dummy_main(self, *k, **kw):
- """
- Determines if a main function is needed by compiling a code snippet with
- the C compiler and linking it with the Fortran compiler (useful on unix-like systems)
- """
- if not self.env.CC:
- self.fatal('A c compiler is required for check_fortran_dummy_main')
-
- lst = ['MAIN__', '__MAIN', '_MAIN', 'MAIN_', 'MAIN']
- lst.extend([m.lower() for m in lst])
- lst.append('')
-
- self.start_msg('Detecting whether we need a dummy main')
- for main in lst:
- kw['fortran_main'] = main
- try:
- self.check_cc(
- fragment = 'int %s() { return 0; }\n' % (main or 'test'),
- features = 'c fcprogram',
- mandatory = True
- )
- if not main:
- self.env.FC_MAIN = -1
- self.end_msg('no')
- else:
- self.env.FC_MAIN = main
- self.end_msg('yes %s' % main)
- break
- except self.errors.ConfigurationError:
- pass
- else:
- self.end_msg('not found')
- self.fatal('could not detect whether fortran requires a dummy main, see the config.log')
-
-# ------------------------------------------------------------------------
-
-GCC_DRIVER_LINE = re.compile('^Driving:')
-POSIX_STATIC_EXT = re.compile('\S+\.a')
-POSIX_LIB_FLAGS = re.compile('-l\S+')
-
-@conf
-def is_link_verbose(self, txt):
- """Returns True if 'useful' link options can be found in txt"""
- assert isinstance(txt, str)
- for line in txt.splitlines():
- if not GCC_DRIVER_LINE.search(line):
- if POSIX_STATIC_EXT.search(line) or POSIX_LIB_FLAGS.search(line):
- return True
- return False
-
-@conf
-def check_fortran_verbose_flag(self, *k, **kw):
- """
- Checks what kind of verbose (-v) flag works, then sets it to env.FC_VERBOSE_FLAG
- """
- self.start_msg('fortran link verbose flag')
- for x in ('-v', '--verbose', '-verbose', '-V'):
- try:
- self.check_cc(
- features = 'fc fcprogram_test',
- fragment = FC_FRAGMENT2,
- compile_filename = 'test.f',
- linkflags = [x],
- mandatory=True)
- except self.errors.ConfigurationError:
- pass
- else:
- # output is on stderr or stdout (for xlf)
- if self.is_link_verbose(self.test_bld.err) or self.is_link_verbose(self.test_bld.out):
- self.end_msg(x)
- break
- else:
- self.end_msg('failure')
- self.fatal('Could not obtain the fortran link verbose flag (see config.log)')
-
- self.env.FC_VERBOSE_FLAG = x
- return x
-
-# ------------------------------------------------------------------------
-
-# linkflags which match those are ignored
-LINKFLAGS_IGNORED = [r'-lang*', r'-lcrt[a-zA-Z0-9\.]*\.o', r'-lc$', r'-lSystem', r'-libmil', r'-LIST:*', r'-LNO:*']
-if os.name == 'nt':
- LINKFLAGS_IGNORED.extend([r'-lfrt*', r'-luser32', r'-lkernel32', r'-ladvapi32', r'-lmsvcrt', r'-lshell32', r'-lmingw', r'-lmoldname'])
-else:
- LINKFLAGS_IGNORED.append(r'-lgcc*')
-RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED]
-
-def _match_ignore(line):
- """Returns True if the line should be ignored (Fortran verbose flag test)"""
- for i in RLINKFLAGS_IGNORED:
- if i.match(line):
- return True
- return False
-
-def parse_fortran_link(lines):
- """Given the output of verbose link of Fortran compiler, this returns a
- list of flags necessary for linking using the standard linker."""
- final_flags = []
- for line in lines:
- if not GCC_DRIVER_LINE.match(line):
- _parse_flink_line(line, final_flags)
- return final_flags
-
-SPACE_OPTS = re.compile('^-[LRuYz]$')
-NOSPACE_OPTS = re.compile('^-[RL]')
-
-def _parse_flink_token(lexer, token, tmp_flags):
- # Here we go (convention for wildcard is shell, not regex !)
- # 1 TODO: we first get some root .a libraries
- # 2 TODO: take everything starting by -bI:*
- # 3 Ignore the following flags: -lang* | -lcrt*.o | -lc |
- # -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*)
- # 4 take into account -lkernel32
- # 5 For options of the kind -[[LRuYz]], as they take one argument
- # after, the actual option is the next token
- # 6 For -YP,*: take and replace by -Larg where arg is the old
- # argument
- # 7 For -[lLR]*: take
-
- # step 3
- if _match_ignore(token):
- pass
- # step 4
- elif token.startswith('-lkernel32') and sys.platform == 'cygwin':
- tmp_flags.append(token)
- # step 5
- elif SPACE_OPTS.match(token):
- t = lexer.get_token()
- if t.startswith('P,'):
- t = t[2:]
- for opt in t.split(os.pathsep):
- tmp_flags.append('-L%s' % opt)
- # step 6
- elif NOSPACE_OPTS.match(token):
- tmp_flags.append(token)
- # step 7
- elif POSIX_LIB_FLAGS.match(token):
- tmp_flags.append(token)
- else:
- # ignore anything not explicitly taken into account
- pass
-
- t = lexer.get_token()
- return t
-
-def _parse_flink_line(line, final_flags):
- """private"""
- lexer = shlex.shlex(line, posix = True)
- lexer.whitespace_split = True
-
- t = lexer.get_token()
- tmp_flags = []
- while t:
- t = _parse_flink_token(lexer, t, tmp_flags)
-
- final_flags.extend(tmp_flags)
- return final_flags
-
-@conf
-def check_fortran_clib(self, autoadd=True, *k, **kw):
- """
- Obtains the flags for linking with the C library
- if this check works, add uselib='CLIB' to your task generators
- """
- if not self.env.FC_VERBOSE_FLAG:
- self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')
-
- self.start_msg('Getting fortran runtime link flags')
- try:
- self.check_cc(
- fragment = FC_FRAGMENT2,
- compile_filename = 'test.f',
- features = 'fc fcprogram_test',
- linkflags = [self.env.FC_VERBOSE_FLAG]
- )
- except Exception:
- self.end_msg(False)
- if kw.get('mandatory', True):
- conf.fatal('Could not find the c library flags')
- else:
- out = self.test_bld.err
- flags = parse_fortran_link(out.splitlines())
- self.end_msg('ok (%s)' % ' '.join(flags))
- self.env.LINKFLAGS_CLIB = flags
- return flags
- return []
-
-def getoutput(conf, cmd, stdin=False):
- """
- Obtains Fortran command outputs
- """
- from waflib import Errors
- if conf.env.env:
- env = conf.env.env
- else:
- env = dict(os.environ)
- env['LANG'] = 'C'
- input = stdin and '\n'.encode() or None
- try:
- out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input)
- except Errors.WafError as e:
- # An WafError might indicate an error code during the command
- # execution, in this case we still obtain the stderr and stdout,
- # which we can use to find the version string.
- if not (hasattr(e, 'stderr') and hasattr(e, 'stdout')):
- raise e
- else:
- # Ignore the return code and return the original
- # stdout and stderr.
- out = e.stdout
- err = e.stderr
- except Exception:
- conf.fatal('could not determine the compiler version %r' % cmd)
- return (out, err)
-
-# ------------------------------------------------------------------------
-
-ROUTINES_CODE = """\
- subroutine foobar()
- return
- end
- subroutine foo_bar()
- return
- end
-"""
-
-MAIN_CODE = """
-void %(dummy_func_nounder)s(void);
-void %(dummy_func_under)s(void);
-int %(main_func_name)s() {
- %(dummy_func_nounder)s();
- %(dummy_func_under)s();
- return 0;
-}
-"""
-
-@feature('link_main_routines_func')
-@before_method('process_source')
-def link_main_routines_tg_method(self):
- """
- The configuration test declares a unique task generator,
- so we create other task generators from there for fortran link tests
- """
- def write_test_file(task):
- task.outputs[0].write(task.generator.code)
- bld = self.bld
- bld(rule=write_test_file, target='main.c', code=MAIN_CODE % self.__dict__)
- bld(rule=write_test_file, target='test.f', code=ROUTINES_CODE)
- bld(features='fc fcstlib', source='test.f', target='test')
- bld(features='c fcprogram', source='main.c', target='app', use='test')
-
-def mangling_schemes():
- """
- Generate triplets for use with mangle_name
- (used in check_fortran_mangling)
- the order is tuned for gfortan
- """
- for u in ('_', ''):
- for du in ('', '_'):
- for c in ("lower", "upper"):
- yield (u, du, c)
-
-def mangle_name(u, du, c, name):
- """Mangle a name from a triplet (used in check_fortran_mangling)"""
- return getattr(name, c)() + u + (name.find('_') != -1 and du or '')
-
-@conf
-def check_fortran_mangling(self, *k, **kw):
- """
- Detect the mangling scheme, sets FORTRAN_MANGLING to the triplet found
-
- This test will compile a fortran static library, then link a c app against it
- """
- if not self.env.CC:
- self.fatal('A c compiler is required for link_main_routines')
- if not self.env.FC:
- self.fatal('A fortran compiler is required for link_main_routines')
- if not self.env.FC_MAIN:
- self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)')
-
- self.start_msg('Getting fortran mangling scheme')
- for (u, du, c) in mangling_schemes():
- try:
- self.check_cc(
- compile_filename = [],
- features = 'link_main_routines_func',
- msg = 'nomsg',
- errmsg = 'nomsg',
- dummy_func_nounder = mangle_name(u, du, c, 'foobar'),
- dummy_func_under = mangle_name(u, du, c, 'foo_bar'),
- main_func_name = self.env.FC_MAIN
- )
- except self.errors.ConfigurationError:
- pass
- else:
- self.end_msg("ok ('%s', '%s', '%s-case')" % (u, du, c))
- self.env.FORTRAN_MANGLING = (u, du, c)
- break
- else:
- self.end_msg(False)
- self.fatal('mangler not found')
- return (u, du, c)
-
-@feature('pyext')
-@before_method('propagate_uselib_vars', 'apply_link')
-def set_lib_pat(self):
- """Sets the Fortran flags for linking with Python"""
- self.env.fcshlib_PATTERN = self.env.pyext_PATTERN
-
-@conf
-def detect_openmp(self):
- """
- Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS``
- """
- for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
- try:
- self.check_fc(
- msg = 'Checking for OpenMP flag %s' % x,
- fragment = 'program main\n call omp_get_num_threads()\nend program main',
- fcflags = x,
- linkflags = x,
- uselib_store = 'OPENMP'
- )
- except self.errors.ConfigurationError:
- pass
- else:
- break
- else:
- self.fatal('Could not find OpenMP')
-
-@conf
-def check_gfortran_o_space(self):
- if self.env.FC_NAME != 'GFORTRAN' or int(self.env.FC_VERSION[0]) > 4:
- # This is for old compilers and only for gfortran.
- # No idea how other implementations handle this. Be safe and bail out.
- return
- self.env.stash()
- self.env.FCLNK_TGT_F = ['-o', '']
- try:
- self.check_fc(msg='Checking if the -o link must be split from arguments', fragment=FC_FRAGMENT, features='fc fcshlib')
- except self.errors.ConfigurationError:
- self.env.revert()
- else:
- self.env.commit()
diff --git a/waflib/Tools/fc_scan.py b/waflib/Tools/fc_scan.py
deleted file mode 100644
index 12cb0fc0..00000000
--- a/waflib/Tools/fc_scan.py
+++ /dev/null
@@ -1,114 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# DC 2008
-# Thomas Nagy 2016-2018 (ita)
-
-import re
-
-INC_REGEX = """(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
-USE_REGEX = """(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
-MOD_REGEX = """(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
-
-re_inc = re.compile(INC_REGEX, re.I)
-re_use = re.compile(USE_REGEX, re.I)
-re_mod = re.compile(MOD_REGEX, re.I)
-
-class fortran_parser(object):
- """
- This parser returns:
-
- * the nodes corresponding to the module names to produce
- * the nodes corresponding to the include files used
- * the module names used by the fortran files
- """
- def __init__(self, incpaths):
- self.seen = []
- """Files already parsed"""
-
- self.nodes = []
- """List of :py:class:`waflib.Node.Node` representing the dependencies to return"""
-
- self.names = []
- """List of module names to return"""
-
- self.incpaths = incpaths
- """List of :py:class:`waflib.Node.Node` representing the include paths"""
-
- def find_deps(self, node):
- """
- Parses a Fortran file to obtain the dependencies used/provided
-
- :param node: fortran file to read
- :type node: :py:class:`waflib.Node.Node`
- :return: lists representing the includes, the modules used, and the modules created by a fortran file
- :rtype: tuple of list of strings
- """
- txt = node.read()
- incs = []
- uses = []
- mods = []
- for line in txt.splitlines():
- # line by line regexp search? optimize?
- m = re_inc.search(line)
- if m:
- incs.append(m.group(1))
- m = re_use.search(line)
- if m:
- uses.append(m.group(1))
- m = re_mod.search(line)
- if m:
- mods.append(m.group(1))
- return (incs, uses, mods)
-
- def start(self, node):
- """
- Start parsing. Use the stack ``self.waiting`` to hold nodes to iterate on
-
- :param node: fortran file
- :type node: :py:class:`waflib.Node.Node`
- """
- self.waiting = [node]
- while self.waiting:
- nd = self.waiting.pop(0)
- self.iter(nd)
-
- def iter(self, node):
- """
- Processes a single file during dependency parsing. Extracts files used
- modules used and modules provided.
- """
- incs, uses, mods = self.find_deps(node)
- for x in incs:
- if x in self.seen:
- continue
- self.seen.append(x)
- self.tryfind_header(x)
-
- for x in uses:
- name = "USE@%s" % x
- if not name in self.names:
- self.names.append(name)
-
- for x in mods:
- name = "MOD@%s" % x
- if not name in self.names:
- self.names.append(name)
-
- def tryfind_header(self, filename):
- """
- Adds an include file to the list of nodes to process
-
- :param filename: file name
- :type filename: string
- """
- found = None
- for n in self.incpaths:
- found = n.find_resource(filename)
- if found:
- self.nodes.append(found)
- self.waiting.append(found)
- break
- if not found:
- if not filename in self.names:
- self.names.append(filename)
-
diff --git a/waflib/Tools/flex.py b/waflib/Tools/flex.py
deleted file mode 100644
index 2256657b..00000000
--- a/waflib/Tools/flex.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# John O'Meara, 2006
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-The **flex** program is a code generator which creates C or C++ files.
-The generated files are compiled into object files.
-"""
-
-import os, re
-from waflib import Task, TaskGen
-from waflib.Tools import ccroot
-
-def decide_ext(self, node):
- if 'cxx' in self.features:
- return ['.lex.cc']
- return ['.lex.c']
-
-def flexfun(tsk):
- env = tsk.env
- bld = tsk.generator.bld
- wd = bld.variant_dir
- def to_list(xx):
- if isinstance(xx, str):
- return [xx]
- return xx
- tsk.last_cmd = lst = []
- lst.extend(to_list(env.FLEX))
- lst.extend(to_list(env.FLEXFLAGS))
- inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs]
- if env.FLEX_MSYS:
- inputs = [x.replace(os.sep, '/') for x in inputs]
- lst.extend(inputs)
- lst = [x for x in lst if x]
- txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
- tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207
-
-TaskGen.declare_chain(
- name = 'flex',
- rule = flexfun, # issue #854
- ext_in = '.l',
- decider = decide_ext,
-)
-
-# To support the following:
-# bld(features='c', flexflags='-P/foo')
-Task.classes['flex'].vars = ['FLEXFLAGS', 'FLEX']
-ccroot.USELIB_VARS['c'].add('FLEXFLAGS')
-ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS')
-
-def configure(conf):
- """
- Detect the *flex* program
- """
- conf.find_program('flex', var='FLEX')
- conf.env.FLEXFLAGS = ['-t']
-
- if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]):
- # this is the flex shipped with MSYS
- conf.env.FLEX_MSYS = True
-
diff --git a/waflib/Tools/g95.py b/waflib/Tools/g95.py
deleted file mode 100644
index f69ba4f3..00000000
--- a/waflib/Tools/g95.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# KWS 2010
-# Thomas Nagy 2016-2018 (ita)
-
-import re
-from waflib import Utils
-from waflib.Tools import fc, fc_config, fc_scan, ar
-from waflib.Configure import conf
-
-@conf
-def find_g95(conf):
- fc = conf.find_program('g95', var='FC')
- conf.get_g95_version(fc)
- conf.env.FC_NAME = 'G95'
-
-@conf
-def g95_flags(conf):
- v = conf.env
- v.FCFLAGS_fcshlib = ['-fPIC']
- v.FORTRANMODFLAG = ['-fmod=', ''] # template for module path
- v.FCFLAGS_DEBUG = ['-Werror'] # why not
-
-@conf
-def g95_modifier_win32(conf):
- fc_config.fortran_modifier_win32(conf)
-
-@conf
-def g95_modifier_cygwin(conf):
- fc_config.fortran_modifier_cygwin(conf)
-
-@conf
-def g95_modifier_darwin(conf):
- fc_config.fortran_modifier_darwin(conf)
-
-@conf
-def g95_modifier_platform(conf):
- dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
- g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None)
- if g95_modifier_func:
- g95_modifier_func()
-
-@conf
-def get_g95_version(conf, fc):
- """get the compiler version"""
-
- version_re = re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
- cmd = fc + ['--version']
- out, err = fc_config.getoutput(conf, cmd, stdin=False)
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- conf.fatal('cannot determine g95 version')
- k = match.groupdict()
- conf.env.FC_VERSION = (k['major'], k['minor'])
-
-def configure(conf):
- conf.find_g95()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.g95_flags()
- conf.g95_modifier_platform()
-
diff --git a/waflib/Tools/gas.py b/waflib/Tools/gas.py
deleted file mode 100644
index 77afed70..00000000
--- a/waflib/Tools/gas.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008-2018 (ita)
-
-"Detect as/gas/gcc for compiling assembly files"
-
-import waflib.Tools.asm # - leave this
-from waflib.Tools import ar
-
-def configure(conf):
- """
- Find the programs gas/as/gcc and set the variable *AS*
- """
- conf.find_program(['gas', 'gcc'], var='AS')
- conf.env.AS_TGT_F = ['-c', '-o']
- conf.env.ASLNK_TGT_F = ['-o']
- conf.find_ar()
- conf.load('asm')
diff --git a/waflib/Tools/gcc.py b/waflib/Tools/gcc.py
index acdd473a..0b897c7d 100644
--- a/waflib/Tools/gcc.py
+++ b/waflib/Tools/gcc.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
+# Thomas Nagy, 2006-2010 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
@@ -27,51 +27,54 @@ def gcc_common_flags(conf):
"""
v = conf.env
- v.CC_SRC_F = []
- v.CC_TGT_F = ['-c', '-o']
+ v['CC_SRC_F'] = []
+ v['CC_TGT_F'] = ['-c', '-o']
- if not v.LINK_CC:
- v.LINK_CC = v.CC
+ # linker
+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
+ v['CCLNK_SRC_F'] = []
+ v['CCLNK_TGT_F'] = ['-o']
+ v['CPPPATH_ST'] = '-I%s'
+ v['DEFINES_ST'] = '-D%s'
- v.CCLNK_SRC_F = []
- v.CCLNK_TGT_F = ['-o']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
+ v['LIB_ST'] = '-l%s' # template for adding libs
+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
+ v['STLIB_ST'] = '-l%s'
+ v['STLIBPATH_ST'] = '-L%s'
+ v['RPATH_ST'] = '-Wl,-rpath,%s'
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
- v.RPATH_ST = '-Wl,-rpath,%s'
+ v['SONAME_ST'] = '-Wl,-h,%s'
+ v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
+ v['STLIB_MARKER'] = '-Wl,-Bstatic'
- v.SONAME_ST = '-Wl,-h,%s'
- v.SHLIB_MARKER = '-Wl,-Bdynamic'
- v.STLIB_MARKER = '-Wl,-Bstatic'
+ # program
+ v['cprogram_PATTERN'] = '%s'
- v.cprogram_PATTERN = '%s'
+ # shared librar
+ v['CFLAGS_cshlib'] = ['-fPIC']
+ v['LINKFLAGS_cshlib'] = ['-shared']
+ v['cshlib_PATTERN'] = 'lib%s.so'
- v.CFLAGS_cshlib = ['-fPIC']
- v.LINKFLAGS_cshlib = ['-shared']
- v.cshlib_PATTERN = 'lib%s.so'
+ # static lib
+ v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
+ v['cstlib_PATTERN'] = 'lib%s.a'
- v.LINKFLAGS_cstlib = ['-Wl,-Bstatic']
- v.cstlib_PATTERN = 'lib%s.a'
-
- v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
- v.CFLAGS_MACBUNDLE = ['-fPIC']
- v.macbundle_PATTERN = '%s.bundle'
+ # osx stuff
+ v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
+ v['CFLAGS_MACBUNDLE'] = ['-fPIC']
+ v['macbundle_PATTERN'] = '%s.bundle'
@conf
def gcc_modifier_win32(conf):
"""Configuration flags for executing gcc on Windows"""
v = conf.env
- v.cprogram_PATTERN = '%s.exe'
+ v['cprogram_PATTERN'] = '%s.exe'
- v.cshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = '%s.dll.a'
- v.IMPLIB_ST = '-Wl,--out-implib,%s'
+ v['cshlib_PATTERN'] = '%s.dll'
+ v['implib_PATTERN'] = 'lib%s.dll.a'
+ v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
- v.CFLAGS_cshlib = []
+ v['CFLAGS_cshlib'] = []
# Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
@@ -83,42 +86,42 @@ def gcc_modifier_cygwin(conf):
"""Configuration flags for executing gcc on Cygwin"""
gcc_modifier_win32(conf)
v = conf.env
- v.cshlib_PATTERN = 'cyg%s.dll'
+ v['cshlib_PATTERN'] = 'cyg%s.dll'
v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base'])
- v.CFLAGS_cshlib = []
+ v['CFLAGS_cshlib'] = []
@conf
def gcc_modifier_darwin(conf):
"""Configuration flags for executing gcc on MacOS"""
v = conf.env
- v.CFLAGS_cshlib = ['-fPIC']
- v.LINKFLAGS_cshlib = ['-dynamiclib']
- v.cshlib_PATTERN = 'lib%s.dylib'
- v.FRAMEWORKPATH_ST = '-F%s'
- v.FRAMEWORK_ST = ['-framework']
- v.ARCH_ST = ['-arch']
+ v['CFLAGS_cshlib'] = ['-fPIC']
+ v['LINKFLAGS_cshlib'] = ['-dynamiclib']
+ v['cshlib_PATTERN'] = 'lib%s.dylib'
+ v['FRAMEWORKPATH_ST'] = '-F%s'
+ v['FRAMEWORK_ST'] = ['-framework']
+ v['ARCH_ST'] = ['-arch']
- v.LINKFLAGS_cstlib = []
+ v['LINKFLAGS_cstlib'] = []
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.SONAME_ST = []
+ v['SHLIB_MARKER'] = []
+ v['STLIB_MARKER'] = []
+ v['SONAME_ST'] = []
@conf
def gcc_modifier_aix(conf):
"""Configuration flags for executing gcc on AIX"""
v = conf.env
- v.LINKFLAGS_cprogram = ['-Wl,-brtl']
- v.LINKFLAGS_cshlib = ['-shared','-Wl,-brtl,-bexpfull']
- v.SHLIB_MARKER = []
+ v['LINKFLAGS_cprogram'] = ['-Wl,-brtl']
+ v['LINKFLAGS_cshlib'] = ['-shared','-Wl,-brtl,-bexpfull']
+ v['SHLIB_MARKER'] = []
@conf
def gcc_modifier_hpux(conf):
v = conf.env
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.CFLAGS_cshlib = ['-fPIC','-DPIC']
- v.cshlib_PATTERN = 'lib%s.sl'
+ v['SHLIB_MARKER'] = []
+ v['STLIB_MARKER'] = []
+ v['CFLAGS_cshlib'] = ['-fPIC','-DPIC']
+ v['cshlib_PATTERN'] = 'lib%s.sl'
@conf
def gcc_modifier_openbsd(conf):
@@ -127,9 +130,9 @@ def gcc_modifier_openbsd(conf):
@conf
def gcc_modifier_osf1V(conf):
v = conf.env
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.SONAME_ST = []
+ v['SHLIB_MARKER'] = []
+ v['STLIB_MARKER'] = []
+ v['SONAME_ST'] = []
@conf
def gcc_modifier_platform(conf):
@@ -152,5 +155,5 @@ def configure(conf):
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
- conf.check_gcc_o_space()
+
diff --git a/waflib/Tools/gdc.py b/waflib/Tools/gdc.py
deleted file mode 100644
index d89a66d3..00000000
--- a/waflib/Tools/gdc.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-
-from waflib.Tools import ar, d
-from waflib.Configure import conf
-
-@conf
-def find_gdc(conf):
- """
- Finds the program gdc and set the variable *D*
- """
- conf.find_program('gdc', var='D')
-
- out = conf.cmd_and_log(conf.env.D + ['--version'])
- if out.find("gdc") == -1:
- conf.fatal("detected compiler is not gdc")
-
-@conf
-def common_flags_gdc(conf):
- """
- Sets the flags required by *gdc*
- """
- v = conf.env
-
- v.DFLAGS = []
-
- v.D_SRC_F = ['-c']
- v.D_TGT_F = '-o%s'
-
- v.D_LINKER = v.D
- v.DLNK_SRC_F = ''
- v.DLNK_TGT_F = '-o%s'
- v.DINC_ST = '-I%s'
-
- v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
- v.DSTLIB_ST = v.DSHLIB_ST = '-l%s'
- v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L%s'
-
- v.LINKFLAGS_dshlib = ['-shared']
-
- v.DHEADER_ext = '.di'
- v.DFLAGS_d_with_header = '-fintfc'
- v.D_HDR_F = '-fintfc-file=%s'
-
-def configure(conf):
- """
- Configuration for gdc
- """
- conf.find_gdc()
- conf.load('ar')
- conf.load('d')
- conf.common_flags_gdc()
- conf.d_platform_flags()
-
diff --git a/waflib/Tools/gfortran.py b/waflib/Tools/gfortran.py
deleted file mode 100644
index 10506673..00000000
--- a/waflib/Tools/gfortran.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# DC 2008
-# Thomas Nagy 2016-2018 (ita)
-
-import re
-from waflib import Utils
-from waflib.Tools import fc, fc_config, fc_scan, ar
-from waflib.Configure import conf
-
-@conf
-def find_gfortran(conf):
- """Find the gfortran program (will look in the environment variable 'FC')"""
- fc = conf.find_program(['gfortran','g77'], var='FC')
- # (fallback to g77 for systems, where no gfortran is available)
- conf.get_gfortran_version(fc)
- conf.env.FC_NAME = 'GFORTRAN'
-
-@conf
-def gfortran_flags(conf):
- v = conf.env
- v.FCFLAGS_fcshlib = ['-fPIC']
- v.FORTRANMODFLAG = ['-J', ''] # template for module path
- v.FCFLAGS_DEBUG = ['-Werror'] # why not
-
-@conf
-def gfortran_modifier_win32(conf):
- fc_config.fortran_modifier_win32(conf)
-
-@conf
-def gfortran_modifier_cygwin(conf):
- fc_config.fortran_modifier_cygwin(conf)
-
-@conf
-def gfortran_modifier_darwin(conf):
- fc_config.fortran_modifier_darwin(conf)
-
-@conf
-def gfortran_modifier_platform(conf):
- dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
- gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None)
- if gfortran_modifier_func:
- gfortran_modifier_func()
-
-@conf
-def get_gfortran_version(conf, fc):
- """Get the compiler version"""
-
- # ensure this is actually gfortran, not an imposter.
- version_re = re.compile(r"GNU\s*Fortran", re.I).search
- cmd = fc + ['--version']
- out, err = fc_config.getoutput(conf, cmd, stdin=False)
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- conf.fatal('Could not determine the compiler type')
-
- # --- now get more detailed info -- see c_config.get_cc_version
- cmd = fc + ['-dM', '-E', '-']
- out, err = fc_config.getoutput(conf, cmd, stdin=True)
-
- if out.find('__GNUC__') < 0:
- conf.fatal('Could not determine the compiler type')
-
- k = {}
- out = out.splitlines()
- import shlex
-
- for line in out:
- lst = shlex.split(line)
- if len(lst)>2:
- key = lst[1]
- val = lst[2]
- k[key] = val
-
- def isD(var):
- return var in k
-
- def isT(var):
- return var in k and k[var] != '0'
-
- conf.env.FC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
-
-def configure(conf):
- conf.find_gfortran()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.gfortran_flags()
- conf.gfortran_modifier_platform()
- conf.check_gfortran_o_space()
diff --git a/waflib/Tools/glib2.py b/waflib/Tools/glib2.py
deleted file mode 100644
index 949fe37c..00000000
--- a/waflib/Tools/glib2.py
+++ /dev/null
@@ -1,489 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-Support for GLib2 tools:
-
-* marshal
-* enums
-* gsettings
-* gresource
-"""
-
-import os
-import functools
-from waflib import Context, Task, Utils, Options, Errors, Logs
-from waflib.TaskGen import taskgen_method, before_method, feature, extension
-from waflib.Configure import conf
-
-################## marshal files
-
-@taskgen_method
-def add_marshal_file(self, filename, prefix):
- """
- Adds a file to the list of marshal files to process. Store them in the attribute *marshal_list*.
-
- :param filename: xml file to compile
- :type filename: string
- :param prefix: marshal prefix (--prefix=prefix)
- :type prefix: string
- """
- if not hasattr(self, 'marshal_list'):
- self.marshal_list = []
- self.meths.append('process_marshal')
- self.marshal_list.append((filename, prefix))
-
-@before_method('process_source')
-def process_marshal(self):
- """
- Processes the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances.
- Adds the c file created to the list of source to process.
- """
- for f, prefix in getattr(self, 'marshal_list', []):
- node = self.path.find_resource(f)
-
- if not node:
- raise Errors.WafError('file not found %r' % f)
-
- h_node = node.change_ext('.h')
- c_node = node.change_ext('.c')
-
- task = self.create_task('glib_genmarshal', node, [h_node, c_node])
- task.env.GLIB_GENMARSHAL_PREFIX = prefix
- self.source = self.to_nodes(getattr(self, 'source', []))
- self.source.append(c_node)
-
-class glib_genmarshal(Task.Task):
- vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL']
- color = 'BLUE'
- ext_out = ['.h']
- def run(self):
- bld = self.generator.bld
-
- get = self.env.get_flat
- cmd1 = "%s %s --prefix=%s --header > %s" % (
- get('GLIB_GENMARSHAL'),
- self.inputs[0].srcpath(),
- get('GLIB_GENMARSHAL_PREFIX'),
- self.outputs[0].abspath()
- )
-
- ret = bld.exec_command(cmd1)
- if ret:
- return ret
-
- #print self.outputs[1].abspath()
- c = '''#include "%s"\n''' % self.outputs[0].name
- self.outputs[1].write(c)
-
- cmd2 = "%s %s --prefix=%s --body >> %s" % (
- get('GLIB_GENMARSHAL'),
- self.inputs[0].srcpath(),
- get('GLIB_GENMARSHAL_PREFIX'),
- self.outputs[1].abspath()
- )
- return bld.exec_command(cmd2)
-
-########################## glib-mkenums
-
-@taskgen_method
-def add_enums_from_template(self, source='', target='', template='', comments=''):
- """
- Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
-
- :param source: enum file to process
- :type source: string
- :param target: target file
- :type target: string
- :param template: template file
- :type template: string
- :param comments: comments
- :type comments: string
- """
- if not hasattr(self, 'enums_list'):
- self.enums_list = []
- self.meths.append('process_enums')
- self.enums_list.append({'source': source,
- 'target': target,
- 'template': template,
- 'file-head': '',
- 'file-prod': '',
- 'file-tail': '',
- 'enum-prod': '',
- 'value-head': '',
- 'value-prod': '',
- 'value-tail': '',
- 'comments': comments})
-
-@taskgen_method
-def add_enums(self, source='', target='',
- file_head='', file_prod='', file_tail='', enum_prod='',
- value_head='', value_prod='', value_tail='', comments=''):
- """
- Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
-
- :param source: enum file to process
- :type source: string
- :param target: target file
- :type target: string
- :param file_head: unused
- :param file_prod: unused
- :param file_tail: unused
- :param enum_prod: unused
- :param value_head: unused
- :param value_prod: unused
- :param value_tail: unused
- :param comments: comments
- :type comments: string
- """
- if not hasattr(self, 'enums_list'):
- self.enums_list = []
- self.meths.append('process_enums')
- self.enums_list.append({'source': source,
- 'template': '',
- 'target': target,
- 'file-head': file_head,
- 'file-prod': file_prod,
- 'file-tail': file_tail,
- 'enum-prod': enum_prod,
- 'value-head': value_head,
- 'value-prod': value_prod,
- 'value-tail': value_tail,
- 'comments': comments})
-
-@before_method('process_source')
-def process_enums(self):
- """
- Processes the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances.
- """
- for enum in getattr(self, 'enums_list', []):
- task = self.create_task('glib_mkenums')
- env = task.env
-
- inputs = []
-
- # process the source
- source_list = self.to_list(enum['source'])
- if not source_list:
- raise Errors.WafError('missing source ' + str(enum))
- source_list = [self.path.find_resource(k) for k in source_list]
- inputs += source_list
- env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
-
- # find the target
- if not enum['target']:
- raise Errors.WafError('missing target ' + str(enum))
- tgt_node = self.path.find_or_declare(enum['target'])
- if tgt_node.name.endswith('.c'):
- self.source.append(tgt_node)
- env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
-
-
- options = []
-
- if enum['template']: # template, if provided
- template_node = self.path.find_resource(enum['template'])
- options.append('--template %s' % (template_node.abspath()))
- inputs.append(template_node)
- params = {'file-head' : '--fhead',
- 'file-prod' : '--fprod',
- 'file-tail' : '--ftail',
- 'enum-prod' : '--eprod',
- 'value-head' : '--vhead',
- 'value-prod' : '--vprod',
- 'value-tail' : '--vtail',
- 'comments': '--comments'}
- for param, option in params.items():
- if enum[param]:
- options.append('%s %r' % (option, enum[param]))
-
- env.GLIB_MKENUMS_OPTIONS = ' '.join(options)
-
- # update the task instance
- task.set_inputs(inputs)
- task.set_outputs(tgt_node)
-
-class glib_mkenums(Task.Task):
- """
- Processes enum files
- """
- run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
- color = 'PINK'
- ext_out = ['.h']
-
-######################################### gsettings
-
-@taskgen_method
-def add_settings_schemas(self, filename_list):
- """
- Adds settings files to process to *settings_schema_files*
-
- :param filename_list: files
- :type filename_list: list of string
- """
- if not hasattr(self, 'settings_schema_files'):
- self.settings_schema_files = []
-
- if not isinstance(filename_list, list):
- filename_list = [filename_list]
-
- self.settings_schema_files.extend(filename_list)
-
-@taskgen_method
-def add_settings_enums(self, namespace, filename_list):
- """
- Called only once by task generator to set the enums namespace.
-
- :param namespace: namespace
- :type namespace: string
- :param filename_list: enum files to process
- :type filename_list: file list
- """
- if hasattr(self, 'settings_enum_namespace'):
- raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name)
- self.settings_enum_namespace = namespace
-
- if not isinstance(filename_list, list):
- filename_list = [filename_list]
- self.settings_enum_files = filename_list
-
-@feature('glib2')
-def process_settings(self):
- """
- Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
- same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks.
-
- """
- enums_tgt_node = []
- install_files = []
-
- settings_schema_files = getattr(self, 'settings_schema_files', [])
- if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS:
- raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
-
- # 1. process gsettings_enum_files (generate .enums.xml)
- #
- if hasattr(self, 'settings_enum_files'):
- enums_task = self.create_task('glib_mkenums')
-
- source_list = self.settings_enum_files
- source_list = [self.path.find_resource(k) for k in source_list]
- enums_task.set_inputs(source_list)
- enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
-
- target = self.settings_enum_namespace + '.enums.xml'
- tgt_node = self.path.find_or_declare(target)
- enums_task.set_outputs(tgt_node)
- enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
- enums_tgt_node = [tgt_node]
-
- install_files.append(tgt_node)
-
- options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace)
- enums_task.env.GLIB_MKENUMS_OPTIONS = options
-
- # 2. process gsettings_schema_files (validate .gschema.xml files)
- #
- for schema in settings_schema_files:
- schema_task = self.create_task ('glib_validate_schema')
-
- schema_node = self.path.find_resource(schema)
- if not schema_node:
- raise Errors.WafError("Cannot find the schema file %r" % schema)
- install_files.append(schema_node)
- source_list = enums_tgt_node + [schema_node]
-
- schema_task.set_inputs (source_list)
- schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list]
-
- target_node = schema_node.change_ext('.xml.valid')
- schema_task.set_outputs (target_node)
- schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath()
-
- # 3. schemas install task
- def compile_schemas_callback(bld):
- if not bld.is_install:
- return
- compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS)
- destdir = Options.options.destdir
- paths = bld._compile_schemas_registered
- if destdir:
- paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths)
- for path in paths:
- Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path)
- if self.bld.exec_command(compile_schemas + [path]):
- Logs.warn('Could not update GSettings schema cache %r' % path)
-
- if self.bld.is_install:
- schemadir = self.env.GSETTINGSSCHEMADIR
- if not schemadir:
- raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
-
- if install_files:
- self.add_install_files(install_to=schemadir, install_from=install_files)
- registered_schemas = getattr(self.bld, '_compile_schemas_registered', None)
- if not registered_schemas:
- registered_schemas = self.bld._compile_schemas_registered = set()
- self.bld.add_post_fun(compile_schemas_callback)
- registered_schemas.add(schemadir)
-
-class glib_validate_schema(Task.Task):
- """
- Validates schema files
- """
- run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
- color = 'PINK'
-
-################## gresource
-
-@extension('.gresource.xml')
-def process_gresource_source(self, node):
- """
- Creates tasks that turn ``.gresource.xml`` files to C code
- """
- if not self.env.GLIB_COMPILE_RESOURCES:
- raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure")
-
- if 'gresource' in self.features:
- return
-
- h_node = node.change_ext('_xml.h')
- c_node = node.change_ext('_xml.c')
- self.create_task('glib_gresource_source', node, [h_node, c_node])
- self.source.append(c_node)
-
-@feature('gresource')
-def process_gresource_bundle(self):
- """
- Creates tasks to turn ``.gresource`` files from ``.gresource.xml`` files::
-
- def build(bld):
- bld(
- features='gresource',
- source=['resources1.gresource.xml', 'resources2.gresource.xml'],
- install_path='${LIBDIR}/${PACKAGE}'
- )
-
- :param source: XML files to process
- :type source: list of string
- :param install_path: installation path
- :type install_path: string
- """
- for i in self.to_list(self.source):
- node = self.path.find_resource(i)
-
- task = self.create_task('glib_gresource_bundle', node, node.change_ext(''))
- inst_to = getattr(self, 'install_path', None)
- if inst_to:
- self.add_install_files(install_to=inst_to, install_from=task.outputs)
-
-class glib_gresource_base(Task.Task):
- """
- Base class for gresource based tasks
- """
- color = 'BLUE'
- base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'
-
- def scan(self):
- """
- Scans gresource dependencies through ``glib-compile-resources --generate-dependencies command``
- """
- bld = self.generator.bld
- kw = {}
- kw['cwd'] = self.get_cwd()
- kw['quiet'] = Context.BOTH
-
- cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % (
- self.inputs[0].parent.srcpath(),
- self.inputs[0].bld_dir(),
- self.inputs[0].bldpath()
- ), self.env)
-
- output = bld.cmd_and_log(cmd, **kw)
-
- nodes = []
- names = []
- for dep in output.splitlines():
- if dep:
- node = bld.bldnode.find_node(dep)
- if node:
- nodes.append(node)
- else:
- names.append(dep)
-
- return (nodes, names)
-
-class glib_gresource_source(glib_gresource_base):
- """
- Task to generate C source code (.h and .c files) from a gresource.xml file
- """
- vars = ['GLIB_COMPILE_RESOURCES']
- fun_h = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}')
- fun_c = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}')
- ext_out = ['.h']
-
- def run(self):
- return self.fun_h[0](self) or self.fun_c[0](self)
-
-class glib_gresource_bundle(glib_gresource_base):
- """
- Task to generate a .gresource binary file from a gresource.xml file
- """
- run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}'
- shell = True # temporary workaround for #795
-
-@conf
-def find_glib_genmarshal(conf):
- conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
-
-@conf
-def find_glib_mkenums(conf):
- if not conf.env.PERL:
- conf.find_program('perl', var='PERL')
- conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS')
-
-@conf
-def find_glib_compile_schemas(conf):
- # when cross-compiling, gsettings.m4 locates the program with the following:
- # pkg-config --variable glib_compile_schemas gio-2.0
- conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS')
-
- def getstr(varname):
- return getattr(Options.options, varname, getattr(conf.env,varname, ''))
-
- gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
- if not gsettingsschemadir:
- datadir = getstr('DATADIR')
- if not datadir:
- prefix = conf.env.PREFIX
- datadir = os.path.join(prefix, 'share')
- gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas')
-
- conf.env.GSETTINGSSCHEMADIR = gsettingsschemadir
-
-@conf
-def find_glib_compile_resources(conf):
- conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES')
-
-def configure(conf):
- """
- Finds the following programs:
-
- * *glib-genmarshal* and set *GLIB_GENMARSHAL*
- * *glib-mkenums* and set *GLIB_MKENUMS*
- * *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory)
- * *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory)
- """
- conf.find_glib_genmarshal()
- conf.find_glib_mkenums()
- conf.find_glib_compile_schemas(mandatory=False)
- conf.find_glib_compile_resources(mandatory=False)
-
-def options(opt):
- """
- Adds the ``--gsettingsschemadir`` command-line option
- """
- gr = opt.add_option_group('Installation directories')
- gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR')
-
diff --git a/waflib/Tools/gnu_dirs.py b/waflib/Tools/gnu_dirs.py
deleted file mode 100644
index 2847071d..00000000
--- a/waflib/Tools/gnu_dirs.py
+++ /dev/null
@@ -1,131 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-
-"""
-Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call::
-
- opt.load('gnu_dirs')
-
-and::
-
- conf.load('gnu_dirs')
-
-Add options for the standard GNU directories, this tool will add the options
-found in autotools, and will update the environment with the following
-installation variables:
-
-============== ========================================= =======================
-Variable Description Default Value
-============== ========================================= =======================
-PREFIX installation prefix /usr/local
-EXEC_PREFIX installation prefix for binaries PREFIX
-BINDIR user commands EXEC_PREFIX/bin
-SBINDIR system binaries EXEC_PREFIX/sbin
-LIBEXECDIR program-specific binaries EXEC_PREFIX/libexec
-SYSCONFDIR host-specific configuration PREFIX/etc
-SHAREDSTATEDIR architecture-independent variable data PREFIX/com
-LOCALSTATEDIR variable data PREFIX/var
-LIBDIR object code libraries EXEC_PREFIX/lib
-INCLUDEDIR header files PREFIX/include
-OLDINCLUDEDIR header files for non-GCC compilers /usr/include
-DATAROOTDIR architecture-independent data root PREFIX/share
-DATADIR architecture-independent data DATAROOTDIR
-INFODIR GNU "info" documentation DATAROOTDIR/info
-LOCALEDIR locale-dependent data DATAROOTDIR/locale
-MANDIR manual pages DATAROOTDIR/man
-DOCDIR documentation root DATAROOTDIR/doc/APPNAME
-HTMLDIR HTML documentation DOCDIR
-DVIDIR DVI documentation DOCDIR
-PDFDIR PDF documentation DOCDIR
-PSDIR PostScript documentation DOCDIR
-============== ========================================= =======================
-"""
-
-import os, re
-from waflib import Utils, Options, Context
-
-gnuopts = '''
-bindir, user commands, ${EXEC_PREFIX}/bin
-sbindir, system binaries, ${EXEC_PREFIX}/sbin
-libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec
-sysconfdir, host-specific configuration, ${PREFIX}/etc
-sharedstatedir, architecture-independent variable data, ${PREFIX}/com
-localstatedir, variable data, ${PREFIX}/var
-libdir, object code libraries, ${EXEC_PREFIX}/lib%s
-includedir, header files, ${PREFIX}/include
-oldincludedir, header files for non-GCC compilers, /usr/include
-datarootdir, architecture-independent data root, ${PREFIX}/share
-datadir, architecture-independent data, ${DATAROOTDIR}
-infodir, GNU "info" documentation, ${DATAROOTDIR}/info
-localedir, locale-dependent data, ${DATAROOTDIR}/locale
-mandir, manual pages, ${DATAROOTDIR}/man
-docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
-htmldir, HTML documentation, ${DOCDIR}
-dvidir, DVI documentation, ${DOCDIR}
-pdfdir, PDF documentation, ${DOCDIR}
-psdir, PostScript documentation, ${DOCDIR}
-''' % Utils.lib64()
-
-_options = [x.split(', ') for x in gnuopts.splitlines() if x]
-
-def configure(conf):
- """
- Reads the command-line options to set lots of variables in *conf.env*. The variables
- BINDIR and LIBDIR will be overwritten.
- """
- def get_param(varname, default):
- return getattr(Options.options, varname, '') or default
-
- env = conf.env
- env.LIBDIR = env.BINDIR = []
- env.EXEC_PREFIX = get_param('EXEC_PREFIX', env.PREFIX)
- env.PACKAGE = getattr(Context.g_module, 'APPNAME', None) or env.PACKAGE
-
- complete = False
- iter = 0
- while not complete and iter < len(_options) + 1:
- iter += 1
- complete = True
- for name, help, default in _options:
- name = name.upper()
- if not env[name]:
- try:
- env[name] = Utils.subst_vars(get_param(name, default).replace('/', os.sep), env)
- except TypeError:
- complete = False
-
- if not complete:
- lst = [x for x, _, _ in _options if not env[x.upper()]]
- raise conf.errors.WafError('Variable substitution failure %r' % lst)
-
-def options(opt):
- """
- Adds lots of command-line options, for example::
-
- --exec-prefix: EXEC_PREFIX
- """
- inst_dir = opt.add_option_group('Installation prefix',
-'By default, "waf install" will put the files in\
- "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
- than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
-
- for k in ('--prefix', '--destdir'):
- option = opt.parser.get_option(k)
- if option:
- opt.parser.remove_option(k)
- inst_dir.add_option(option)
-
- inst_dir.add_option('--exec-prefix',
- help = 'installation prefix for binaries [PREFIX]',
- default = '',
- dest = 'EXEC_PREFIX')
-
- dirs_options = opt.add_option_group('Installation directories')
-
- for name, help, default in _options:
- option_name = '--' + name
- str_default = default
- str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default))
- dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
-
diff --git a/waflib/Tools/gxx.py b/waflib/Tools/gxx.py
index 22c5d26f..9cf52070 100644
--- a/waflib/Tools/gxx.py
+++ b/waflib/Tools/gxx.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
+# Thomas Nagy, 2006-2010 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
@@ -14,7 +14,7 @@ from waflib.Configure import conf
@conf
def find_gxx(conf):
"""
- Finds the program g++, and if present, try to detect its version number
+ Find the program g++, and if present, try to detect its version number
"""
cxx = conf.find_program(['g++', 'c++'], var='CXX')
conf.get_cc_version(cxx, gcc=True)
@@ -27,51 +27,54 @@ def gxx_common_flags(conf):
"""
v = conf.env
- v.CXX_SRC_F = []
- v.CXX_TGT_F = ['-c', '-o']
+ v['CXX_SRC_F'] = []
+ v['CXX_TGT_F'] = ['-c', '-o']
- if not v.LINK_CXX:
- v.LINK_CXX = v.CXX
+ # linker
+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
+ v['CXXLNK_SRC_F'] = []
+ v['CXXLNK_TGT_F'] = ['-o']
+ v['CPPPATH_ST'] = '-I%s'
+ v['DEFINES_ST'] = '-D%s'
- v.CXXLNK_SRC_F = []
- v.CXXLNK_TGT_F = ['-o']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
+ v['LIB_ST'] = '-l%s' # template for adding libs
+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
+ v['STLIB_ST'] = '-l%s'
+ v['STLIBPATH_ST'] = '-L%s'
+ v['RPATH_ST'] = '-Wl,-rpath,%s'
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
- v.RPATH_ST = '-Wl,-rpath,%s'
+ v['SONAME_ST'] = '-Wl,-h,%s'
+ v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
+ v['STLIB_MARKER'] = '-Wl,-Bstatic'
- v.SONAME_ST = '-Wl,-h,%s'
- v.SHLIB_MARKER = '-Wl,-Bdynamic'
- v.STLIB_MARKER = '-Wl,-Bstatic'
+ # program
+ v['cxxprogram_PATTERN'] = '%s'
- v.cxxprogram_PATTERN = '%s'
+ # shared library
+ v['CXXFLAGS_cxxshlib'] = ['-fPIC']
+ v['LINKFLAGS_cxxshlib'] = ['-shared']
+ v['cxxshlib_PATTERN'] = 'lib%s.so'
- v.CXXFLAGS_cxxshlib = ['-fPIC']
- v.LINKFLAGS_cxxshlib = ['-shared']
- v.cxxshlib_PATTERN = 'lib%s.so'
+ # static lib
+ v['LINKFLAGS_cxxstlib'] = ['-Wl,-Bstatic']
+ v['cxxstlib_PATTERN'] = 'lib%s.a'
- v.LINKFLAGS_cxxstlib = ['-Wl,-Bstatic']
- v.cxxstlib_PATTERN = 'lib%s.a'
-
- v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
- v.CXXFLAGS_MACBUNDLE = ['-fPIC']
- v.macbundle_PATTERN = '%s.bundle'
+ # osx stuff
+ v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
+ v['CXXFLAGS_MACBUNDLE'] = ['-fPIC']
+ v['macbundle_PATTERN'] = '%s.bundle'
@conf
def gxx_modifier_win32(conf):
"""Configuration flags for executing gcc on Windows"""
v = conf.env
- v.cxxprogram_PATTERN = '%s.exe'
+ v['cxxprogram_PATTERN'] = '%s.exe'
- v.cxxshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = '%s.dll.a'
- v.IMPLIB_ST = '-Wl,--out-implib,%s'
+ v['cxxshlib_PATTERN'] = '%s.dll'
+ v['implib_PATTERN'] = 'lib%s.dll.a'
+ v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
- v.CXXFLAGS_cxxshlib = []
+ v['CXXFLAGS_cxxshlib'] = []
# Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
@@ -83,43 +86,43 @@ def gxx_modifier_cygwin(conf):
"""Configuration flags for executing g++ on Cygwin"""
gxx_modifier_win32(conf)
v = conf.env
- v.cxxshlib_PATTERN = 'cyg%s.dll'
+ v['cxxshlib_PATTERN'] = 'cyg%s.dll'
v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base'])
- v.CXXFLAGS_cxxshlib = []
+ v['CXXFLAGS_cxxshlib'] = []
@conf
def gxx_modifier_darwin(conf):
"""Configuration flags for executing g++ on MacOS"""
v = conf.env
- v.CXXFLAGS_cxxshlib = ['-fPIC']
- v.LINKFLAGS_cxxshlib = ['-dynamiclib']
- v.cxxshlib_PATTERN = 'lib%s.dylib'
- v.FRAMEWORKPATH_ST = '-F%s'
- v.FRAMEWORK_ST = ['-framework']
- v.ARCH_ST = ['-arch']
+ v['CXXFLAGS_cxxshlib'] = ['-fPIC']
+ v['LINKFLAGS_cxxshlib'] = ['-dynamiclib']
+ v['cxxshlib_PATTERN'] = 'lib%s.dylib'
+ v['FRAMEWORKPATH_ST'] = '-F%s'
+ v['FRAMEWORK_ST'] = ['-framework']
+ v['ARCH_ST'] = ['-arch']
- v.LINKFLAGS_cxxstlib = []
+ v['LINKFLAGS_cxxstlib'] = []
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.SONAME_ST = []
+ v['SHLIB_MARKER'] = []
+ v['STLIB_MARKER'] = []
+ v['SONAME_ST'] = []
@conf
def gxx_modifier_aix(conf):
"""Configuration flags for executing g++ on AIX"""
v = conf.env
- v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
+ v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl']
- v.LINKFLAGS_cxxshlib = ['-shared', '-Wl,-brtl,-bexpfull']
- v.SHLIB_MARKER = []
+ v['LINKFLAGS_cxxshlib'] = ['-shared', '-Wl,-brtl,-bexpfull']
+ v['SHLIB_MARKER'] = []
@conf
def gxx_modifier_hpux(conf):
v = conf.env
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.CFLAGS_cxxshlib = ['-fPIC','-DPIC']
- v.cxxshlib_PATTERN = 'lib%s.sl'
+ v['SHLIB_MARKER'] = []
+ v['STLIB_MARKER'] = []
+ v['CFLAGS_cxxshlib'] = ['-fPIC','-DPIC']
+ v['cxxshlib_PATTERN'] = 'lib%s.sl'
@conf
def gxx_modifier_openbsd(conf):
@@ -128,9 +131,9 @@ def gxx_modifier_openbsd(conf):
@conf
def gcc_modifier_osf1V(conf):
v = conf.env
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.SONAME_ST = []
+ v['SHLIB_MARKER'] = []
+ v['STLIB_MARKER'] = []
+ v['SONAME_ST'] = []
@conf
def gxx_modifier_platform(conf):
@@ -153,5 +156,4 @@ def configure(conf):
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
- conf.check_gcc_o_space('cxx')
diff --git a/waflib/Tools/icc.py b/waflib/Tools/icc.py
index b6492c8e..f3395030 100644
--- a/waflib/Tools/icc.py
+++ b/waflib/Tools/icc.py
@@ -1,10 +1,10 @@
#!/usr/bin/env python
# encoding: utf-8
# Stian Selnes 2008
-# Thomas Nagy 2009-2018 (ita)
+# Thomas Nagy 2009-2010 (ita)
"""
-Detects the Intel C compiler
+Detect the Intel C compiler
"""
import sys
@@ -14,8 +14,11 @@ from waflib.Configure import conf
@conf
def find_icc(conf):
"""
- Finds the program icc and execute it to ensure it really is icc
+ Find the program icc and execute it to ensure it really is icc
"""
+ if sys.platform == 'cygwin':
+ conf.fatal('The Intel compiler does not work on Cygwin')
+
cc = conf.find_program(['icc', 'ICL'], var='CC')
conf.get_cc_version(cc, icc=True)
conf.env.CC_NAME = 'icc'
diff --git a/waflib/Tools/icpc.py b/waflib/Tools/icpc.py
index 8a6cc6c4..03603d44 100644
--- a/waflib/Tools/icpc.py
+++ b/waflib/Tools/icpc.py
@@ -1,9 +1,9 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy 2009-2018 (ita)
+# Thomas Nagy 2009-2010 (ita)
"""
-Detects the Intel C++ compiler
+Detect the Intel C++ compiler
"""
import sys
@@ -13,8 +13,11 @@ from waflib.Configure import conf
@conf
def find_icpc(conf):
"""
- Finds the program icpc, and execute it to ensure it really is icpc
+ Find the program icpc, and execute it to ensure it really is icpc
"""
+ if sys.platform == 'cygwin':
+ conf.fatal('The Intel compiler does not work on Cygwin')
+
cxx = conf.find_program('icpc', var='CXX')
conf.get_cc_version(cxx, icc=True)
conf.env.CXX_NAME = 'icc'
diff --git a/waflib/Tools/ifort.py b/waflib/Tools/ifort.py
deleted file mode 100644
index 74934f3f..00000000
--- a/waflib/Tools/ifort.py
+++ /dev/null
@@ -1,413 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# DC 2008
-# Thomas Nagy 2016-2018 (ita)
-
-import os, re, traceback
-from waflib import Utils, Logs, Errors
-from waflib.Tools import fc, fc_config, fc_scan, ar, ccroot
-from waflib.Configure import conf
-from waflib.TaskGen import after_method, feature
-
-@conf
-def find_ifort(conf):
- fc = conf.find_program('ifort', var='FC')
- conf.get_ifort_version(fc)
- conf.env.FC_NAME = 'IFORT'
-
-@conf
-def ifort_modifier_win32(self):
- v = self.env
- v.IFORT_WIN32 = True
- v.FCSTLIB_MARKER = ''
- v.FCSHLIB_MARKER = ''
-
- v.FCLIB_ST = v.FCSTLIB_ST = '%s.lib'
- v.FCLIBPATH_ST = v.STLIBPATH_ST = '/LIBPATH:%s'
- v.FCINCPATH_ST = '/I%s'
- v.FCDEFINES_ST = '/D%s'
-
- v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'
- v.fcshlib_PATTERN = '%s.dll'
- v.fcstlib_PATTERN = v.implib_PATTERN = '%s.lib'
-
- v.FCLNK_TGT_F = '/out:'
- v.FC_TGT_F = ['/c', '/o', '']
- v.FCFLAGS_fcshlib = ''
- v.LINKFLAGS_fcshlib = '/DLL'
- v.AR_TGT_F = '/out:'
- v.IMPLIB_ST = '/IMPLIB:%s'
-
- v.append_value('LINKFLAGS', '/subsystem:console')
- if v.IFORT_MANIFEST:
- v.append_value('LINKFLAGS', ['/MANIFEST'])
-
-@conf
-def ifort_modifier_darwin(conf):
- fc_config.fortran_modifier_darwin(conf)
-
-@conf
-def ifort_modifier_platform(conf):
- dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
- ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None)
- if ifort_modifier_func:
- ifort_modifier_func()
-
-@conf
-def get_ifort_version(conf, fc):
- """
- Detects the compiler version and sets ``conf.env.FC_VERSION``
- """
- version_re = re.compile(r"\bIntel\b.*\bVersion\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
- if Utils.is_win32:
- cmd = fc
- else:
- cmd = fc + ['-logo']
-
- out, err = fc_config.getoutput(conf, cmd, stdin=False)
- match = version_re(out) or version_re(err)
- if not match:
- conf.fatal('cannot determine ifort version.')
- k = match.groupdict()
- conf.env.FC_VERSION = (k['major'], k['minor'])
-
-def configure(conf):
- """
- Detects the Intel Fortran compilers
- """
- if Utils.is_win32:
- compiler, version, path, includes, libdirs, arch = conf.detect_ifort()
- v = conf.env
- v.DEST_CPU = arch
- v.PATH = path
- v.INCLUDES = includes
- v.LIBPATH = libdirs
- v.MSVC_COMPILER = compiler
- try:
- v.MSVC_VERSION = float(version)
- except ValueError:
- v.MSVC_VERSION = float(version[:-3])
-
- conf.find_ifort_win32()
- conf.ifort_modifier_win32()
- else:
- conf.find_ifort()
- conf.find_program('xiar', var='AR')
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.ifort_modifier_platform()
-
-
-all_ifort_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
-"""List of icl platforms"""
-
-@conf
-def gather_ifort_versions(conf, versions):
- """
- List compiler versions by looking up registry keys
- """
- version_pattern = re.compile('^...?.?\....?.?')
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran')
- except OSError:
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran')
- except OSError:
- return
- index = 0
- while 1:
- try:
- version = Utils.winreg.EnumKey(all_versions, index)
- except OSError:
- break
- index += 1
- if not version_pattern.match(version):
- continue
- targets = {}
- for target,arch in all_ifort_platforms:
- if target=='intel64':
- targetDir='EM64T_NATIVE'
- else:
- targetDir=target
- try:
- Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
- icl_version=Utils.winreg.OpenKey(all_versions,version)
- path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except OSError:
- pass
- else:
- batch_file=os.path.join(path,'bin','ifortvars.bat')
- if os.path.isfile(batch_file):
- targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
-
- for target,arch in all_ifort_platforms:
- try:
- icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
- path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except OSError:
- continue
- else:
- batch_file=os.path.join(path,'bin','ifortvars.bat')
- if os.path.isfile(batch_file):
- targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
- major = version[0:2]
- versions['intel ' + major] = targets
-
-@conf
-def setup_ifort(conf, versiondict):
- """
- Checks installed compilers and targets and returns the first combination from the user's
- options, env, or the global supported lists that checks.
-
- :param versiondict: dict(platform -> dict(architecture -> configuration))
- :type versiondict: dict(string -> dict(string -> target_compiler)
- :return: the compiler, revision, path, include dirs, library paths and target architecture
- :rtype: tuple of strings
- """
- platforms = Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_ifort_platforms]
- desired_versions = conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys())))
- for version in desired_versions:
- try:
- targets = versiondict[version]
- except KeyError:
- continue
- for arch in platforms:
- try:
- cfg = targets[arch]
- except KeyError:
- continue
- cfg.evaluate()
- if cfg.is_valid:
- compiler,revision = version.rsplit(' ', 1)
- return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
- conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))
-
-@conf
-def get_ifort_version_win32(conf, compiler, version, target, vcvars):
- # FIXME hack
- try:
- conf.msvc_cnt += 1
- except AttributeError:
- conf.msvc_cnt = 1
- batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
- batfile.write("""@echo off
-set INCLUDE=
-set LIB=
-call "%s" %s
-echo PATH=%%PATH%%
-echo INCLUDE=%%INCLUDE%%
-echo LIB=%%LIB%%;%%LIBPATH%%
-""" % (vcvars,target))
- sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()])
- batfile.delete()
- lines = sout.splitlines()
-
- if not lines[0]:
- lines.pop(0)
-
- MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
- for line in lines:
- if line.startswith('PATH='):
- path = line[5:]
- MSVC_PATH = path.split(';')
- elif line.startswith('INCLUDE='):
- MSVC_INCDIR = [i for i in line[8:].split(';') if i]
- elif line.startswith('LIB='):
- MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
- if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
- conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)')
-
- # Check if the compiler is usable at all.
- # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
- env = dict(os.environ)
- env.update(PATH = path)
- compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
- fc = conf.find_program(compiler_name, path_list=MSVC_PATH)
-
- # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
- if 'CL' in env:
- del(env['CL'])
-
- try:
- conf.cmd_and_log(fc + ['/help'], env=env)
- except UnicodeError:
- st = traceback.format_exc()
- if conf.logger:
- conf.logger.error(st)
- conf.fatal('ifort: Unicode error - check the code page?')
- except Exception as e:
- Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e))
- conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)')
- else:
- Logs.debug('ifort: get_ifort_version: %r %r %r -> OK', compiler, version, target)
- finally:
- conf.env[compiler_name] = ''
-
- return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
-
-class target_compiler(object):
- """
- Wraps a compiler configuration; call evaluate() to determine
- whether the configuration is usable.
- """
- def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None):
- """
- :param ctx: configuration context to use to eventually get the version environment
- :param compiler: compiler name
- :param cpu: target cpu
- :param version: compiler version number
- :param bat_target: ?
- :param bat: path to the batch file to run
- :param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths)
- """
- self.conf = ctx
- self.name = None
- self.is_valid = False
- self.is_done = False
-
- self.compiler = compiler
- self.cpu = cpu
- self.version = version
- self.bat_target = bat_target
- self.bat = bat
- self.callback = callback
-
- def evaluate(self):
- if self.is_done:
- return
- self.is_done = True
- try:
- vs = self.conf.get_ifort_version_win32(self.compiler, self.version, self.bat_target, self.bat)
- except Errors.ConfigurationError:
- self.is_valid = False
- return
- if self.callback:
- vs = self.callback(self, vs)
- self.is_valid = True
- (self.bindirs, self.incdirs, self.libdirs) = vs
-
- def __str__(self):
- return str((self.bindirs, self.incdirs, self.libdirs))
-
- def __repr__(self):
- return repr((self.bindirs, self.incdirs, self.libdirs))
-
-@conf
-def detect_ifort(self):
- return self.setup_ifort(self.get_ifort_versions(False))
-
-@conf
-def get_ifort_versions(self, eval_and_save=True):
- """
- :return: platforms to compiler configurations
- :rtype: dict
- """
- dct = {}
- self.gather_ifort_versions(dct)
- return dct
-
-def _get_prog_names(self, compiler):
- if compiler=='intel':
- compiler_name = 'ifort'
- linker_name = 'XILINK'
- lib_name = 'XILIB'
- else:
- # assumes CL.exe
- compiler_name = 'CL'
- linker_name = 'LINK'
- lib_name = 'LIB'
- return compiler_name, linker_name, lib_name
-
-@conf
-def find_ifort_win32(conf):
- # the autodetection is supposed to be performed before entering in this method
- v = conf.env
- path = v.PATH
- compiler = v.MSVC_COMPILER
- version = v.MSVC_VERSION
-
- compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
- v.IFORT_MANIFEST = (compiler == 'intel' and version >= 11)
-
- # compiler
- fc = conf.find_program(compiler_name, var='FC', path_list=path)
-
- # before setting anything, check if the compiler is really intel fortran
- env = dict(conf.environ)
- if path:
- env.update(PATH = ';'.join(path))
- if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env):
- conf.fatal('not intel fortran compiler could not be identified')
-
- v.FC_NAME = 'IFORT'
-
- if not v.LINK_FC:
- conf.find_program(linker_name, var='LINK_FC', path_list=path, mandatory=True)
-
- if not v.AR:
- conf.find_program(lib_name, path_list=path, var='AR', mandatory=True)
- v.ARFLAGS = ['/nologo']
-
- # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
- if v.IFORT_MANIFEST:
- conf.find_program('MT', path_list=path, var='MT')
- v.MTFLAGS = ['/nologo']
-
- try:
- conf.load('winres')
- except Errors.WafError:
- Logs.warn('Resource compiler not found. Compiling resource file is disabled')
-
-#######################################################################################################
-##### conf above, build below
-
-@after_method('apply_link')
-@feature('fc')
-def apply_flags_ifort(self):
- """
- Adds additional flags implied by msvc, such as subsystems and pdb files::
-
- def build(bld):
- bld.stlib(source='main.c', target='bar', subsystem='gruik')
- """
- if not self.env.IFORT_WIN32 or not getattr(self, 'link_task', None):
- return
-
- is_static = isinstance(self.link_task, ccroot.stlink_task)
-
- subsystem = getattr(self, 'subsystem', '')
- if subsystem:
- subsystem = '/subsystem:%s' % subsystem
- flags = is_static and 'ARFLAGS' or 'LINKFLAGS'
- self.env.append_value(flags, subsystem)
-
- if not is_static:
- for f in self.env.LINKFLAGS:
- d = f.lower()
- if d[1:] == 'debug':
- pdbnode = self.link_task.outputs[0].change_ext('.pdb')
- self.link_task.outputs.append(pdbnode)
-
- if getattr(self, 'install_task', None):
- self.pdb_install_task = self.add_install_files(install_to=self.install_task.install_to, install_from=pdbnode)
-
- break
-
-@feature('fcprogram', 'fcshlib', 'fcprogram_test')
-@after_method('apply_link')
-def apply_manifest_ifort(self):
- """
- Enables manifest embedding in Fortran DLLs when using ifort on Windows
- See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
- """
- if self.env.IFORT_WIN32 and getattr(self, 'link_task', None):
- # it seems ifort.exe cannot be called for linking
- self.link_task.env.FC = self.env.LINK_FC
-
- if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self, 'link_task', None):
- out_node = self.link_task.outputs[0]
- man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
- self.link_task.outputs.append(man_node)
- self.env.DO_MANIFEST = True
-
diff --git a/waflib/Tools/intltool.py b/waflib/Tools/intltool.py
deleted file mode 100644
index af95ba80..00000000
--- a/waflib/Tools/intltool.py
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-Support for translation tools such as msgfmt and intltool
-
-Usage::
-
- def configure(conf):
- conf.load('gnu_dirs intltool')
-
- def build(bld):
- # process the .po files into .gmo files, and install them in LOCALEDIR
- bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
-
- # process an input file, substituting the translations from the po dir
- bld(
- features = "intltool_in",
- podir = "../po",
- style = "desktop",
- flags = ["-u"],
- source = 'kupfer.desktop.in',
- install_path = "${DATADIR}/applications",
- )
-
-Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
-"""
-
-from __future__ import with_statement
-
-import os, re
-from waflib import Context, Task, Utils, Logs
-import waflib.Tools.ccroot
-from waflib.TaskGen import feature, before_method, taskgen_method
-from waflib.Logs import error
-from waflib.Configure import conf
-
-_style_flags = {
- 'ba': '-b',
- 'desktop': '-d',
- 'keys': '-k',
- 'quoted': '--quoted-style',
- 'quotedxml': '--quotedxml-style',
- 'rfc822deb': '-r',
- 'schemas': '-s',
- 'xml': '-x',
-}
-
-@taskgen_method
-def ensure_localedir(self):
- """
- Expands LOCALEDIR from DATAROOTDIR/locale if possible, or falls back to PREFIX/share/locale
- """
- # use the tool gnu_dirs to provide options to define this
- if not self.env.LOCALEDIR:
- if self.env.DATAROOTDIR:
- self.env.LOCALEDIR = os.path.join(self.env.DATAROOTDIR, 'locale')
- else:
- self.env.LOCALEDIR = os.path.join(self.env.PREFIX, 'share', 'locale')
-
-@before_method('process_source')
-@feature('intltool_in')
-def apply_intltool_in_f(self):
- """
- Creates tasks to translate files by intltool-merge::
-
- def build(bld):
- bld(
- features = "intltool_in",
- podir = "../po",
- style = "desktop",
- flags = ["-u"],
- source = 'kupfer.desktop.in',
- install_path = "${DATADIR}/applications",
- )
-
- :param podir: location of the .po files
- :type podir: string
- :param source: source files to process
- :type source: list of string
- :param style: the intltool-merge mode of operation, can be one of the following values:
- ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``.
- See the ``intltool-merge`` man page for more information about supported modes of operation.
- :type style: string
- :param flags: compilation flags ("-quc" by default)
- :type flags: list of string
- :param install_path: installation path
- :type install_path: string
- """
- try:
- self.meths.remove('process_source')
- except ValueError:
- pass
-
- self.ensure_localedir()
-
- podir = getattr(self, 'podir', '.')
- podirnode = self.path.find_dir(podir)
- if not podirnode:
- error("could not find the podir %r" % podir)
- return
-
- cache = getattr(self, 'intlcache', '.intlcache')
- self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)]
- self.env.INTLPODIR = podirnode.bldpath()
- self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT))
-
- if '-c' in self.env.INTLFLAGS:
- self.bld.fatal('Redundant -c flag in intltool task %r' % self)
-
- style = getattr(self, 'style', None)
- if style:
- try:
- style_flag = _style_flags[style]
- except KeyError:
- self.bld.fatal('intltool_in style "%s" is not valid' % style)
-
- self.env.append_unique('INTLFLAGS', [style_flag])
-
- for i in self.to_list(self.source):
- node = self.path.find_resource(i)
-
- task = self.create_task('intltool', node, node.change_ext(''))
- inst = getattr(self, 'install_path', None)
- if inst:
- self.add_install_files(install_to=inst, install_from=task.outputs)
-
-@feature('intltool_po')
-def apply_intltool_po(self):
- """
- Creates tasks to process po files::
-
- def build(bld):
- bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
-
- The relevant task generator arguments are:
-
- :param podir: directory of the .po files
- :type podir: string
- :param appname: name of the application
- :type appname: string
- :param install_path: installation directory
- :type install_path: string
-
- The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
- """
- try:
- self.meths.remove('process_source')
- except ValueError:
- pass
-
- self.ensure_localedir()
-
- appname = getattr(self, 'appname', getattr(Context.g_module, Context.APPNAME, 'set_your_app_name'))
- podir = getattr(self, 'podir', '.')
- inst = getattr(self, 'install_path', '${LOCALEDIR}')
-
- linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
- if linguas:
- # scan LINGUAS file for locales to process
- with open(linguas.abspath()) as f:
- langs = []
- for line in f.readlines():
- # ignore lines containing comments
- if not line.startswith('#'):
- langs += line.split()
- re_linguas = re.compile('[-a-zA-Z_@.]+')
- for lang in langs:
- # Make sure that we only process lines which contain locales
- if re_linguas.match(lang):
- node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
- task = self.create_task('po', node, node.change_ext('.mo'))
-
- if inst:
- filename = task.outputs[0].name
- (langname, ext) = os.path.splitext(filename)
- inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
- self.add_install_as(install_to=inst_file, install_from=task.outputs[0],
- chmod=getattr(self, 'chmod', Utils.O644))
-
- else:
- Logs.pprint('RED', "Error no LINGUAS file found in po directory")
-
-class po(Task.Task):
- """
- Compiles .po files into .gmo files
- """
- run_str = '${MSGFMT} -o ${TGT} ${SRC}'
- color = 'BLUE'
-
-class intltool(Task.Task):
- """
- Calls intltool-merge to update translation files
- """
- run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
- color = 'BLUE'
-
-@conf
-def find_msgfmt(conf):
- """
- Detects msgfmt and sets the ``MSGFMT`` variable
- """
- conf.find_program('msgfmt', var='MSGFMT')
-
-@conf
-def find_intltool_merge(conf):
- """
- Detects intltool-merge
- """
- if not conf.env.PERL:
- conf.find_program('perl', var='PERL')
- conf.env.INTLCACHE_ST = '--cache=%s'
- conf.env.INTLFLAGS_DEFAULT = ['-q', '-u']
- conf.find_program('intltool-merge', interpreter='PERL', var='INTLTOOL')
-
-def configure(conf):
- """
- Detects the program *msgfmt* and set *conf.env.MSGFMT*.
- Detects the program *intltool-merge* and set *conf.env.INTLTOOL*.
- It is possible to set INTLTOOL in the environment, but it must not have spaces in it::
-
- $ INTLTOOL="/path/to/the program/intltool" waf configure
-
- If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*.
- """
- conf.find_msgfmt()
- conf.find_intltool_merge()
- if conf.env.CC or conf.env.CXX:
- conf.check(header_name='locale.h')
-
diff --git a/waflib/Tools/irixcc.py b/waflib/Tools/irixcc.py
index c3ae1ac9..55eb70be 100644
--- a/waflib/Tools/irixcc.py
+++ b/waflib/Tools/irixcc.py
@@ -1,12 +1,11 @@
#! /usr/bin/env python
-# encoding: utf-8
# imported from samba
"""
-Compiler definition for irix/MIPSpro cc compiler
+compiler definition for irix/MIPSpro cc compiler
+based on suncc.py from waf
"""
-from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
@@ -14,46 +13,41 @@ from waflib.Configure import conf
def find_irixcc(conf):
v = conf.env
cc = None
- if v.CC:
- cc = v.CC
- elif 'CC' in conf.environ:
- cc = conf.environ['CC']
- if not cc:
- cc = conf.find_program('cc', var='CC')
- if not cc:
- conf.fatal('irixcc was not found')
+ if v['CC']: cc = v['CC']
+ elif 'CC' in conf.environ: cc = conf.environ['CC']
+ if not cc: cc = conf.find_program('cc', var='CC')
+ if not cc: conf.fatal('irixcc was not found')
try:
conf.cmd_and_log(cc + ['-version'])
- except Errors.WafError:
+ except Exception:
conf.fatal('%r -version could not be executed' % cc)
- v.CC = cc
- v.CC_NAME = 'irix'
+ v['CC'] = cc
+ v['CC_NAME'] = 'irix'
@conf
def irixcc_common_flags(conf):
v = conf.env
- v.CC_SRC_F = ''
- v.CC_TGT_F = ['-c', '-o']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
+ v['CC_SRC_F'] = ''
+ v['CC_TGT_F'] = ['-c', '-o']
+ v['CPPPATH_ST'] = '-I%s'
+ v['DEFINES_ST'] = '-D%s'
- if not v.LINK_CC:
- v.LINK_CC = v.CC
+ # linker
+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
+ v['CCLNK_SRC_F'] = ''
+ v['CCLNK_TGT_F'] = ['-o']
- v.CCLNK_SRC_F = ''
- v.CCLNK_TGT_F = ['-o']
+ v['LIB_ST'] = '-l%s' # template for adding libs
+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
+ v['STLIB_ST'] = '-l%s'
+ v['STLIBPATH_ST'] = '-L%s'
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
-
- v.cprogram_PATTERN = '%s'
- v.cshlib_PATTERN = 'lib%s.so'
- v.cstlib_PATTERN = 'lib%s.a'
+ v['cprogram_PATTERN'] = '%s'
+ v['cshlib_PATTERN'] = 'lib%s.so'
+ v['cstlib_PATTERN'] = 'lib%s.a'
def configure(conf):
conf.find_irixcc()
diff --git a/waflib/Tools/javaw.py b/waflib/Tools/javaw.py
deleted file mode 100644
index f6fd20cc..00000000
--- a/waflib/Tools/javaw.py
+++ /dev/null
@@ -1,464 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-Java support
-
-Javac is one of the few compilers that behaves very badly:
-
-#. it outputs files where it wants to (-d is only for the package root)
-
-#. it recompiles files silently behind your back
-
-#. it outputs an undefined amount of files (inner classes)
-
-Remember that the compilation can be performed using Jython[1] rather than regular Python. Instead of
-running one of the following commands::
-
- ./waf configure
- python waf configure
-
-You would have to run::
-
- java -jar /path/to/jython.jar waf configure
-
-[1] http://www.jython.org/
-"""
-
-import os, shutil
-from waflib import Task, Utils, Errors, Node
-from waflib.Configure import conf
-from waflib.TaskGen import feature, before_method, after_method
-
-from waflib.Tools import ccroot
-ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS'])
-
-SOURCE_RE = '**/*.java'
-JAR_RE = '**/*'
-
-class_check_source = '''
-public class Test {
- public static void main(String[] argv) {
- Class lib;
- if (argv.length < 1) {
- System.err.println("Missing argument");
- System.exit(77);
- }
- try {
- lib = Class.forName(argv[0]);
- } catch (ClassNotFoundException e) {
- System.err.println("ClassNotFoundException");
- System.exit(1);
- }
- lib = null;
- System.exit(0);
- }
-}
-'''
-
-@feature('javac')
-@before_method('process_source')
-def apply_java(self):
- """
- Create a javac task for compiling *.java files*. There can be
- only one javac task by task generator.
- """
- Utils.def_attrs(self, jarname='', classpath='',
- sourcepath='.', srcdir='.',
- jar_mf_attributes={}, jar_mf_classpath=[])
-
- outdir = getattr(self, 'outdir', None)
- if outdir:
- if not isinstance(outdir, Node.Node):
- outdir = self.path.get_bld().make_node(self.outdir)
- else:
- outdir = self.path.get_bld()
- outdir.mkdir()
- self.outdir = outdir
- self.env.OUTDIR = outdir.abspath()
-
- self.javac_task = tsk = self.create_task('javac')
- tmp = []
-
- srcdir = getattr(self, 'srcdir', '')
- if isinstance(srcdir, Node.Node):
- srcdir = [srcdir]
- for x in Utils.to_list(srcdir):
- if isinstance(x, Node.Node):
- y = x
- else:
- y = self.path.find_dir(x)
- if not y:
- self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
- tmp.append(y)
-
- tsk.srcdir = tmp
-
- if getattr(self, 'compat', None):
- tsk.env.append_value('JAVACFLAGS', ['-source', str(self.compat)])
-
- if hasattr(self, 'sourcepath'):
- fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
- names = os.pathsep.join([x.srcpath() for x in fold])
- else:
- names = [x.srcpath() for x in tsk.srcdir]
-
- if names:
- tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
-
-@feature('javac')
-@before_method('propagate_uselib_vars')
-@after_method('apply_java')
-def use_javac_files(self):
- """
- Processes the *use* attribute referring to other java compilations
- """
- lst = []
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- names = self.to_list(getattr(self, 'use', []))
- get = self.bld.get_tgen_by_name
- for x in names:
- try:
- y = get(x)
- except Errors.WafError:
- self.uselib.append(x)
- else:
- y.post()
- if hasattr(y, 'jar_task'):
- lst.append(y.jar_task.outputs[0].abspath())
- self.javac_task.set_run_after(y.jar_task)
- else:
- for tsk in y.tasks:
- self.javac_task.set_run_after(tsk)
- self.env.append_value('CLASSPATH', lst)
-
-@feature('javac')
-@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files')
-def set_classpath(self):
- """
- Sets the CLASSPATH value on the *javac* task previously created.
- """
- if getattr(self, 'classpath', None):
- self.env.append_unique('CLASSPATH', getattr(self, 'classpath', []))
- for x in self.tasks:
- x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep
-
-@feature('jar')
-@after_method('apply_java', 'use_javac_files')
-@before_method('process_source')
-def jar_files(self):
- """
- Creates a jar task (one maximum per task generator)
- """
- destfile = getattr(self, 'destfile', 'test.jar')
- jaropts = getattr(self, 'jaropts', [])
- manifest = getattr(self, 'manifest', None)
-
- basedir = getattr(self, 'basedir', None)
- if basedir:
- if not isinstance(self.basedir, Node.Node):
- basedir = self.path.get_bld().make_node(basedir)
- else:
- basedir = self.path.get_bld()
- if not basedir:
- self.bld.fatal('Could not find the basedir %r for %r' % (self.basedir, self))
-
- self.jar_task = tsk = self.create_task('jar_create')
- if manifest:
- jarcreate = getattr(self, 'jarcreate', 'cfm')
- if not isinstance(manifest,Node.Node):
- node = self.path.find_resource(manifest)
- else:
- node = manifest
- if not node:
- self.bld.fatal('invalid manifest file %r for %r' % (manifest, self))
- tsk.dep_nodes.append(node)
- jaropts.insert(0, node.abspath())
- else:
- jarcreate = getattr(self, 'jarcreate', 'cf')
- if not isinstance(destfile, Node.Node):
- destfile = self.path.find_or_declare(destfile)
- if not destfile:
- self.bld.fatal('invalid destfile %r for %r' % (destfile, self))
- tsk.set_outputs(destfile)
- tsk.basedir = basedir
-
- jaropts.append('-C')
- jaropts.append(basedir.bldpath())
- jaropts.append('.')
-
- tsk.env.JAROPTS = jaropts
- tsk.env.JARCREATE = jarcreate
-
- if getattr(self, 'javac_task', None):
- tsk.set_run_after(self.javac_task)
-
-@feature('jar')
-@after_method('jar_files')
-def use_jar_files(self):
- """
- Processes the *use* attribute to set the build order on the
- tasks created by another task generator.
- """
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- names = self.to_list(getattr(self, 'use', []))
- get = self.bld.get_tgen_by_name
- for x in names:
- try:
- y = get(x)
- except Errors.WafError:
- self.uselib.append(x)
- else:
- y.post()
- self.jar_task.run_after.update(y.tasks)
-
-class JTask(Task.Task):
- """
- Base class for java and jar tasks; provides functionality to run long commands
- """
- def split_argfile(self, cmd):
- inline = [cmd[0]]
- infile = []
- for x in cmd[1:]:
- # jar and javac do not want -J flags in @file
- if x.startswith('-J'):
- inline.append(x)
- else:
- infile.append(self.quote_flag(x))
- return (inline, infile)
-
-class jar_create(JTask):
- """
- Creates a jar file
- """
- color = 'GREEN'
- run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'
-
- def runnable_status(self):
- """
- Wait for dependent tasks to be executed, then read the
- files to update the list of inputs.
- """
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- if not self.inputs:
- try:
- self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])]
- except Exception:
- raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self))
- return super(jar_create, self).runnable_status()
-
-class javac(JTask):
- """
- Compiles java files
- """
- color = 'BLUE'
- run_str = '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}'
- vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR']
- """
- The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change.
- """
- def uid(self):
- """Identify java tasks by input&output folder"""
- lst = [self.__class__.__name__, self.generator.outdir.abspath()]
- for x in self.srcdir:
- lst.append(x.abspath())
- return Utils.h_list(lst)
-
- def runnable_status(self):
- """
- Waits for dependent tasks to be complete, then read the file system to find the input nodes.
- """
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
-
- if not self.inputs:
- self.inputs = []
- for x in self.srcdir:
- if x.exists():
- self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
- return super(javac, self).runnable_status()
-
- def post_run(self):
- """
- List class files created
- """
- for node in self.generator.outdir.ant_glob('**/*.class'):
- self.generator.bld.node_sigs[node] = self.uid()
- self.generator.bld.task_sigs[self.uid()] = self.cache_sig
-
-@feature('javadoc')
-@after_method('process_rule')
-def create_javadoc(self):
- """
- Creates a javadoc task (feature 'javadoc')
- """
- tsk = self.create_task('javadoc')
- tsk.classpath = getattr(self, 'classpath', [])
- self.javadoc_package = Utils.to_list(self.javadoc_package)
- if not isinstance(self.javadoc_output, Node.Node):
- self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output)
-
-class javadoc(Task.Task):
- """
- Builds java documentation
- """
- color = 'BLUE'
-
- def __str__(self):
- return '%s: %s -> %s\n' % (self.__class__.__name__, self.generator.srcdir, self.generator.javadoc_output)
-
- def run(self):
- env = self.env
- bld = self.generator.bld
- wd = bld.bldnode
-
- #add src node + bld node (for generated java code)
- srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir
- srcpath += os.pathsep
- srcpath += self.generator.path.get_bld().abspath() + os.sep + self.generator.srcdir
-
- classpath = env.CLASSPATH
- classpath += os.pathsep
- classpath += os.pathsep.join(self.classpath)
- classpath = "".join(classpath)
-
- self.last_cmd = lst = []
- lst.extend(Utils.to_list(env.JAVADOC))
- lst.extend(['-d', self.generator.javadoc_output.abspath()])
- lst.extend(['-sourcepath', srcpath])
- lst.extend(['-classpath', classpath])
- lst.extend(['-subpackages'])
- lst.extend(self.generator.javadoc_package)
- lst = [x for x in lst if x]
-
- self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
-
- def post_run(self):
- nodes = self.generator.javadoc_output.ant_glob('**')
- for node in nodes:
- self.generator.bld.node_sigs[node] = self.uid()
- self.generator.bld.task_sigs[self.uid()] = self.cache_sig
-
-def configure(self):
- """
- Detects the javac, java and jar programs
- """
- # If JAVA_PATH is set, we prepend it to the path list
- java_path = self.environ['PATH'].split(os.pathsep)
- v = self.env
-
- if 'JAVA_HOME' in self.environ:
- java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path
- self.env.JAVA_HOME = [self.environ['JAVA_HOME']]
-
- for x in 'javac java jar javadoc'.split():
- self.find_program(x, var=x.upper(), path_list=java_path)
-
- if 'CLASSPATH' in self.environ:
- v.CLASSPATH = self.environ['CLASSPATH']
-
- if not v.JAR:
- self.fatal('jar is required for making java packages')
- if not v.JAVAC:
- self.fatal('javac is required for compiling java classes')
-
- v.JARCREATE = 'cf' # can use cvf
- v.JAVACFLAGS = []
-
-@conf
-def check_java_class(self, classname, with_classpath=None):
- """
- Checks if the specified java class exists
-
- :param classname: class to check, like java.util.HashMap
- :type classname: string
- :param with_classpath: additional classpath to give
- :type with_classpath: string
- """
- javatestdir = '.waf-javatest'
-
- classpath = javatestdir
- if self.env.CLASSPATH:
- classpath += os.pathsep + self.env.CLASSPATH
- if isinstance(with_classpath, str):
- classpath += os.pathsep + with_classpath
-
- shutil.rmtree(javatestdir, True)
- os.mkdir(javatestdir)
-
- Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source)
-
- # Compile the source
- self.exec_command(self.env.JAVAC + [os.path.join(javatestdir, 'Test.java')], shell=False)
-
- # Try to run the app
- cmd = self.env.JAVA + ['-cp', classpath, 'Test', classname]
- self.to_log("%s\n" % str(cmd))
- found = self.exec_command(cmd, shell=False)
-
- self.msg('Checking for java class %s' % classname, not found)
-
- shutil.rmtree(javatestdir, True)
-
- return found
-
-@conf
-def check_jni_headers(conf):
- """
- Checks for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets::
-
- def options(opt):
- opt.load('compiler_c')
-
- def configure(conf):
- conf.load('compiler_c java')
- conf.check_jni_headers()
-
- def build(bld):
- bld.shlib(source='a.c', target='app', use='JAVA')
- """
- if not conf.env.CC_NAME and not conf.env.CXX_NAME:
- conf.fatal('load a compiler first (gcc, g++, ..)')
-
- if not conf.env.JAVA_HOME:
- conf.fatal('set JAVA_HOME in the system environment')
-
- # jni requires the jvm
- javaHome = conf.env.JAVA_HOME[0]
-
- dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
- if dir is None:
- dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/../Headers') # think different?!
- if dir is None:
- conf.fatal('JAVA_HOME does not seem to be set properly')
-
- f = dir.ant_glob('**/(jni|jni_md).h')
- incDirs = [x.parent.abspath() for x in f]
-
- dir = conf.root.find_dir(conf.env.JAVA_HOME[0])
- f = dir.ant_glob('**/*jvm.(so|dll|dylib)')
- libDirs = [x.parent.abspath() for x in f] or [javaHome]
-
- # On windows, we need both the .dll and .lib to link. On my JDK, they are
- # in different directories...
- f = dir.ant_glob('**/*jvm.(lib)')
- if f:
- libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f]
-
- if conf.env.DEST_OS == 'freebsd':
- conf.env.append_unique('LINKFLAGS_JAVA', '-pthread')
- for d in libDirs:
- try:
- conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
- libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA')
- except Exception:
- pass
- else:
- break
- else:
- conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
-
diff --git a/waflib/Tools/ldc2.py b/waflib/Tools/ldc2.py
deleted file mode 100644
index a51c344b..00000000
--- a/waflib/Tools/ldc2.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Alex Rønne Petersen, 2012 (alexrp/Zor)
-
-from waflib.Tools import ar, d
-from waflib.Configure import conf
-
-@conf
-def find_ldc2(conf):
- """
- Finds the program *ldc2* and set the variable *D*
- """
- conf.find_program(['ldc2'], var='D')
-
- out = conf.cmd_and_log(conf.env.D + ['-version'])
- if out.find("based on DMD v2.") == -1:
- conf.fatal("detected compiler is not ldc2")
-
-@conf
-def common_flags_ldc2(conf):
- """
- Sets the D flags required by *ldc2*
- """
- v = conf.env
-
- v.D_SRC_F = ['-c']
- v.D_TGT_F = '-of%s'
-
- v.D_LINKER = v.D
- v.DLNK_SRC_F = ''
- v.DLNK_TGT_F = '-of%s'
- v.DINC_ST = '-I%s'
-
- v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
- v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s'
- v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s'
-
- v.LINKFLAGS_dshlib = ['-L-shared']
-
- v.DHEADER_ext = '.di'
- v.DFLAGS_d_with_header = ['-H', '-Hf']
- v.D_HDR_F = '%s'
-
- v.LINKFLAGS = []
- v.DFLAGS_dshlib = ['-relocation-model=pic']
-
-def configure(conf):
- """
- Configuration for *ldc2*
- """
- conf.find_ldc2()
- conf.load('ar')
- conf.load('d')
- conf.common_flags_ldc2()
- conf.d_platform_flags()
-
diff --git a/waflib/Tools/lua.py b/waflib/Tools/lua.py
deleted file mode 100644
index 15a333a9..00000000
--- a/waflib/Tools/lua.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Sebastian Schlingmann, 2008
-# Thomas Nagy, 2008-2018 (ita)
-
-"""
-Lua support.
-
-Compile *.lua* files into *.luac*::
-
- def configure(conf):
- conf.load('lua')
- conf.env.LUADIR = '/usr/local/share/myapp/scripts/'
- def build(bld):
- bld(source='foo.lua')
-"""
-
-from waflib.TaskGen import extension
-from waflib import Task
-
-@extension('.lua')
-def add_lua(self, node):
- tsk = self.create_task('luac', node, node.change_ext('.luac'))
- inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None)
- if inst_to:
- self.add_install_files(install_to=inst_to, install_from=tsk.outputs)
- return tsk
-
-class luac(Task.Task):
- run_str = '${LUAC} -s -o ${TGT} ${SRC}'
- color = 'PINK'
-
-def configure(conf):
- """
- Detect the luac compiler and set *conf.env.LUAC*
- """
- conf.find_program('luac', var='LUAC')
-
diff --git a/waflib/Tools/md5_tstamp.py b/waflib/Tools/md5_tstamp.py
deleted file mode 100644
index 6428e460..00000000
--- a/waflib/Tools/md5_tstamp.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-Re-calculate md5 hashes of files only when the file times or the file
-size have changed.
-
-The hashes can also reflect either the file contents (STRONGEST=True) or the
-file time and file size.
-
-The performance benefits of this module are usually insignificant.
-"""
-
-import os, stat
-from waflib import Utils, Build, Node
-
-STRONGEST = True
-
-Build.SAVED_ATTRS.append('hashes_md5_tstamp')
-def h_file(self):
- filename = self.abspath()
- st = os.stat(filename)
-
- cache = self.ctx.hashes_md5_tstamp
- if filename in cache and cache[filename][0] == st.st_mtime:
- return cache[filename][1]
-
- if STRONGEST:
- ret = Utils.h_file(filename)
- else:
- if stat.S_ISDIR(st[stat.ST_MODE]):
- raise IOError('Not a file')
- ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest()
-
- cache[filename] = (st.st_mtime, ret)
- return ret
-h_file.__doc__ = Node.Node.h_file.__doc__
-Node.Node.h_file = h_file
-
diff --git a/waflib/Tools/msvc.py b/waflib/Tools/msvc.py
index 17b347d4..d98432e0 100644
--- a/waflib/Tools/msvc.py
+++ b/waflib/Tools/msvc.py
@@ -8,12 +8,6 @@
"""
Microsoft Visual C++/Intel C++ compiler support
-If you get detection problems, first try any of the following::
-
- chcp 65001
- set PYTHONIOENCODING=...
- set PYTHONLEGACYWINDOWSSTDIO=1
-
Usage::
$ waf configure --msvc_version="msvc 10.0,msvc 9.0" --msvc_target="x64"
@@ -21,8 +15,8 @@ Usage::
or::
def configure(conf):
- conf.env.MSVC_VERSIONS = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
- conf.env.MSVC_TARGETS = ['x64']
+ conf.env['MSVC_VERSIONS'] = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
+ conf.env['MSVC_TARGETS'] = ['x64']
conf.load('msvc')
or::
@@ -37,14 +31,14 @@ or::
Platforms and targets will be tested in the order they appear;
the first good configuration will be used.
-To force testing all the configurations that are not used, use the ``--no-msvc-lazy`` option
-or set ``conf.env.MSVC_LAZY_AUTODETECT=False``.
+To skip testing all the configurations that are not used, use the ``--msvc_lazy_autodetect`` option
+or set ``conf.env['MSVC_LAZY_AUTODETECT']=True``.
Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm
Compilers supported:
-* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 15 (Visual Studio 2017)
+* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 12.0 (Visual Studio 2013)
* wsdk => Windows SDK, versions 6.0, 6.1, 7.0, 7.1, 8.0
* icl => Intel compiler, versions 9, 10, 11, 13
* winphone => Visual Studio to target Windows Phone 8 native (version 8.0 for now)
@@ -58,12 +52,13 @@ cmd.exe /C "chcp 1252 & set PYTHONUNBUFFERED=true && set && waf configure"
Setting PYTHONUNBUFFERED gives the unbuffered output.
"""
-import os, sys, re, traceback
-from waflib import Utils, Logs, Options, Errors
+import os, sys, re, tempfile
+from waflib import Utils, Task, Logs, Options, Errors
+from waflib.Logs import debug, warn
from waflib.TaskGen import after_method, feature
from waflib.Configure import conf
-from waflib.Tools import ccroot, c, cxx, ar
+from waflib.Tools import ccroot, c, cxx, ar, winres
g_msvc_systemlibs = '''
aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
@@ -87,9 +82,7 @@ wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xol
'''.split()
"""importlibs provided by MSVC/Platform SDK. Do NOT search them"""
-all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'),
- ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64'), ('x86_arm', 'arm'), ('x86_arm64', 'arm64'),
- ('amd64_x86', 'x86'), ('amd64_arm', 'arm'), ('amd64_arm64', 'arm64') ]
+all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64'), ('x86_arm', 'arm'), ('amd64_x86', 'x86'), ('amd64_arm', 'arm') ]
"""List of msvc platforms"""
all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
@@ -101,63 +94,45 @@ all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'),
def options(opt):
opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default='')
opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='')
- opt.add_option('--no-msvc-lazy', action='store_false', help = 'lazily check msvc target environments', default=True, dest='msvc_lazy')
+ opt.add_option('--msvc_lazy_autodetect', action='store_true', help = 'lazily check msvc target environments')
-@conf
-def setup_msvc(conf, versiondict):
+def setup_msvc(conf, versions, arch = False):
"""
Checks installed compilers and targets and returns the first combination from the user's
options, env, or the global supported lists that checks.
- :param versiondict: dict(platform -> dict(architecture -> configuration))
- :type versiondict: dict(string -> dict(string -> target_compiler)
- :return: the compiler, revision, path, include dirs, library paths and target architecture
+ :param versions: A list of tuples of all installed compilers and available targets.
+ :param arch: Whether to return the target architecture.
+ :return: the compiler, revision, path, include dirs, library paths, and (optionally) target architecture
:rtype: tuple of strings
"""
platforms = getattr(Options.options, 'msvc_targets', '').split(',')
if platforms == ['']:
- platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
+ platforms=Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
desired_versions = getattr(Options.options, 'msvc_version', '').split(',')
if desired_versions == ['']:
- desired_versions = conf.env.MSVC_VERSIONS or list(reversed(sorted(versiondict.keys())))
-
- # Override lazy detection by evaluating after the fact.
- lazy_detect = getattr(Options.options, 'msvc_lazy', True)
- if conf.env.MSVC_LAZY_AUTODETECT is False:
- lazy_detect = False
-
- if not lazy_detect:
- for val in versiondict.values():
- for arch in list(val.keys()):
- cfg = val[arch]
- cfg.evaluate()
- if not cfg.is_valid:
- del val[arch]
- conf.env.MSVC_INSTALLED_VERSIONS = versiondict
+ desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1]
+ versiondict = dict(versions)
for version in desired_versions:
- Logs.debug('msvc: detecting %r - %r', version, desired_versions)
try:
- targets = versiondict[version]
- except KeyError:
- continue
-
- seen = set()
- for arch in platforms:
- if arch in seen:
- continue
- else:
- seen.add(arch)
- try:
- cfg = targets[arch]
- except KeyError:
- continue
-
- cfg.evaluate()
- if cfg.is_valid:
- compiler,revision = version.rsplit(' ', 1)
- return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
- conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))
+ targets = dict(versiondict[version])
+ for target in platforms:
+ try:
+ try:
+ realtarget,(p1,p2,p3) = targets[target]
+ except conf.errors.ConfigurationError:
+ # lazytup target evaluation errors
+ del(targets[target])
+ else:
+ compiler,revision = version.rsplit(' ', 1)
+ if arch:
+ return compiler,revision,p1,p2,p3,realtarget
+ else:
+ return compiler,revision,p1,p2,p3
+ except KeyError: continue
+ except KeyError: continue
+ conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
@conf
def get_msvc_version(conf, compiler, version, target, vcvars):
@@ -172,7 +147,7 @@ def get_msvc_version(conf, compiler, version, target, vcvars):
:return: the location of the compiler executable, the location of include dirs, and the library paths
:rtype: tuple of strings
"""
- Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
+ debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
try:
conf.msvc_cnt += 1
@@ -212,27 +187,67 @@ echo LIB=%%LIB%%;%%LIBPATH%%
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
- # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
+ # delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
if 'CL' in env:
del(env['CL'])
try:
- conf.cmd_and_log(cxx + ['/help'], env=env)
- except UnicodeError:
- st = traceback.format_exc()
- if conf.logger:
- conf.logger.error(st)
- conf.fatal('msvc: Unicode error - check the code page?')
- except Exception as e:
- Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e))
- conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)')
- else:
- Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
+ try:
+ conf.cmd_and_log(cxx + ['/help'], env=env)
+ except UnicodeError:
+ st = Utils.ex_stack()
+ if conf.logger:
+ conf.logger.error(st)
+ conf.fatal('msvc: Unicode error - check the code page?')
+ except Exception as e:
+ debug('msvc: get_msvc_version: %r %r %r -> failure %s' % (compiler, version, target, str(e)))
+ conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)')
+ else:
+ debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
finally:
conf.env[compiler_name] = ''
return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
+@conf
+def gather_wsdk_versions(conf, versions):
+ """
+ Use winreg to add the msvc versions to the input list
+
+ :param versions: list to modify
+ :type versions: list
+ """
+ version_pattern = re.compile('^v..?.?\...?.?')
+ try:
+ all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
+ except WindowsError:
+ try:
+ all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
+ except WindowsError:
+ return
+ index = 0
+ while 1:
+ try:
+ version = Utils.winreg.EnumKey(all_versions, index)
+ except WindowsError:
+ break
+ index = index + 1
+ if not version_pattern.match(version):
+ continue
+ try:
+ msvc_version = Utils.winreg.OpenKey(all_versions, version)
+ path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
+ except WindowsError:
+ continue
+ if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
+ targets = []
+ for target,arch in all_msvc_platforms:
+ try:
+ targets.append((target, (arch, get_compiler_env(conf, 'wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')))))
+ except conf.errors.ConfigurationError:
+ pass
+ versions.append(('wsdk ' + version[1:], targets))
+
def gather_wince_supported_platforms():
"""
Checks SmartPhones SDKs
@@ -243,31 +258,31 @@ def gather_wince_supported_platforms():
supported_wince_platforms = []
try:
ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
- except OSError:
+ except WindowsError:
try:
ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
- except OSError:
+ except WindowsError:
ce_sdk = ''
if not ce_sdk:
return supported_wince_platforms
- index = 0
+ ce_index = 0
while 1:
try:
- sdk_device = Utils.winreg.EnumKey(ce_sdk, index)
- sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device)
- except OSError:
+ sdk_device = Utils.winreg.EnumKey(ce_sdk, ce_index)
+ except WindowsError:
break
- index += 1
+ ce_index = ce_index + 1
+ sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device)
try:
path,type = Utils.winreg.QueryValueEx(sdk, 'SDKRootDir')
- except OSError:
+ except WindowsError:
try:
path,type = Utils.winreg.QueryValueEx(sdk,'SDKInformation')
- except OSError:
+ path,xml = os.path.split(path)
+ except WindowsError:
continue
- path,xml = os.path.split(path)
- path = str(path)
+ path=str(path)
path,device = os.path.split(path)
if not device:
path,device = os.path.split(path)
@@ -284,140 +299,122 @@ def gather_msvc_detected_versions():
version_pattern = re.compile('^(\d\d?\.\d\d?)(Exp)?$')
detected_versions = []
for vcver,vcvar in (('VCExpress','Exp'), ('VisualStudio','')):
- prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\' + vcver
try:
+ prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
- except OSError:
- prefix = 'SOFTWARE\\Microsoft\\' + vcver
+ except WindowsError:
try:
+ prefix = 'SOFTWARE\\Microsoft\\'+vcver
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
- except OSError:
+ except WindowsError:
continue
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
- except OSError:
+ except WindowsError:
break
- index += 1
+ index = index + 1
match = version_pattern.match(version)
- if match:
- versionnumber = float(match.group(1))
- else:
+ if not match:
continue
- detected_versions.append((versionnumber, version+vcvar, prefix+'\\'+version))
+ else:
+ versionnumber = float(match.group(1))
+ detected_versions.append((versionnumber, version+vcvar, prefix+"\\"+version))
def fun(tup):
return tup[0]
detected_versions.sort(key = fun)
return detected_versions
-class target_compiler(object):
- """
- Wrap a compiler configuration; call evaluate() to determine
- whether the configuration is usable.
- """
- def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None):
- """
- :param ctx: configuration context to use to eventually get the version environment
- :param compiler: compiler name
- :param cpu: target cpu
- :param version: compiler version number
- :param bat_target: ?
- :param bat: path to the batch file to run
- """
- self.conf = ctx
- self.name = None
- self.is_valid = False
- self.is_done = False
-
- self.compiler = compiler
- self.cpu = cpu
- self.version = version
- self.bat_target = bat_target
- self.bat = bat
- self.callback = callback
-
- def evaluate(self):
- if self.is_done:
- return
- self.is_done = True
- try:
- vs = self.conf.get_msvc_version(self.compiler, self.version, self.bat_target, self.bat)
- except Errors.ConfigurationError:
- self.is_valid = False
- return
- if self.callback:
- vs = self.callback(self, vs)
- self.is_valid = True
- (self.bindirs, self.incdirs, self.libdirs) = vs
-
- def __str__(self):
- return str((self.compiler, self.cpu, self.version, self.bat_target, self.bat))
+def get_compiler_env(conf, compiler, version, bat_target, bat, select=None):
+ """
+ Gets the compiler environment variables as a tuple. Evaluation is eager by default.
+ If set to lazy with ``--msvc_lazy_autodetect`` or ``env.MSVC_LAZY_AUTODETECT``
+ the environment is evaluated when the tuple is destructured or iterated. This means
+ destructuring can throw :py:class:`conf.errors.ConfigurationError`.
+
+ :param conf: configuration context to use to eventually get the version environment
+ :param compiler: compiler name
+ :param version: compiler version number
+ :param bat: path to the batch file to run
+ :param select: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths)
+ """
+ lazy = getattr(Options.options, 'msvc_lazy_autodetect', False) or conf.env['MSVC_LAZY_AUTODETECT']
- def __repr__(self):
- return repr((self.compiler, self.cpu, self.version, self.bat_target, self.bat))
+ def msvc_thunk():
+ vs = conf.get_msvc_version(compiler, version, bat_target, bat)
+ if select:
+ return select(vs)
+ else:
+ return vs
+ return lazytup(msvc_thunk, lazy, ([], [], []))
-@conf
-def gather_wsdk_versions(conf, versions):
+class lazytup(object):
"""
- Use winreg to add the msvc versions to the input list
+ A tuple that evaluates its elements from a function when iterated or destructured.
- :param versions: list to modify
- :type versions: list
+ :param fn: thunk to evaluate the tuple on demand
+ :param lazy: whether to delay evaluation or evaluate in the constructor
+ :param default: optional default for :py:func:`repr` if it should not evaluate
"""
- version_pattern = re.compile('^v..?.?\...?.?')
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
- except OSError:
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
- except OSError:
+ def __init__(self, fn, lazy=True, default=None):
+ self.fn = fn
+ self.default = default
+ if not lazy:
+ self.evaluate()
+ def __len__(self):
+ self.evaluate()
+ return len(self.value)
+ def __iter__(self):
+ self.evaluate()
+ for i, v in enumerate(self.value):
+ yield v
+ def __getitem__(self, i):
+ self.evaluate()
+ return self.value[i]
+ def __repr__(self):
+ if hasattr(self, 'value'):
+ return repr(self.value)
+ elif self.default:
+ return repr(self.default)
+ else:
+ self.evaluate()
+ return repr(self.value)
+ def evaluate(self):
+ if hasattr(self, 'value'):
return
- index = 0
- while 1:
- try:
- version = Utils.winreg.EnumKey(all_versions, index)
- except OSError:
- break
- index += 1
- if not version_pattern.match(version):
- continue
- try:
- msvc_version = Utils.winreg.OpenKey(all_versions, version)
- path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
- except OSError:
- continue
- if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
- targets = {}
- for target,arch in all_msvc_platforms:
- targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))
- versions['wsdk ' + version[1:]] = targets
+ self.value = self.fn()
@conf
def gather_msvc_targets(conf, versions, version, vc_path):
#Looking for normal MSVC compilers!
- targets = {}
-
- if os.path.isfile(os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')):
- for target,realtarget in all_msvc_platforms[::-1]:
- targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat'))
- elif os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')):
+ targets = []
+ if os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')):
for target,realtarget in all_msvc_platforms[::-1]:
- targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'vcvarsall.bat'))
+ try:
+ targets.append((target, (realtarget, get_compiler_env(conf, 'msvc', version, target, os.path.join(vc_path, 'vcvarsall.bat')))))
+ except conf.errors.ConfigurationError:
+ pass
elif os.path.isfile(os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')):
- targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat'))
+ try:
+ targets.append(('x86', ('x86', get_compiler_env(conf, 'msvc', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')))))
+ except conf.errors.ConfigurationError:
+ pass
elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')):
- targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))
+ try:
+ targets.append(('x86', ('x86', get_compiler_env(conf, 'msvc', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat')))))
+ except conf.errors.ConfigurationError:
+ pass
if targets:
- versions['msvc %s' % version] = targets
+ versions.append(('msvc '+ version, targets))
@conf
def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_platforms):
#Looking for Win CE compilers!
for device,platforms in supported_platforms:
- targets = {}
+ cetargets = []
for platform,compiler,include,lib in platforms:
winCEpath = os.path.join(vc_path, 'ce')
if not os.path.isdir(winCEpath):
@@ -427,52 +424,27 @@ def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_pla
bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)]
incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include]
libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib]
- def combine_common(obj, compiler_env):
- # TODO this is likely broken, remove in waf 2.1
+ def combine_common(compiler_env):
(common_bindirs,_1,_2) = compiler_env
return (bindirs + common_bindirs, incdirs, libdirs)
- targets[platform] = target_compiler(conf, 'msvc', platform, version, 'x86', vsvars, combine_common)
- if targets:
- versions[device + ' ' + version] = targets
+ try:
+ cetargets.append((platform, (platform, get_compiler_env(conf, 'msvc', version, 'x86', vsvars, combine_common))))
+ except conf.errors.ConfigurationError:
+ continue
+ if cetargets:
+ versions.append((device + ' ' + version, cetargets))
@conf
def gather_winphone_targets(conf, versions, version, vc_path, vsvars):
#Looking for WinPhone compilers
- targets = {}
+ targets = []
for target,realtarget in all_msvc_platforms[::-1]:
- targets[target] = target_compiler(conf, 'winphone', realtarget, version, target, vsvars)
+ try:
+ targets.append((target, (realtarget, get_compiler_env(conf, 'winphone', version, target, vsvars))))
+ except conf.errors.ConfigurationError:
+ pass
if targets:
- versions['winphone ' + version] = targets
-
-@conf
-def gather_vswhere_versions(conf, versions):
- try:
- import json
- except ImportError:
- Logs.error('Visual Studio 2017 detection requires Python 2.6')
- return
-
- prg_path = os.environ.get('ProgramFiles(x86)', os.environ.get('ProgramFiles', 'C:\\Program Files (x86)'))
-
- vswhere = os.path.join(prg_path, 'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
- args = [vswhere, '-products', '*', '-legacy', '-format', 'json']
- try:
- txt = conf.cmd_and_log(args)
- except Errors.WafError as e:
- Logs.debug('msvc: vswhere.exe failed %s', e)
- return
-
- if sys.version_info[0] < 3:
- txt = txt.decode(Utils.console_encoding())
-
- arr = json.loads(txt)
- arr.sort(key=lambda x: x['installationVersion'])
- for entry in arr:
- ver = entry['installationVersion']
- ver = str('.'.join(ver.split('.')[:2]))
- path = str(os.path.abspath(entry['installationPath']))
- if os.path.exists(path) and ('msvc %s' % ver) not in versions:
- conf.gather_msvc_targets(versions, ver, path)
+ versions.append(('winphone '+ version, targets))
@conf
def gather_msvc_versions(conf, versions):
@@ -481,20 +453,12 @@ def gather_msvc_versions(conf, versions):
try:
try:
msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\VC")
- except OSError:
+ except WindowsError:
msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\Microsoft Visual C++")
path,type = Utils.winreg.QueryValueEx(msvc_version, 'ProductDir')
- except OSError:
- try:
- msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7")
- path,type = Utils.winreg.QueryValueEx(msvc_version, version)
- except OSError:
- continue
- else:
- vc_paths.append((version, os.path.abspath(str(path))))
- continue
- else:
vc_paths.append((version, os.path.abspath(str(path))))
+ except WindowsError:
+ continue
wince_supported_platforms = gather_wince_supported_platforms()
@@ -528,48 +492,50 @@ def gather_icl_versions(conf, versions):
version_pattern = re.compile('^...?.?\....?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
- except OSError:
+ except WindowsError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
- except OSError:
+ except WindowsError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
- except OSError:
+ except WindowsError:
break
- index += 1
+ index = index + 1
if not version_pattern.match(version):
continue
- targets = {}
+ targets = []
for target,arch in all_icl_platforms:
- if target=='intel64':
- targetDir='EM64T_NATIVE'
- else:
- targetDir=target
try:
+ if target=='intel64': targetDir='EM64T_NATIVE'
+ else: targetDir=target
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except OSError:
- pass
- else:
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
- targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+ try:
+ targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file))))
+ except conf.errors.ConfigurationError:
+ pass
+ except WindowsError:
+ pass
for target,arch in all_icl_platforms:
try:
icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except OSError:
- continue
- else:
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
- targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+ try:
+ targets.append((target, (arch, get_compiler_env(conf, 'intel', version, target, batch_file))))
+ except conf.errors.ConfigurationError:
+ pass
+ except WindowsError:
+ continue
major = version[0:2]
- versions['intel ' + major] = targets
+ versions.append(('intel ' + major, targets))
@conf
def gather_intel_composer_versions(conf, versions):
@@ -582,44 +548,42 @@ def gather_intel_composer_versions(conf, versions):
version_pattern = re.compile('^...?.?\...?.?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites')
- except OSError:
+ except WindowsError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites')
- except OSError:
+ except WindowsError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
- except OSError:
+ except WindowsError:
break
- index += 1
+ index = index + 1
if not version_pattern.match(version):
continue
- targets = {}
+ targets = []
for target,arch in all_icl_platforms:
- if target=='intel64':
- targetDir='EM64T_NATIVE'
- else:
- targetDir=target
try:
+ if target=='intel64': targetDir='EM64T_NATIVE'
+ else: targetDir=target
try:
defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
- except OSError:
- if targetDir == 'EM64T_NATIVE':
+ except WindowsError:
+ if targetDir=='EM64T_NATIVE':
defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
else:
- raise
+ raise WindowsError
uid,type = Utils.winreg.QueryValueEx(defaults, 'SubKey')
Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except OSError:
- pass
- else:
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
- targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+ try:
+ targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file))))
+ except conf.errors.ConfigurationError:
+ pass
# The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012
# http://software.intel.com/en-us/forums/topic/328487
compilervars_warning_attr = '_compilervars_warning_key'
@@ -637,41 +601,72 @@ def gather_intel_composer_versions(conf, versions):
'(VSWinExpress.exe) but it does not seem to be installed at %r. '
'The intel command line set up will fail to configure unless the file %r'
'is patched. See: %s') % (vs_express_path, compilervars_arch, patch_url))
+ except WindowsError:
+ pass
major = version[0:2]
- versions['intel ' + major] = targets
+ versions.append(('intel ' + major, targets))
@conf
-def detect_msvc(self):
- return self.setup_msvc(self.get_msvc_versions())
+def get_msvc_versions(conf, eval_and_save=True):
+ """
+ :return: list of compilers installed
+ :rtype: list of string
+ """
+ if conf.env['MSVC_INSTALLED_VERSIONS']:
+ return conf.env['MSVC_INSTALLED_VERSIONS']
+
+ # Gather all the compiler versions and targets. This phase can be lazy
+ # per lazy detection settings.
+ lst = []
+ conf.gather_icl_versions(lst)
+ conf.gather_intel_composer_versions(lst)
+ conf.gather_wsdk_versions(lst)
+ conf.gather_msvc_versions(lst)
+
+ # Override lazy detection by evaluating after the fact.
+ if eval_and_save:
+ def checked_target(t):
+ target,(arch,paths) = t
+ try:
+ paths.evaluate()
+ except conf.errors.ConfigurationError:
+ return None
+ else:
+ return t
+ lst = [(version, list(filter(checked_target, targets))) for version, targets in lst]
+ conf.env['MSVC_INSTALLED_VERSIONS'] = lst
+
+ return lst
@conf
-def get_msvc_versions(self):
+def print_all_msvc_detected(conf):
"""
- :return: platform to compiler configurations
- :rtype: dict
+ Print the contents of *conf.env.MSVC_INSTALLED_VERSIONS*
"""
- dct = Utils.ordered_iter_dict()
- self.gather_icl_versions(dct)
- self.gather_intel_composer_versions(dct)
- self.gather_wsdk_versions(dct)
- self.gather_msvc_versions(dct)
- self.gather_vswhere_versions(dct)
- Logs.debug('msvc: detected versions %r', list(dct.keys()))
- return dct
+ for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
+ Logs.info(version)
+ for target,l in targets:
+ Logs.info("\t"+target)
+
+@conf
+def detect_msvc(conf, arch = False):
+ # Save installed versions only if lazy detection is disabled.
+ lazy_detect = getattr(Options.options, 'msvc_lazy_autodetect', False) or conf.env['MSVC_LAZY_AUTODETECT']
+ versions = get_msvc_versions(conf, not lazy_detect)
+ return setup_msvc(conf, versions, arch)
@conf
def find_lt_names_msvc(self, libname, is_static=False):
"""
Win32/MSVC specific code to glean out information from libtool la files.
- this function is not attached to the task_gen class. Returns a triplet:
- (library absolute path, library name without extension, whether the library is static)
+ this function is not attached to the task_gen class
"""
lt_names=[
'lib%s.la' % libname,
'%s.la' % libname,
]
- for path in self.env.LIBPATH:
+ for path in self.env['LIBPATH']:
for la in lt_names:
laf=os.path.join(path,la)
dll=None
@@ -713,14 +708,14 @@ def libname_msvc(self, libname, is_static=False):
(lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
if lt_path != None and lt_libname != None:
- if lt_static:
- # file existence check has been made by find_lt_names
+ if lt_static == True:
+ # file existance check has been made by find_lt_names
return os.path.join(lt_path,lt_libname)
if lt_path != None:
- _libpaths = [lt_path] + self.env.LIBPATH
+ _libpaths=[lt_path] + self.env['LIBPATH']
else:
- _libpaths = self.env.LIBPATH
+ _libpaths=self.env['LIBPATH']
static_libs=[
'lib%ss.lib' % lib,
@@ -746,11 +741,11 @@ def libname_msvc(self, libname, is_static=False):
for path in _libpaths:
for libn in libnames:
if os.path.exists(os.path.join(path, libn)):
- Logs.debug('msvc: lib found: %s', os.path.join(path,libn))
+ debug('msvc: lib found: %s' % os.path.join(path,libn))
return re.sub('\.lib$', '',libn)
#if no lib can be found, just return the libname as msvc expects it
- self.fatal('The library %r could not be found' % libname)
+ self.fatal("The library %r could not be found" % libname)
return re.sub('\.lib$', '', libname)
@conf
@@ -758,7 +753,7 @@ def check_lib_msvc(self, libname, is_static=False, uselib_store=None):
"""
Ideally we should be able to place the lib in the right env var, either STLIB or LIB,
but we don't distinguish static libs from shared libs.
- This is ok since msvc doesn't have any special linker flag to select static libs (no env.STLIB_MARKER)
+ This is ok since msvc doesn't have any special linker flag to select static libs (no env['STLIB_MARKER'])
"""
libn = self.libname_msvc(libname, is_static)
@@ -795,26 +790,27 @@ def no_autodetect(conf):
configure(conf)
@conf
-def autodetect(conf, arch=False):
+def autodetect(conf, arch = False):
v = conf.env
if v.NO_MSVC_DETECT:
return
-
- compiler, version, path, includes, libdirs, cpu = conf.detect_msvc()
if arch:
- v.DEST_CPU = cpu
+ compiler, version, path, includes, libdirs, arch = conf.detect_msvc(True)
+ v['DEST_CPU'] = arch
+ else:
+ compiler, version, path, includes, libdirs = conf.detect_msvc()
- v.PATH = path
- v.INCLUDES = includes
- v.LIBPATH = libdirs
- v.MSVC_COMPILER = compiler
+ v['PATH'] = path
+ v['INCLUDES'] = includes
+ v['LIBPATH'] = libdirs
+ v['MSVC_COMPILER'] = compiler
try:
- v.MSVC_VERSION = float(version)
- except ValueError:
- v.MSVC_VERSION = float(version[:-3])
+ v['MSVC_VERSION'] = float(version)
+ except Exception:
+ v['MSVC_VERSION'] = float(version[:-3])
def _get_prog_names(conf, compiler):
- if compiler == 'intel':
+ if compiler=='intel':
compiler_name = 'ICL'
linker_name = 'XILINK'
lib_name = 'XILIB'
@@ -833,9 +829,9 @@ def find_msvc(conf):
# the autodetection is supposed to be performed before entering in this method
v = conf.env
- path = v.PATH
- compiler = v.MSVC_COMPILER
- version = v.MSVC_VERSION
+ path = v['PATH']
+ compiler = v['MSVC_COMPILER']
+ version = v['MSVC_VERSION']
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
v.MSVC_MANIFEST = (compiler == 'msvc' and version >= 8) or (compiler == 'wsdk' and version >= 6) or (compiler == 'intel' and version >= 11)
@@ -845,47 +841,48 @@ def find_msvc(conf):
# before setting anything, check if the compiler is really msvc
env = dict(conf.environ)
- if path:
- env.update(PATH = ';'.join(path))
+ if path: env.update(PATH = ';'.join(path))
if not conf.cmd_and_log(cxx + ['/nologo', '/help'], env=env):
conf.fatal('the msvc compiler could not be identified')
# c/c++ compiler
- v.CC = v.CXX = cxx
- v.CC_NAME = v.CXX_NAME = 'msvc'
+ v['CC'] = v['CXX'] = cxx
+ v['CC_NAME'] = v['CXX_NAME'] = 'msvc'
# linker
- if not v.LINK_CXX:
- conf.find_program(linker_name, path_list=path, errmsg='%s was not found (linker)' % linker_name, var='LINK_CXX')
+ if not v['LINK_CXX']:
+ link = conf.find_program(linker_name, path_list=path)
+ if link: v['LINK_CXX'] = link
+ else: conf.fatal('%s was not found (linker)' % linker_name)
+ v['LINK'] = link
- if not v.LINK_CC:
- v.LINK_CC = v.LINK_CXX
+ if not v['LINK_CC']:
+ v['LINK_CC'] = v['LINK_CXX']
# staticlib linker
- if not v.AR:
+ if not v['AR']:
stliblink = conf.find_program(lib_name, path_list=path, var='AR')
- if not stliblink:
- return
- v.ARFLAGS = ['/nologo']
+ if not stliblink: return
+ v['ARFLAGS'] = ['/NOLOGO']
# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
if v.MSVC_MANIFEST:
conf.find_program('MT', path_list=path, var='MT')
- v.MTFLAGS = ['/nologo']
+ v['MTFLAGS'] = ['/NOLOGO']
try:
conf.load('winres')
- except Errors.ConfigurationError:
- Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+ except Errors.WafError:
+ warn('Resource compiler not found. Compiling resource file is disabled')
@conf
def visual_studio_add_flags(self):
"""visual studio flags found in the system environment"""
v = self.env
- if self.environ.get('INCLUDE'):
- v.prepend_value('INCLUDES', [x for x in self.environ['INCLUDE'].split(';') if x]) # notice the 'S'
- if self.environ.get('LIB'):
- v.prepend_value('LIBPATH', [x for x in self.environ['LIB'].split(';') if x])
+ try: v.prepend_value('INCLUDES', [x for x in self.environ['INCLUDE'].split(';') if x]) # notice the 'S'
+ except Exception: pass
+ try: v.prepend_value('LIBPATH', [x for x in self.environ['LIB'].split(';') if x])
+ except Exception: pass
@conf
def msvc_common_flags(conf):
@@ -894,53 +891,62 @@ def msvc_common_flags(conf):
"""
v = conf.env
- v.DEST_BINFMT = 'pe'
+ v['DEST_BINFMT'] = 'pe'
v.append_value('CFLAGS', ['/nologo'])
v.append_value('CXXFLAGS', ['/nologo'])
- v.append_value('LINKFLAGS', ['/nologo'])
- v.DEFINES_ST = '/D%s'
+ v['DEFINES_ST'] = '/D%s'
- v.CC_SRC_F = ''
- v.CC_TGT_F = ['/c', '/Fo']
- v.CXX_SRC_F = ''
- v.CXX_TGT_F = ['/c', '/Fo']
+ v['CC_SRC_F'] = ''
+ v['CC_TGT_F'] = ['/c', '/Fo']
+ v['CXX_SRC_F'] = ''
+ v['CXX_TGT_F'] = ['/c', '/Fo']
if (v.MSVC_COMPILER == 'msvc' and v.MSVC_VERSION >= 8) or (v.MSVC_COMPILER == 'wsdk' and v.MSVC_VERSION >= 6):
- v.CC_TGT_F = ['/FC'] + v.CC_TGT_F
- v.CXX_TGT_F = ['/FC'] + v.CXX_TGT_F
+ v['CC_TGT_F']= ['/FC'] + v['CC_TGT_F']
+ v['CXX_TGT_F']= ['/FC'] + v['CXX_TGT_F']
+
+ v['CPPPATH_ST'] = '/I%s' # template for adding include paths
- v.CPPPATH_ST = '/I%s' # template for adding include paths
+ v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:'
- v.AR_TGT_F = v.CCLNK_TGT_F = v.CXXLNK_TGT_F = '/OUT:'
+ # Subsystem specific flags
+ v['CFLAGS_CONSOLE'] = v['CXXFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE']
+ v['CFLAGS_NATIVE'] = v['CXXFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE']
+ v['CFLAGS_POSIX'] = v['CXXFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX']
+ v['CFLAGS_WINDOWS'] = v['CXXFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS']
+ v['CFLAGS_WINDOWSCE'] = v['CXXFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE']
# CRT specific flags
- v.CFLAGS_CRT_MULTITHREADED = v.CXXFLAGS_CRT_MULTITHREADED = ['/MT']
- v.CFLAGS_CRT_MULTITHREADED_DLL = v.CXXFLAGS_CRT_MULTITHREADED_DLL = ['/MD']
+ v['CFLAGS_CRT_MULTITHREADED'] = v['CXXFLAGS_CRT_MULTITHREADED'] = ['/MT']
+ v['CFLAGS_CRT_MULTITHREADED_DLL'] = v['CXXFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD']
- v.CFLAGS_CRT_MULTITHREADED_DBG = v.CXXFLAGS_CRT_MULTITHREADED_DBG = ['/MTd']
- v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = ['/MDd']
+ v['CFLAGS_CRT_MULTITHREADED_DBG'] = v['CXXFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd']
+ v['CFLAGS_CRT_MULTITHREADED_DLL_DBG'] = v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd']
- v.LIB_ST = '%s.lib'
- v.LIBPATH_ST = '/LIBPATH:%s'
- v.STLIB_ST = '%s.lib'
- v.STLIBPATH_ST = '/LIBPATH:%s'
+ # linker
+ v['LIB_ST'] = '%s.lib' # template for adding shared libs
+ v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths
+ v['STLIB_ST'] = '%s.lib'
+ v['STLIBPATH_ST'] = '/LIBPATH:%s'
- if v.MSVC_MANIFEST:
+ v.append_value('LINKFLAGS', ['/NOLOGO'])
+ if v['MSVC_MANIFEST']:
v.append_value('LINKFLAGS', ['/MANIFEST'])
- v.CFLAGS_cshlib = []
- v.CXXFLAGS_cxxshlib = []
- v.LINKFLAGS_cshlib = v.LINKFLAGS_cxxshlib = ['/DLL']
- v.cshlib_PATTERN = v.cxxshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = '%s.lib'
- v.IMPLIB_ST = '/IMPLIB:%s'
-
- v.LINKFLAGS_cstlib = []
- v.cstlib_PATTERN = v.cxxstlib_PATTERN = '%s.lib'
+ # shared library
+ v['CFLAGS_cshlib'] = []
+ v['CXXFLAGS_cxxshlib'] = []
+ v['LINKFLAGS_cshlib'] = v['LINKFLAGS_cxxshlib'] = ['/DLL']
+ v['cshlib_PATTERN'] = v['cxxshlib_PATTERN'] = '%s.dll'
+ v['implib_PATTERN'] = '%s.lib'
+ v['IMPLIB_ST'] = '/IMPLIB:%s'
- v.cprogram_PATTERN = v.cxxprogram_PATTERN = '%s.exe'
+ # static library
+ v['LINKFLAGS_cstlib'] = []
+ v['cstlib_PATTERN'] = v['cxxstlib_PATTERN'] = '%s.lib'
- v.def_PATTERN = '/def:%s'
+ # program
+ v['cprogram_PATTERN'] = v['cxxprogram_PATTERN'] = '%s.exe'
#######################################################################################################
@@ -974,10 +980,12 @@ def apply_flags_msvc(self):
self.link_task.outputs.append(pdbnode)
if getattr(self, 'install_task', None):
- self.pdb_install_task = self.add_install_files(
- install_to=self.install_task.install_to, install_from=pdbnode)
+ self.pdb_install_task = self.bld.install_files(self.install_task.dest, pdbnode, env=self.env)
+
break
+# split the manifest file processing from the link task, like for the rc processing
+
@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib')
@after_method('apply_link')
def apply_manifest(self):
@@ -987,16 +995,161 @@ def apply_manifest(self):
the manifest file, the binaries are unusable.
See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
"""
+
if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST and getattr(self, 'link_task', None):
out_node = self.link_task.outputs[0]
man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
self.link_task.outputs.append(man_node)
- self.env.DO_MANIFEST = True
+ self.link_task.do_manifest = True
+
+def exec_mf(self):
+ """
+ Create the manifest file
+ """
+ env = self.env
+ mtool = env['MT']
+ if not mtool:
+ return 0
+
+ self.do_manifest = False
+
+ outfile = self.outputs[0].abspath()
+
+ manifest = None
+ for out_node in self.outputs:
+ if out_node.name.endswith('.manifest'):
+ manifest = out_node.abspath()
+ break
+ if manifest is None:
+ # Should never get here. If we do, it means the manifest file was
+ # never added to the outputs list, thus we don't have a manifest file
+ # to embed, so we just return.
+ return 0
+
+ # embedding mode. Different for EXE's and DLL's.
+ # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
+ mode = ''
+ if 'cprogram' in self.generator.features or 'cxxprogram' in self.generator.features:
+ mode = '1'
+ elif 'cshlib' in self.generator.features or 'cxxshlib' in self.generator.features:
+ mode = '2'
+
+ debug('msvc: embedding manifest in mode %r' % mode)
+
+ lst = [] + mtool
+ lst.extend(Utils.to_list(env['MTFLAGS']))
+ lst.extend(['-manifest', manifest])
+ lst.append('-outputresource:%s;%s' % (outfile, mode))
+
+ return self.exec_command(lst)
+
+def quote_response_command(self, flag):
+ if flag.find(' ') > -1:
+ for x in ('/LIBPATH:', '/IMPLIB:', '/OUT:', '/I'):
+ if flag.startswith(x):
+ flag = '%s"%s"' % (x, flag[len(x):])
+ break
+ else:
+ flag = '"%s"' % flag
+ return flag
+
+def exec_response_command(self, cmd, **kw):
+ # not public yet
+ try:
+ tmp = None
+ if sys.platform.startswith('win') and isinstance(cmd, list) and len(' '.join(cmd)) >= 8192:
+ program = cmd[0] #unquoted program name, otherwise exec_command will fail
+ cmd = [self.quote_response_command(x) for x in cmd]
+ (fd, tmp) = tempfile.mkstemp()
+ os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode())
+ os.close(fd)
+ cmd = [program, '@' + tmp]
+ # no return here, that's on purpose
+ ret = self.generator.bld.exec_command(cmd, **kw)
+ finally:
+ if tmp:
+ try:
+ os.remove(tmp)
+ except OSError:
+ pass # anti-virus and indexers can keep the files open -_-
+ return ret
+
+########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
+
+def exec_command_msvc(self, *k, **kw):
+ """
+ Change the command-line execution for msvc programs.
+ Instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in
+ """
+ if isinstance(k[0], list):
+ lst = []
+ carry = ''
+ for a in k[0]:
+ if a == '/Fo' or a == '/doc' or a[-1] == ':':
+ carry = a
+ else:
+ lst.append(carry + a)
+ carry = ''
+ k = [lst]
+
+ if self.env['PATH']:
+ env = dict(self.env.env or os.environ)
+ env.update(PATH = ';'.join(self.env['PATH']))
+ kw['env'] = env
+
+ bld = self.generator.bld
+ try:
+ if not kw.get('cwd', None):
+ kw['cwd'] = bld.cwd
+ except AttributeError:
+ bld.cwd = kw['cwd'] = bld.variant_dir
+
+ ret = self.exec_response_command(k[0], **kw)
+ if not ret and getattr(self, 'do_manifest', None):
+ ret = self.exec_mf()
+ return ret
+
+def wrap_class(class_name):
+ """
+ Manifest file processing and @response file workaround for command-line length limits on Windows systems
+ The indicated task class is replaced by a subclass to prevent conflicts in case the class is wrapped more than once
+ """
+ cls = Task.classes.get(class_name, None)
+
+ if not cls:
+ return None
+
+ derived_class = type(class_name, (cls,), {})
+
+ def exec_command(self, *k, **kw):
+ if self.env['CC_NAME'] == 'msvc':
+ return self.exec_command_msvc(*k, **kw)
+ else:
+ return super(derived_class, self).exec_command(*k, **kw)
+
+ # Chain-up monkeypatch needed since exec_command() is in base class API
+ derived_class.exec_command = exec_command
+
+ # No chain-up behavior needed since the following methods aren't in
+ # base class API
+ derived_class.exec_response_command = exec_response_command
+ derived_class.quote_response_command = quote_response_command
+ derived_class.exec_command_msvc = exec_command_msvc
+ derived_class.exec_mf = exec_mf
+
+ if hasattr(cls, 'hcode'):
+ derived_class.hcode = cls.hcode
+
+ return derived_class
+
+for k in 'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split():
+ wrap_class(k)
def make_winapp(self, family):
append = self.env.append_unique
append('DEFINES', 'WINAPI_FAMILY=%s' % family)
- append('CXXFLAGS', ['/ZW', '/TP'])
+ append('CXXFLAGS', '/ZW')
+ append('CXXFLAGS', '/TP')
for lib_path in self.env.LIBPATH:
append('CXXFLAGS','/AI%s'%lib_path)
@@ -1008,7 +1161,9 @@ def make_winphone_app(self):
Insert configuration flags for windows phone applications (adds /ZW, /TP...)
"""
make_winapp(self, 'WINAPI_FAMILY_PHONE_APP')
- self.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib'])
+ conf.env.append_unique('LINKFLAGS', '/NODEFAULTLIB:ole32.lib')
+ conf.env.append_unique('LINKFLAGS', 'PhoneAppModelHost.lib')
+
@feature('winapp')
@after_method('process_use')
diff --git a/waflib/Tools/nasm.py b/waflib/Tools/nasm.py
deleted file mode 100644
index 411d5826..00000000
--- a/waflib/Tools/nasm.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008-2018 (ita)
-
-"""
-Nasm tool (asm processing)
-"""
-
-import os
-import waflib.Tools.asm # leave this
-from waflib.TaskGen import feature
-
-@feature('asm')
-def apply_nasm_vars(self):
- """provided for compatibility"""
- self.env.append_value('ASFLAGS', self.to_list(getattr(self, 'nasm_flags', [])))
-
-def configure(conf):
- """
- Detect nasm/yasm and set the variable *AS*
- """
- conf.find_program(['nasm', 'yasm'], var='AS')
- conf.env.AS_TGT_F = ['-o']
- conf.env.ASLNK_TGT_F = ['-o']
- conf.load('asm')
- conf.env.ASMPATH_ST = '-I%s' + os.sep
diff --git a/waflib/Tools/nobuild.py b/waflib/Tools/nobuild.py
deleted file mode 100644
index 2e4b055e..00000000
--- a/waflib/Tools/nobuild.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-"""
-Override the build commands to write empty files.
-This is useful for profiling and evaluating the Python overhead.
-
-To use::
-
- def build(bld):
- ...
- bld.load('nobuild')
-
-"""
-
-from waflib import Task
-def build(bld):
- def run(self):
- for x in self.outputs:
- x.write('')
- for (name, cls) in Task.classes.items():
- cls.run = run
-
diff --git a/waflib/Tools/perl.py b/waflib/Tools/perl.py
deleted file mode 100644
index 32b03fba..00000000
--- a/waflib/Tools/perl.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# andersg at 0x63.nu 2007
-# Thomas Nagy 2016-2018 (ita)
-
-"""
-Support for Perl extensions. A C/C++ compiler is required::
-
- def options(opt):
- opt.load('compiler_c perl')
- def configure(conf):
- conf.load('compiler_c perl')
- conf.check_perl_version((5,6,0))
- conf.check_perl_ext_devel()
- conf.check_perl_module('Cairo')
- conf.check_perl_module('Devel::PPPort 4.89')
- def build(bld):
- bld(
- features = 'c cshlib perlext',
- source = 'Mytest.xs',
- target = 'Mytest',
- install_path = '${ARCHDIR_PERL}/auto')
- bld.install_files('${ARCHDIR_PERL}', 'Mytest.pm')
-"""
-
-import os
-from waflib import Task, Options, Utils, Errors
-from waflib.Configure import conf
-from waflib.TaskGen import extension, feature, before_method
-
-@before_method('apply_incpaths', 'apply_link', 'propagate_uselib_vars')
-@feature('perlext')
-def init_perlext(self):
- """
- Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
- *lib* prefix from library names.
- """
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- if not 'PERLEXT' in self.uselib:
- self.uselib.append('PERLEXT')
- self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN
-
-@extension('.xs')
-def xsubpp_file(self, node):
- """
- Create :py:class:`waflib.Tools.perl.xsubpp` tasks to process *.xs* files
- """
- outnode = node.change_ext('.c')
- self.create_task('xsubpp', node, outnode)
- self.source.append(outnode)
-
-class xsubpp(Task.Task):
- """
- Process *.xs* files
- """
- run_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
- color = 'BLUE'
- ext_out = ['.h']
-
-@conf
-def check_perl_version(self, minver=None):
- """
- Check if Perl is installed, and set the variable PERL.
- minver is supposed to be a tuple
- """
- res = True
- if minver:
- cver = '.'.join(map(str,minver))
- else:
- cver = ''
-
- self.start_msg('Checking for minimum perl version %s' % cver)
-
- perl = self.find_program('perl', var='PERL', value=getattr(Options.options, 'perlbinary', None))
- version = self.cmd_and_log(perl + ["-e", 'printf \"%vd\", $^V'])
- if not version:
- res = False
- version = "Unknown"
- elif not minver is None:
- ver = tuple(map(int, version.split(".")))
- if ver < minver:
- res = False
-
- self.end_msg(version, color=res and 'GREEN' or 'YELLOW')
- return res
-
-@conf
-def check_perl_module(self, module):
- """
- Check if specified perlmodule is installed.
-
- The minimum version can be specified by specifying it after modulename
- like this::
-
- def configure(conf):
- conf.check_perl_module("Some::Module 2.92")
- """
- cmd = self.env.PERL + ['-e', 'use %s' % module]
- self.start_msg('perl module %s' % module)
- try:
- r = self.cmd_and_log(cmd)
- except Errors.WafError:
- self.end_msg(False)
- return None
- self.end_msg(r or True)
- return r
-
-@conf
-def check_perl_ext_devel(self):
- """
- Check for configuration needed to build perl extensions.
-
- Sets different xxx_PERLEXT variables in the environment.
-
- Also sets the ARCHDIR_PERL variable useful as installation path,
- which can be overridden by ``--with-perl-archdir`` option.
- """
-
- env = self.env
- perl = env.PERL
- if not perl:
- self.fatal('find perl first')
-
- def cmd_perl_config(s):
- return perl + ['-MConfig', '-e', 'print \"%s\"' % s]
- def cfg_str(cfg):
- return self.cmd_and_log(cmd_perl_config(cfg))
- def cfg_lst(cfg):
- return Utils.to_list(cfg_str(cfg))
- def find_xsubpp():
- for var in ('privlib', 'vendorlib'):
- xsubpp = cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}' % var)
- if xsubpp and os.path.isfile(xsubpp[0]):
- return xsubpp
- return self.find_program('xsubpp')
-
- env.LINKFLAGS_PERLEXT = cfg_lst('$Config{lddlflags}')
- env.INCLUDES_PERLEXT = cfg_lst('$Config{archlib}/CORE')
- env.CFLAGS_PERLEXT = cfg_lst('$Config{ccflags} $Config{cccdlflags}')
- env.EXTUTILS_TYPEMAP = cfg_lst('$Config{privlib}/ExtUtils/typemap')
- env.XSUBPP = find_xsubpp()
-
- if not getattr(Options.options, 'perlarchdir', None):
- env.ARCHDIR_PERL = cfg_str('$Config{sitearch}')
- else:
- env.ARCHDIR_PERL = getattr(Options.options, 'perlarchdir')
-
- env.perlext_PATTERN = '%s.' + cfg_str('$Config{dlext}')
-
-def options(opt):
- """
- Add the ``--with-perl-archdir`` and ``--with-perl-binary`` command-line options.
- """
- opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
- opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)
-
diff --git a/waflib/Tools/python.py b/waflib/Tools/python.py
deleted file mode 100644
index 25841d03..00000000
--- a/waflib/Tools/python.py
+++ /dev/null
@@ -1,627 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2007-2015 (ita)
-# Gustavo Carneiro (gjc), 2007
-
-"""
-Support for Python, detect the headers and libraries and provide
-*use* variables to link C/C++ programs against them::
-
- def options(opt):
- opt.load('compiler_c python')
- def configure(conf):
- conf.load('compiler_c python')
- conf.check_python_version((2,4,2))
- conf.check_python_headers()
- def build(bld):
- bld.program(features='pyembed', source='a.c', target='myprog')
- bld.shlib(features='pyext', source='b.c', target='mylib')
-"""
-
-import os, sys
-from waflib import Errors, Logs, Node, Options, Task, Utils
-from waflib.TaskGen import extension, before_method, after_method, feature
-from waflib.Configure import conf
-
-FRAG = '''
-#include <Python.h>
-#ifdef __cplusplus
-extern "C" {
-#endif
- void Py_Initialize(void);
- void Py_Finalize(void);
-#ifdef __cplusplus
-}
-#endif
-int main(int argc, char **argv)
-{
- (void)argc; (void)argv;
- Py_Initialize();
- Py_Finalize();
- return 0;
-}
-'''
-"""
-Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers`
-"""
-
-INST = '''
-import sys, py_compile
-py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
-'''
-"""
-Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
-"""
-
-DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']
-
-@before_method('process_source')
-@feature('py')
-def feature_py(self):
- """
- Create tasks to byte-compile .py files and install them, if requested
- """
- self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
- install_from = getattr(self, 'install_from', None)
- if install_from and not isinstance(install_from, Node.Node):
- install_from = self.path.find_dir(install_from)
- self.install_from = install_from
-
- ver = self.env.PYTHON_VERSION
- if not ver:
- self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')
-
- if int(ver.replace('.', '')) > 31:
- self.install_32 = True
-
-@extension('.py')
-def process_py(self, node):
- """
- Add signature of .py file, so it will be byte-compiled when necessary
- """
- assert(hasattr(self, 'install_path')), 'add features="py"'
-
- # where to install the python file
- if self.install_path:
- if self.install_from:
- self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=True)
- else:
- self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=True)
-
- lst = []
- if self.env.PYC:
- lst.append('pyc')
- if self.env.PYO:
- lst.append('pyo')
-
- if self.install_path:
- if self.install_from:
- pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.install_from)), self.env)
- else:
- pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.path)), self.env)
- else:
- pyd = node.abspath()
-
- for ext in lst:
- if self.env.PYTAG and not self.env.NOPYCACHE:
- # __pycache__ installation for python 3.2 - PEP 3147
- name = node.name[:-3]
- pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext))
- pyobj.parent.mkdir()
- else:
- pyobj = node.change_ext(".%s" % ext)
-
- tsk = self.create_task(ext, node, pyobj)
- tsk.pyd = pyd
-
- if self.install_path:
- self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=True)
-
-class pyc(Task.Task):
- """
- Byte-compiling python files
- """
- color = 'PINK'
- def __str__(self):
- node = self.outputs[0]
- return node.path_from(node.ctx.launch_node())
- def run(self):
- cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
- ret = self.generator.bld.exec_command(cmd)
- return ret
-
-class pyo(Task.Task):
- """
- Byte-compiling python files
- """
- color = 'PINK'
- def __str__(self):
- node = self.outputs[0]
- return node.path_from(node.ctx.launch_node())
- def run(self):
- cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
- ret = self.generator.bld.exec_command(cmd)
- return ret
-
-@feature('pyext')
-@before_method('propagate_uselib_vars', 'apply_link')
-@after_method('apply_bundle')
-def init_pyext(self):
- """
- Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
- *lib* prefix from library names.
- """
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- if not 'PYEXT' in self.uselib:
- self.uselib.append('PYEXT')
- # override shlib_PATTERN set by the osx module
- self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN = self.env.pyext_PATTERN
- self.env.fcshlib_PATTERN = self.env.dshlib_PATTERN = self.env.pyext_PATTERN
-
- try:
- if not self.install_path:
- return
- except AttributeError:
- self.install_path = '${PYTHONARCHDIR}'
-
-@feature('pyext')
-@before_method('apply_link', 'apply_bundle')
-def set_bundle(self):
- """Mac-specific pyext extension that enables bundles from c_osx.py"""
- if Utils.unversioned_sys_platform() == 'darwin':
- self.mac_bundle = True
-
-@before_method('propagate_uselib_vars')
-@feature('pyembed')
-def init_pyembed(self):
- """
- Add the PYEMBED variable.
- """
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- if not 'PYEMBED' in self.uselib:
- self.uselib.append('PYEMBED')
-
-@conf
-def get_python_variables(self, variables, imports=None):
- """
- Spawn a new python process to dump configuration variables
-
- :param variables: variables to print
- :type variables: list of string
- :param imports: one import by element
- :type imports: list of string
- :return: the variable values
- :rtype: list of string
- """
- if not imports:
- try:
- imports = self.python_imports
- except AttributeError:
- imports = DISTUTILS_IMP
-
- program = list(imports) # copy
- program.append('')
- for v in variables:
- program.append("print(repr(%s))" % v)
- os_env = dict(os.environ)
- try:
- del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
- except KeyError:
- pass
-
- try:
- out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
- except Errors.WafError:
- self.fatal('The distutils module is unusable: install "python-devel"?')
- self.to_log(out)
- return_values = []
- for s in out.splitlines():
- s = s.strip()
- if not s:
- continue
- if s == 'None':
- return_values.append(None)
- elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
- return_values.append(eval(s))
- elif s[0].isdigit():
- return_values.append(int(s))
- else: break
- return return_values
-
-@conf
-def test_pyembed(self, mode, msg='Testing pyembed configuration'):
- self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg=msg,
- fragment=FRAG, errmsg='Could not build a python embedded interpreter',
- features='%s %sprogram pyembed' % (mode, mode))
-
-@conf
-def test_pyext(self, mode, msg='Testing pyext configuration'):
- self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg=msg,
- fragment=FRAG, errmsg='Could not build python extensions',
- features='%s %sshlib pyext' % (mode, mode))
-
-@conf
-def python_cross_compile(self, features='pyembed pyext'):
- """
- For cross-compilation purposes, it is possible to bypass the normal detection and set the flags that you want:
- PYTHON_VERSION='3.4' PYTAG='cpython34' pyext_PATTERN="%s.so" PYTHON_LDFLAGS='-lpthread -ldl' waf configure
-
- The following variables are used:
- PYTHON_VERSION required
- PYTAG required
- PYTHON_LDFLAGS required
- pyext_PATTERN required
- PYTHON_PYEXT_LDFLAGS
- PYTHON_PYEMBED_LDFLAGS
- """
- features = Utils.to_list(features)
- if not ('PYTHON_LDFLAGS' in self.environ or 'PYTHON_PYEXT_LDFLAGS' in self.environ or 'PYTHON_PYEMBED_LDFLAGS' in self.environ):
- return False
-
- for x in 'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
- if not x in self.environ:
- self.fatal('Please set %s in the os environment' % x)
- else:
- self.env[x] = self.environ[x]
-
- xx = self.env.CXX_NAME and 'cxx' or 'c'
- if 'pyext' in features:
- flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
- if flags is None:
- self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')
- else:
- self.parse_flags(flags, 'PYEXT')
- self.test_pyext(xx)
- if 'pyembed' in features:
- flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
- if flags is None:
- self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')
- else:
- self.parse_flags(flags, 'PYEMBED')
- self.test_pyembed(xx)
- return True
-
-@conf
-def check_python_headers(conf, features='pyembed pyext'):
- """
- Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
- On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
-
- * PYEXT: for compiling python extensions
- * PYEMBED: for embedding a python interpreter
- """
- features = Utils.to_list(features)
- assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'"
- env = conf.env
- if not env.CC_NAME and not env.CXX_NAME:
- conf.fatal('load a compiler first (gcc, g++, ..)')
-
- # bypass all the code below for cross-compilation
- if conf.python_cross_compile(features):
- return
-
- if not env.PYTHON_VERSION:
- conf.check_python_version()
-
- pybin = env.PYTHON
- if not pybin:
- conf.fatal('Could not find the python executable')
-
- # so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
- v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
- try:
- lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
- except RuntimeError:
- conf.fatal("Python development headers not found (-v for details).")
-
- vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
- conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals)))
-
- dct = dict(zip(v, lst))
- x = 'MACOSX_DEPLOYMENT_TARGET'
- if dct[x]:
- env[x] = conf.environ[x] = dct[x]
- env.pyext_PATTERN = '%s' + dct['SO'] # not a mistake
-
-
- # Try to get pythonX.Y-config
- num = '.'.join(env.PYTHON_VERSION.split('.')[:2])
- conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False)
-
- if env.PYTHON_CONFIG:
- # python2.6-config requires 3 runs
- all_flags = [['--cflags', '--libs', '--ldflags']]
- if sys.hexversion < 0x2070000:
- all_flags = [[k] for k in all_flags[0]]
-
- xx = env.CXX_NAME and 'cxx' or 'c'
-
- if 'pyembed' in features:
- for flags in all_flags:
- conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags)
-
- try:
- conf.test_pyembed(xx)
- except conf.errors.ConfigurationError:
- # python bug 7352
- if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
- env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']])
- conf.test_pyembed(xx)
- else:
- raise
-
- if 'pyext' in features:
- for flags in all_flags:
- conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags)
-
- try:
- conf.test_pyext(xx)
- except conf.errors.ConfigurationError:
- # python bug 7352
- if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
- env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']])
- conf.test_pyext(xx)
- else:
- raise
-
- conf.define('HAVE_PYTHON_H', 1)
- return
-
- # No python-config, do something else on windows systems
- all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
- conf.parse_flags(all_flags, 'PYEMBED')
-
- all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
- conf.parse_flags(all_flags, 'PYEXT')
-
- result = None
- if not dct["LDVERSION"]:
- dct["LDVERSION"] = env.PYTHON_VERSION
-
- # further simplification will be complicated
- for name in ('python' + dct['LDVERSION'], 'python' + env.PYTHON_VERSION + 'm', 'python' + env.PYTHON_VERSION.replace('.', '')):
-
- # LIBPATH_PYEMBED is already set; see if it works.
- if not result and env.LIBPATH_PYEMBED:
- path = env.LIBPATH_PYEMBED
- conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
- result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)
-
- if not result and dct['LIBDIR']:
- path = [dct['LIBDIR']]
- conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
- result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name)
-
- if not result and dct['LIBPL']:
- path = [dct['LIBPL']]
- conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
- result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name)
-
- if not result:
- path = [os.path.join(dct['prefix'], "libs")]
- conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
- result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)
-
- if result:
- break # do not forget to set LIBPATH_PYEMBED
-
- if result:
- env.LIBPATH_PYEMBED = path
- env.append_value('LIB_PYEMBED', [name])
- else:
- conf.to_log("\n\n### LIB NOT FOUND\n")
-
- # under certain conditions, python extensions must link to
- # python libraries, not just python embedding programs.
- if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
- env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
- env.LIB_PYEXT = env.LIB_PYEMBED
-
- conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],))
- env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
- env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]
-
- # Code using the Python API needs to be compiled with -fno-strict-aliasing
- if env.CC_NAME == 'gcc':
- env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
- env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
- if env.CXX_NAME == 'gcc':
- env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
- env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
-
- if env.CC_NAME == "msvc":
- from distutils.msvccompiler import MSVCCompiler
- dist_compiler = MSVCCompiler()
- dist_compiler.initialize()
- env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
- env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
- env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
-
- # See if it compiles
- conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!')
-
-@conf
-def check_python_version(conf, minver=None):
- """
- Check if the python interpreter is found matching a given minimum version.
- minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
-
- If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' (eg. '2.4')
- of the actual python version found, and PYTHONDIR and PYTHONARCHDIR
- are defined, pointing to the site-packages directories appropriate for
- this python version, where modules/packages/extensions should be
- installed.
-
- :param minver: minimum version
- :type minver: tuple of int
- """
- assert minver is None or isinstance(minver, tuple)
- pybin = conf.env.PYTHON
- if not pybin:
- conf.fatal('could not find the python executable')
-
- # Get python version string
- cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
- Logs.debug('python: Running python command %r', cmd)
- lines = conf.cmd_and_log(cmd).split()
- assert len(lines) == 5, "found %r lines, expected 5: %r" % (len(lines), lines)
- pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
-
- # Compare python version with the minimum required
- result = (minver is None) or (pyver_tuple >= minver)
-
- if result:
- # define useful environment variables
- pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
- conf.env.PYTHON_VERSION = pyver
-
- if 'PYTHONDIR' in conf.env:
- # Check if --pythondir was specified
- pydir = conf.env.PYTHONDIR
- elif 'PYTHONDIR' in conf.environ:
- # Check environment for PYTHONDIR
- pydir = conf.environ['PYTHONDIR']
- else:
- # Finally, try to guess
- if Utils.is_win32:
- (python_LIBDEST, pydir) = conf.get_python_variables(
- ["get_config_var('LIBDEST') or ''",
- "get_python_lib(standard_lib=0) or ''"])
- else:
- python_LIBDEST = None
- (pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
- if python_LIBDEST is None:
- if conf.env.LIBDIR:
- python_LIBDEST = os.path.join(conf.env.LIBDIR, 'python' + pyver)
- else:
- python_LIBDEST = os.path.join(conf.env.PREFIX, 'lib', 'python' + pyver)
-
- if 'PYTHONARCHDIR' in conf.env:
- # Check if --pythonarchdir was specified
- pyarchdir = conf.env.PYTHONARCHDIR
- elif 'PYTHONARCHDIR' in conf.environ:
- # Check environment for PYTHONDIR
- pyarchdir = conf.environ['PYTHONARCHDIR']
- else:
- # Finally, try to guess
- (pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
- if not pyarchdir:
- pyarchdir = pydir
-
- if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
- conf.define('PYTHONDIR', pydir)
- conf.define('PYTHONARCHDIR', pyarchdir)
-
- conf.env.PYTHONDIR = pydir
- conf.env.PYTHONARCHDIR = pyarchdir
-
- # Feedback
- pyver_full = '.'.join(map(str, pyver_tuple[:3]))
- if minver is None:
- conf.msg('Checking for python version', pyver_full)
- else:
- minver_str = '.'.join(map(str, minver))
- conf.msg('Checking for python version >= %s' % (minver_str,), pyver_full, color=result and 'GREEN' or 'YELLOW')
-
- if not result:
- conf.fatal('The python version is too old, expecting %r' % (minver,))
-
-PYTHON_MODULE_TEMPLATE = '''
-import %s as current_module
-version = getattr(current_module, '__version__', None)
-if version is not None:
- print(str(version))
-else:
- print('unknown version')
-'''
-
-@conf
-def check_python_module(conf, module_name, condition=''):
- """
- Check if the selected python interpreter can import the given python module::
-
- def configure(conf):
- conf.check_python_module('pygccxml')
- conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)")
-
- :param module_name: module
- :type module_name: string
- """
- msg = "Checking for python module %r" % module_name
- if condition:
- msg = '%s (%s)' % (msg, condition)
- conf.start_msg(msg)
- try:
- ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
- except Errors.WafError:
- conf.end_msg(False)
- conf.fatal('Could not find the python module %r' % module_name)
-
- ret = ret.strip()
- if condition:
- conf.end_msg(ret)
- if ret == 'unknown version':
- conf.fatal('Could not check the %s version' % module_name)
-
- from distutils.version import LooseVersion
- def num(*k):
- if isinstance(k[0], int):
- return LooseVersion('.'.join([str(x) for x in k]))
- else:
- return LooseVersion(k[0])
- d = {'num': num, 'ver': LooseVersion(ret)}
- ev = eval(condition, {}, d)
- if not ev:
- conf.fatal('The %s version does not satisfy the requirements' % module_name)
- else:
- if ret == 'unknown version':
- conf.end_msg(True)
- else:
- conf.end_msg(ret)
-
-def configure(conf):
- """
- Detect the python interpreter
- """
- v = conf.env
- if getattr(Options.options, 'pythondir', None):
- v.PYTHONDIR = Options.options.pythondir
- if getattr(Options.options, 'pythonarchdir', None):
- v.PYTHONARCHDIR = Options.options.pythonarchdir
- if getattr(Options.options, 'nopycache', None):
- v.NOPYCACHE=Options.options.nopycache
-
- if not v.PYTHON:
- v.PYTHON = [getattr(Options.options, 'python', None) or sys.executable]
- v.PYTHON = Utils.to_list(v.PYTHON)
- conf.find_program('python', var='PYTHON')
-
- v.PYFLAGS = ''
- v.PYFLAGS_OPT = '-O'
-
- v.PYC = getattr(Options.options, 'pyc', 1)
- v.PYO = getattr(Options.options, 'pyo', 1)
-
- try:
- v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import imp;print(imp.get_tag())"]).strip()
- except Errors.WafError:
- pass
-
-def options(opt):
- """
- Add python-specific options
- """
- pyopt=opt.add_option_group("Python Options")
- pyopt.add_option('--nopyc', dest = 'pyc', action='store_false', default=1,
- help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]')
- pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1,
- help='Do not install optimised compiled .pyo files (configuration) [Default:install]')
- pyopt.add_option('--nopycache',dest='nopycache', action='store_true',
- help='Do not use __pycache__ directory to install objects [Default:auto]')
- pyopt.add_option('--python', dest="python",
- help='python binary to be used [Default: %s]' % sys.executable)
- pyopt.add_option('--pythondir', dest='pythondir',
- help='Installation path for python modules (py, platform-independent .py and .pyc files)')
- pyopt.add_option('--pythonarchdir', dest='pythonarchdir',
- help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')
-
diff --git a/waflib/Tools/qt5.py b/waflib/Tools/qt5.py
deleted file mode 100644
index 4f9c6908..00000000
--- a/waflib/Tools/qt5.py
+++ /dev/null
@@ -1,796 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-This tool helps with finding Qt5 tools and libraries,
-and also provides syntactic sugar for using Qt5 tools.
-
-The following snippet illustrates the tool usage::
-
- def options(opt):
- opt.load('compiler_cxx qt5')
-
- def configure(conf):
- conf.load('compiler_cxx qt5')
-
- def build(bld):
- bld(
- features = 'qt5 cxx cxxprogram',
- uselib = 'QT5CORE QT5GUI QT5OPENGL QT5SVG',
- source = 'main.cpp textures.qrc aboutDialog.ui',
- target = 'window',
- )
-
-Here, the UI description and resource files will be processed
-to generate code.
-
-Usage
-=====
-
-Load the "qt5" tool.
-
-You also need to edit your sources accordingly:
-
-- the normal way of doing things is to have your C++ files
- include the .moc file.
- This is regarded as the best practice (and provides much faster
- compilations).
- It also implies that the include paths have beenset properly.
-
-- to have the include paths added automatically, use the following::
-
- from waflib.TaskGen import feature, before_method, after_method
- @feature('cxx')
- @after_method('process_source')
- @before_method('apply_incpaths')
- def add_includes_paths(self):
- incs = set(self.to_list(getattr(self, 'includes', '')))
- for x in self.compiled_tasks:
- incs.add(x.inputs[0].parent.path_from(self.path))
- self.includes = sorted(incs)
-
-Note: another tool provides Qt processing that does not require
-.moc includes, see 'playground/slow_qt/'.
-
-A few options (--qt{dir,bin,...}) and environment variables
-(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
-tool path selection, etc; please read the source for more info.
-
-The detection uses pkg-config on Linux by default. To force static library detection use:
-QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
-"""
-
-from __future__ import with_statement
-
-try:
- from xml.sax import make_parser
- from xml.sax.handler import ContentHandler
-except ImportError:
- has_xml = False
- ContentHandler = object
-else:
- has_xml = True
-
-import os, sys, re
-from waflib.Tools import cxx
-from waflib import Task, Utils, Options, Errors, Context
-from waflib.TaskGen import feature, after_method, extension, before_method
-from waflib.Configure import conf
-from waflib import Logs
-
-MOC_H = ['.h', '.hpp', '.hxx', '.hh']
-"""
-File extensions associated to .moc files
-"""
-
-EXT_RCC = ['.qrc']
-"""
-File extension for the resource (.qrc) files
-"""
-
-EXT_UI = ['.ui']
-"""
-File extension for the user interface (.ui) files
-"""
-
-EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C']
-"""
-File extensions of C++ files that may require a .moc processing
-"""
-
-class qxx(Task.classes['cxx']):
- """
- Each C++ file can have zero or several .moc files to create.
- They are known only when the files are scanned (preprocessor)
- To avoid scanning the c++ files each time (parsing C/C++), the results
- are retrieved from the task cache (bld.node_deps/bld.raw_deps).
- The moc tasks are also created *dynamically* during the build.
- """
-
- def __init__(self, *k, **kw):
- Task.Task.__init__(self, *k, **kw)
- self.moc_done = 0
-
- def runnable_status(self):
- """
- Compute the task signature to make sure the scanner was executed. Create the
- moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary),
- then postpone the task execution (there is no need to recompute the task signature).
- """
- if self.moc_done:
- return Task.Task.runnable_status(self)
- else:
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- self.add_moc_tasks()
- return Task.Task.runnable_status(self)
-
- def create_moc_task(self, h_node, m_node):
- """
- If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
- It is not possible to change the file names, but we can assume that the moc transformation will be identical,
- and the moc tasks can be shared in a global cache.
- """
- try:
- moc_cache = self.generator.bld.moc_cache
- except AttributeError:
- moc_cache = self.generator.bld.moc_cache = {}
-
- try:
- return moc_cache[h_node]
- except KeyError:
- tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
- tsk.set_inputs(h_node)
- tsk.set_outputs(m_node)
- tsk.env.append_unique('MOC_FLAGS', '-i')
-
- if self.generator:
- self.generator.tasks.append(tsk)
-
- # direct injection in the build phase (safe because called from the main thread)
- gen = self.generator.bld.producer
- gen.outstanding.append(tsk)
- gen.total += 1
-
- return tsk
-
- else:
- # remove the signature, it must be recomputed with the moc task
- delattr(self, 'cache_sig')
-
- def add_moc_tasks(self):
- """
- Creates moc tasks by looking in the list of file dependencies ``bld.raw_deps[self.uid()]``
- """
- node = self.inputs[0]
- bld = self.generator.bld
-
- try:
- # compute the signature once to know if there is a moc file to create
- self.signature()
- except KeyError:
- # the moc file may be referenced somewhere else
- pass
- else:
- # remove the signature, it must be recomputed with the moc task
- delattr(self, 'cache_sig')
-
- include_nodes = [node.parent] + self.generator.includes_nodes
-
- moctasks = []
- mocfiles = set()
- for d in bld.raw_deps.get(self.uid(), []):
- if not d.endswith('.moc'):
- continue
-
- # process that base.moc only once
- if d in mocfiles:
- continue
- mocfiles.add(d)
-
- # find the source associated with the moc file
- h_node = None
- base2 = d[:-4]
-
- # foo.moc from foo.cpp
- prefix = node.name[:node.name.rfind('.')]
- if base2 == prefix:
- h_node = node
- else:
- # this deviates from the standard
- # if bar.cpp includes foo.moc, then assume it is from foo.h
- for x in include_nodes:
- for e in MOC_H:
- h_node = x.find_node(base2 + e)
- if h_node:
- break
- else:
- continue
- break
- if h_node:
- m_node = h_node.change_ext('.moc')
- else:
- raise Errors.WafError('No source found for %r which is a moc file' % d)
-
- # create the moc task
- task = self.create_moc_task(h_node, m_node)
- moctasks.append(task)
-
- # simple scheduler dependency: run the moc task before others
- self.run_after.update(set(moctasks))
- self.moc_done = 1
-
-class trans_update(Task.Task):
- """Updates a .ts files from a list of C++ files"""
- run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
- color = 'BLUE'
-
-class XMLHandler(ContentHandler):
- """
- Parses ``.qrc`` files
- """
- def __init__(self):
- ContentHandler.__init__(self)
- self.buf = []
- self.files = []
- def startElement(self, name, attrs):
- if name == 'file':
- self.buf = []
- def endElement(self, name):
- if name == 'file':
- self.files.append(str(''.join(self.buf)))
- def characters(self, cars):
- self.buf.append(cars)
-
-@extension(*EXT_RCC)
-def create_rcc_task(self, node):
- "Creates rcc and cxx tasks for ``.qrc`` files"
- rcnode = node.change_ext('_rc.%d.cpp' % self.idx)
- self.create_task('rcc', node, rcnode)
- cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
- try:
- self.compiled_tasks.append(cpptask)
- except AttributeError:
- self.compiled_tasks = [cpptask]
- return cpptask
-
-@extension(*EXT_UI)
-def create_uic_task(self, node):
- "Create uic tasks for user interface ``.ui`` definition files"
-
- """
- If UIC file is used in more than one bld, we would have a conflict in parallel execution
- It is not possible to change the file names (like .self.idx. as for objects) as they have
- to be referenced by the source file, but we can assume that the transformation will be identical
- and the tasks can be shared in a global cache.
- """
- try:
- uic_cache = self.bld.uic_cache
- except AttributeError:
- uic_cache = self.bld.uic_cache = {}
-
- if node not in uic_cache:
- uictask = uic_cache[node] = self.create_task('ui5', node)
- uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
-
-@extension('.ts')
-def add_lang(self, node):
- """Adds all the .ts file into ``self.lang``"""
- self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
-
-@feature('qt5')
-@before_method('process_source')
-def process_mocs(self):
- """
- Processes MOC files included in headers::
-
- def build(bld):
- bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE', moc='foo.h')
-
- The build will run moc on foo.h to create moc_foo.n.cpp. The number in the file name
- is provided to avoid name clashes when the same headers are used by several targets.
- """
- lst = self.to_nodes(getattr(self, 'moc', []))
- self.source = self.to_list(getattr(self, 'source', []))
- for x in lst:
- prefix = x.name[:x.name.rfind('.')] # foo.h -> foo
- moc_target = 'moc_%s.%d.cpp' % (prefix, self.idx)
- moc_node = x.parent.find_or_declare(moc_target)
- self.source.append(moc_node)
-
- self.create_task('moc', x, moc_node)
-
-@feature('qt5')
-@after_method('apply_link')
-def apply_qt5(self):
- """
- Adds MOC_FLAGS which may be necessary for moc::
-
- def build(bld):
- bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE')
-
- The additional parameters are:
-
- :param lang: list of translation files (\*.ts) to process
- :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
- :param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**)
- :type update: bool
- :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
- :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
- """
- if getattr(self, 'lang', None):
- qmtasks = []
- for x in self.to_list(self.lang):
- if isinstance(x, str):
- x = self.path.find_resource(x + '.ts')
- qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx)))
-
- if getattr(self, 'update', None) and Options.options.trans_qt5:
- cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
- a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
- for x in qmtasks:
- self.create_task('trans_update', cxxnodes, x.inputs)
-
- if getattr(self, 'langname', None):
- qmnodes = [x.outputs[0] for x in qmtasks]
- rcnode = self.langname
- if isinstance(rcnode, str):
- rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx))
- t = self.create_task('qm2rcc', qmnodes, rcnode)
- k = create_rcc_task(self, t.outputs[0])
- self.link_task.inputs.append(k.outputs[0])
-
- lst = []
- for flag in self.to_list(self.env.CXXFLAGS):
- if len(flag) < 2:
- continue
- f = flag[0:2]
- if f in ('-D', '-I', '/D', '/I'):
- if (f[0] == '/'):
- lst.append('-' + flag[1:])
- else:
- lst.append(flag)
- self.env.append_value('MOC_FLAGS', lst)
-
-@extension(*EXT_QT5)
-def cxx_hook(self, node):
- """
- Re-maps C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task.
- """
- return self.create_compiled_task('qxx', node)
-
-class rcc(Task.Task):
- """
- Processes ``.qrc`` files
- """
- color = 'BLUE'
- run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
- ext_out = ['.h']
-
- def rcname(self):
- return os.path.splitext(self.inputs[0].name)[0]
-
- def scan(self):
- """Parse the *.qrc* files"""
- if not has_xml:
- Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
- return ([], [])
-
- parser = make_parser()
- curHandler = XMLHandler()
- parser.setContentHandler(curHandler)
- with open(self.inputs[0].abspath(), 'r') as f:
- parser.parse(f)
-
- nodes = []
- names = []
- root = self.inputs[0].parent
- for x in curHandler.files:
- nd = root.find_resource(x)
- if nd:
- nodes.append(nd)
- else:
- names.append(x)
- return (nodes, names)
-
- def quote_flag(self, x):
- """
- Override Task.quote_flag. QT parses the argument files
- differently than cl.exe and link.exe
-
- :param x: flag
- :type x: string
- :return: quoted flag
- :rtype: string
- """
- return x
-
-
-class moc(Task.Task):
- """
- Creates ``.moc`` files
- """
- color = 'BLUE'
- run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
-
- def quote_flag(self, x):
- """
- Override Task.quote_flag. QT parses the argument files
- differently than cl.exe and link.exe
-
- :param x: flag
- :type x: string
- :return: quoted flag
- :rtype: string
- """
- return x
-
-
-class ui5(Task.Task):
- """
- Processes ``.ui`` files
- """
- color = 'BLUE'
- run_str = '${QT_UIC} ${SRC} -o ${TGT}'
- ext_out = ['.h']
-
-class ts2qm(Task.Task):
- """
- Generates ``.qm`` files from ``.ts`` files
- """
- color = 'BLUE'
- run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
-
-class qm2rcc(Task.Task):
- """
- Generates ``.qrc`` files from ``.qm`` files
- """
- color = 'BLUE'
- after = 'ts2qm'
- def run(self):
- """Create a qrc file including the inputs"""
- txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
- code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
- self.outputs[0].write(code)
-
-def configure(self):
- """
- Besides the configuration options, the environment variable QT5_ROOT may be used
- to give the location of the qt5 libraries (absolute path).
-
- The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg`
- """
- self.find_qt5_binaries()
- self.set_qt5_libs_dir()
- self.set_qt5_libs_to_check()
- self.set_qt5_defines()
- self.find_qt5_libraries()
- self.add_qt5_rpath()
- self.simplify_qt5_libs()
-
- # warn about this during the configuration too
- if not has_xml:
- Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
-
- if 'COMPILER_CXX' not in self.env:
- self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
-
- # Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
- frag = '#include <QApplication>\nint main(int argc, char **argv) {return 0;}\n'
- uses = 'QT5CORE QT5WIDGETS QT5GUI'
- for flag in [[], '-fPIE', '-fPIC', '-std=c++11' , ['-std=c++11', '-fPIE'], ['-std=c++11', '-fPIC']]:
- msg = 'See if Qt files compile '
- if flag:
- msg += 'with %s' % flag
- try:
- self.check(features='qt5 cxx', use=uses, uselib_store='qt5', cxxflags=flag, fragment=frag, msg=msg)
- except self.errors.ConfigurationError:
- pass
- else:
- break
- else:
- self.fatal('Could not build a simple Qt application')
-
- # FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
- if Utils.unversioned_sys_platform() == 'freebsd':
- frag = '#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n'
- try:
- self.check(features='qt5 cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?')
- except self.errors.ConfigurationError:
- self.check(features='qt5 cxx cxxprogram', use=uses, uselib_store='qt5', libpath='/usr/local/lib', fragment=frag, msg='Is /usr/local/lib required?')
-
-@conf
-def find_qt5_binaries(self):
- """
- Detects Qt programs such as qmake, moc, uic, lrelease
- """
- env = self.env
- opt = Options.options
-
- qtdir = getattr(opt, 'qtdir', '')
- qtbin = getattr(opt, 'qtbin', '')
-
- paths = []
-
- if qtdir:
- qtbin = os.path.join(qtdir, 'bin')
-
- # the qt directory has been given from QT5_ROOT - deduce the qt binary path
- if not qtdir:
- qtdir = self.environ.get('QT5_ROOT', '')
- qtbin = self.environ.get('QT5_BIN') or os.path.join(qtdir, 'bin')
-
- if qtbin:
- paths = [qtbin]
-
- # no qtdir, look in the path and in /usr/local/Trolltech
- if not qtdir:
- paths = self.environ.get('PATH', '').split(os.pathsep)
- paths.extend(['/usr/share/qt5/bin', '/usr/local/lib/qt5/bin'])
- try:
- lst = Utils.listdir('/usr/local/Trolltech/')
- except OSError:
- pass
- else:
- if lst:
- lst.sort()
- lst.reverse()
-
- # keep the highest version
- qtdir = '/usr/local/Trolltech/%s/' % lst[0]
- qtbin = os.path.join(qtdir, 'bin')
- paths.append(qtbin)
-
- # at the end, try to find qmake in the paths given
- # keep the one with the highest version
- cand = None
- prev_ver = ['5', '0', '0']
- for qmk in ('qmake-qt5', 'qmake5', 'qmake'):
- try:
- qmake = self.find_program(qmk, path_list=paths)
- except self.errors.ConfigurationError:
- pass
- else:
- try:
- version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
- except self.errors.WafError:
- pass
- else:
- if version:
- new_ver = version.split('.')
- if new_ver > prev_ver:
- cand = qmake
- prev_ver = new_ver
-
- # qmake could not be found easily, rely on qtchooser
- if not cand:
- try:
- self.find_program('qtchooser')
- except self.errors.ConfigurationError:
- pass
- else:
- cmd = self.env.QTCHOOSER + ['-qt=5', '-run-tool=qmake']
- try:
- version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION'])
- except self.errors.WafError:
- pass
- else:
- cand = cmd
-
- if cand:
- self.env.QMAKE = cand
- else:
- self.fatal('Could not find qmake for qt5')
-
- self.env.QT_HOST_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_BINS']).strip()
- paths.insert(0, qtbin)
-
- def find_bin(lst, var):
- if var in env:
- return
- for f in lst:
- try:
- ret = self.find_program(f, path_list=paths)
- except self.errors.ConfigurationError:
- pass
- else:
- env[var]=ret
- break
-
- find_bin(['uic-qt5', 'uic'], 'QT_UIC')
- if not env.QT_UIC:
- self.fatal('cannot find the uic compiler for qt5')
-
- self.start_msg('Checking for uic version')
- uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH)
- uicver = ''.join(uicver).strip()
- uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
- self.end_msg(uicver)
- if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1:
- self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')
-
- find_bin(['moc-qt5', 'moc'], 'QT_MOC')
- find_bin(['rcc-qt5', 'rcc'], 'QT_RCC')
- find_bin(['lrelease-qt5', 'lrelease'], 'QT_LRELEASE')
- find_bin(['lupdate-qt5', 'lupdate'], 'QT_LUPDATE')
-
- env.UIC_ST = '%s -o %s'
- env.MOC_ST = '-o'
- env.ui_PATTERN = 'ui_%s.h'
- env.QT_LRELEASE_FLAGS = ['-silent']
- env.MOCCPPPATH_ST = '-I%s'
- env.MOCDEFINES_ST = '-D%s'
-
-@conf
-def set_qt5_libs_dir(self):
- env = self.env
- qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT5_LIBDIR')
- if not qtlibs:
- try:
- qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
- except Errors.WafError:
- qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
- qtlibs = os.path.join(qtdir, 'lib')
- self.msg('Found the Qt5 libraries in', qtlibs)
- env.QTLIBS = qtlibs
-
-@conf
-def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static):
- env = self.env
- if force_static:
- exts = ('.a', '.lib')
- prefix = 'STLIB'
- else:
- exts = ('.so', '.lib')
- prefix = 'LIB'
-
- def lib_names():
- for x in exts:
- for k in ('', '5') if Utils.is_win32 else ['']:
- for p in ('lib', ''):
- yield (p, name, k, x)
-
- for tup in lib_names():
- k = ''.join(tup)
- path = os.path.join(qtlibs, k)
- if os.path.exists(path):
- if env.DEST_OS == 'win32':
- libval = ''.join(tup[:-1])
- else:
- libval = name
- env.append_unique(prefix + '_' + uselib, libval)
- env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs)
- env.append_unique('INCLUDES_' + uselib, qtincludes)
- env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt5', 'Qt')))
- return k
- return False
-
-@conf
-def find_qt5_libraries(self):
- env = self.env
-
- qtincludes = self.environ.get('QT5_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
- force_static = self.environ.get('QT5_FORCE_STATIC')
- try:
- if self.environ.get('QT5_XCOMPILE'):
- self.fatal('QT5_XCOMPILE Disables pkg-config detection')
- self.check_cfg(atleast_pkgconfig_version='0.1')
- except self.errors.ConfigurationError:
- for i in self.qt5_vars:
- uselib = i.upper()
- if Utils.unversioned_sys_platform() == 'darwin':
- # Since at least qt 4.7.3 each library locates in separate directory
- fwk = i.replace('Qt5', 'Qt')
- frameworkName = fwk + '.framework'
-
- qtDynamicLib = os.path.join(env.QTLIBS, frameworkName, fwk)
- if os.path.exists(qtDynamicLib):
- env.append_unique('FRAMEWORK_' + uselib, fwk)
- env.append_unique('FRAMEWORKPATH_' + uselib, env.QTLIBS)
- self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
- else:
- self.msg('Checking for %s' % i, False, 'YELLOW')
- env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
- else:
- ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static)
- if not force_static and not ret:
- ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True)
- self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW')
- else:
- path = '%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % (
- self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS)
- for i in self.qt5_vars:
- self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)
-
-@conf
-def simplify_qt5_libs(self):
- """
- Since library paths make really long command-lines,
- and since everything depends on qtcore, remove the qtcore ones from qtgui, etc
- """
- env = self.env
- def process_lib(vars_, coreval):
- for d in vars_:
- var = d.upper()
- if var == 'QTCORE':
- continue
-
- value = env['LIBPATH_'+var]
- if value:
- core = env[coreval]
- accu = []
- for lib in value:
- if lib in core:
- continue
- accu.append(lib)
- env['LIBPATH_'+var] = accu
- process_lib(self.qt5_vars, 'LIBPATH_QTCORE')
-
-@conf
-def add_qt5_rpath(self):
- """
- Defines rpath entries for Qt libraries
- """
- env = self.env
- if getattr(Options.options, 'want_rpath', False):
- def process_rpath(vars_, coreval):
- for d in vars_:
- var = d.upper()
- value = env['LIBPATH_' + var]
- if value:
- core = env[coreval]
- accu = []
- for lib in value:
- if var != 'QTCORE':
- if lib in core:
- continue
- accu.append('-Wl,--rpath='+lib)
- env['RPATH_' + var] = accu
- process_rpath(self.qt5_vars, 'LIBPATH_QTCORE')
-
-@conf
-def set_qt5_libs_to_check(self):
- self.qt5_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
- if not self.qt5_vars:
- dirlst = Utils.listdir(self.env.QTLIBS)
-
- pat = self.env.cxxshlib_PATTERN
- if Utils.is_win32:
- pat = pat.replace('.dll', '.lib')
- if self.environ.get('QT5_FORCE_STATIC'):
- pat = self.env.cxxstlib_PATTERN
- if Utils.unversioned_sys_platform() == 'darwin':
- pat = "%s\.framework"
- re_qt = re.compile(pat%'Qt5?(?P<name>.*)'+'$')
- for x in dirlst:
- m = re_qt.match(x)
- if m:
- self.qt5_vars.append("Qt5%s" % m.group('name'))
- if not self.qt5_vars:
- self.fatal('cannot find any Qt5 library (%r)' % self.env.QTLIBS)
-
- qtextralibs = getattr(Options.options, 'qtextralibs', None)
- if qtextralibs:
- self.qt5_vars.extend(qtextralibs.split(','))
-
-@conf
-def set_qt5_defines(self):
- if sys.platform != 'win32':
- return
- for x in self.qt5_vars:
- y=x.replace('Qt5', 'Qt')[2:].upper()
- self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
-
-def options(opt):
- """
- Command-line options
- """
- opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
- for i in 'qtdir qtbin qtlibs'.split():
- opt.add_option('--'+i, type='string', default='', dest=i)
-
- opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
- opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')
-
diff --git a/waflib/Tools/ruby.py b/waflib/Tools/ruby.py
deleted file mode 100644
index 8d92a79a..00000000
--- a/waflib/Tools/ruby.py
+++ /dev/null
@@ -1,186 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# daniel.svensson at purplescout.se 2008
-# Thomas Nagy 2016-2018 (ita)
-
-"""
-Support for Ruby extensions. A C/C++ compiler is required::
-
- def options(opt):
- opt.load('compiler_c ruby')
- def configure(conf):
- conf.load('compiler_c ruby')
- conf.check_ruby_version((1,8,0))
- conf.check_ruby_ext_devel()
- conf.check_ruby_module('libxml')
- def build(bld):
- bld(
- features = 'c cshlib rubyext',
- source = 'rb_mytest.c',
- target = 'mytest_ext',
- install_path = '${ARCHDIR_RUBY}')
- bld.install_files('${LIBDIR_RUBY}', 'Mytest.rb')
-"""
-
-import os
-from waflib import Errors, Options, Task, Utils
-from waflib.TaskGen import before_method, feature, extension
-from waflib.Configure import conf
-
-@feature('rubyext')
-@before_method('apply_incpaths', 'process_source', 'apply_bundle', 'apply_link')
-def init_rubyext(self):
- """
- Add required variables for ruby extensions
- """
- self.install_path = '${ARCHDIR_RUBY}'
- self.uselib = self.to_list(getattr(self, 'uselib', ''))
- if not 'RUBY' in self.uselib:
- self.uselib.append('RUBY')
- if not 'RUBYEXT' in self.uselib:
- self.uselib.append('RUBYEXT')
-
-@feature('rubyext')
-@before_method('apply_link', 'propagate_uselib_vars')
-def apply_ruby_so_name(self):
- """
- Strip the *lib* prefix from ruby extensions
- """
- self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.rubyext_PATTERN
-
-@conf
-def check_ruby_version(self, minver=()):
- """
- Checks if ruby is installed.
- If installed the variable RUBY will be set in environment.
- The ruby binary can be overridden by ``--with-ruby-binary`` command-line option.
- """
-
- ruby = self.find_program('ruby', var='RUBY', value=Options.options.rubybinary)
-
- try:
- version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
- except Errors.WafError:
- self.fatal('could not determine ruby version')
- self.env.RUBY_VERSION = version
-
- try:
- ver = tuple(map(int, version.split('.')))
- except Errors.WafError:
- self.fatal('unsupported ruby version %r' % version)
-
- cver = ''
- if minver:
- cver = '> ' + '.'.join(str(x) for x in minver)
- if ver < minver:
- self.fatal('ruby is too old %r' % ver)
-
- self.msg('Checking for ruby version %s' % cver, version)
-
-@conf
-def check_ruby_ext_devel(self):
- """
- Check if a ruby extension can be created
- """
- if not self.env.RUBY:
- self.fatal('ruby detection is required first')
-
- if not self.env.CC_NAME and not self.env.CXX_NAME:
- self.fatal('load a c/c++ compiler first')
-
- version = tuple(map(int, self.env.RUBY_VERSION.split(".")))
-
- def read_out(cmd):
- return Utils.to_list(self.cmd_and_log(self.env.RUBY + ['-rrbconfig', '-e', cmd]))
-
- def read_config(key):
- return read_out('puts RbConfig::CONFIG[%r]' % key)
-
- cpppath = archdir = read_config('archdir')
-
- if version >= (1, 9, 0):
- ruby_hdrdir = read_config('rubyhdrdir')
- cpppath += ruby_hdrdir
- if version >= (2, 0, 0):
- cpppath += read_config('rubyarchhdrdir')
- cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
-
- self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file', link_header_test=False)
-
- self.env.LIBPATH_RUBYEXT = read_config('libdir')
- self.env.LIBPATH_RUBYEXT += archdir
- self.env.INCLUDES_RUBYEXT = cpppath
- self.env.CFLAGS_RUBYEXT = read_config('CCDLFLAGS')
- self.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
-
- # ok this is really stupid, but the command and flags are combined.
- # so we try to find the first argument...
- flags = read_config('LDSHARED')
- while flags and flags[0][0] != '-':
- flags = flags[1:]
-
- # we also want to strip out the deprecated ppc flags
- if len(flags) > 1 and flags[1] == "ppc":
- flags = flags[2:]
-
- self.env.LINKFLAGS_RUBYEXT = flags
- self.env.LINKFLAGS_RUBYEXT += read_config('LIBS')
- self.env.LINKFLAGS_RUBYEXT += read_config('LIBRUBYARG_SHARED')
-
- if Options.options.rubyarchdir:
- self.env.ARCHDIR_RUBY = Options.options.rubyarchdir
- else:
- self.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
-
- if Options.options.rubylibdir:
- self.env.LIBDIR_RUBY = Options.options.rubylibdir
- else:
- self.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
-
-@conf
-def check_ruby_module(self, module_name):
- """
- Check if the selected ruby interpreter can require the given ruby module::
-
- def configure(conf):
- conf.check_ruby_module('libxml')
-
- :param module_name: module
- :type module_name: string
- """
- self.start_msg('Ruby module %s' % module_name)
- try:
- self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name])
- except Errors.WafError:
- self.end_msg(False)
- self.fatal('Could not find the ruby module %r' % module_name)
- self.end_msg(True)
-
-@extension('.rb')
-def process(self, node):
- return self.create_task('run_ruby', node)
-
-class run_ruby(Task.Task):
- """
- Task to run ruby files detected by file extension .rb::
-
- def options(opt):
- opt.load('ruby')
-
- def configure(ctx):
- ctx.check_ruby_version()
-
- def build(bld):
- bld.env.RBFLAGS = '-e puts "hello world"'
- bld(source='a_ruby_file.rb')
- """
- run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'
-
-def options(opt):
- """
- Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options
- """
- opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
- opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
- opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
-
diff --git a/waflib/Tools/suncc.py b/waflib/Tools/suncc.py
index 33d34fc9..4909854e 100644
--- a/waflib/Tools/suncc.py
+++ b/waflib/Tools/suncc.py
@@ -1,26 +1,27 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
+# Thomas Nagy, 2006-2010 (ita)
# Ralf Habacker, 2006 (rh)
-from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
@conf
def find_scc(conf):
"""
- Detects the Sun C compiler
+ Detect the Sun C compiler
"""
v = conf.env
cc = conf.find_program('cc', var='CC')
+
try:
conf.cmd_and_log(cc + ['-flags'])
- except Errors.WafError:
+ except Exception:
conf.fatal('%r is not a Sun compiler' % cc)
v.CC_NAME = 'sun'
conf.get_suncc_version(cc)
+
@conf
def scc_common_flags(conf):
"""
@@ -28,34 +29,36 @@ def scc_common_flags(conf):
"""
v = conf.env
- v.CC_SRC_F = []
- v.CC_TGT_F = ['-c', '-o', '']
+ v['CC_SRC_F'] = []
+ v['CC_TGT_F'] = ['-c', '-o']
- if not v.LINK_CC:
- v.LINK_CC = v.CC
+ # linker
+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
+ v['CCLNK_SRC_F'] = ''
+ v['CCLNK_TGT_F'] = ['-o']
+ v['CPPPATH_ST'] = '-I%s'
+ v['DEFINES_ST'] = '-D%s'
- v.CCLNK_SRC_F = ''
- v.CCLNK_TGT_F = ['-o', '']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
+ v['LIB_ST'] = '-l%s' # template for adding libs
+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
+ v['STLIB_ST'] = '-l%s'
+ v['STLIBPATH_ST'] = '-L%s'
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
+ v['SONAME_ST'] = '-Wl,-h,%s'
+ v['SHLIB_MARKER'] = '-Bdynamic'
+ v['STLIB_MARKER'] = '-Bstatic'
- v.SONAME_ST = '-Wl,-h,%s'
- v.SHLIB_MARKER = '-Bdynamic'
- v.STLIB_MARKER = '-Bstatic'
+ # program
+ v['cprogram_PATTERN'] = '%s'
- v.cprogram_PATTERN = '%s'
+ # shared library
+ v['CFLAGS_cshlib'] = ['-xcode=pic32', '-DPIC']
+ v['LINKFLAGS_cshlib'] = ['-G']
+ v['cshlib_PATTERN'] = 'lib%s.so'
- v.CFLAGS_cshlib = ['-xcode=pic32', '-DPIC']
- v.LINKFLAGS_cshlib = ['-G']
- v.cshlib_PATTERN = 'lib%s.so'
-
- v.LINKFLAGS_cstlib = ['-Bstatic']
- v.cstlib_PATTERN = 'lib%s.a'
+ # static lib
+ v['LINKFLAGS_cstlib'] = ['-Bstatic']
+ v['cstlib_PATTERN'] = 'lib%s.a'
def configure(conf):
conf.find_scc()
@@ -64,4 +67,3 @@ def configure(conf):
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
-
diff --git a/waflib/Tools/suncxx.py b/waflib/Tools/suncxx.py
index 3b384f6f..5a04b483 100644
--- a/waflib/Tools/suncxx.py
+++ b/waflib/Tools/suncxx.py
@@ -1,22 +1,21 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
+# Thomas Nagy, 2006-2010 (ita)
# Ralf Habacker, 2006 (rh)
-from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
@conf
def find_sxx(conf):
"""
- Detects the sun C++ compiler
+ Detect the sun C++ compiler
"""
v = conf.env
cc = conf.find_program(['CC', 'c++'], var='CXX')
try:
conf.cmd_and_log(cc + ['-flags'])
- except Errors.WafError:
+ except Exception:
conf.fatal('%r is not a Sun compiler' % cc)
v.CXX_NAME = 'sun'
conf.get_suncc_version(cc)
@@ -28,34 +27,36 @@ def sxx_common_flags(conf):
"""
v = conf.env
- v.CXX_SRC_F = []
- v.CXX_TGT_F = ['-c', '-o', '']
+ v['CXX_SRC_F'] = []
+ v['CXX_TGT_F'] = ['-c', '-o']
- if not v.LINK_CXX:
- v.LINK_CXX = v.CXX
+ # linker
+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
+ v['CXXLNK_SRC_F'] = []
+ v['CXXLNK_TGT_F'] = ['-o']
+ v['CPPPATH_ST'] = '-I%s'
+ v['DEFINES_ST'] = '-D%s'
- v.CXXLNK_SRC_F = []
- v.CXXLNK_TGT_F = ['-o', '']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
+ v['LIB_ST'] = '-l%s' # template for adding libs
+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
+ v['STLIB_ST'] = '-l%s'
+ v['STLIBPATH_ST'] = '-L%s'
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
+ v['SONAME_ST'] = '-Wl,-h,%s'
+ v['SHLIB_MARKER'] = '-Bdynamic'
+ v['STLIB_MARKER'] = '-Bstatic'
- v.SONAME_ST = '-Wl,-h,%s'
- v.SHLIB_MARKER = '-Bdynamic'
- v.STLIB_MARKER = '-Bstatic'
+ # program
+ v['cxxprogram_PATTERN'] = '%s'
- v.cxxprogram_PATTERN = '%s'
+ # shared library
+ v['CXXFLAGS_cxxshlib'] = ['-xcode=pic32', '-DPIC']
+ v['LINKFLAGS_cxxshlib'] = ['-G']
+ v['cxxshlib_PATTERN'] = 'lib%s.so'
- v.CXXFLAGS_cxxshlib = ['-xcode=pic32', '-DPIC']
- v.LINKFLAGS_cxxshlib = ['-G']
- v.cxxshlib_PATTERN = 'lib%s.so'
-
- v.LINKFLAGS_cxxstlib = ['-Bstatic']
- v.cxxstlib_PATTERN = 'lib%s.a'
+ # static lib
+ v['LINKFLAGS_cxxstlib'] = ['-Bstatic']
+ v['cxxstlib_PATTERN'] = 'lib%s.a'
def configure(conf):
conf.find_sxx()
diff --git a/waflib/Tools/tex.py b/waflib/Tools/tex.py
deleted file mode 100644
index eaf9fdb5..00000000
--- a/waflib/Tools/tex.py
+++ /dev/null
@@ -1,543 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-TeX/LaTeX/PDFLaTeX/XeLaTeX support
-
-Example::
-
- def configure(conf):
- conf.load('tex')
- if not conf.env.LATEX:
- conf.fatal('The program LaTex is required')
-
- def build(bld):
- bld(
- features = 'tex',
- type = 'latex', # pdflatex or xelatex
- source = 'document.ltx', # mandatory, the source
- outs = 'ps', # 'pdf' or 'ps pdf'
- deps = 'crossreferencing.lst', # to give dependencies directly
- prompt = 1, # 0 for the batch mode
- )
-
-Notes:
-
-- To configure with a special program, use::
-
- $ PDFLATEX=luatex waf configure
-
-- This tool does not use the target attribute of the task generator
- (``bld(target=...)``); the target file name is built from the source
- base name and the output type(s)
-"""
-
-import os, re
-from waflib import Utils, Task, Errors, Logs, Node
-from waflib.TaskGen import feature, before_method
-
-re_bibunit = re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
-def bibunitscan(self):
- """
- Parses TeX inputs and try to find the *bibunit* file dependencies
-
- :return: list of bibunit files
- :rtype: list of :py:class:`waflib.Node.Node`
- """
- node = self.inputs[0]
-
- nodes = []
- if not node:
- return nodes
-
- code = node.read()
- for match in re_bibunit.finditer(code):
- path = match.group('file')
- if path:
- found = None
- for k in ('', '.bib'):
- # add another loop for the tex include paths?
- Logs.debug('tex: trying %s%s', path, k)
- fi = node.parent.find_resource(path + k)
- if fi:
- found = True
- nodes.append(fi)
- # no break
- if not found:
- Logs.debug('tex: could not find %s', path)
-
- Logs.debug('tex: found the following bibunit files: %s', nodes)
- return nodes
-
-exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty']
-"""List of typical file extensions included in latex files"""
-
-exts_tex = ['.ltx', '.tex']
-"""List of typical file extensions that contain latex"""
-
-re_tex = re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
-"""Regexp for expressions that may include latex files"""
-
-g_bibtex_re = re.compile('bibdata', re.M)
-"""Regexp for bibtex files"""
-
-g_glossaries_re = re.compile('\\@newglossary', re.M)
-"""Regexp for expressions that create glossaries"""
-
-class tex(Task.Task):
- """
- Compiles a tex/latex file.
-
- .. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex
- """
-
- bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
- bibtex_fun.__doc__ = """
- Execute the program **bibtex**
- """
-
- makeindex_fun, _ = Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
- makeindex_fun.__doc__ = """
- Execute the program **makeindex**
- """
-
- makeglossaries_fun, _ = Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}', shell=False)
- makeglossaries_fun.__doc__ = """
- Execute the program **makeglossaries**
- """
-
- def exec_command(self, cmd, **kw):
- """
- Executes TeX commands without buffering (latex may prompt for inputs)
-
- :return: the return code
- :rtype: int
- """
- if self.env.PROMPT_LATEX:
- # capture the outputs in configuration tests
- kw['stdout'] = kw['stderr'] = None
- return super(tex, self).exec_command(cmd, **kw)
-
- def scan_aux(self, node):
- """
- Recursive regex-based scanner that finds included auxiliary files.
- """
- nodes = [node]
- re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M)
-
- def parse_node(node):
- code = node.read()
- for match in re_aux.finditer(code):
- path = match.group('file')
- found = node.parent.find_or_declare(path)
- if found and found not in nodes:
- Logs.debug('tex: found aux node %r', found)
- nodes.append(found)
- parse_node(found)
- parse_node(node)
- return nodes
-
- def scan(self):
- """
- Recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`
-
- Depending on your needs you might want:
-
- * to change re_tex::
-
- from waflib.Tools import tex
- tex.re_tex = myregex
-
- * or to change the method scan from the latex tasks::
-
- from waflib.Task import classes
- classes['latex'].scan = myscanfunction
- """
- node = self.inputs[0]
-
- nodes = []
- names = []
- seen = []
- if not node:
- return (nodes, names)
-
- def parse_node(node):
- if node in seen:
- return
- seen.append(node)
- code = node.read()
- for match in re_tex.finditer(code):
-
- multibib = match.group('type')
- if multibib and multibib.startswith('bibliography'):
- multibib = multibib[len('bibliography'):]
- if multibib.startswith('style'):
- continue
- else:
- multibib = None
-
- for path in match.group('file').split(','):
- if path:
- add_name = True
- found = None
- for k in exts_deps_tex:
-
- # issue 1067, scan in all texinputs folders
- for up in self.texinputs_nodes:
- Logs.debug('tex: trying %s%s', path, k)
- found = up.find_resource(path + k)
- if found:
- break
-
-
- for tsk in self.generator.tasks:
- if not found or found in tsk.outputs:
- break
- else:
- nodes.append(found)
- add_name = False
- for ext in exts_tex:
- if found.name.endswith(ext):
- parse_node(found)
- break
-
- # multibib stuff
- if found and multibib and found.name.endswith('.bib'):
- try:
- self.multibibs.append(found)
- except AttributeError:
- self.multibibs = [found]
-
- # no break, people are crazy
- if add_name:
- names.append(path)
- parse_node(node)
-
- for x in nodes:
- x.parent.get_bld().mkdir()
-
- Logs.debug("tex: found the following : %s and names %s", nodes, names)
- return (nodes, names)
-
- def check_status(self, msg, retcode):
- """
- Checks an exit status and raise an error with a particular message
-
- :param msg: message to display if the code is non-zero
- :type msg: string
- :param retcode: condition
- :type retcode: boolean
- """
- if retcode != 0:
- raise Errors.WafError('%r command exit status %r' % (msg, retcode))
-
- def info(self, *k, **kw):
- try:
- info = self.generator.bld.conf.logger.info
- except AttributeError:
- info = Logs.info
- info(*k, **kw)
-
- def bibfile(self):
- """
- Parses *.aux* files to find bibfiles to process.
- If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`
- """
- for aux_node in self.aux_nodes:
- try:
- ct = aux_node.read()
- except EnvironmentError:
- Logs.error('Error reading %s: %r', aux_node.abspath())
- continue
-
- if g_bibtex_re.findall(ct):
- self.info('calling bibtex')
-
- self.env.env = {}
- self.env.env.update(os.environ)
- self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
- self.env.SRCFILE = aux_node.name[:-4]
- self.check_status('error when calling bibtex', self.bibtex_fun())
-
- for node in getattr(self, 'multibibs', []):
- self.env.env = {}
- self.env.env.update(os.environ)
- self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
- self.env.SRCFILE = node.name[:-4]
- self.check_status('error when calling bibtex', self.bibtex_fun())
-
- def bibunits(self):
- """
- Parses *.aux* file to find bibunit files. If there are bibunit files,
- runs :py:meth:`waflib.Tools.tex.tex.bibtex_fun`.
- """
- try:
- bibunits = bibunitscan(self)
- except OSError:
- Logs.error('error bibunitscan')
- else:
- if bibunits:
- fn = ['bu' + str(i) for i in range(1, len(bibunits) + 1)]
- if fn:
- self.info('calling bibtex on bibunits')
-
- for f in fn:
- self.env.env = {'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}
- self.env.SRCFILE = f
- self.check_status('error when calling bibtex', self.bibtex_fun())
-
- def makeindex(self):
- """
- Searches the filesystem for *.idx* files to process. If present,
- runs :py:meth:`waflib.Tools.tex.tex.makeindex_fun`
- """
- self.idx_node = self.inputs[0].change_ext('.idx')
- try:
- idx_path = self.idx_node.abspath()
- os.stat(idx_path)
- except OSError:
- self.info('index file %s absent, not calling makeindex', idx_path)
- else:
- self.info('calling makeindex')
-
- self.env.SRCFILE = self.idx_node.name
- self.env.env = {}
- self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun())
-
- def bibtopic(self):
- """
- Lists additional .aux files from the bibtopic package
- """
- p = self.inputs[0].parent.get_bld()
- if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')):
- self.aux_nodes += p.ant_glob('*[0-9].aux')
-
- def makeglossaries(self):
- """
- Lists additional glossaries from .aux files. If present, runs the makeglossaries program.
- """
- src_file = self.inputs[0].abspath()
- base_file = os.path.basename(src_file)
- base, _ = os.path.splitext(base_file)
- for aux_node in self.aux_nodes:
- try:
- ct = aux_node.read()
- except EnvironmentError:
- Logs.error('Error reading %s: %r', aux_node.abspath())
- continue
-
- if g_glossaries_re.findall(ct):
- if not self.env.MAKEGLOSSARIES:
- raise Errors.WafError("The program 'makeglossaries' is missing!")
- Logs.warn('calling makeglossaries')
- self.env.SRCFILE = base
- self.check_status('error when calling makeglossaries %s' % base, self.makeglossaries_fun())
- return
-
- def texinputs(self):
- """
- Returns the list of texinput nodes as a string suitable for the TEXINPUTS environment variables
-
- :rtype: string
- """
- return os.pathsep.join([k.abspath() for k in self.texinputs_nodes]) + os.pathsep
-
- def run(self):
- """
- Runs the whole TeX build process
-
- Multiple passes are required depending on the usage of cross-references,
- bibliographies, glossaries, indexes and additional contents
- The appropriate TeX compiler is called until the *.aux* files stop changing.
- """
- env = self.env
-
- if not env.PROMPT_LATEX:
- env.append_value('LATEXFLAGS', '-interaction=batchmode')
- env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
- env.append_value('XELATEXFLAGS', '-interaction=batchmode')
-
- # important, set the cwd for everybody
- self.cwd = self.inputs[0].parent.get_bld()
-
- self.info('first pass on %s', self.__class__.__name__)
-
- # Hash .aux files before even calling the LaTeX compiler
- cur_hash = self.hash_aux_nodes()
-
- self.call_latex()
-
- # Find the .aux files again since bibtex processing can require it
- self.hash_aux_nodes()
-
- self.bibtopic()
- self.bibfile()
- self.bibunits()
- self.makeindex()
- self.makeglossaries()
-
- for i in range(10):
- # There is no need to call latex again if the .aux hash value has not changed
- prev_hash = cur_hash
- cur_hash = self.hash_aux_nodes()
- if not cur_hash:
- Logs.error('No aux.h to process')
- if cur_hash and cur_hash == prev_hash:
- break
-
- # run the command
- self.info('calling %s', self.__class__.__name__)
- self.call_latex()
-
- def hash_aux_nodes(self):
- """
- Returns a hash of the .aux file contents
-
- :rtype: string or bytes
- """
- try:
- self.aux_nodes
- except AttributeError:
- try:
- self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux'))
- except IOError:
- return None
- return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])
-
- def call_latex(self):
- """
- Runs the TeX compiler once
- """
- self.env.env = {}
- self.env.env.update(os.environ)
- self.env.env.update({'TEXINPUTS': self.texinputs()})
- self.env.SRCFILE = self.inputs[0].abspath()
- self.check_status('error when calling latex', self.texfun())
-
-class latex(tex):
- "Compiles LaTeX files"
- texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
-
-class pdflatex(tex):
- "Compiles PdfLaTeX files"
- texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
-
-class xelatex(tex):
- "XeLaTeX files"
- texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
-
-class dvips(Task.Task):
- "Converts dvi files to postscript"
- run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
- color = 'BLUE'
- after = ['latex', 'pdflatex', 'xelatex']
-
-class dvipdf(Task.Task):
- "Converts dvi files to pdf"
- run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
- color = 'BLUE'
- after = ['latex', 'pdflatex', 'xelatex']
-
-class pdf2ps(Task.Task):
- "Converts pdf files to postscript"
- run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
- color = 'BLUE'
- after = ['latex', 'pdflatex', 'xelatex']
-
-@feature('tex')
-@before_method('process_source')
-def apply_tex(self):
- """
- Creates :py:class:`waflib.Tools.tex.tex` objects, and
- dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
- """
- if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'):
- self.type = 'pdflatex'
-
- outs = Utils.to_list(getattr(self, 'outs', []))
-
- # prompt for incomplete files (else the batchmode is used)
- try:
- self.generator.bld.conf
- except AttributeError:
- default_prompt = False
- else:
- default_prompt = True
- self.env.PROMPT_LATEX = getattr(self, 'prompt', default_prompt)
-
- deps_lst = []
-
- if getattr(self, 'deps', None):
- deps = self.to_list(self.deps)
- for dep in deps:
- if isinstance(dep, str):
- n = self.path.find_resource(dep)
- if not n:
- self.bld.fatal('Could not find %r for %r' % (dep, self))
- if not n in deps_lst:
- deps_lst.append(n)
- elif isinstance(dep, Node.Node):
- deps_lst.append(dep)
-
- for node in self.to_nodes(self.source):
- if self.type == 'latex':
- task = self.create_task('latex', node, node.change_ext('.dvi'))
- elif self.type == 'pdflatex':
- task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
- elif self.type == 'xelatex':
- task = self.create_task('xelatex', node, node.change_ext('.pdf'))
-
- task.env = self.env
-
- # add the manual dependencies
- if deps_lst:
- for n in deps_lst:
- if not n in task.dep_nodes:
- task.dep_nodes.append(n)
-
- # texinputs is a nasty beast
- if hasattr(self, 'texinputs_nodes'):
- task.texinputs_nodes = self.texinputs_nodes
- else:
- task.texinputs_nodes = [node.parent, node.parent.get_bld(), self.path, self.path.get_bld()]
- lst = os.environ.get('TEXINPUTS', '')
- if self.env.TEXINPUTS:
- lst += os.pathsep + self.env.TEXINPUTS
- if lst:
- lst = lst.split(os.pathsep)
- for x in lst:
- if x:
- if os.path.isabs(x):
- p = self.bld.root.find_node(x)
- if p:
- task.texinputs_nodes.append(p)
- else:
- Logs.error('Invalid TEXINPUTS folder %s', x)
- else:
- Logs.error('Cannot resolve relative paths in TEXINPUTS %s', x)
-
- if self.type == 'latex':
- if 'ps' in outs:
- tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
- tsk.env.env = dict(os.environ)
- if 'pdf' in outs:
- tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
- tsk.env.env = dict(os.environ)
- elif self.type == 'pdflatex':
- if 'ps' in outs:
- self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
- self.source = []
-
-def configure(self):
- """
- Find the programs tex, latex and others without raising errors.
- """
- v = self.env
- for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split():
- try:
- self.find_program(p, var=p.upper())
- except self.errors.ConfigurationError:
- pass
- v.DVIPSFLAGS = '-Ppdf'
-
diff --git a/waflib/Tools/vala.py b/waflib/Tools/vala.py
deleted file mode 100644
index 822ec502..00000000
--- a/waflib/Tools/vala.py
+++ /dev/null
@@ -1,355 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-# Radosław Szkodziński, 2010
-
-"""
-At this point, vala is still unstable, so do not expect
-this tool to be too stable either (apis, etc)
-"""
-
-import re
-from waflib import Build, Context, Errors, Logs, Node, Options, Task, Utils
-from waflib.TaskGen import extension, taskgen_method
-from waflib.Configure import conf
-
-class valac(Task.Task):
- """
- Compiles vala files
- """
- #run_str = "${VALAC} ${VALAFLAGS}" # ideally
- #vars = ['VALAC_VERSION']
- vars = ["VALAC", "VALAC_VERSION", "VALAFLAGS"]
- ext_out = ['.h']
-
- def run(self):
- cmd = self.env.VALAC + self.env.VALAFLAGS
- resources = getattr(self, 'vala_exclude', [])
- cmd.extend([a.abspath() for a in self.inputs if a not in resources])
- ret = self.exec_command(cmd, cwd=self.vala_dir_node.abspath())
-
- if ret:
- return ret
-
- if self.generator.dump_deps_node:
- self.generator.dump_deps_node.write('\n'.join(self.generator.packages))
-
- return ret
-
-@taskgen_method
-def init_vala_task(self):
- """
- Initializes the vala task with the relevant data (acts as a constructor)
- """
- self.profile = getattr(self, 'profile', 'gobject')
-
- self.packages = packages = Utils.to_list(getattr(self, 'packages', []))
- self.use = Utils.to_list(getattr(self, 'use', []))
- if packages and not self.use:
- self.use = packages[:] # copy
-
- if self.profile == 'gobject':
- if not 'GOBJECT' in self.use:
- self.use.append('GOBJECT')
-
- def addflags(flags):
- self.env.append_value('VALAFLAGS', flags)
-
- if self.profile:
- addflags('--profile=%s' % self.profile)
-
- valatask = self.valatask
-
- # output directory
- if hasattr(self, 'vala_dir'):
- if isinstance(self.vala_dir, str):
- valatask.vala_dir_node = self.path.get_bld().make_node(self.vala_dir)
- try:
- valatask.vala_dir_node.mkdir()
- except OSError:
- raise self.bld.fatal('Cannot create the vala dir %r' % valatask.vala_dir_node)
- else:
- valatask.vala_dir_node = self.vala_dir
- else:
- valatask.vala_dir_node = self.path.get_bld()
- addflags('--directory=%s' % valatask.vala_dir_node.abspath())
-
- if hasattr(self, 'thread'):
- if self.profile == 'gobject':
- if not 'GTHREAD' in self.use:
- self.use.append('GTHREAD')
- else:
- #Vala doesn't have threading support for dova nor posix
- Logs.warn('Profile %s means no threading support', self.profile)
- self.thread = False
-
- if self.thread:
- addflags('--thread')
-
- self.is_lib = 'cprogram' not in self.features
- if self.is_lib:
- addflags('--library=%s' % self.target)
-
- h_node = valatask.vala_dir_node.find_or_declare('%s.h' % self.target)
- valatask.outputs.append(h_node)
- addflags('--header=%s' % h_node.name)
-
- valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi' % self.target))
-
- if getattr(self, 'gir', None):
- gir_node = valatask.vala_dir_node.find_or_declare('%s.gir' % self.gir)
- addflags('--gir=%s' % gir_node.name)
- valatask.outputs.append(gir_node)
-
- self.vala_target_glib = getattr(self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None))
- if self.vala_target_glib:
- addflags('--target-glib=%s' % self.vala_target_glib)
-
- addflags(['--define=%s' % x for x in Utils.to_list(getattr(self, 'vala_defines', []))])
-
- packages_private = Utils.to_list(getattr(self, 'packages_private', []))
- addflags(['--pkg=%s' % x for x in packages_private])
-
- def _get_api_version():
- api_version = '1.0'
- if hasattr(Context.g_module, 'API_VERSION'):
- version = Context.g_module.API_VERSION.split(".")
- if version[0] == "0":
- api_version = "0." + version[1]
- else:
- api_version = version[0] + ".0"
- return api_version
-
- self.includes = Utils.to_list(getattr(self, 'includes', []))
- valatask.install_path = getattr(self, 'install_path', '')
-
- valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi')
- valatask.pkg_name = getattr(self, 'pkg_name', self.env.PACKAGE)
- valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version()))
- valatask.install_binding = getattr(self, 'install_binding', True)
-
- self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
- #includes = []
-
- if hasattr(self, 'use'):
- local_packages = Utils.to_list(self.use)[:] # make sure to have a copy
- seen = []
- while len(local_packages) > 0:
- package = local_packages.pop()
- if package in seen:
- continue
- seen.append(package)
-
- # check if the package exists
- try:
- package_obj = self.bld.get_tgen_by_name(package)
- except Errors.WafError:
- continue
-
- # in practice the other task is already processed
- # but this makes it explicit
- package_obj.post()
- package_name = package_obj.target
- task = getattr(package_obj, 'valatask', None)
- if task:
- for output in task.outputs:
- if output.name == package_name + ".vapi":
- valatask.set_run_after(task)
- if package_name not in packages:
- packages.append(package_name)
- if output.parent not in vapi_dirs:
- vapi_dirs.append(output.parent)
- if output.parent not in self.includes:
- self.includes.append(output.parent)
-
- if hasattr(package_obj, 'use'):
- lst = self.to_list(package_obj.use)
- lst.reverse()
- local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
-
- addflags(['--pkg=%s' % p for p in packages])
-
- for vapi_dir in vapi_dirs:
- if isinstance(vapi_dir, Node.Node):
- v_node = vapi_dir
- else:
- v_node = self.path.find_dir(vapi_dir)
- if not v_node:
- Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
- else:
- addflags('--vapidir=%s' % v_node.abspath())
-
- self.dump_deps_node = None
- if self.is_lib and self.packages:
- self.dump_deps_node = valatask.vala_dir_node.find_or_declare('%s.deps' % self.target)
- valatask.outputs.append(self.dump_deps_node)
-
- if self.is_lib and valatask.install_binding:
- headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
- if headers_list:
- self.install_vheader = self.add_install_files(install_to=valatask.header_path, install_from=headers_list)
-
- vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))]
- if vapi_list:
- self.install_vapi = self.add_install_files(install_to=valatask.vapi_path, install_from=vapi_list)
-
- gir_list = [o for o in valatask.outputs if o.suffix() == '.gir']
- if gir_list:
- self.install_gir = self.add_install_files(
- install_to=getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), install_from=gir_list)
-
- if hasattr(self, 'vala_resources'):
- nodes = self.to_nodes(self.vala_resources)
- valatask.vala_exclude = getattr(valatask, 'vala_exclude', []) + nodes
- valatask.inputs.extend(nodes)
- for x in nodes:
- addflags(['--gresources', x.abspath()])
-
-@extension('.vala', '.gs')
-def vala_file(self, node):
- """
- Compile a vala file and bind the task to *self.valatask*. If an existing vala task is already set, add the node
- to its inputs. The typical example is::
-
- def build(bld):
- bld.program(
- packages = 'gtk+-2.0',
- target = 'vala-gtk-example',
- use = 'GTK GLIB',
- source = 'vala-gtk-example.vala foo.vala',
- vala_defines = ['DEBUG'] # adds --define=<xyz> values to the command-line
-
- # the following arguments are for libraries
- #gir = 'hello-1.0',
- #gir_path = '/tmp',
- #vapi_path = '/tmp',
- #pkg_name = 'hello'
- # disable installing of gir, vapi and header
- #install_binding = False
-
- # profile = 'xyz' # adds --profile=<xyz> to enable profiling
- # thread = True, # adds --thread, except if profile is on or not on 'gobject'
- # vala_target_glib = 'xyz' # adds --target-glib=<xyz>, can be given through the command-line option --vala-target-glib=<xyz>
- )
-
-
- :param node: vala file
- :type node: :py:class:`waflib.Node.Node`
- """
-
- try:
- valatask = self.valatask
- except AttributeError:
- valatask = self.valatask = self.create_task('valac')
- self.init_vala_task()
-
- valatask.inputs.append(node)
- name = node.name[:node.name.rfind('.')] + '.c'
- c_node = valatask.vala_dir_node.find_or_declare(name)
- valatask.outputs.append(c_node)
- self.source.append(c_node)
-
-@extension('.vapi')
-def vapi_file(self, node):
- try:
- valatask = self.valatask
- except AttributeError:
- valatask = self.valatask = self.create_task('valac')
- self.init_vala_task()
- valatask.inputs.append(node)
-
-@conf
-def find_valac(self, valac_name, min_version):
- """
- Find the valac program, and execute it to store the version
- number in *conf.env.VALAC_VERSION*
-
- :param valac_name: program name
- :type valac_name: string or list of string
- :param min_version: minimum version acceptable
- :type min_version: tuple of int
- """
- valac = self.find_program(valac_name, var='VALAC')
- try:
- output = self.cmd_and_log(valac + ['--version'])
- except Errors.WafError:
- valac_version = None
- else:
- ver = re.search(r'\d+.\d+.\d+', output).group().split('.')
- valac_version = tuple([int(x) for x in ver])
-
- self.msg('Checking for %s version >= %r' % (valac_name, min_version),
- valac_version, valac_version and valac_version >= min_version)
- if valac and valac_version < min_version:
- self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version))
-
- self.env.VALAC_VERSION = valac_version
- return valac
-
-@conf
-def check_vala(self, min_version=(0,8,0), branch=None):
- """
- Check if vala compiler from a given branch exists of at least a given
- version.
-
- :param min_version: minimum version acceptable (0.8.0)
- :type min_version: tuple
- :param branch: first part of the version number, in case a snapshot is used (0, 8)
- :type branch: tuple of int
- """
- if self.env.VALA_MINVER:
- min_version = self.env.VALA_MINVER
- if self.env.VALA_MINVER_BRANCH:
- branch = self.env.VALA_MINVER_BRANCH
- if not branch:
- branch = min_version[:2]
- try:
- find_valac(self, 'valac-%d.%d' % (branch[0], branch[1]), min_version)
- except self.errors.ConfigurationError:
- find_valac(self, 'valac', min_version)
-
-@conf
-def check_vala_deps(self):
- """
- Load the gobject and gthread packages if they are missing.
- """
- if not self.env.HAVE_GOBJECT:
- pkg_args = {'package': 'gobject-2.0',
- 'uselib_store': 'GOBJECT',
- 'args': '--cflags --libs'}
- if getattr(Options.options, 'vala_target_glib', None):
- pkg_args['atleast_version'] = Options.options.vala_target_glib
- self.check_cfg(**pkg_args)
-
- if not self.env.HAVE_GTHREAD:
- pkg_args = {'package': 'gthread-2.0',
- 'uselib_store': 'GTHREAD',
- 'args': '--cflags --libs'}
- if getattr(Options.options, 'vala_target_glib', None):
- pkg_args['atleast_version'] = Options.options.vala_target_glib
- self.check_cfg(**pkg_args)
-
-def configure(self):
- """
- Use the following to enforce minimum vala version::
-
- def configure(conf):
- conf.env.VALA_MINVER = (0, 10, 0)
- conf.load('vala')
- """
- self.load('gnu_dirs')
- self.check_vala_deps()
- self.check_vala()
- self.add_os_flags('VALAFLAGS')
- self.env.append_unique('VALAFLAGS', ['-C'])
-
-def options(opt):
- """
- Load the :py:mod:`waflib.Tools.gnu_dirs` tool and add the ``--vala-target-glib`` command-line option
- """
- opt.load('gnu_dirs')
- valaopts = opt.add_option_group('Vala Compiler Options')
- valaopts.add_option('--vala-target-glib', default=None,
- dest='vala_target_glib', metavar='MAJOR.MINOR',
- help='Target version of glib for Vala GObject code generation')
-
diff --git a/waflib/Tools/waf_unit_test.py b/waflib/Tools/waf_unit_test.py
index a71ed1c0..27cd9a40 100644
--- a/waflib/Tools/waf_unit_test.py
+++ b/waflib/Tools/waf_unit_test.py
@@ -1,10 +1,10 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2006
-# Thomas Nagy, 2010-2018 (ita)
+# Thomas Nagy, 2010
"""
-Unit testing system for C/C++/D and interpreted languages providing test execution:
+Unit testing system for C/C++/D providing test execution:
* in parallel, by using ``waf -j``
* partial (only the tests that have changed) or full (by using ``waf --alltests``)
@@ -31,128 +31,31 @@ the predefined callback::
bld(features='cxx cxxprogram test', source='main.c', target='app')
from waflib.Tools import waf_unit_test
bld.add_post_fun(waf_unit_test.summary)
-
-By passing --dump-test-scripts the build outputs corresponding python files
-(with extension _run.py) that are useful for debugging purposes.
"""
-import os, shlex, sys
+import os
from waflib.TaskGen import feature, after_method, taskgen_method
from waflib import Utils, Task, Logs, Options
-from waflib.Tools import ccroot
testlock = Utils.threading.Lock()
-SCRIPT_TEMPLATE = """#! %(python)s
-import subprocess, sys
-cmd = %(cmd)r
-# if you want to debug with gdb:
-#cmd = ['gdb', '-args'] + cmd
-env = %(env)r
-status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str))
-sys.exit(status)
-"""
-
-@taskgen_method
-def handle_ut_cwd(self, key):
- """
- Task generator method, used internally to limit code duplication.
- This method may disappear anytime.
- """
- cwd = getattr(self, key, None)
- if cwd:
- if isinstance(cwd, str):
- # we want a Node instance
- if os.path.isabs(cwd):
- self.ut_cwd = self.bld.root.make_node(cwd)
- else:
- self.ut_cwd = self.path.make_node(cwd)
-
-@feature('test_scripts')
-def make_interpreted_test(self):
- """Create interpreted unit tests."""
- for x in ['test_scripts_source', 'test_scripts_template']:
- if not hasattr(self, x):
- Logs.warn('a test_scripts taskgen i missing %s' % x)
- return
-
- self.ut_run, lst = Task.compile_fun(self.test_scripts_template, shell=getattr(self, 'test_scripts_shell', False))
-
- script_nodes = self.to_nodes(self.test_scripts_source)
- for script_node in script_nodes:
- tsk = self.create_task('utest', [script_node])
- tsk.vars = lst + tsk.vars
- tsk.env['SCRIPT'] = script_node.path_from(tsk.get_cwd())
-
- self.handle_ut_cwd('test_scripts_cwd')
-
- env = getattr(self, 'test_scripts_env', None)
- if env:
- self.ut_env = env
- else:
- self.ut_env = dict(os.environ)
-
- paths = getattr(self, 'test_scripts_paths', {})
- for (k,v) in paths.items():
- p = self.ut_env.get(k, '').split(os.pathsep)
- if isinstance(v, str):
- v = v.split(os.pathsep)
- self.ut_env[k] = os.pathsep.join(p + v)
-
@feature('test')
-@after_method('apply_link', 'process_use')
+@after_method('apply_link')
def make_test(self):
"""Create the unit test task. There can be only one unit test task by task generator."""
- if not getattr(self, 'link_task', None):
- return
-
- tsk = self.create_task('utest', self.link_task.outputs)
- if getattr(self, 'ut_str', None):
- self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
- tsk.vars = lst + tsk.vars
-
- self.handle_ut_cwd('ut_cwd')
-
- if not hasattr(self, 'ut_paths'):
- paths = []
- for x in self.tmp_use_sorted:
- try:
- y = self.bld.get_tgen_by_name(x).link_task
- except AttributeError:
- pass
- else:
- if not isinstance(y, ccroot.stlink_task):
- paths.append(y.outputs[0].parent.abspath())
- self.ut_paths = os.pathsep.join(paths) + os.pathsep
-
- if not hasattr(self, 'ut_env'):
- self.ut_env = dct = dict(os.environ)
- def add_path(var):
- dct[var] = self.ut_paths + dct.get(var,'')
- if Utils.is_win32:
- add_path('PATH')
- elif Utils.unversioned_sys_platform() == 'darwin':
- add_path('DYLD_LIBRARY_PATH')
- add_path('LD_LIBRARY_PATH')
- else:
- add_path('LD_LIBRARY_PATH')
-
- if not hasattr(self, 'ut_cmd'):
- self.ut_cmd = getattr(Options.options, 'testcmd', False)
+ if getattr(self, 'link_task', None):
+ self.create_task('utest', self.link_task.outputs)
+
@taskgen_method
def add_test_results(self, tup):
"""Override and return tup[1] to interrupt the build immediately if a test does not run"""
Logs.debug("ut: %r", tup)
- try:
- self.utest_results.append(tup)
- except AttributeError:
- self.utest_results = [tup]
+ self.utest_result = tup
try:
self.bld.utest_results.append(tup)
except AttributeError:
self.bld.utest_results = [tup]
-@Task.deep_inputs
class utest(Task.Task):
"""
Execute a unit test
@@ -160,7 +63,6 @@ class utest(Task.Task):
color = 'PINK'
after = ['vnum', 'inst']
vars = []
-
def runnable_status(self):
"""
Always execute the task if `waf --alltests` was used or no
@@ -175,17 +77,37 @@ class utest(Task.Task):
return Task.RUN_ME
return ret
+ def add_path(self, dct, path, var):
+ dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
+
def get_test_env(self):
"""
In general, tests may require any library built anywhere in the project.
Override this method if fewer paths are needed
"""
- return self.generator.ut_env
-
- def post_run(self):
- super(utest, self).post_run()
- if getattr(Options.options, 'clear_failed_tests', False) and self.waf_unit_test_results[1]:
- self.generator.bld.task_sigs[self.uid()] = None
+ try:
+ fu = getattr(self.generator.bld, 'all_test_paths')
+ except AttributeError:
+ # this operation may be performed by at most #maxjobs
+ fu = os.environ.copy()
+
+ lst = []
+ for g in self.generator.bld.groups:
+ for tg in g:
+ if getattr(tg, 'link_task', None):
+ s = tg.link_task.outputs[0].parent.abspath()
+ if s not in lst:
+ lst.append(s)
+
+ if Utils.is_win32:
+ self.add_path(fu, lst, 'PATH')
+ elif Utils.unversioned_sys_platform() == 'darwin':
+ self.add_path(fu, lst, 'DYLD_LIBRARY_PATH')
+ self.add_path(fu, lst, 'LD_LIBRARY_PATH')
+ else:
+ self.add_path(fu, lst, 'LD_LIBRARY_PATH')
+ self.generator.bld.all_test_paths = fu
+ return fu
def run(self):
"""
@@ -194,44 +116,29 @@ class utest(Task.Task):
Override ``add_test_results`` to interrupt the build
"""
- if hasattr(self.generator, 'ut_run'):
- return self.generator.ut_run(self)
-
- self.ut_exec = getattr(self.generator, 'ut_exec', [self.inputs[0].abspath()])
- ut_cmd = getattr(self.generator, 'ut_cmd', False)
- if ut_cmd:
- self.ut_exec = shlex.split(ut_cmd % ' '.join(self.ut_exec))
-
- return self.exec_command(self.ut_exec)
-
- def exec_command(self, cmd, **kw):
- Logs.debug('runner: %r', cmd)
- if getattr(Options.options, 'dump_test_scripts', False):
- script_code = SCRIPT_TEMPLATE % {
- 'python': sys.executable,
- 'env': self.get_test_env(),
- 'cwd': self.get_cwd().abspath(),
- 'cmd': cmd
- }
- script_file = self.inputs[0].abspath() + '_run.py'
- Utils.writef(script_file, script_code)
- os.chmod(script_file, Utils.O755)
- if Logs.verbose > 1:
- Logs.info('Test debug file written as %r' % script_file)
-
- proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(),
- stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, shell=isinstance(cmd,str))
+
+ filename = self.inputs[0].abspath()
+ self.ut_exec = getattr(self.generator, 'ut_exec', [filename])
+ if getattr(self.generator, 'ut_fun', None):
+ self.generator.ut_fun(self)
+
+
+ cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath()
+
+ testcmd = getattr(self.generator, 'ut_cmd', False) or getattr(Options.options, 'testcmd', False)
+ if testcmd:
+ self.ut_exec = (testcmd % self.ut_exec[0]).split(' ')
+
+ proc = Utils.subprocess.Popen(self.ut_exec, cwd=cwd, env=self.get_test_env(), stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE)
(stdout, stderr) = proc.communicate()
- self.waf_unit_test_results = tup = (self.inputs[0].abspath(), proc.returncode, stdout, stderr)
+
+ tup = (filename, proc.returncode, stdout, stderr)
testlock.acquire()
try:
return self.generator.add_test_results(tup)
finally:
testlock.release()
- def get_cwd(self):
- return getattr(self.generator, 'ut_cwd', self.inputs[0].parent)
-
def summary(bld):
"""
Display an execution summary::
@@ -248,15 +155,15 @@ def summary(bld):
total = len(lst)
tfail = len([x for x in lst if x[1]])
- Logs.pprint('GREEN', ' tests that pass %d/%d' % (total-tfail, total))
+ Logs.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
for (f, code, out, err) in lst:
if not code:
- Logs.pprint('GREEN', ' %s' % f)
+ Logs.pprint('CYAN', ' %s' % f)
- Logs.pprint('GREEN' if tfail == 0 else 'RED', ' tests that fail %d/%d' % (tfail, total))
+ Logs.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
for (f, code, out, err) in lst:
if code:
- Logs.pprint('RED', ' %s' % f)
+ Logs.pprint('CYAN', ' %s' % f)
def set_exit_code(bld):
"""
@@ -287,10 +194,8 @@ def options(opt):
"""
opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests')
opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')
- opt.add_option('--clear-failed', action='store_true', default=False,
- help='Force failed unit tests to run again next time', dest='clear_failed_tests')
- opt.add_option('--testcmd', action='store', default=False, dest='testcmd',
- help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind')
- opt.add_option('--dump-test-scripts', action='store_true', default=False,
- help='Create python scripts to help debug tests', dest='dump_test_scripts')
+ opt.add_option('--testcmd', action='store', default=False,
+ help = 'Run the unit tests using the test-cmd string'
+ ' example "--test-cmd="valgrind --error-exitcode=1'
+ ' %s" to run under valgrind', dest='testcmd')
diff --git a/waflib/Tools/winres.py b/waflib/Tools/winres.py
deleted file mode 100644
index 586c596c..00000000
--- a/waflib/Tools/winres.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Brant Young, 2007
-
-"Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}"
-
-import re
-from waflib import Task
-from waflib.TaskGen import extension
-from waflib.Tools import c_preproc
-
-@extension('.rc')
-def rc_file(self, node):
- """
- Binds the .rc extension to a winrc task
- """
- obj_ext = '.rc.o'
- if self.env.WINRC_TGT_F == '/fo':
- obj_ext = '.res'
- rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
- try:
- self.compiled_tasks.append(rctask)
- except AttributeError:
- self.compiled_tasks = [rctask]
-
-re_lines = re.compile(
- '(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\
- '(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',
- re.IGNORECASE | re.MULTILINE)
-
-class rc_parser(c_preproc.c_parser):
- """
- Calculates dependencies in .rc files
- """
- def filter_comments(self, node):
- """
- Overrides :py:meth:`waflib.Tools.c_preproc.c_parser.filter_comments`
- """
- code = node.read()
- if c_preproc.use_trigraphs:
- for (a, b) in c_preproc.trig_def:
- code = code.split(a).join(b)
- code = c_preproc.re_nl.sub('', code)
- code = c_preproc.re_cpp.sub(c_preproc.repl, code)
- ret = []
- for m in re.finditer(re_lines, code):
- if m.group(2):
- ret.append((m.group(2), m.group(3)))
- else:
- ret.append(('include', m.group(5)))
- return ret
-
-class winrc(Task.Task):
- """
- Compiles resource files
- """
- run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
- color = 'BLUE'
- def scan(self):
- tmp = rc_parser(self.generator.includes_nodes)
- tmp.start(self.inputs[0], self.env)
- return (tmp.nodes, tmp.names)
-
-def configure(conf):
- """
- Detects the programs RC or windres, depending on the C/C++ compiler in use
- """
- v = conf.env
- if not v.WINRC:
- if v.CC_NAME == 'msvc':
- conf.find_program('RC', var='WINRC', path_list=v.PATH)
- v.WINRC_TGT_F = '/fo'
- v.WINRC_SRC_F = ''
- else:
- conf.find_program('windres', var='WINRC', path_list=v.PATH)
- v.WINRC_TGT_F = '-o'
- v.WINRC_SRC_F = '-i'
-
diff --git a/waflib/Tools/xlc.py b/waflib/Tools/xlc.py
index 134dd415..3bd8d026 100644
--- a/waflib/Tools/xlc.py
+++ b/waflib/Tools/xlc.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
+# Thomas Nagy, 2006-2010 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
@@ -11,7 +11,7 @@ from waflib.Configure import conf
@conf
def find_xlc(conf):
"""
- Detects the Aix C compiler
+ Detect the Aix C compiler
"""
cc = conf.find_program(['xlc_r', 'xlc'], var='CC')
conf.get_xlc_version(cc)
@@ -24,36 +24,38 @@ def xlc_common_flags(conf):
"""
v = conf.env
- v.CC_SRC_F = []
- v.CC_TGT_F = ['-c', '-o']
+ v['CC_SRC_F'] = []
+ v['CC_TGT_F'] = ['-c', '-o']
- if not v.LINK_CC:
- v.LINK_CC = v.CC
+ # linker
+ if not v['LINK_CC']: v['LINK_CC'] = v['CC']
+ v['CCLNK_SRC_F'] = []
+ v['CCLNK_TGT_F'] = ['-o']
+ v['CPPPATH_ST'] = '-I%s'
+ v['DEFINES_ST'] = '-D%s'
- v.CCLNK_SRC_F = []
- v.CCLNK_TGT_F = ['-o']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
+ v['LIB_ST'] = '-l%s' # template for adding libs
+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
+ v['STLIB_ST'] = '-l%s'
+ v['STLIBPATH_ST'] = '-L%s'
+ v['RPATH_ST'] = '-Wl,-rpath,%s'
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
- v.RPATH_ST = '-Wl,-rpath,%s'
+ v['SONAME_ST'] = []
+ v['SHLIB_MARKER'] = []
+ v['STLIB_MARKER'] = []
- v.SONAME_ST = []
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
+ # program
+ v['LINKFLAGS_cprogram'] = ['-Wl,-brtl']
+ v['cprogram_PATTERN'] = '%s'
- v.LINKFLAGS_cprogram = ['-Wl,-brtl']
- v.cprogram_PATTERN = '%s'
+ # shared library
+ v['CFLAGS_cshlib'] = ['-fPIC']
+ v['LINKFLAGS_cshlib'] = ['-G', '-Wl,-brtl,-bexpfull']
+ v['cshlib_PATTERN'] = 'lib%s.so'
- v.CFLAGS_cshlib = ['-fPIC']
- v.LINKFLAGS_cshlib = ['-G', '-Wl,-brtl,-bexpfull']
- v.cshlib_PATTERN = 'lib%s.so'
-
- v.LINKFLAGS_cstlib = []
- v.cstlib_PATTERN = 'lib%s.a'
+ # static lib
+ v['LINKFLAGS_cstlib'] = []
+ v['cstlib_PATTERN'] = 'lib%s.a'
def configure(conf):
conf.find_xlc()
diff --git a/waflib/Tools/xlcxx.py b/waflib/Tools/xlcxx.py
index 76aa59bc..150aeaa4 100644
--- a/waflib/Tools/xlcxx.py
+++ b/waflib/Tools/xlcxx.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
+# Thomas Nagy, 2006-2010 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
@@ -11,7 +11,7 @@ from waflib.Configure import conf
@conf
def find_xlcxx(conf):
"""
- Detects the Aix C++ compiler
+ Detect the Aix C++ compiler
"""
cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX')
conf.get_xlc_version(cxx)
@@ -24,36 +24,38 @@ def xlcxx_common_flags(conf):
"""
v = conf.env
- v.CXX_SRC_F = []
- v.CXX_TGT_F = ['-c', '-o']
+ v['CXX_SRC_F'] = []
+ v['CXX_TGT_F'] = ['-c', '-o']
- if not v.LINK_CXX:
- v.LINK_CXX = v.CXX
+ # linker
+ if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
+ v['CXXLNK_SRC_F'] = []
+ v['CXXLNK_TGT_F'] = ['-o']
+ v['CPPPATH_ST'] = '-I%s'
+ v['DEFINES_ST'] = '-D%s'
- v.CXXLNK_SRC_F = []
- v.CXXLNK_TGT_F = ['-o']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
+ v['LIB_ST'] = '-l%s' # template for adding libs
+ v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
+ v['STLIB_ST'] = '-l%s'
+ v['STLIBPATH_ST'] = '-L%s'
+ v['RPATH_ST'] = '-Wl,-rpath,%s'
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
- v.RPATH_ST = '-Wl,-rpath,%s'
+ v['SONAME_ST'] = []
+ v['SHLIB_MARKER'] = []
+ v['STLIB_MARKER'] = []
- v.SONAME_ST = []
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
+ # program
+ v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl']
+ v['cxxprogram_PATTERN'] = '%s'
- v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
- v.cxxprogram_PATTERN = '%s'
+ # shared library
+ v['CXXFLAGS_cxxshlib'] = ['-fPIC']
+ v['LINKFLAGS_cxxshlib'] = ['-G', '-Wl,-brtl,-bexpfull']
+ v['cxxshlib_PATTERN'] = 'lib%s.so'
- v.CXXFLAGS_cxxshlib = ['-fPIC']
- v.LINKFLAGS_cxxshlib = ['-G', '-Wl,-brtl,-bexpfull']
- v.cxxshlib_PATTERN = 'lib%s.so'
-
- v.LINKFLAGS_cxxstlib = []
- v.cxxstlib_PATTERN = 'lib%s.a'
+ # static lib
+ v['LINKFLAGS_cxxstlib'] = []
+ v['cxxstlib_PATTERN'] = 'lib%s.a'
def configure(conf):
conf.find_xlcxx()