summaryrefslogtreecommitdiff
path: root/deps/gyp/pylib
diff options
context:
space:
mode:
Diffstat (limited to 'deps/gyp/pylib')
-rw-r--r--deps/gyp/pylib/gyp/MSVSNew.py2
-rw-r--r--deps/gyp/pylib/gyp/MSVSSettings.py36
-rwxr-xr-xdeps/gyp/pylib/gyp/MSVSSettings_test.py4
-rw-r--r--deps/gyp/pylib/gyp/MSVSUtil.py12
-rw-r--r--deps/gyp/pylib/gyp/MSVSVersion.py78
-rwxr-xr-xdeps/gyp/pylib/gyp/__init__.py29
-rw-r--r--deps/gyp/pylib/gyp/common.py28
-rwxr-xr-xdeps/gyp/pylib/gyp/flock_tool.py7
-rw-r--r--deps/gyp/pylib/gyp/generator/analyzer.py741
-rw-r--r--deps/gyp/pylib/gyp/generator/android.py1090
-rw-r--r--deps/gyp/pylib/gyp/generator/cmake.py404
-rw-r--r--deps/gyp/pylib/gyp/generator/dump_dependency_json.py20
-rw-r--r--deps/gyp/pylib/gyp/generator/eclipse.py129
-rw-r--r--deps/gyp/pylib/gyp/generator/gypd.py7
-rw-r--r--deps/gyp/pylib/gyp/generator/make.py156
-rw-r--r--deps/gyp/pylib/gyp/generator/msvs.py236
-rw-r--r--deps/gyp/pylib/gyp/generator/ninja.py333
-rw-r--r--deps/gyp/pylib/gyp/generator/ninja_test.py21
-rw-r--r--deps/gyp/pylib/gyp/generator/xcode.py106
-rw-r--r--deps/gyp/pylib/gyp/input.py364
-rwxr-xr-xdeps/gyp/pylib/gyp/input_test.py14
-rwxr-xr-xdeps/gyp/pylib/gyp/mac_tool.py108
-rw-r--r--deps/gyp/pylib/gyp/msvs_emulation.py179
-rwxr-xr-xdeps/gyp/pylib/gyp/win_tool.py9
-rw-r--r--deps/gyp/pylib/gyp/xcode_emulation.py84
-rw-r--r--deps/gyp/pylib/gyp/xcode_ninja.py21
-rw-r--r--deps/gyp/pylib/gyp/xcodeproj_file.py167
27 files changed, 2550 insertions, 1835 deletions
diff --git a/deps/gyp/pylib/gyp/MSVSNew.py b/deps/gyp/pylib/gyp/MSVSNew.py
index 845dcb0639..593f0e5b0b 100644
--- a/deps/gyp/pylib/gyp/MSVSNew.py
+++ b/deps/gyp/pylib/gyp/MSVSNew.py
@@ -172,7 +172,7 @@ class MSVSProject(MSVSSolutionEntry):
#------------------------------------------------------------------------------
-class MSVSSolution:
+class MSVSSolution(object):
"""Visual Studio solution."""
def __init__(self, path, version, entries=None, variants=None,
diff --git a/deps/gyp/pylib/gyp/MSVSSettings.py b/deps/gyp/pylib/gyp/MSVSSettings.py
index 205b3b5b9b..4985756bdd 100644
--- a/deps/gyp/pylib/gyp/MSVSSettings.py
+++ b/deps/gyp/pylib/gyp/MSVSSettings.py
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-"""Code to validate and convert settings of the Microsoft build tools.
+r"""Code to validate and convert settings of the Microsoft build tools.
This file contains code to validate and convert settings of the Microsoft
build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
@@ -314,7 +314,14 @@ def _MSBuildOnly(tool, name, setting_type):
name: the name of the setting.
setting_type: the type of this setting.
"""
+
+ def _Translate(value, msbuild_settings):
+ # Let msbuild-only properties get translated as-is from msvs_settings.
+ tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {})
+ tool_settings[name] = value
+
_msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
+ _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
def _ConvertedToAdditionalOption(tool, msvs_name, flag):
@@ -417,11 +424,11 @@ def ConvertVCMacrosToMSBuild(s):
if '$' in s:
replace_map = {
'$(ConfigurationName)': '$(Configuration)',
- '$(InputDir)': '%(RootDir)%(Directory)',
+ '$(InputDir)': '%(RelativeDir)',
'$(InputExt)': '%(Extension)',
'$(InputFileName)': '%(Filename)%(Extension)',
'$(InputName)': '%(Filename)',
- '$(InputPath)': '%(FullPath)',
+ '$(InputPath)': '%(Identity)',
'$(ParentName)': '$(ProjectFileName)',
'$(PlatformName)': '$(Platform)',
'$(SafeInputName)': '%(Filename)',
@@ -531,6 +538,7 @@ _midl = _Tool('VCMIDLTool', 'Midl')
_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
_lib = _Tool('VCLibrarianTool', 'Lib')
_manifest = _Tool('VCManifestTool', 'Manifest')
+_masm = _Tool('MASM', 'MASM')
_AddTool(_compile)
@@ -539,6 +547,7 @@ _AddTool(_midl)
_AddTool(_rc)
_AddTool(_lib)
_AddTool(_manifest)
+_AddTool(_masm)
# Add sections only found in the MSBuild settings.
_msbuild_validators[''] = {}
_msbuild_validators['ProjectReference'] = {}
@@ -602,7 +611,8 @@ _Same(_compile, 'BrowseInformation',
_Same(_compile, 'CallingConvention',
_Enumeration(['Cdecl', # /Gd
'FastCall', # /Gr
- 'StdCall'])) # /Gz
+ 'StdCall', # /Gz
+ 'VectorCall'])) # /Gv
_Same(_compile, 'CompileAs',
_Enumeration(['Default',
'CompileAsC', # /TC
@@ -618,7 +628,10 @@ _Same(_compile, 'EnableEnhancedInstructionSet',
'StreamingSIMDExtensions', # /arch:SSE
'StreamingSIMDExtensions2', # /arch:SSE2
'AdvancedVectorExtensions', # /arch:AVX (vs2012+)
- 'NoExtensions',])) # /arch:IA32 (vs2012+)
+ 'NoExtensions', # /arch:IA32 (vs2012+)
+ # This one only exists in the new msbuild format.
+ 'AdvancedVectorExtensions2', # /arch:AVX2 (vs2013r2+)
+ ]))
_Same(_compile, 'ErrorReporting',
_Enumeration(['None', # /errorReport:none
'Prompt', # /errorReport:prompt
@@ -695,10 +708,7 @@ _MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean)
_MSBuildOnly(_compile, 'BuildingInIDE', _boolean)
_MSBuildOnly(_compile, 'CompileAsManaged',
_Enumeration([], new=['false',
- 'true', # /clr
- 'Pure', # /clr:pure
- 'Safe', # /clr:safe
- 'OldSyntax'])) # /clr:oldSyntax
+ 'true'])) # /clr
_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch
_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP
_MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi
@@ -1076,3 +1086,11 @@ _MSBuildOnly(_manifest, 'ManifestFromManagedAssembly',
_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource
_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency
_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
+
+
+# Directives for MASM.
+# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the
+# MSBuild MASM settings.
+
+# Options that have the same name in MSVS and MSBuild.
+_Same(_masm, 'UseSafeExceptionHandlers', _boolean) # /safeseh
diff --git a/deps/gyp/pylib/gyp/MSVSSettings_test.py b/deps/gyp/pylib/gyp/MSVSSettings_test.py
index 9bd37ec348..bf6ea6b802 100755
--- a/deps/gyp/pylib/gyp/MSVSSettings_test.py
+++ b/deps/gyp/pylib/gyp/MSVSSettings_test.py
@@ -267,7 +267,7 @@ class TestSequenceFunctions(unittest.TestCase):
'Warning: for VCCLCompilerTool/BrowseInformation, '
"invalid literal for int() with base 10: 'fdkslj'",
'Warning: for VCCLCompilerTool/CallingConvention, '
- 'index value (-1) not in expected range [0, 3)',
+ 'index value (-1) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/DebugInformationFormat, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
@@ -296,7 +296,7 @@ class TestSequenceFunctions(unittest.TestCase):
'BuildingInIDE': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
- 'CompileAsManaged': 'Pure',
+ 'CompileAsManaged': 'true',
'CreateHotpatchableImage': 'true',
'DebugInformationFormat': 'ProgramDatabase',
'DisableLanguageExtensions': 'true',
diff --git a/deps/gyp/pylib/gyp/MSVSUtil.py b/deps/gyp/pylib/gyp/MSVSUtil.py
index fbf3ed2e3c..0b32e91180 100644
--- a/deps/gyp/pylib/gyp/MSVSUtil.py
+++ b/deps/gyp/pylib/gyp/MSVSUtil.py
@@ -8,10 +8,12 @@ import copy
import os
-_TARGET_TYPE_EXT = {
- 'executable': '.exe',
- 'loadable_module': '.dll',
- 'shared_library': '.dll',
+# A dictionary mapping supported target types to extensions.
+TARGET_TYPE_EXT = {
+ 'executable': 'exe',
+ 'loadable_module': 'dll',
+ 'shared_library': 'dll',
+ 'static_library': 'lib',
}
@@ -157,7 +159,7 @@ def _GetPdbPath(target_dict, config_name, vars):
pdb_base = target_dict.get('product_name', target_dict['target_name'])
- pdb_base = '%s%s.pdb' % (pdb_base, _TARGET_TYPE_EXT[target_dict['type']])
+ pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']])
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
return pdb_path
diff --git a/deps/gyp/pylib/gyp/MSVSVersion.py b/deps/gyp/pylib/gyp/MSVSVersion.py
index bcd6122f2d..d9bfa684fa 100644
--- a/deps/gyp/pylib/gyp/MSVSVersion.py
+++ b/deps/gyp/pylib/gyp/MSVSVersion.py
@@ -84,10 +84,11 @@ class VisualStudioVersion(object):
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
- if self.short_name == '2013' and (
+ if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
- # VS2013 non-Express has a x64-x86 cross that we want to prefer.
+ # VS2013 and later, non-Express have a x64-x86 cross that we want
+ # to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
@@ -138,7 +139,7 @@ def _RegistryQueryBase(sysdir, key, value):
def _RegistryQuery(key, value=None):
- """Use reg.exe to read a particular key through _RegistryQueryBase.
+ r"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
@@ -165,8 +166,33 @@ def _RegistryQuery(key, value=None):
return text
+def _RegistryGetValueUsingWinReg(key, value):
+ """Use the _winreg module to obtain the value of a registry key.
+
+ Args:
+ key: The registry key.
+ value: The particular registry value to read.
+ Return:
+ contents of the registry key's value, or None on failure. Throws
+ ImportError if _winreg is unavailable.
+ """
+ import _winreg
+ try:
+ root, subkey = key.split('\\', 1)
+ assert root == 'HKLM' # Only need HKLM for now.
+ with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
+ return _winreg.QueryValueEx(hkey, value)[0]
+ except WindowsError:
+ return None
+
+
def _RegistryGetValue(key, value):
- """Use reg.exe to obtain the value of a registry key.
+ """Use _winreg or reg.exe to obtain the value of a registry key.
+
+ Using _winreg is preferable because it solves an issue on some corporate
+ environments where access to reg.exe is locked down. However, we still need
+ to fallback to reg.exe for the case where the _winreg module is not available
+ (for example in cygwin python).
Args:
key: The registry key.
@@ -174,6 +200,12 @@ def _RegistryGetValue(key, value):
Return:
contents of the registry key's value, or None on failure.
"""
+ try:
+ return _RegistryGetValueUsingWinReg(key, value)
+ except ImportError:
+ pass
+
+ # Fallback to reg.exe if we fail to import _winreg.
text = _RegistryQuery(key, value)
if not text:
return None
@@ -184,19 +216,6 @@ def _RegistryGetValue(key, value):
return match.group(1)
-def _RegistryKeyExists(key):
- """Use reg.exe to see if a key exists.
-
- Args:
- key: The registry key to check.
- Return:
- True if the key exists
- """
- if not _RegistryQuery(key):
- return False
- return True
-
-
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
@@ -207,6 +226,15 @@ def _CreateVersion(name, path, sdk_based=False):
if path:
path = os.path.normpath(path)
versions = {
+ '2015': VisualStudioVersion('2015',
+ 'Visual Studio 2015',
+ solution_version='12.00',
+ project_version='14.0',
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v140'),
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
@@ -316,7 +344,8 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
- 2013(e) - Visual Studio 2013 (11)
+ 2013(e) - Visual Studio 2013 (12)
+ 2015 - Visual Studio 2015 (14)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
@@ -325,6 +354,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
+ '14.0': '2015',
}
versions = []
for version in versions_to_check:
@@ -361,13 +391,14 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
if not path:
continue
path = _ConvertToCygpath(path)
- versions.append(_CreateVersion(version_to_year[version] + 'e',
- os.path.join(path, '..'), sdk_based=True))
+ if version != '14.0': # There is no Express edition for 2015.
+ versions.append(_CreateVersion(version_to_year[version] + 'e',
+ os.path.join(path, '..'), sdk_based=True))
return versions
-def SelectVisualStudioVersion(version='auto'):
+def SelectVisualStudioVersion(version='auto', allow_fallback=True):
"""Select which version of Visual Studio projects to generate.
Arguments:
@@ -379,7 +410,7 @@ def SelectVisualStudioVersion(version='auto'):
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
- 'auto': ('12.0', '10.0', '9.0', '8.0', '11.0'),
+ 'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
@@ -390,6 +421,7 @@ def SelectVisualStudioVersion(version='auto'):
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
+ '2015': ('14.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
@@ -401,6 +433,8 @@ def SelectVisualStudioVersion(version='auto'):
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
+ if not allow_fallback:
+ raise ValueError('Could not locate Visual Studio installation.')
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
diff --git a/deps/gyp/pylib/gyp/__init__.py b/deps/gyp/pylib/gyp/__init__.py
index 30edea5675..668f38b60d 100755
--- a/deps/gyp/pylib/gyp/__init__.py
+++ b/deps/gyp/pylib/gyp/__init__.py
@@ -49,7 +49,7 @@ def FindBuildFiles():
def Load(build_files, format, default_variables={},
includes=[], depth='.', params=None, check=False,
- circular_check=True):
+ circular_check=True, duplicate_basename_check=True):
"""
Loads one or more specified build files.
default_variables and includes will be copied before use.
@@ -59,7 +59,6 @@ def Load(build_files, format, default_variables={},
if params is None:
params = {}
- flavor = None
if '-' in format:
format, params['flavor'] = format.split('-', 1)
@@ -69,6 +68,7 @@ def Load(build_files, format, default_variables={},
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
# avoiding collisions with user and automatic variables.
default_variables['GENERATOR'] = format
+ default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '')
# Format can be a custom python file, or by default the name of a module
# within gyp.generator.
@@ -126,6 +126,7 @@ def Load(build_files, format, default_variables={},
# Process the input specific to this generator.
result = gyp.input.Load(build_files, default_variables, includes[:],
depth, generator_input_info, check, circular_check,
+ duplicate_basename_check,
params['parallel'], params['root_targets'])
return [generator] + result
@@ -324,6 +325,16 @@ def gyp_main(args):
parser.add_option('--no-circular-check', dest='circular_check',
action='store_false', default=True, regenerate=False,
help="don't check for circular relationships between files")
+ # --no-duplicate-basename-check disables the check for duplicate basenames
+ # in a static_library/shared_library project. Visual C++ 2008 generator
+ # doesn't support this configuration. Libtool on Mac also generates warnings
+ # when duplicate basenames are passed into Make generator on Mac.
+ # TODO(yukawa): Remove this option when these legacy generators are
+ # deprecated.
+ parser.add_option('--no-duplicate-basename-check',
+ dest='duplicate_basename_check', action='store_false',
+ default=True, regenerate=False,
+ help="don't check for duplicate basenames")
parser.add_option('--no-parallel', action='store_true', default=False,
help='Disable multiprocessing')
parser.add_option('-S', '--suffix', dest='suffix', default='',
@@ -371,7 +382,7 @@ def gyp_main(args):
if options.use_environment:
generate_formats = os.environ.get('GYP_GENERATORS', [])
if generate_formats:
- generate_formats = re.split('[\s,]', generate_formats)
+ generate_formats = re.split(r'[\s,]', generate_formats)
if generate_formats:
options.formats = generate_formats
else:
@@ -493,14 +504,14 @@ def gyp_main(args):
'gyp_binary': sys.argv[0],
'home_dot_gyp': home_dot_gyp,
'parallel': options.parallel,
- 'root_targets': options.root_targets}
+ 'root_targets': options.root_targets,
+ 'target_arch': cmdline_default_variables.get('target_arch', '')}
# Start with the default variables from the command line.
- [generator, flat_list, targets, data] = Load(build_files, format,
- cmdline_default_variables,
- includes, options.depth,
- params, options.check,
- options.circular_check)
+ [generator, flat_list, targets, data] = Load(
+ build_files, format, cmdline_default_variables, includes, options.depth,
+ params, options.check, options.circular_check,
+ options.duplicate_basename_check)
# TODO(mark): Pass |data| for now because the generator needs a list of
# build files that came in. In the future, maybe it should just accept
diff --git a/deps/gyp/pylib/gyp/common.py b/deps/gyp/pylib/gyp/common.py
index df71d973e1..256e3f3a6b 100644
--- a/deps/gyp/pylib/gyp/common.py
+++ b/deps/gyp/pylib/gyp/common.py
@@ -131,13 +131,20 @@ def QualifiedTarget(build_file, target, toolset):
@memoize
-def RelativePath(path, relative_to):
+def RelativePath(path, relative_to, follow_path_symlink=True):
# Assuming both |path| and |relative_to| are relative to the current
# directory, returns a relative path that identifies path relative to
# relative_to.
+ # If |follow_symlink_path| is true (default) and |path| is a symlink, then
+ # this method returns a path to the real file represented by |path|. If it is
+ # false, this method returns a path to the symlink. If |path| is not a
+ # symlink, this option has no effect.
# Convert to normalized (and therefore absolute paths).
- path = os.path.realpath(path)
+ if follow_path_symlink:
+ path = os.path.realpath(path)
+ else:
+ path = os.path.abspath(path)
relative_to = os.path.realpath(relative_to)
# On Windows, we can't create a relative path to a different drive, so just
@@ -329,7 +336,7 @@ def WriteOnDiff(filename):
the target if it differs (on close).
"""
- class Writer:
+ class Writer(object):
"""Wrapper around file which only covers the target if it differs."""
def __init__(self):
# Pick temporary file.
@@ -418,6 +425,8 @@ def GetFlavor(params):
return 'freebsd'
if sys.platform.startswith('openbsd'):
return 'openbsd'
+ if sys.platform.startswith('netbsd'):
+ return 'netbsd'
if sys.platform.startswith('aix'):
return 'aix'
@@ -548,7 +557,7 @@ class CycleError(Exception):
def TopologicallySorted(graph, get_edges):
- """Topologically sort based on a user provided edge definition.
+ r"""Topologically sort based on a user provided edge definition.
Args:
graph: A list of node names.
@@ -586,3 +595,14 @@ def TopologicallySorted(graph, get_edges):
for node in sorted(graph):
Visit(node)
return ordered_nodes
+
+def CrossCompileRequested():
+ # TODO: figure out how to not build extra host objects in the
+ # non-cross-compile case when this is enabled, and enable unconditionally.
+ return (os.environ.get('GYP_CROSSCOMPILE') or
+ os.environ.get('AR_host') or
+ os.environ.get('CC_host') or
+ os.environ.get('CXX_host') or
+ os.environ.get('AR_target') or
+ os.environ.get('CC_target') or
+ os.environ.get('CXX_target'))
diff --git a/deps/gyp/pylib/gyp/flock_tool.py b/deps/gyp/pylib/gyp/flock_tool.py
index 3e7efff26e..b38d8660f7 100755
--- a/deps/gyp/pylib/gyp/flock_tool.py
+++ b/deps/gyp/pylib/gyp/flock_tool.py
@@ -40,7 +40,12 @@ class FlockTool(object):
# with EBADF, that's why we use this F_SETLK
# hack instead.
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
- op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
+ if sys.platform.startswith('aix'):
+ # Python on AIX is compiled with LARGEFILE support, which changes the
+ # struct size.
+ op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
+ else:
+ op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
fcntl.fcntl(fd, fcntl.F_SETLK, op)
return subprocess.call(cmd_list)
diff --git a/deps/gyp/pylib/gyp/generator/analyzer.py b/deps/gyp/pylib/gyp/generator/analyzer.py
new file mode 100644
index 0000000000..921c1a6b71
--- /dev/null
+++ b/deps/gyp/pylib/gyp/generator/analyzer.py
@@ -0,0 +1,741 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
+the generator flag config_path) the path of a json file that dictates the files
+and targets to search for. The following keys are supported:
+files: list of paths (relative) of the files to search for.
+test_targets: unqualified target names to search for. Any target in this list
+that depends upon a file in |files| is output regardless of the type of target
+or chain of dependencies.
+additional_compile_targets: Unqualified targets to search for in addition to
+test_targets. Targets in the combined list that depend upon a file in |files|
+are not necessarily output. For example, if the target is of type none then the
+target is not output (but one of the descendants of the target will be).
+
+The following is output:
+error: only supplied if there is an error.
+compile_targets: minimal set of targets that directly or indirectly (for
+ targets of type none) depend on the files in |files| and is one of the
+ supplied targets or a target that one of the supplied targets depends on.
+ The expectation is this set of targets is passed into a build step. This list
+ always contains the output of test_targets as well.
+test_targets: set of targets from the supplied |test_targets| that either
+ directly or indirectly depend upon a file in |files|. This list if useful
+ if additional processing needs to be done for certain targets after the
+ build, such as running tests.
+status: outputs one of three values: none of the supplied files were found,
+ one of the include files changed so that it should be assumed everything
+ changed (in this case test_targets and compile_targets are not output) or at
+ least one file was found.
+invalid_targets: list of supplied targets that were not found.
+
+Example:
+Consider a graph like the following:
+ A D
+ / \
+B C
+A depends upon both B and C, A is of type none and B and C are executables.
+D is an executable, has no dependencies and nothing depends on it.
+If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
+files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
+the following is output:
+|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
+and the supplied target A depends upon it. A is not output as a build_target
+as it is of type none with no rules and actions.
+|test_targets| = ["B"] B directly depends upon the change file b.cc.
+
+Even though the file d.cc, which D depends upon, has changed D is not output
+as it was not supplied by way of |additional_compile_targets| or |test_targets|.
+
+If the generator flag analyzer_output_path is specified, output is written
+there. Otherwise output is written to stdout.
+
+In Gyp the "all" target is shorthand for the root targets in the files passed
+to gyp. For example, if file "a.gyp" contains targets "a1" and
+"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
+on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
+Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
+directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
+then the "all" target includes "b1" and "b2".
+"""
+
+import gyp.common
+import gyp.ninja_syntax as ninja_syntax
+import json
+import os
+import posixpath
+import sys
+
+debug = False
+
+found_dependency_string = 'Found dependency'
+no_dependency_string = 'No dependencies'
+# Status when it should be assumed that everything has changed.
+all_changed_string = 'Found dependency (all)'
+
+# MatchStatus is used indicate if and how a target depends upon the supplied
+# sources.
+# The target's sources contain one of the supplied paths.
+MATCH_STATUS_MATCHES = 1
+# The target has a dependency on another target that contains one of the
+# supplied paths.
+MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
+# The target's sources weren't in the supplied paths and none of the target's
+# dependencies depend upon a target that matched.
+MATCH_STATUS_DOESNT_MATCH = 3
+# The target doesn't contain the source, but the dependent targets have not yet
+# been visited to determine a more specific status yet.
+MATCH_STATUS_TBD = 4
+
+generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
+
+generator_wants_static_library_dependencies_adjusted = False
+
+generator_default_variables = {
+}
+for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
+ 'LIB_DIR', 'SHARED_LIB_DIR']:
+ generator_default_variables[dirname] = '!!!'
+
+for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
+ 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
+ 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
+ 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
+ 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
+ 'CONFIGURATION_NAME']:
+ generator_default_variables[unused] = ''
+
+
+def _ToGypPath(path):
+ """Converts a path to the format used by gyp."""
+ if os.sep == '\\' and os.altsep == '/':
+ return path.replace('\\', '/')
+ return path
+
+
+def _ResolveParent(path, base_path_components):
+ """Resolves |path|, which starts with at least one '../'. Returns an empty
+ string if the path shouldn't be considered. See _AddSources() for a
+ description of |base_path_components|."""
+ depth = 0
+ while path.startswith('../'):
+ depth += 1
+ path = path[3:]
+ # Relative includes may go outside the source tree. For example, an action may
+ # have inputs in /usr/include, which are not in the source tree.
+ if depth > len(base_path_components):
+ return ''
+ if depth == len(base_path_components):
+ return path
+ return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
+ '/' + path
+
+
+def _AddSources(sources, base_path, base_path_components, result):
+ """Extracts valid sources from |sources| and adds them to |result|. Each
+ source file is relative to |base_path|, but may contain '..'. To make
+ resolving '..' easier |base_path_components| contains each of the
+ directories in |base_path|. Additionally each source may contain variables.
+ Such sources are ignored as it is assumed dependencies on them are expressed
+ and tracked in some other means."""
+ # NOTE: gyp paths are always posix style.
+ for source in sources:
+ if not len(source) or source.startswith('!!!') or source.startswith('$'):
+ continue
+ # variable expansion may lead to //.
+ org_source = source
+ source = source[0] + source[1:].replace('//', '/')
+ if source.startswith('../'):
+ source = _ResolveParent(source, base_path_components)
+ if len(source):
+ result.append(source)
+ continue
+ result.append(base_path + source)
+ if debug:
+ print 'AddSource', org_source, result[len(result) - 1]
+
+
+def _ExtractSourcesFromAction(action, base_path, base_path_components,
+ results):
+ if 'inputs' in action:
+ _AddSources(action['inputs'], base_path, base_path_components, results)
+
+
+def _ToLocalPath(toplevel_dir, path):
+ """Converts |path| to a path relative to |toplevel_dir|."""
+ if path == toplevel_dir:
+ return ''
+ if path.startswith(toplevel_dir + '/'):
+ return path[len(toplevel_dir) + len('/'):]
+ return path
+
+
+def _ExtractSources(target, target_dict, toplevel_dir):
+ # |target| is either absolute or relative and in the format of the OS. Gyp
+ # source paths are always posix. Convert |target| to a posix path relative to
+ # |toplevel_dir_|. This is done to make it easy to build source paths.
+ base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
+ base_path_components = base_path.split('/')
+
+ # Add a trailing '/' so that _AddSources() can easily build paths.
+ if len(base_path):
+ base_path += '/'
+
+ if debug:
+ print 'ExtractSources', target, base_path
+
+ results = []
+ if 'sources' in target_dict:
+ _AddSources(target_dict['sources'], base_path, base_path_components,
+ results)
+ # Include the inputs from any actions. Any changes to these affect the
+ # resulting output.
+ if 'actions' in target_dict:
+ for action in target_dict['actions']:
+ _ExtractSourcesFromAction(action, base_path, base_path_components,
+ results)
+ if 'rules' in target_dict:
+ for rule in target_dict['rules']:
+ _ExtractSourcesFromAction(rule, base_path, base_path_components, results)
+
+ return results
+
+
+class Target(object):
+ """Holds information about a particular target:
+ deps: set of Targets this Target depends upon. This is not recursive, only the
+ direct dependent Targets.
+ match_status: one of the MatchStatus values.
+ back_deps: set of Targets that have a dependency on this Target.
+ visited: used during iteration to indicate whether we've visited this target.
+ This is used for two iterations, once in building the set of Targets and
+ again in _GetBuildTargets().
+ name: fully qualified name of the target.
+ requires_build: True if the target type is such that it needs to be built.
+ See _DoesTargetTypeRequireBuild for details.
+ added_to_compile_targets: used when determining if the target was added to the
+ set of targets that needs to be built.
+ in_roots: true if this target is a descendant of one of the root nodes.
+ is_executable: true if the type of target is executable.
+ is_static_library: true if the type of target is static_library.
+ is_or_has_linked_ancestor: true if the target does a link (eg executable), or
+ if there is a target in back_deps that does a link."""
+ def __init__(self, name):
+ self.deps = set()
+ self.match_status = MATCH_STATUS_TBD
+ self.back_deps = set()
+ self.name = name
+ # TODO(sky): I don't like hanging this off Target. This state is specific
+ # to certain functions and should be isolated there.
+ self.visited = False
+ self.requires_build = False
+ self.added_to_compile_targets = False
+ self.in_roots = False
+ self.is_executable = False
+ self.is_static_library = False
+ self.is_or_has_linked_ancestor = False
+
+
+class Config(object):
+ """Details what we're looking for
+ files: set of files to search for
+ targets: see file description for details."""
+ def __init__(self):
+ self.files = []
+ self.targets = set()
+ self.additional_compile_target_names = set()
+ self.test_target_names = set()
+
+ def Init(self, params):
+ """Initializes Config. This is a separate method as it raises an exception
+ if there is a parse error."""
+ generator_flags = params.get('generator_flags', {})
+ config_path = generator_flags.get('config_path', None)
+ if not config_path:
+ return
+ try:
+ f = open(config_path, 'r')
+ config = json.load(f)
+ f.close()
+ except IOError:
+ raise Exception('Unable to open file ' + config_path)
+ except ValueError as e:
+ raise Exception('Unable to parse config file ' + config_path + str(e))
+ if not isinstance(config, dict):
+ raise Exception('config_path must be a JSON file containing a dictionary')
+ self.files = config.get('files', [])
+ self.additional_compile_target_names = set(
+ config.get('additional_compile_targets', []))
+ self.test_target_names = set(config.get('test_targets', []))
+
+
+def _WasBuildFileModified(build_file, data, files, toplevel_dir):
+ """Returns true if the build file |build_file| is either in |files| or
+ one of the files included by |build_file| is in |files|. |toplevel_dir| is
+ the root of the source tree."""
+ if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
+ if debug:
+ print 'gyp file modified', build_file
+ return True
+
+ # First element of included_files is the file itself.
+ if len(data[build_file]['included_files']) <= 1:
+ return False
+
+ for include_file in data[build_file]['included_files'][1:]:
+ # |included_files| are relative to the directory of the |build_file|.
+ rel_include_file = \
+ _ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
+ if _ToLocalPath(toplevel_dir, rel_include_file) in files:
+ if debug:
+ print 'included gyp file modified, gyp_file=', build_file, \
+ 'included file=', rel_include_file
+ return True
+ return False
+
+
+def _GetOrCreateTargetByName(targets, target_name):
+ """Creates or returns the Target at targets[target_name]. If there is no
+ Target for |target_name| one is created. Returns a tuple of whether a new
+ Target was created and the Target."""
+ if target_name in targets:
+ return False, targets[target_name]
+ target = Target(target_name)
+ targets[target_name] = target
+ return True, target
+
+
+def _DoesTargetTypeRequireBuild(target_dict):
+ """Returns true if the target type is such that it needs to be built."""
+ # If a 'none' target has rules or actions we assume it requires a build.
+ return bool(target_dict['type'] != 'none' or
+ target_dict.get('actions') or target_dict.get('rules'))
+
+
+def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
+ build_files):
+ """Returns a tuple of the following:
+ . A dictionary mapping from fully qualified name to Target.
+ . A list of the targets that have a source file in |files|.
+ . Targets that constitute the 'all' target. See description at top of file
+ for details on the 'all' target.
+ This sets the |match_status| of the targets that contain any of the source
+ files in |files| to MATCH_STATUS_MATCHES.
+ |toplevel_dir| is the root of the source tree."""
+ # Maps from target name to Target.
+ name_to_target = {}
+
+ # Targets that matched.
+ matching_targets = []
+
+ # Queue of targets to visit.
+ targets_to_visit = target_list[:]
+
+ # Maps from build file to a boolean indicating whether the build file is in
+ # |files|.
+ build_file_in_files = {}
+
+ # Root targets across all files.
+ roots = set()
+
+ # Set of Targets in |build_files|.
+ build_file_targets = set()
+
+ while len(targets_to_visit) > 0:
+ target_name = targets_to_visit.pop()
+ created_target, target = _GetOrCreateTargetByName(name_to_target,
+ target_name)
+ if created_target:
+ roots.add(target)
+ elif target.visited:
+ continue
+
+ target.visited = True
+ target.requires_build = _DoesTargetTypeRequireBuild(
+ target_dicts[target_name])
+ target_type = target_dicts[target_name]['type']
+ target.is_executable = target_type == 'executable'
+ target.is_static_library = target_type == 'static_library'
+ target.is_or_has_linked_ancestor = (target_type == 'executable' or
+ target_type == 'shared_library')
+
+ build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
+ if not build_file in build_file_in_files:
+ build_file_in_files[build_file] = \
+ _WasBuildFileModified(build_file, data, files, toplevel_dir)
+
+ if build_file in build_files:
+ build_file_targets.add(target)
+
+ # If a build file (or any of its included files) is modified we assume all
+ # targets in the file are modified.
+ if build_file_in_files[build_file]:
+ print 'matching target from modified build file', target_name
+ target.match_status = MATCH_STATUS_MATCHES
+ matching_targets.append(target)
+ else:
+ sources = _ExtractSources(target_name, target_dicts[target_name],
+ toplevel_dir)
+ for source in sources:
+ if _ToGypPath(os.path.normpath(source)) in files:
+ print 'target', target_name, 'matches', source
+ target.match_status = MATCH_STATUS_MATCHES
+ matching_targets.append(target)
+ break
+
+ # Add dependencies to visit as well as updating back pointers for deps.
+ for dep in target_dicts[target_name].get('dependencies', []):
+ targets_to_visit.append(dep)
+
+ created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target,
+ dep)
+ if not created_dep_target:
+ roots.discard(dep_target)
+
+ target.deps.add(dep_target)
+ dep_target.back_deps.add(target)
+
+ return name_to_target, matching_targets, roots & build_file_targets
+
+
+def _GetUnqualifiedToTargetMapping(all_targets, to_find):
+ """Returns a tuple of the following:
+ . mapping (dictionary) from unqualified name to Target for all the
+ Targets in |to_find|.
+ . any target names not found. If this is empty all targets were found."""
+ result = {}
+ if not to_find:
+ return {}, []
+ to_find = set(to_find)
+ for target_name in all_targets.keys():
+ extracted = gyp.common.ParseQualifiedTarget(target_name)
+ if len(extracted) > 1 and extracted[1] in to_find:
+ to_find.remove(extracted[1])
+ result[extracted[1]] = all_targets[target_name]
+ if not to_find:
+ return result, []
+ return result, [x for x in to_find]
+
+
+def _DoesTargetDependOnMatchingTargets(target):
+ """Returns true if |target| or any of its dependencies is one of the
+ targets containing the files supplied as input to analyzer. This updates
+ |matches| of the Targets as it recurses.
+ target: the Target to look for."""
+ if target.match_status == MATCH_STATUS_DOESNT_MATCH:
+ return False
+ if target.match_status == MATCH_STATUS_MATCHES or \
+ target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
+ return True
+ for dep in target.deps:
+ if _DoesTargetDependOnMatchingTargets(dep):
+ target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
+ print '\t', target.name, 'matches by dep', dep.name
+ return True
+ target.match_status = MATCH_STATUS_DOESNT_MATCH
+ return False
+
+
+def _GetTargetsDependingOnMatchingTargets(possible_targets):
+ """Returns the list of Targets in |possible_targets| that depend (either
+ directly on indirectly) on at least one of the targets containing the files
+ supplied as input to analyzer.
+ possible_targets: targets to search from."""
+ found = []
+ print 'Targets that matched by dependency:'
+ for target in possible_targets:
+ if _DoesTargetDependOnMatchingTargets(target):
+ found.append(target)
+ return found
+
+
+def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
+ """Recurses through all targets that depend on |target|, adding all targets
+ that need to be built (and are in |roots|) to |result|.
+ roots: set of root targets.
+ add_if_no_ancestor: If true and there are no ancestors of |target| then add
+ |target| to |result|. |target| must still be in |roots|.
+ result: targets that need to be built are added here."""
+ if target.visited:
+ return
+
+ target.visited = True
+ target.in_roots = target in roots
+
+ for back_dep_target in target.back_deps:
+ _AddCompileTargets(back_dep_target, roots, False, result)
+ target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
+ target.in_roots |= back_dep_target.in_roots
+ target.is_or_has_linked_ancestor |= (
+ back_dep_target.is_or_has_linked_ancestor)
+
+ # Always add 'executable' targets. Even though they may be built by other
+ # targets that depend upon them it makes detection of what is going to be
+ # built easier.
+ # And always add static_libraries that have no dependencies on them from
+ # linkables. This is necessary as the other dependencies on them may be
+ # static libraries themselves, which are not compile time dependencies.
+ if target.in_roots and \
+ (target.is_executable or
+ (not target.added_to_compile_targets and
+ (add_if_no_ancestor or target.requires_build)) or
+ (target.is_static_library and add_if_no_ancestor and
+ not target.is_or_has_linked_ancestor)):
+ print '\t\tadding to compile targets', target.name, 'executable', \
+ target.is_executable, 'added_to_compile_targets', \
+ target.added_to_compile_targets, 'add_if_no_ancestor', \
+ add_if_no_ancestor, 'requires_build', target.requires_build, \
+ 'is_static_library', target.is_static_library, \
+ 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
+ result.add(target)
+ target.added_to_compile_targets = True
+
+
+def _GetCompileTargets(matching_targets, supplied_targets):
+ """Returns the set of Targets that require a build.
+ matching_targets: targets that changed and need to be built.
+ supplied_targets: set of targets supplied to analyzer to search from."""
+ result = set()
+ for target in matching_targets:
+ print 'finding compile targets for match', target.name
+ _AddCompileTargets(target, supplied_targets, True, result)
+ return result
+
+
+def _WriteOutput(params, **values):
+ """Writes the output, either to stdout or a file is specified."""
+ if 'error' in values:
+ print 'Error:', values['error']
+ if 'status' in values:
+ print values['status']
+ if 'targets' in values:
+ values['targets'].sort()
+ print 'Supplied targets that depend on changed files:'
+ for target in values['targets']:
+ print '\t', target
+ if 'invalid_targets' in values:
+ values['invalid_targets'].sort()
+ print 'The following targets were not found:'
+ for target in values['invalid_targets']:
+ print '\t', target
+ if 'build_targets' in values:
+ values['build_targets'].sort()
+ print 'Targets that require a build:'
+ for target in values['build_targets']:
+ print '\t', target
+ if 'compile_targets' in values:
+ values['compile_targets'].sort()
+ print 'Targets that need to be built:'
+ for target in values['compile_targets']:
+ print '\t', target
+ if 'test_targets' in values:
+ values['test_targets'].sort()
+ print 'Test targets:'
+ for target in values['test_targets']:
+ print '\t', target
+
+ output_path = params.get('generator_flags', {}).get(
+ 'analyzer_output_path', None)
+ if not output_path:
+ print json.dumps(values)
+ return
+ try:
+ f = open(output_path, 'w')
+ f.write(json.dumps(values) + '\n')
+ f.close()
+ except IOError as e:
+ print 'Error writing to output file', output_path, str(e)
+
+
+def _WasGypIncludeFileModified(params, files):
+ """Returns true if one of the files in |files| is in the set of included
+ files."""
+ if params['options'].includes:
+ for include in params['options'].includes:
+ if _ToGypPath(os.path.normpath(include)) in files:
+ print 'Include file modified, assuming all changed', include
+ return True
+ return False
+
+
+def _NamesNotIn(names, mapping):
+ """Returns a list of the values in |names| that are not in |mapping|."""
+ return [name for name in names if name not in mapping]
+
+
+def _LookupTargets(names, mapping):
+ """Returns a list of the mapping[name] for each value in |names| that is in
+ |mapping|."""
+ return [mapping[name] for name in names if name in mapping]
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'mac':
+ default_variables.setdefault('OS', 'mac')
+ elif flavor == 'win':
+ default_variables.setdefault('OS', 'win')
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Windows Ninja generator.
+ import gyp.generator.msvs as msvs_generator
+ generator_additional_non_configuration_keys = getattr(msvs_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(msvs_generator,
+ 'generator_additional_path_sections', [])
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+ else:
+ operating_system = flavor
+ if flavor == 'android':
+ operating_system = 'linux' # Keep this legacy behavior for now.
+ default_variables.setdefault('OS', operating_system)
+
+
+class TargetCalculator(object):
+ """Calculates the matching test_targets and matching compile_targets."""
+ def __init__(self, files, additional_compile_target_names, test_target_names,
+ data, target_list, target_dicts, toplevel_dir, build_files):
+ self._additional_compile_target_names = set(additional_compile_target_names)
+ self._test_target_names = set(test_target_names)
+ self._name_to_target, self._changed_targets, self._root_targets = (
+ _GenerateTargets(data, target_list, target_dicts, toplevel_dir,
+ frozenset(files), build_files))
+ self._unqualified_mapping, self.invalid_targets = (
+ _GetUnqualifiedToTargetMapping(self._name_to_target,
+ self._supplied_target_names_no_all()))
+
+ def _supplied_target_names(self):
+ return self._additional_compile_target_names | self._test_target_names
+
+ def _supplied_target_names_no_all(self):
+ """Returns the supplied test targets without 'all'."""
+ result = self._supplied_target_names();
+ result.discard('all')
+ return result
+
+ def is_build_impacted(self):
+ """Returns true if the supplied files impact the build at all."""
+ return self._changed_targets
+
+ def find_matching_test_target_names(self):
+ """Returns the set of output test targets."""
+ assert self.is_build_impacted()
+ # Find the test targets first. 'all' is special cased to mean all the
+ # root targets. To deal with all the supplied |test_targets| are expanded
+ # to include the root targets during lookup. If any of the root targets
+ # match, we remove it and replace it with 'all'.
+ test_target_names_no_all = set(self._test_target_names)
+ test_target_names_no_all.discard('all')
+ test_targets_no_all = _LookupTargets(test_target_names_no_all,
+ self._unqualified_mapping)
+ test_target_names_contains_all = 'all' in self._test_target_names
+ if test_target_names_contains_all:
+ test_targets = [x for x in (set(test_targets_no_all) |
+ set(self._root_targets))]
+ else:
+ test_targets = [x for x in test_targets_no_all]
+ print 'supplied test_targets'
+ for target_name in self._test_target_names:
+ print '\t', target_name
+ print 'found test_targets'
+ for target in test_targets:
+ print '\t', target.name
+ print 'searching for matching test targets'
+ matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
+ matching_test_targets_contains_all = (test_target_names_contains_all and
+ set(matching_test_targets) &
+ set(self._root_targets))
+ if matching_test_targets_contains_all:
+ # Remove any of the targets for all that were not explicitly supplied,
+ # 'all' is subsequentely added to the matching names below.
+ matching_test_targets = [x for x in (set(matching_test_targets) &
+ set(test_targets_no_all))]
+ print 'matched test_targets'
+ for target in matching_test_targets:
+ print '\t', target.name
+ matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in matching_test_targets]
+ if matching_test_targets_contains_all:
+ matching_target_names.append('all')
+ print '\tall'
+ return matching_target_names
+
+ def find_matching_compile_target_names(self):
+ """Returns the set of output compile targets."""
+ assert self.is_build_impacted();
+ # Compile targets are found by searching up from changed targets.
+ # Reset the visited status for _GetBuildTargets.
+ for target in self._name_to_target.itervalues():
+ target.visited = False
+
+ supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
+ self._unqualified_mapping)
+ if 'all' in self._supplied_target_names():
+ supplied_targets = [x for x in (set(supplied_targets) |
+ set(self._root_targets))]
+ print 'Supplied test_targets & compile_targets'
+ for target in supplied_targets:
+ print '\t', target.name
+ print 'Finding compile targets'
+ compile_targets = _GetCompileTargets(self._changed_targets,
+ supplied_targets)
+ return [gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in compile_targets]
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Called by gyp as the final stage. Outputs results."""
+ config = Config()
+ try:
+ config.Init(params)
+
+ if not config.files:
+ raise Exception('Must specify files to analyze via config_path generator '
+ 'flag')
+
+ toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
+ if debug:
+ print 'toplevel_dir', toplevel_dir
+
+ if _WasGypIncludeFileModified(params, config.files):
+ result_dict = { 'status': all_changed_string,
+ 'test_targets': list(config.test_target_names),
+ 'compile_targets': list(
+ config.additional_compile_target_names |
+ config.test_target_names) }
+ _WriteOutput(params, **result_dict)
+ return
+
+ calculator = TargetCalculator(config.files,
+ config.additional_compile_target_names,
+ config.test_target_names, data,
+ target_list, target_dicts, toplevel_dir,
+ params['build_files'])
+ if not calculator.is_build_impacted():
+ result_dict = { 'status': no_dependency_string,
+ 'test_targets': [],
+ 'compile_targets': [] }
+ if calculator.invalid_targets:
+ result_dict['invalid_targets'] = calculator.invalid_targets
+ _WriteOutput(params, **result_dict)
+ return
+
+ test_target_names = calculator.find_matching_test_target_names()
+ compile_target_names = calculator.find_matching_compile_target_names()
+ found_at_least_one_target = compile_target_names or test_target_names
+ result_dict = { 'test_targets': test_target_names,
+ 'status': found_dependency_string if
+ found_at_least_one_target else no_dependency_string,
+ 'compile_targets': list(
+ set(compile_target_names) |
+ set(test_target_names)) }
+ if calculator.invalid_targets:
+ result_dict['invalid_targets'] = calculator.invalid_targets
+ _WriteOutput(params, **result_dict)
+
+ except Exception as e:
+ _WriteOutput(params, error=str(e))
diff --git a/deps/gyp/pylib/gyp/generator/android.py b/deps/gyp/pylib/gyp/generator/android.py
deleted file mode 100644
index 39884749b1..0000000000
--- a/deps/gyp/pylib/gyp/generator/android.py
+++ /dev/null
@@ -1,1090 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Notes:
-#
-# This generates makefiles suitable for inclusion into the Android build system
-# via an Android.mk file. It is based on make.py, the standard makefile
-# generator.
-#
-# The code below generates a separate .mk file for each target, but
-# all are sourced by the top-level GypAndroid.mk. This means that all
-# variables in .mk-files clobber one another, and furthermore that any
-# variables set potentially clash with other Android build system variables.
-# Try to avoid setting global variables where possible.
-
-import gyp
-import gyp.common
-import gyp.generator.make as make # Reuse global functions from make backend.
-import os
-import re
-import subprocess
-
-generator_default_variables = {
- 'OS': 'android',
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': 'lib',
- 'SHARED_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
- 'SHARED_LIB_SUFFIX': '.so',
- 'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)',
- 'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)',
- 'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)',
- 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
- 'LIB_DIR': '$(obj).$(TOOLSET)',
- 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
- 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
- 'RULE_INPUT_PATH': '$(RULE_SOURCES)',
- 'RULE_INPUT_EXT': '$(suffix $<)',
- 'RULE_INPUT_NAME': '$(notdir $<)',
- 'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)',
-}
-
-# Make supports multiple toolsets
-generator_supports_multiple_toolsets = True
-
-
-# Generator-specific gyp specs.
-generator_additional_non_configuration_keys = [
- # Boolean to declare that this target does not want its name mangled.
- 'android_unmangled_name',
-]
-generator_additional_path_sections = []
-generator_extra_sources_for_rules = []
-
-
-ALL_MODULES_FOOTER = """\
-# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
-# all the included sub-makefiles. This is just here to clarify.
-gyp_all_modules:
-"""
-
-header = """\
-# This file is generated by gyp; do not edit.
-
-"""
-
-android_standard_include_paths = set([
- # JNI_H_INCLUDE in build/core/binary.mk
- 'dalvik/libnativehelper/include/nativehelper',
- # from SRC_HEADERS in build/core/config.mk
- 'system/core/include',
- 'hardware/libhardware/include',
- 'hardware/libhardware_legacy/include',
- 'hardware/ril/include',
- 'dalvik/libnativehelper/include',
- 'frameworks/native/include',
- 'frameworks/native/opengl/include',
- 'frameworks/base/include',
- 'frameworks/base/opengl/include',
- 'frameworks/base/native/include',
- 'external/skia/include',
- # TARGET_C_INCLUDES in build/core/combo/TARGET_linux-arm.mk
- 'bionic/libc/arch-arm/include',
- 'bionic/libc/include',
- 'bionic/libstdc++/include',
- 'bionic/libc/kernel/common',
- 'bionic/libc/kernel/arch-arm',
- 'bionic/libm/include',
- 'bionic/libm/include/arm',
- 'bionic/libthread_db/include',
- ])
-
-
-# Map gyp target types to Android module classes.
-MODULE_CLASSES = {
- 'static_library': 'STATIC_LIBRARIES',
- 'shared_library': 'SHARED_LIBRARIES',
- 'executable': 'EXECUTABLES',
-}
-
-
-def IsCPPExtension(ext):
- return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx'
-
-
-def Sourceify(path):
- """Convert a path to its source directory form. The Android backend does not
- support options.generator_output, so this function is a noop."""
- return path
-
-
-# Map from qualified target to path to output.
-# For Android, the target of these maps is a tuple ('static', 'modulename'),
-# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string,
-# since we link by module.
-target_outputs = {}
-# Map from qualified target to any linkable output. A subset
-# of target_outputs. E.g. when mybinary depends on liba, we want to
-# include liba in the linker line; when otherbinary depends on
-# mybinary, we just want to build mybinary first.
-target_link_deps = {}
-
-
-class AndroidMkWriter(object):
- """AndroidMkWriter packages up the writing of one target-specific Android.mk.
-
- Its only real entry point is Write(), and is mostly used for namespacing.
- """
-
- def __init__(self, android_top_dir):
- self.android_top_dir = android_top_dir
-
- def Write(self, qualified_target, relative_target, base_path, output_filename,
- spec, configs, part_of_all, write_alias_target):
- """The main entry point: writes a .mk file for a single target.
-
- Arguments:
- qualified_target: target we're generating
- relative_target: qualified target name relative to the root
- base_path: path relative to source root we're building in, used to resolve
- target-relative paths
- output_filename: output .mk file name to write
- spec, configs: gyp info
- part_of_all: flag indicating this target is part of 'all'
- write_alias_target: flag indicating whether to create short aliases for
- this target
- """
- gyp.common.EnsureDirExists(output_filename)
-
- self.fp = open(output_filename, 'w')
-
- self.fp.write(header)
-
- self.qualified_target = qualified_target
- self.relative_target = relative_target
- self.path = base_path
- self.target = spec['target_name']
- self.type = spec['type']
- self.toolset = spec['toolset']
-
- deps, link_deps = self.ComputeDeps(spec)
-
- # Some of the generation below can add extra output, sources, or
- # link dependencies. All of the out params of the functions that
- # follow use names like extra_foo.
- extra_outputs = []
- extra_sources = []
-
- self.android_class = MODULE_CLASSES.get(self.type, 'GYP')
- self.android_module = self.ComputeAndroidModule(spec)
- (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
- self.output = self.output_binary = self.ComputeOutput(spec)
-
- # Standard header.
- self.WriteLn('include $(CLEAR_VARS)\n')
-
- # Module class and name.
- self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class)
- self.WriteLn('LOCAL_MODULE := ' + self.android_module)
- # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE.
- # The library module classes fail if the stem is set. ComputeOutputParts
- # makes sure that stem == modulename in these cases.
- if self.android_stem != self.android_module:
- self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
- self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
- self.WriteLn('LOCAL_MODULE_TAGS := optional')
- if self.toolset == 'host':
- self.WriteLn('LOCAL_IS_HOST_MODULE := true')
- else:
- self.WriteLn('LOCAL_MODULE_TARGET_ARCH := '
- '$(TARGET_$(GYP_VAR_PREFIX)ARCH)')
-
- # Grab output directories; needed for Actions and Rules.
- if self.toolset == 'host':
- self.WriteLn('gyp_intermediate_dir := '
- '$(call local-intermediates-dir)')
- else:
- self.WriteLn('gyp_intermediate_dir := '
- '$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))')
- self.WriteLn('gyp_shared_intermediate_dir := '
- '$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))')
- self.WriteLn()
-
- # List files this target depends on so that actions/rules/copies/sources
- # can depend on the list.
- # TODO: doesn't pull in things through transitive link deps; needed?
- target_dependencies = [x[1] for x in deps if x[0] == 'path']
- self.WriteLn('# Make sure our deps are built first.')
- self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES',
- local_pathify=True)
-
- # Actions must come first, since they can generate more OBJs for use below.
- if 'actions' in spec:
- self.WriteActions(spec['actions'], extra_sources, extra_outputs)
-
- # Rules must be early like actions.
- if 'rules' in spec:
- self.WriteRules(spec['rules'], extra_sources, extra_outputs)
-
- if 'copies' in spec:
- self.WriteCopies(spec['copies'], extra_outputs)
-
- # GYP generated outputs.
- self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True)
-
- # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend
- # on both our dependency targets and our generated files.
- self.WriteLn('# Make sure our deps and generated files are built first.')
- self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) '
- '$(GYP_GENERATED_OUTPUTS)')
- self.WriteLn()
-
- # Sources.
- if spec.get('sources', []) or extra_sources:
- self.WriteSources(spec, configs, extra_sources)
-
- self.WriteTarget(spec, configs, deps, link_deps, part_of_all,
- write_alias_target)
-
- # Update global list of target outputs, used in dependency tracking.
- target_outputs[qualified_target] = ('path', self.output_binary)
-
- # Update global list of link dependencies.
- if self.type == 'static_library':
- target_link_deps[qualified_target] = ('static', self.android_module)
- elif self.type == 'shared_library':
- target_link_deps[qualified_target] = ('shared', self.android_module)
-
- self.fp.close()
- return self.android_module
-
-
- def WriteActions(self, actions, extra_sources, extra_outputs):
- """Write Makefile code for any 'actions' from the gyp input.
-
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- actions (used to make other pieces dependent on these
- actions)
- """
- for action in actions:
- name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
- action['action_name']))
- self.WriteLn('### Rules for action "%s":' % action['action_name'])
- inputs = action['inputs']
- outputs = action['outputs']
-
- # Build up a list of outputs.
- # Collect the output dirs we'll need.
- dirs = set()
- for out in outputs:
- if not out.startswith('$'):
- print ('WARNING: Action for target "%s" writes output to local path '
- '"%s".' % (self.target, out))
- dir = os.path.split(out)[0]
- if dir:
- dirs.add(dir)
- if int(action.get('process_outputs_as_sources', False)):
- extra_sources += outputs
-
- # Prepare the actual command.
- command = gyp.common.EncodePOSIXShellList(action['action'])
- if 'message' in action:
- quiet_cmd = 'Gyp action: %s ($@)' % action['message']
- else:
- quiet_cmd = 'Gyp action: %s ($@)' % name
- if len(dirs) > 0:
- command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
-
- cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
- command = cd_action + command
-
- # The makefile rules are all relative to the top dir, but the gyp actions
- # are defined relative to their containing dir. This replaces the gyp_*
- # variables for the action rule with an absolute version so that the
- # output goes in the right place.
- # Only write the gyp_* rules for the "primary" output (:1);
- # it's superfluous for the "extra outputs", and this avoids accidentally
- # writing duplicate dummy rules for those outputs.
- main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
- self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
- self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
- self.WriteLn('%s: gyp_intermediate_dir := '
- '$(abspath $(gyp_intermediate_dir))' % main_output)
- self.WriteLn('%s: gyp_shared_intermediate_dir := '
- '$(abspath $(gyp_shared_intermediate_dir))' % main_output)
-
- # Android's envsetup.sh adds a number of directories to the path including
- # the built host binary directory. This causes actions/rules invoked by
- # gyp to sometimes use these instead of system versions, e.g. bison.
- # The built host binaries may not be suitable, and can cause errors.
- # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable
- # set by envsetup.
- self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
- % main_output)
-
- for input in inputs:
- assert ' ' not in input, (
- "Spaces in action input filenames not supported (%s)" % input)
- for output in outputs:
- assert ' ' not in output, (
- "Spaces in action output filenames not supported (%s)" % output)
-
- self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
- (main_output, ' '.join(map(self.LocalPathify, inputs))))
- self.WriteLn('\t@echo "%s"' % quiet_cmd)
- self.WriteLn('\t$(hide)%s\n' % command)
- for output in outputs[1:]:
- # Make each output depend on the main output, with an empty command
- # to force make to notice that the mtime has changed.
- self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output))
-
- extra_outputs += outputs
- self.WriteLn()
-
- self.WriteLn()
-
-
- def WriteRules(self, rules, extra_sources, extra_outputs):
- """Write Makefile code for any 'rules' from the gyp input.
-
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- rules (used to make other pieces dependent on these rules)
- """
- if len(rules) == 0:
- return
-
- for rule in rules:
- if len(rule.get('rule_sources', [])) == 0:
- continue
- name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
- rule['rule_name']))
- self.WriteLn('\n### Generated for rule "%s":' % name)
- self.WriteLn('# "%s":' % rule)
-
- inputs = rule.get('inputs')
- for rule_source in rule.get('rule_sources', []):
- (rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
- (rule_source_root, rule_source_ext) = \
- os.path.splitext(rule_source_basename)
-
- outputs = [self.ExpandInputRoot(out, rule_source_root,
- rule_source_dirname)
- for out in rule['outputs']]
-
- dirs = set()
- for out in outputs:
- if not out.startswith('$'):
- print ('WARNING: Rule for target %s writes output to local path %s'
- % (self.target, out))
- dir = os.path.dirname(out)
- if dir:
- dirs.add(dir)
- extra_outputs += outputs
- if int(rule.get('process_outputs_as_sources', False)):
- extra_sources.extend(outputs)
-
- components = []
- for component in rule['action']:
- component = self.ExpandInputRoot(component, rule_source_root,
- rule_source_dirname)
- if '$(RULE_SOURCES)' in component:
- component = component.replace('$(RULE_SOURCES)',
- rule_source)
- components.append(component)
-
- command = gyp.common.EncodePOSIXShellList(components)
- cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
- command = cd_action + command
- if dirs:
- command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
-
- # We set up a rule to build the first output, and then set up
- # a rule for each additional output to depend on the first.
- outputs = map(self.LocalPathify, outputs)
- main_output = outputs[0]
- self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
- self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
- self.WriteLn('%s: gyp_intermediate_dir := '
- '$(abspath $(gyp_intermediate_dir))' % main_output)
- self.WriteLn('%s: gyp_shared_intermediate_dir := '
- '$(abspath $(gyp_shared_intermediate_dir))' % main_output)
-
- # See explanation in WriteActions.
- self.WriteLn('%s: export PATH := '
- '$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)
-
- main_output_deps = self.LocalPathify(rule_source)
- if inputs:
- main_output_deps += ' '
- main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs])
-
- self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
- (main_output, main_output_deps))
- self.WriteLn('\t%s\n' % command)
- for output in outputs[1:]:
- # Make each output depend on the main output, with an empty command
- # to force make to notice that the mtime has changed.
- self.WriteLn('%s: %s ;' % (output, main_output))
- self.WriteLn()
-
- self.WriteLn()
-
-
- def WriteCopies(self, copies, extra_outputs):
- """Write Makefile code for any 'copies' from the gyp input.
-
- extra_outputs: a list that will be filled in with any outputs of this action
- (used to make other pieces dependent on this action)
- """
- self.WriteLn('### Generated for copy rule.')
-
- variable = make.StringToMakefileVariable(self.relative_target + '_copies')
- outputs = []
- for copy in copies:
- for path in copy['files']:
- # The Android build system does not allow generation of files into the
- # source tree. The destination should start with a variable, which will
- # typically be $(gyp_intermediate_dir) or
- # $(gyp_shared_intermediate_dir). Note that we can't use an assertion
- # because some of the gyp tests depend on this.
- if not copy['destination'].startswith('$'):
- print ('WARNING: Copy rule for target %s writes output to '
- 'local path %s' % (self.target, copy['destination']))
-
- # LocalPathify() calls normpath, stripping trailing slashes.
- path = Sourceify(self.LocalPathify(path))
- filename = os.path.split(path)[1]
- output = Sourceify(self.LocalPathify(os.path.join(copy['destination'],
- filename)))
-
- self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' %
- (output, path))
- self.WriteLn('\t@echo Copying: $@')
- self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
- self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
- self.WriteLn()
- outputs.append(output)
- self.WriteLn('%s = %s' % (variable,
- ' '.join(map(make.QuoteSpaces, outputs))))
- extra_outputs.append('$(%s)' % variable)
- self.WriteLn()
-
-
- def WriteSourceFlags(self, spec, configs):
- """Write out the flags and include paths used to compile source files for
- the current target.
-
- Args:
- spec, configs: input from gyp.
- """
- for configname, config in sorted(configs.iteritems()):
- extracted_includes = []
-
- self.WriteLn('\n# Flags passed to both C and C++ files.')
- cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
- config.get('cflags', []) + config.get('cflags_c', []))
- extracted_includes.extend(includes_from_cflags)
- self.WriteList(cflags, 'MY_CFLAGS_%s' % configname)
-
- self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname,
- prefix='-D', quoter=make.EscapeCppDefine)
-
- self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
- includes = list(config.get('include_dirs', []))
- includes.extend(extracted_includes)
- includes = map(Sourceify, map(self.LocalPathify, includes))
- includes = self.NormalizeIncludePaths(includes)
- self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname)
-
- self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
- self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname)
-
- self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '
- '$(MY_DEFS_$(GYP_CONFIGURATION))')
- # Undefine ANDROID for host modules
- # TODO: the source code should not use macro ANDROID to tell if it's host
- # or target module.
- if self.toolset == 'host':
- self.WriteLn('# Undefine ANDROID for host modules')
- self.WriteLn('LOCAL_CFLAGS += -UANDROID')
- self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
- '$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
- self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
- # Android uses separate flags for assembly file invocations, but gyp expects
- # the same CFLAGS to be applied:
- self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
-
-
- def WriteSources(self, spec, configs, extra_sources):
- """Write Makefile code for any 'sources' from the gyp input.
- These are source files necessary to build the current target.
- We need to handle shared_intermediate directory source files as
- a special case by copying them to the intermediate directory and
- treating them as a genereated sources. Otherwise the Android build
- rules won't pick them up.
-
- Args:
- spec, configs: input from gyp.
- extra_sources: Sources generated from Actions or Rules.
- """
- sources = filter(make.Compilable, spec.get('sources', []))
- generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
- extra_sources = filter(make.Compilable, extra_sources)
-
- # Determine and output the C++ extension used by these sources.
- # We simply find the first C++ file and use that extension.
- all_sources = sources + extra_sources
- local_cpp_extension = '.cpp'
- for source in all_sources:
- (root, ext) = os.path.splitext(source)
- if IsCPPExtension(ext):
- local_cpp_extension = ext
- break
- if local_cpp_extension != '.cpp':
- self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension)
-
- # We need to move any non-generated sources that are coming from the
- # shared intermediate directory out of LOCAL_SRC_FILES and put them
- # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files
- # that don't match our local_cpp_extension, since Android will only
- # generate Makefile rules for a single LOCAL_CPP_EXTENSION.
- local_files = []
- for source in sources:
- (root, ext) = os.path.splitext(source)
- if '$(gyp_shared_intermediate_dir)' in source:
- extra_sources.append(source)
- elif '$(gyp_intermediate_dir)' in source:
- extra_sources.append(source)
- elif IsCPPExtension(ext) and ext != local_cpp_extension:
- extra_sources.append(source)
- else:
- local_files.append(os.path.normpath(os.path.join(self.path, source)))
-
- # For any generated source, if it is coming from the shared intermediate
- # directory then we add a Make rule to copy them to the local intermediate
- # directory first. This is because the Android LOCAL_GENERATED_SOURCES
- # must be in the local module intermediate directory for the compile rules
- # to work properly. If the file has the wrong C++ extension, then we add
- # a rule to copy that to intermediates and use the new version.
- final_generated_sources = []
- # If a source file gets copied, we still need to add the orginal source
- # directory as header search path, for GCC searches headers in the
- # directory that contains the source file by default.
- origin_src_dirs = []
- for source in extra_sources:
- local_file = source
- if not '$(gyp_intermediate_dir)/' in local_file:
- basename = os.path.basename(local_file)
- local_file = '$(gyp_intermediate_dir)/' + basename
- (root, ext) = os.path.splitext(local_file)
- if IsCPPExtension(ext) and ext != local_cpp_extension:
- local_file = root + local_cpp_extension
- if local_file != source:
- self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source)))
- self.WriteLn('\tmkdir -p $(@D); cp $< $@')
- origin_src_dirs.append(os.path.dirname(source))
- final_generated_sources.append(local_file)
-
- # We add back in all of the non-compilable stuff to make sure that the
- # make rules have dependencies on them.
- final_generated_sources.extend(generated_not_sources)
- self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES')
-
- origin_src_dirs = gyp.common.uniquer(origin_src_dirs)
- origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs))
- self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS')
-
- self.WriteList(local_files, 'LOCAL_SRC_FILES')
-
- # Write out the flags used to compile the source; this must be done last
- # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.
- self.WriteSourceFlags(spec, configs)
-
-
- def ComputeAndroidModule(self, spec):
- """Return the Android module name used for a gyp spec.
-
- We use the complete qualified target name to avoid collisions between
- duplicate targets in different directories. We also add a suffix to
- distinguish gyp-generated module names.
- """
-
- if int(spec.get('android_unmangled_name', 0)):
- assert self.type != 'shared_library' or self.target.startswith('lib')
- return self.target
-
- if self.type == 'shared_library':
- # For reasons of convention, the Android build system requires that all
- # shared library modules are named 'libfoo' when generating -l flags.
- prefix = 'lib_'
- else:
- prefix = ''
-
- if spec['toolset'] == 'host':
- suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp'
- else:
- suffix = '_gyp'
-
- if self.path:
- middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target))
- else:
- middle = make.StringToMakefileVariable(self.target)
-
- return ''.join([prefix, middle, suffix])
-
-
- def ComputeOutputParts(self, spec):
- """Return the 'output basename' of a gyp spec, split into filename + ext.
-
- Android libraries must be named the same thing as their module name,
- otherwise the linker can't find them, so product_name and so on must be
- ignored if we are building a library, and the "lib" prepending is
- not done for Android.
- """
- assert self.type != 'loadable_module' # TODO: not supported?
-
- target = spec['target_name']
- target_prefix = ''
- target_ext = ''
- if self.type == 'static_library':
- target = self.ComputeAndroidModule(spec)
- target_ext = '.a'
- elif self.type == 'shared_library':
- target = self.ComputeAndroidModule(spec)
- target_ext = '.so'
- elif self.type == 'none':
- target_ext = '.stamp'
- elif self.type != 'executable':
- print ("ERROR: What output file should be generated?",
- "type", self.type, "target", target)
-
- if self.type != 'static_library' and self.type != 'shared_library':
- target_prefix = spec.get('product_prefix', target_prefix)
- target = spec.get('product_name', target)
- product_ext = spec.get('product_extension')
- if product_ext:
- target_ext = '.' + product_ext
-
- target_stem = target_prefix + target
- return (target_stem, target_ext)
-
-
- def ComputeOutputBasename(self, spec):
- """Return the 'output basename' of a gyp spec.
-
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- 'libfoobar.so'
- """
- return ''.join(self.ComputeOutputParts(spec))
-
-
- def ComputeOutput(self, spec):
- """Return the 'output' (full output path) of a gyp spec.
-
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- '$(obj)/baz/libfoobar.so'
- """
- if self.type == 'executable' and self.toolset == 'host':
- # We install host executables into shared_intermediate_dir so they can be
- # run by gyp rules that refer to PRODUCT_DIR.
- path = '$(gyp_shared_intermediate_dir)'
- elif self.type == 'shared_library':
- if self.toolset == 'host':
- path = '$(HOST_OUT_INTERMEDIATE_LIBRARIES)'
- else:
- path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)'
- else:
- # Other targets just get built into their intermediate dir.
- if self.toolset == 'host':
- path = '$(call intermediates-dir-for,%s,%s,true)' % (self.android_class,
- self.android_module)
- else:
- path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))'
- % (self.android_class, self.android_module))
-
- assert spec.get('product_dir') is None # TODO: not supported?
- return os.path.join(path, self.ComputeOutputBasename(spec))
-
- def NormalizeIncludePaths(self, include_paths):
- """ Normalize include_paths.
- Convert absolute paths to relative to the Android top directory;
- filter out include paths that are already brought in by the Android build
- system.
-
- Args:
- include_paths: A list of unprocessed include paths.
- Returns:
- A list of normalized include paths.
- """
- normalized = []
- for path in include_paths:
- if path[0] == '/':
- path = gyp.common.RelativePath(path, self.android_top_dir)
-
- # Filter out the Android standard search path.
- if path not in android_standard_include_paths:
- normalized.append(path)
- return normalized
-
- def ExtractIncludesFromCFlags(self, cflags):
- """Extract includes "-I..." out from cflags
-
- Args:
- cflags: A list of compiler flags, which may be mixed with "-I.."
- Returns:
- A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
- """
- clean_cflags = []
- include_paths = []
- for flag in cflags:
- if flag.startswith('-I'):
- include_paths.append(flag[2:])
- else:
- clean_cflags.append(flag)
-
- return (clean_cflags, include_paths)
-
- def ComputeAndroidLibraryModuleNames(self, libraries):
- """Compute the Android module names from libraries, ie spec.get('libraries')
-
- Args:
- libraries: the value of spec.get('libraries')
- Returns:
- A tuple (static_lib_modules, dynamic_lib_modules)
- """
- static_lib_modules = []
- dynamic_lib_modules = []
- for libs in libraries:
- # Libs can have multiple words.
- for lib in libs.split():
- # Filter the system libraries, which are added by default by the Android
- # build system.
- if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or
- lib.endswith('libgcc.a')):
- continue
- match = re.search(r'([^/]+)\.a$', lib)
- if match:
- static_lib_modules.append(match.group(1))
- continue
- match = re.search(r'([^/]+)\.so$', lib)
- if match:
- dynamic_lib_modules.append(match.group(1))
- continue
- # "-lstlport" -> libstlport
- if lib.startswith('-l'):
- if lib.endswith('_static'):
- static_lib_modules.append('lib' + lib[2:])
- else:
- dynamic_lib_modules.append('lib' + lib[2:])
- return (static_lib_modules, dynamic_lib_modules)
-
-
- def ComputeDeps(self, spec):
- """Compute the dependencies of a gyp spec.
-
- Returns a tuple (deps, link_deps), where each is a list of
- filenames that will need to be put in front of make for either
- building (deps) or linking (link_deps).
- """
- deps = []
- link_deps = []
- if 'dependencies' in spec:
- deps.extend([target_outputs[dep] for dep in spec['dependencies']
- if target_outputs[dep]])
- for dep in spec['dependencies']:
- if dep in target_link_deps:
- link_deps.append(target_link_deps[dep])
- deps.extend(link_deps)
- return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
-
-
- def WriteTargetFlags(self, spec, configs, link_deps):
- """Write Makefile code to specify the link flags and library dependencies.
-
- spec, configs: input from gyp.
- link_deps: link dependency list; see ComputeDeps()
- """
- for configname, config in sorted(configs.iteritems()):
- ldflags = list(config.get('ldflags', []))
- self.WriteLn('')
- self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
- self.WriteLn('\nLOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION))')
-
- # Libraries (i.e. -lfoo)
- libraries = gyp.common.uniquer(spec.get('libraries', []))
- static_libs, dynamic_libs = self.ComputeAndroidLibraryModuleNames(
- libraries)
-
- # Link dependencies (i.e. libfoo.a, libfoo.so)
- static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
- shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
- self.WriteLn('')
- self.WriteList(static_libs + static_link_deps,
- 'LOCAL_STATIC_LIBRARIES')
- self.WriteLn('# Enable grouping to fix circular references')
- self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
- self.WriteLn('')
- self.WriteList(dynamic_libs + shared_link_deps,
- 'LOCAL_SHARED_LIBRARIES')
-
-
- def WriteTarget(self, spec, configs, deps, link_deps, part_of_all,
- write_alias_target):
- """Write Makefile code to produce the final target of the gyp spec.
-
- spec, configs: input from gyp.
- deps, link_deps: dependency lists; see ComputeDeps()
- part_of_all: flag indicating this target is part of 'all'
- write_alias_target: flag indicating whether to create short aliases for this
- target
- """
- self.WriteLn('### Rules for final target.')
-
- if self.type != 'none':
- self.WriteTargetFlags(spec, configs, link_deps)
-
- # Add to the set of targets which represent the gyp 'all' target. We use the
- # name 'gyp_all_modules' as the Android build system doesn't allow the use
- # of the Make target 'all' and because 'all_modules' is the equivalent of
- # the Make target 'all' on Android.
- if part_of_all and write_alias_target:
- self.WriteLn('# Add target alias to "gyp_all_modules" target.')
- self.WriteLn('.PHONY: gyp_all_modules')
- self.WriteLn('gyp_all_modules: %s' % self.android_module)
- self.WriteLn('')
-
- # Add an alias from the gyp target name to the Android module name. This
- # simplifies manual builds of the target, and is required by the test
- # framework.
- if self.target != self.android_module and write_alias_target:
- self.WriteLn('# Alias gyp target name.')
- self.WriteLn('.PHONY: %s' % self.target)
- self.WriteLn('%s: %s' % (self.target, self.android_module))
- self.WriteLn('')
-
- # Add the command to trigger build of the target type depending
- # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
- # NOTE: This has to come last!
- modifier = ''
- if self.toolset == 'host':
- modifier = 'HOST_'
- if self.type == 'static_library':
- self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier)
- elif self.type == 'shared_library':
- self.WriteLn('LOCAL_PRELINK_MODULE := false')
- self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
- elif self.type == 'executable':
- if self.toolset == 'host':
- self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
- else:
- # Don't install target executables for now, as it results in them being
- # included in ROM. This can be revisited if there's a reason to install
- # them later.
- self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
- self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
- else:
- self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
- self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
- if self.toolset == 'target':
- self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)')
- self.WriteLn()
- self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
- self.WriteLn()
- self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)')
- self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
- self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
- self.WriteLn('\t$(hide) touch $@')
- if self.toolset == 'target':
- self.WriteLn()
- self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
-
-
- def WriteList(self, value_list, variable=None, prefix='',
- quoter=make.QuoteIfNecessary, local_pathify=False):
- """Write a variable definition that is a list of values.
-
- E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
- foo = blaha blahb
- but in a pretty-printed style.
- """
- values = ''
- if value_list:
- value_list = [quoter(prefix + l) for l in value_list]
- if local_pathify:
- value_list = [self.LocalPathify(l) for l in value_list]
- values = ' \\\n\t' + ' \\\n\t'.join(value_list)
- self.fp.write('%s :=%s\n\n' % (variable, values))
-
-
- def WriteLn(self, text=''):
- self.fp.write(text + '\n')
-
-
- def LocalPathify(self, path):
- """Convert a subdirectory-relative path into a normalized path which starts
- with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
- Absolute paths, or paths that contain variables, are just normalized."""
- if '$(' in path or os.path.isabs(path):
- # path is not a file in the project tree in this case, but calling
- # normpath is still important for trimming trailing slashes.
- return os.path.normpath(path)
- local_path = os.path.join('$(LOCAL_PATH)', self.path, path)
- local_path = os.path.normpath(local_path)
- # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH)
- # - i.e. that the resulting path is still inside the project tree. The
- # path may legitimately have ended up containing just $(LOCAL_PATH), though,
- # so we don't look for a slash.
- assert local_path.startswith('$(LOCAL_PATH)'), (
- 'Path %s attempts to escape from gyp path %s !)' % (path, self.path))
- return local_path
-
-
- def ExpandInputRoot(self, template, expansion, dirname):
- if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
- return template
- path = template % {
- 'INPUT_ROOT': expansion,
- 'INPUT_DIRNAME': dirname,
- }
- return os.path.normpath(path)
-
-
-def PerformBuild(data, configurations, params):
- # The android backend only supports the default configuration.
- options = params['options']
- makefile = os.path.abspath(os.path.join(options.toplevel_dir,
- 'GypAndroid.mk'))
- env = dict(os.environ)
- env['ONE_SHOT_MAKEFILE'] = makefile
- arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules']
- print 'Building: %s' % arguments
- subprocess.check_call(arguments, env=env)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- options = params['options']
- generator_flags = params.get('generator_flags', {})
- builddir_name = generator_flags.get('output_dir', 'out')
- limit_to_target_all = generator_flags.get('limit_to_target_all', False)
- write_alias_targets = generator_flags.get('write_alias_targets', True)
- android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
- assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
-
- def CalculateMakefilePath(build_file, base_name):
- """Determine where to write a Makefile for a given gyp file."""
- # Paths in gyp files are relative to the .gyp file, but we want
- # paths relative to the source root for the master makefile. Grab
- # the path of the .gyp file as the base to relativize against.
- # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
- base_path = gyp.common.RelativePath(os.path.dirname(build_file),
- options.depth)
- # We write the file in the base_path directory.
- output_file = os.path.join(options.depth, base_path, base_name)
- assert not options.generator_output, (
- 'The Android backend does not support options.generator_output.')
- base_path = gyp.common.RelativePath(os.path.dirname(build_file),
- options.toplevel_dir)
- return base_path, output_file
-
- # TODO: search for the first non-'Default' target. This can go
- # away when we add verification that all targets have the
- # necessary configurations.
- default_configuration = None
- toolsets = set([target_dicts[target]['toolset'] for target in target_list])
- for target in target_list:
- spec = target_dicts[target]
- if spec['default_configuration'] != 'Default':
- default_configuration = spec['default_configuration']
- break
- if not default_configuration:
- default_configuration = 'Default'
-
- srcdir = '.'
- makefile_name = 'GypAndroid' + options.suffix + '.mk'
- makefile_path = os.path.join(options.toplevel_dir, makefile_name)
- assert not options.generator_output, (
- 'The Android backend does not support options.generator_output.')
- gyp.common.EnsureDirExists(makefile_path)
- root_makefile = open(makefile_path, 'w')
-
- root_makefile.write(header)
-
- # We set LOCAL_PATH just once, here, to the top of the project tree. This
- # allows all the other paths we use to be relative to the Android.mk file,
- # as the Android build system expects.
- root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n')
-
- # Find the list of targets that derive from the gyp file(s) being built.
- needed_targets = set()
- for build_file in params['build_files']:
- for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
- needed_targets.add(target)
-
- build_files = set()
- include_list = set()
- android_modules = {}
- for qualified_target in target_list:
- build_file, target, toolset = gyp.common.ParseQualifiedTarget(
- qualified_target)
- relative_build_file = gyp.common.RelativePath(build_file,
- options.toplevel_dir)
- build_files.add(relative_build_file)
- included_files = data[build_file]['included_files']
- for included_file in included_files:
- # The included_files entries are relative to the dir of the build file
- # that included them, so we have to undo that and then make them relative
- # to the root dir.
- relative_include_file = gyp.common.RelativePath(
- gyp.common.UnrelativePath(included_file, build_file),
- options.toplevel_dir)
- abs_include_file = os.path.abspath(relative_include_file)
- # If the include file is from the ~/.gyp dir, we should use absolute path
- # so that relocating the src dir doesn't break the path.
- if (params['home_dot_gyp'] and
- abs_include_file.startswith(params['home_dot_gyp'])):
- build_files.add(abs_include_file)
- else:
- build_files.add(relative_include_file)
-
- base_path, output_file = CalculateMakefilePath(build_file,
- target + '.' + toolset + options.suffix + '.mk')
-
- spec = target_dicts[qualified_target]
- configs = spec['configurations']
-
- part_of_all = (qualified_target in needed_targets and
- not int(spec.get('suppress_wildcard', False)))
- if limit_to_target_all and not part_of_all:
- continue
-
- relative_target = gyp.common.QualifiedTarget(relative_build_file, target,
- toolset)
- writer = AndroidMkWriter(android_top_dir)
- android_module = writer.Write(qualified_target, relative_target, base_path,
- output_file, spec, configs,
- part_of_all=part_of_all,
- write_alias_target=write_alias_targets)
- if android_module in android_modules:
- print ('ERROR: Android module names must be unique. The following '
- 'targets both generate Android module name %s.\n %s\n %s' %
- (android_module, android_modules[android_module],
- qualified_target))
- return
- android_modules[android_module] = qualified_target
-
- # Our root_makefile lives at the source root. Compute the relative path
- # from there to the output_file for including.
- mkfile_rel_path = gyp.common.RelativePath(output_file,
- os.path.dirname(makefile_path))
- include_list.add(mkfile_rel_path)
-
- root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
- root_makefile.write('GYP_VAR_PREFIX ?=\n')
-
- # Write out the sorted list of includes.
- root_makefile.write('\n')
- for include_file in sorted(include_list):
- root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
- root_makefile.write('\n')
-
- if write_alias_targets:
- root_makefile.write(ALL_MODULES_FOOTER)
-
- root_makefile.close()
diff --git a/deps/gyp/pylib/gyp/generator/cmake.py b/deps/gyp/pylib/gyp/generator/cmake.py
index 10d015ee83..17f5e6396c 100644
--- a/deps/gyp/pylib/gyp/generator/cmake.py
+++ b/deps/gyp/pylib/gyp/generator/cmake.py
@@ -55,7 +55,7 @@ generator_default_variables = {
'CONFIGURATION_NAME': '${configuration}',
}
-FULL_PATH_VARS = ('${CMAKE_SOURCE_DIR}', '${builddir}', '${obj}')
+FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}')
generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = True
@@ -103,7 +103,7 @@ def NormjoinPathForceCMakeSource(base_path, rel_path):
if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
return rel_path
# TODO: do we need to check base_path for absolute variables as well?
- return os.path.join('${CMAKE_SOURCE_DIR}',
+ return os.path.join('${CMAKE_CURRENT_LIST_DIR}',
os.path.normpath(os.path.join(base_path, rel_path)))
@@ -150,20 +150,17 @@ def SetFileProperty(output, source_name, property_name, values, sep):
output.write('")\n')
-def SetFilesProperty(output, source_names, property_name, values, sep):
+def SetFilesProperty(output, variable, property_name, values, sep):
"""Given a set of source files, sets the given property on them."""
- output.write('set_source_files_properties(\n')
- for source_name in source_names:
- output.write(' ')
- output.write(source_name)
- output.write('\n')
- output.write(' PROPERTIES\n ')
+ output.write('set_source_files_properties(')
+ WriteVariable(output, variable)
+ output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
- output.write('"\n)\n')
+ output.write('")\n')
def SetTargetProperty(output, target_name, property_name, values, sep=''):
@@ -216,7 +213,7 @@ def WriteVariable(output, variable_name, prepend=None):
output.write('}')
-class CMakeTargetType:
+class CMakeTargetType(object):
def __init__(self, command, modifier, property_modifier):
self.command = command
self.modifier = modifier
@@ -236,11 +233,11 @@ def StringToCMakeTargetName(a):
"""Converts the given string 'a' to a valid CMake target name.
All invalid characters are replaced by '_'.
- Invalid for cmake: ' ', '/', '(', ')'
+ Invalid for cmake: ' ', '/', '(', ')', '"'
Invalid for make: ':'
Invalid for unknown reasons but cause failures: '.'
"""
- return a.translate(string.maketrans(' /():.', '______'))
+ return a.translate(string.maketrans(' /():."', '_______'))
def WriteActions(target_name, actions, extra_sources, extra_deps,
@@ -296,7 +293,7 @@ def WriteActions(target_name, actions, extra_sources, extra_deps,
WriteVariable(output, inputs_name)
output.write('\n')
- output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+ output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
@@ -401,9 +398,9 @@ def WriteRules(target_name, rules, extra_sources, extra_deps,
output.write(NormjoinPath(path_to_gyp, rule_source))
output.write('\n')
- # CMAKE_SOURCE_DIR is where the CMakeLists.txt lives.
+ # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
# The cwd is the current build directory.
- output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+ output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
@@ -464,7 +461,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
extra_deps.append(copy_name)
return
- class Copy:
+ class Copy(object):
def __init__(self, ext, command):
self.cmake_inputs = []
self.cmake_outputs = []
@@ -488,7 +485,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
copy = file_copy if os.path.basename(src) else dir_copy
- copy.cmake_inputs.append(NormjoinPath(path_to_gyp, src))
+ copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
copy.gyp_inputs.append(src)
copy.gyp_outputs.append(dst)
@@ -525,7 +522,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
WriteVariable(output, copy.inputs_name, ' ')
output.write('\n')
- output.write('WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+ output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
@@ -640,6 +637,12 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
target_type = spec.get('type', '<missing target type>')
target_toolset = spec.get('toolset')
+ cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
+ if cmake_target_type is None:
+ print ('Target %s has unknown target type %s, skipping.' %
+ ( target_name, target_type ) )
+ return
+
SetVariable(output, 'TARGET', target_name)
SetVariable(output, 'TOOLSET', target_toolset)
@@ -667,27 +670,89 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
srcs = spec.get('sources', [])
# Gyp separates the sheep from the goats based on file extensions.
- def partition(l, p):
- return reduce(lambda x, e: x[not p(e)].append(e) or x, l, ([], []))
- compilable_srcs, other_srcs = partition(srcs, Compilable)
+ # A full separation is done here because of flag handing (see below).
+ s_sources = []
+ c_sources = []
+ cxx_sources = []
+ linkable_sources = []
+ other_sources = []
+ for src in srcs:
+ _, ext = os.path.splitext(src)
+ src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+ src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);
- # CMake gets upset when executable targets provide no sources.
- if target_type == 'executable' and not compilable_srcs and not extra_sources:
- print ('Executable %s has no complilable sources, treating as "none".' %
- target_name )
- target_type = 'none'
+ if src_type == 's':
+ s_sources.append(src_norm_path)
+ elif src_type == 'cc':
+ c_sources.append(src_norm_path)
+ elif src_type == 'cxx':
+ cxx_sources.append(src_norm_path)
+ elif Linkable(ext):
+ linkable_sources.append(src_norm_path)
+ else:
+ other_sources.append(src_norm_path)
- cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
- if cmake_target_type is None:
- print ('Target %s has unknown target type %s, skipping.' %
- ( target_name, target_type ) )
- return
+ for extra_source in extra_sources:
+ src, real_source = extra_source
+ _, ext = os.path.splitext(real_source)
+ src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+
+ if src_type == 's':
+ s_sources.append(src)
+ elif src_type == 'cc':
+ c_sources.append(src)
+ elif src_type == 'cxx':
+ cxx_sources.append(src)
+ elif Linkable(ext):
+ linkable_sources.append(src)
+ else:
+ other_sources.append(src)
+
+ s_sources_name = None
+ if s_sources:
+ s_sources_name = cmake_target_name + '__asm_srcs'
+ SetVariableList(output, s_sources_name, s_sources)
+
+ c_sources_name = None
+ if c_sources:
+ c_sources_name = cmake_target_name + '__c_srcs'
+ SetVariableList(output, c_sources_name, c_sources)
+
+ cxx_sources_name = None
+ if cxx_sources:
+ cxx_sources_name = cmake_target_name + '__cxx_srcs'
+ SetVariableList(output, cxx_sources_name, cxx_sources)
+
+ linkable_sources_name = None
+ if linkable_sources:
+ linkable_sources_name = cmake_target_name + '__linkable_srcs'
+ SetVariableList(output, linkable_sources_name, linkable_sources)
+
+ other_sources_name = None
+ if other_sources:
+ other_sources_name = cmake_target_name + '__other_srcs'
+ SetVariableList(output, other_sources_name, other_sources)
+
+ # CMake gets upset when executable targets provide no sources.
+ # http://www.cmake.org/pipermail/cmake/2010-July/038461.html
+ dummy_sources_name = None
+ has_sources = (s_sources_name or
+ c_sources_name or
+ cxx_sources_name or
+ linkable_sources_name or
+ other_sources_name)
+ if target_type == 'executable' and not has_sources:
+ dummy_sources_name = cmake_target_name + '__dummy_srcs'
+ SetVariable(output, dummy_sources_name,
+ "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
+ output.write('if(NOT EXISTS "')
+ WriteVariable(output, dummy_sources_name)
+ output.write('")\n')
+ output.write(' file(WRITE "')
+ WriteVariable(output, dummy_sources_name)
+ output.write('" "")\n')
+ output.write("endif()\n")
- other_srcs_name = None
- if other_srcs:
- other_srcs_name = cmake_target_name + '__other_srcs'
- SetVariableList(output, other_srcs_name,
- [NormjoinPath(path_from_cmakelists_to_gyp, src) for src in other_srcs])
# CMake is opposed to setting linker directories and considers the practice
# of setting linker directories dangerous. Instead, it favors the use of
@@ -713,37 +778,54 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
output.write(' ')
output.write(cmake_target_type.modifier)
- if other_srcs_name:
- WriteVariable(output, other_srcs_name, ' ')
-
- output.write('\n')
-
- for src in compilable_srcs:
- output.write(' ')
- output.write(NormjoinPath(path_from_cmakelists_to_gyp, src))
- output.write('\n')
- for extra_source in extra_sources:
- output.write(' ')
- src, _ = extra_source
- output.write(NormjoinPath(path_from_cmakelists_to_gyp, src))
- output.write('\n')
+ if s_sources_name:
+ WriteVariable(output, s_sources_name, ' ')
+ if c_sources_name:
+ WriteVariable(output, c_sources_name, ' ')
+ if cxx_sources_name:
+ WriteVariable(output, cxx_sources_name, ' ')
+ if linkable_sources_name:
+ WriteVariable(output, linkable_sources_name, ' ')
+ if other_sources_name:
+ WriteVariable(output, other_sources_name, ' ')
+ if dummy_sources_name:
+ WriteVariable(output, dummy_sources_name, ' ')
output.write(')\n')
+ # Let CMake know if the 'all' target should depend on this target.
+ exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
+ else 'FALSE')
+ SetTargetProperty(output, cmake_target_name,
+ 'EXCLUDE_FROM_ALL', exclude_from_all)
+ for extra_target_name in extra_deps:
+ SetTargetProperty(output, extra_target_name,
+ 'EXCLUDE_FROM_ALL', exclude_from_all)
+
# Output name and location.
if target_type != 'none':
+ # Link as 'C' if there are no other files
+ if not c_sources and not cxx_sources:
+ SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
+
# Mark uncompiled sources as uncompiled.
- if other_srcs_name:
+ if other_sources_name:
output.write('set_source_files_properties(')
- WriteVariable(output, other_srcs_name, '')
+ WriteVariable(output, other_sources_name, '')
output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
+ # Mark object sources as linkable.
+ if linkable_sources_name:
+ output.write('set_source_files_properties(')
+ WriteVariable(output, other_sources_name, '')
+ output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
+
# Output directory
target_output_directory = spec.get('product_dir')
if target_output_directory is None:
if target_type in ('executable', 'loadable_module'):
target_output_directory = generator_default_variables['PRODUCT_DIR']
- elif target_type in ('shared_library'):
+ elif target_type == 'shared_library':
target_output_directory = '${builddir}/lib.${TOOLSET}'
elif spec.get('standalone_static_library', False):
target_output_directory = generator_default_variables['PRODUCT_DIR']
@@ -804,122 +886,84 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
cmake_target_output_basename)
SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
- # Let CMake know if the 'all' target should depend on this target.
- exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
- else 'FALSE')
- SetTargetProperty(output, cmake_target_name,
- 'EXCLUDE_FROM_ALL', exclude_from_all)
- for extra_target_name in extra_deps:
- SetTargetProperty(output, extra_target_name,
- 'EXCLUDE_FROM_ALL', exclude_from_all)
-
- # Includes
- includes = config.get('include_dirs')
- if includes:
- # This (target include directories) is what requires CMake 2.8.8
- includes_name = cmake_target_name + '__include_dirs'
- SetVariableList(output, includes_name,
- [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
- for include in includes])
- output.write('set_property(TARGET ')
- output.write(cmake_target_name)
- output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
- WriteVariable(output, includes_name, '')
- output.write(')\n')
-
- # Defines
- defines = config.get('defines')
- if defines is not None:
- SetTargetProperty(output,
- cmake_target_name,
- 'COMPILE_DEFINITIONS',
- defines,
- ';')
-
- # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
- # CMake currently does not have target C and CXX flags.
- # So, instead of doing...
-
- # cflags_c = config.get('cflags_c')
- # if cflags_c is not None:
- # SetTargetProperty(output, cmake_target_name,
- # 'C_COMPILE_FLAGS', cflags_c, ' ')
-
- # cflags_cc = config.get('cflags_cc')
- # if cflags_cc is not None:
- # SetTargetProperty(output, cmake_target_name,
- # 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
-
- # Instead we must...
- s_sources = []
- c_sources = []
- cxx_sources = []
- for src in srcs:
- _, ext = os.path.splitext(src)
- src_type = COMPILABLE_EXTENSIONS.get(ext, None)
-
- if src_type == 's':
- s_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
-
- if src_type == 'cc':
- c_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
-
- if src_type == 'cxx':
- cxx_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
-
- for extra_source in extra_sources:
- src, real_source = extra_source
- _, ext = os.path.splitext(real_source)
- src_type = COMPILABLE_EXTENSIONS.get(ext, None)
-
- if src_type == 's':
- s_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
-
- if src_type == 'cc':
- c_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
-
- if src_type == 'cxx':
- cxx_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
-
- cflags = config.get('cflags', [])
- cflags_c = config.get('cflags_c', [])
- cflags_cxx = config.get('cflags_cc', [])
- if c_sources and not (s_sources or cxx_sources):
- flags = []
- flags.extend(cflags)
- flags.extend(cflags_c)
- SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
-
- elif cxx_sources and not (s_sources or c_sources):
- flags = []
- flags.extend(cflags)
- flags.extend(cflags_cxx)
- SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
-
- else:
- if s_sources and cflags:
- SetFilesProperty(output, s_sources, 'COMPILE_FLAGS', cflags, ' ')
+ # Includes
+ includes = config.get('include_dirs')
+ if includes:
+ # This (target include directories) is what requires CMake 2.8.8
+ includes_name = cmake_target_name + '__include_dirs'
+ SetVariableList(output, includes_name,
+ [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
+ for include in includes])
+ output.write('set_property(TARGET ')
+ output.write(cmake_target_name)
+ output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
+ WriteVariable(output, includes_name, '')
+ output.write(')\n')
- if c_sources and (cflags or cflags_c):
+ # Defines
+ defines = config.get('defines')
+ if defines is not None:
+ SetTargetProperty(output,
+ cmake_target_name,
+ 'COMPILE_DEFINITIONS',
+ defines,
+ ';')
+
+ # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
+ # CMake currently does not have target C and CXX flags.
+ # So, instead of doing...
+
+ # cflags_c = config.get('cflags_c')
+ # if cflags_c is not None:
+ # SetTargetProperty(output, cmake_target_name,
+ # 'C_COMPILE_FLAGS', cflags_c, ' ')
+
+ # cflags_cc = config.get('cflags_cc')
+ # if cflags_cc is not None:
+ # SetTargetProperty(output, cmake_target_name,
+ # 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
+
+ # Instead we must...
+ cflags = config.get('cflags', [])
+ cflags_c = config.get('cflags_c', [])
+ cflags_cxx = config.get('cflags_cc', [])
+ if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
+ SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
+
+ elif c_sources and not (s_sources or cxx_sources):
flags = []
flags.extend(cflags)
flags.extend(cflags_c)
- SetFilesProperty(output, c_sources, 'COMPILE_FLAGS', flags, ' ')
+ SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
- if cxx_sources and (cflags or cflags_cxx):
+ elif cxx_sources and not (s_sources or c_sources):
flags = []
flags.extend(cflags)
flags.extend(cflags_cxx)
- SetFilesProperty(output, cxx_sources, 'COMPILE_FLAGS', flags, ' ')
+ SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
- # Have assembly link as c if there are no other files
- if not c_sources and not cxx_sources and s_sources:
- SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
-
- # Linker flags
- ldflags = config.get('ldflags')
- if ldflags is not None:
- SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
+ else:
+ # TODO: This is broken, one cannot generally set properties on files,
+ # as other targets may require different properties on the same files.
+ if s_sources and cflags:
+ SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')
+
+ if c_sources and (cflags or cflags_c):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_c)
+ SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')
+
+ if cxx_sources and (cflags or cflags_cxx):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_cxx)
+ SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')
+
+ # Linker flags
+ ldflags = config.get('ldflags')
+ if ldflags is not None:
+ SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
# Note on Dependencies and Libraries:
# CMake wants to handle link order, resolving the link line up front.
@@ -1040,20 +1084,49 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
output.write('cmake_policy(VERSION 2.8.8)\n')
- _, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
+ gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
output.write('project(')
output.write(project_target)
output.write(')\n')
SetVariable(output, 'configuration', config_to_use)
+ ar = None
+ cc = None
+ cxx = None
+
+ make_global_settings = data[gyp_file].get('make_global_settings', [])
+ build_to_top = gyp.common.InvertRelativePath(build_dir,
+ options.toplevel_dir)
+ for key, value in make_global_settings:
+ if key == 'AR':
+ ar = os.path.join(build_to_top, value)
+ if key == 'CC':
+ cc = os.path.join(build_to_top, value)
+ if key == 'CXX':
+ cxx = os.path.join(build_to_top, value)
+
+ ar = gyp.common.GetEnvironFallback(['AR_target', 'AR'], ar)
+ cc = gyp.common.GetEnvironFallback(['CC_target', 'CC'], cc)
+ cxx = gyp.common.GetEnvironFallback(['CXX_target', 'CXX'], cxx)
+
+ if ar:
+ SetVariable(output, 'CMAKE_AR', ar)
+ if cc:
+ SetVariable(output, 'CMAKE_C_COMPILER', cc)
+ if cxx:
+ SetVariable(output, 'CMAKE_CXX_COMPILER', cxx)
+
# The following appears to be as-yet undocumented.
# http://public.kitware.com/Bug/view.php?id=8392
output.write('enable_language(ASM)\n')
# ASM-ATT does not support .S files.
# output.write('enable_language(ASM-ATT)\n')
- SetVariable(output, 'builddir', '${CMAKE_BINARY_DIR}')
+ if cc:
+ SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
+
+ SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}')
SetVariable(output, 'obj', '${builddir}/obj')
output.write('\n')
@@ -1066,6 +1139,11 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
output.write('\n')
+ # Force ninja to use rsp files. Otherwise link and ar lines can get too long,
+ # resulting in 'Argument list too long' errors.
+ output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
+ output.write('\n')
+
namer = CMakeNamer(target_list)
# The list of targets upon which the 'all' target should depend.
diff --git a/deps/gyp/pylib/gyp/generator/dump_dependency_json.py b/deps/gyp/pylib/gyp/generator/dump_dependency_json.py
index 927ba6ebad..160eafe2ef 100644
--- a/deps/gyp/pylib/gyp/generator/dump_dependency_json.py
+++ b/deps/gyp/pylib/gyp/generator/dump_dependency_json.py
@@ -14,6 +14,9 @@ generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = False
+generator_filelist_paths = {
+}
+
generator_default_variables = {
}
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
@@ -56,6 +59,17 @@ def CalculateGeneratorInputInfo(params):
global generator_wants_static_library_dependencies_adjusted
generator_wants_static_library_dependencies_adjusted = True
+ toplevel = params['options'].toplevel_dir
+ generator_dir = os.path.relpath(params['options'].generator_output or '.')
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = generator_flags.get('output_dir', 'out')
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, generator_dir, output_dir, 'gypfiles'))
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
def GenerateOutput(target_list, target_dicts, data, params):
# Map of target -> list of targets it depends on.
@@ -74,7 +88,11 @@ def GenerateOutput(target_list, target_dicts, data, params):
edges[target].append(dep)
targets_to_visit.append(dep)
- filename = 'dump.json'
+ try:
+ filepath = params['generator_flags']['output_dir']
+ except KeyError:
+ filepath = '.'
+ filename = os.path.join(filepath, 'dump.json')
f = open(filename, 'w')
json.dump(edges, f)
f.close()
diff --git a/deps/gyp/pylib/gyp/generator/eclipse.py b/deps/gyp/pylib/gyp/generator/eclipse.py
index 718eb5d3db..3544347b3b 100644
--- a/deps/gyp/pylib/gyp/generator/eclipse.py
+++ b/deps/gyp/pylib/gyp/generator/eclipse.py
@@ -24,6 +24,7 @@ import gyp
import gyp.common
import gyp.msvs_emulation
import shlex
+import xml.etree.cElementTree as ET
generator_wants_static_library_dependencies_adjusted = False
@@ -31,8 +32,8 @@ generator_default_variables = {
}
for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
- # Some gyp steps fail if these are empty(!).
- generator_default_variables[dirname] = 'dir'
+ # Some gyp steps fail if these are empty(!), so we convert them to variables
+ generator_default_variables[dirname] = '$' + dirname
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
@@ -294,33 +295,123 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
os.path.join(toplevel_build, 'gen')]
- out_name = os.path.join(toplevel_build, 'eclipse-cdt-settings.xml')
+ GenerateCdtSettingsFile(target_list,
+ target_dicts,
+ data,
+ params,
+ config_name,
+ os.path.join(toplevel_build,
+ 'eclipse-cdt-settings.xml'),
+ options,
+ shared_intermediate_dirs)
+ GenerateClasspathFile(target_list,
+ target_dicts,
+ options.toplevel_dir,
+ toplevel_build,
+ os.path.join(toplevel_build,
+ 'eclipse-classpath.xml'))
+
+
+def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
+ config_name, out_name, options,
+ shared_intermediate_dirs):
gyp.common.EnsureDirExists(out_name)
- out = open(out_name, 'w')
+ with open(out_name, 'w') as out:
+ out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
+ out.write('<cdtprojectproperties>\n')
+
+ eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
+ 'GNU C++', 'GNU C', 'Assembly']
+ compiler_path = GetCompilerPath(target_list, data, options)
+ include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
+ shared_intermediate_dirs,
+ config_name, params, compiler_path)
+ WriteIncludePaths(out, eclipse_langs, include_dirs)
+ defines = GetAllDefines(target_list, target_dicts, data, config_name,
+ params, compiler_path)
+ WriteMacros(out, eclipse_langs, defines)
+
+ out.write('</cdtprojectproperties>\n')
+
+
+def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
+ toplevel_build, out_name):
+ '''Generates a classpath file suitable for symbol navigation and code
+ completion of Java code (such as in Android projects) by finding all
+ .java and .jar files used as action inputs.'''
+ gyp.common.EnsureDirExists(out_name)
+ result = ET.Element('classpath')
+
+ def AddElements(kind, paths):
+ # First, we need to normalize the paths so they are all relative to the
+ # toplevel dir.
+ rel_paths = set()
+ for path in paths:
+ if os.path.isabs(path):
+ rel_paths.add(os.path.relpath(path, toplevel_dir))
+ else:
+ rel_paths.add(path)
+
+ for path in sorted(rel_paths):
+ entry_element = ET.SubElement(result, 'classpathentry')
+ entry_element.set('kind', kind)
+ entry_element.set('path', path)
+
+ AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
+ AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
+ # Include the standard JRE container and a dummy out folder
+ AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
+ # Include a dummy out folder so that Eclipse doesn't use the default /bin
+ # folder in the root of the project.
+ AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
+
+ ET.ElementTree(result).write(out_name)
- out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
- out.write('<cdtprojectproperties>\n')
- eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
- 'GNU C++', 'GNU C', 'Assembly']
- compiler_path = GetCompilerPath(target_list, data, options)
- include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
- shared_intermediate_dirs, config_name,
- params, compiler_path)
- WriteIncludePaths(out, eclipse_langs, include_dirs)
- defines = GetAllDefines(target_list, target_dicts, data, config_name, params,
- compiler_path)
- WriteMacros(out, eclipse_langs, defines)
+def GetJavaJars(target_list, target_dicts, toplevel_dir):
+ '''Generates a sequence of all .jars used as inputs.'''
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ for action in target.get('actions', []):
+ for input_ in action['inputs']:
+ if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
+ if os.path.isabs(input_):
+ yield input_
+ else:
+ yield os.path.join(os.path.dirname(target_name), input_)
+
- out.write('</cdtprojectproperties>\n')
- out.close()
+def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
+ '''Generates a sequence of all likely java package root directories.'''
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ for action in target.get('actions', []):
+ for input_ in action['inputs']:
+ if (os.path.splitext(input_)[1] == '.java' and
+ not input_.startswith('$')):
+ dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
+ input_))
+ # If there is a parent 'src' or 'java' folder, navigate up to it -
+ # these are canonical package root names in Chromium. This will
+ # break if 'src' or 'java' exists in the package structure. This
+ # could be further improved by inspecting the java file for the
+ # package name if this proves to be too fragile in practice.
+ parent_search = dir_
+ while os.path.basename(parent_search) not in ['src', 'java']:
+ parent_search, _ = os.path.split(parent_search)
+ if not parent_search or parent_search == toplevel_dir:
+ # Didn't find a known root, just return the original path
+ yield dir_
+ break
+ else:
+ yield parent_search
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate an XML settings file that can be imported into a CDT project."""
if params['options'].generator_output:
- raise NotImplementedError, "--generator_output not implemented for eclipse"
+ raise NotImplementedError("--generator_output not implemented for eclipse")
user_config = params.get('generator_flags', {}).get('config', None)
if user_config:
diff --git a/deps/gyp/pylib/gyp/generator/gypd.py b/deps/gyp/pylib/gyp/generator/gypd.py
index 22ef57f847..3efdb9966a 100644
--- a/deps/gyp/pylib/gyp/generator/gypd.py
+++ b/deps/gyp/pylib/gyp/generator/gypd.py
@@ -39,9 +39,11 @@ import pprint
# These variables should just be spit back out as variable references.
_generator_identity_variables = [
+ 'CONFIGURATION_NAME',
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
+ 'LIB_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
@@ -49,6 +51,11 @@ _generator_identity_variables = [
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
+ 'SHARED_LIB_DIR',
+ 'SHARED_LIB_PREFIX',
+ 'SHARED_LIB_SUFFIX',
+ 'STATIC_LIB_PREFIX',
+ 'STATIC_LIB_SUFFIX',
]
# gypd doesn't define a default value for OS like many other generator
diff --git a/deps/gyp/pylib/gyp/generator/make.py b/deps/gyp/pylib/gyp/generator/make.py
index 7a8adb5850..b7da768fb3 100644
--- a/deps/gyp/pylib/gyp/generator/make.py
+++ b/deps/gyp/pylib/gyp/generator/make.py
@@ -29,6 +29,7 @@ import gyp
import gyp.common
import gyp.xcode_emulation
from gyp.common import GetEnvironFallback
+from gyp.common import GypError
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
@@ -141,7 +142,7 @@ cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
# special "figure out circular dependencies" flags around the entire
# input list during linking.
quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) -Wl,--start-group $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) -Wl,--end-group $(LIBS)
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
# We support two kinds of shared objects (.so):
# 1) shared_library, which is just bundling together many dependent libraries
@@ -210,10 +211,10 @@ cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(T
LINK_COMMANDS_AIX = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
-cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
-cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
@@ -272,30 +273,22 @@ all_deps :=
%(make_global_settings)s
CC.target ?= %(CC.target)s
-CFLAGS.target ?= $(CFLAGS)
+CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
CXX.target ?= %(CXX.target)s
-CXXFLAGS.target ?= $(CXXFLAGS)
+CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
LINK.target ?= %(LINK.target)s
LDFLAGS.target ?= $(LDFLAGS)
AR.target ?= $(AR)
# C++ apps need to be linked with g++.
-#
-# Note: flock is used to seralize linking. Linking is a memory-intensive
-# process so running parallel links can often lead to thrashing. To disable
-# the serialization, override LINK via an envrionment variable as follows:
-#
-# export LINK=g++
-#
-# This will allow make to invoke N linker processes as specified in -jN.
-LINK ?= %(flock)s $(builddir)/linker.lock $(CXX.target)
+LINK ?= $(CXX.target)
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
# to replicate this environment fallback in make as well.
CC.host ?= %(CC.host)s
-CFLAGS.host ?=
+CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
CXX.host ?= %(CXX.host)s
-CXXFLAGS.host ?=
+CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
LINK.host ?= %(LINK.host)s
LDFLAGS.host ?=
AR.host ?= %(AR.host)s
@@ -372,7 +365,7 @@ cmd_touch = touch $@
quiet_cmd_copy = COPY $@
# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp -af "$<" "$@")
+cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@")
%(link_commands)s
"""
@@ -631,6 +624,38 @@ def QuoteSpaces(s, quote=r'\ '):
return s.replace(' ', quote)
+# TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py.
+def _ValidateSourcesForOSX(spec, all_sources):
+ """Makes sure if duplicate basenames are not specified in the source list.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ """
+ if spec.get('type', None) != 'static_library':
+ return
+
+ basenames = {}
+ for source in all_sources:
+ name, ext = os.path.splitext(source)
+ is_compiled_file = ext in [
+ '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
+ if not is_compiled_file:
+ continue
+ basename = os.path.basename(name) # Don't include extension.
+ basenames.setdefault(basename, []).append(source)
+
+ error = ''
+ for basename, files in basenames.iteritems():
+ if len(files) > 1:
+ error += ' %s: %s\n' % (basename, ' '.join(files))
+
+ if error:
+ print('static library %s has several files with the same basename:\n' %
+ spec['target_name'] + error + 'libtool on OS X will generate' +
+ ' warnings for them.')
+ raise GypError('Duplicate basenames in sources section, see list above')
+
+
# Map from qualified target to path to output.
target_outputs = {}
# Map from qualified target to any linkable output. A subset
@@ -640,7 +665,7 @@ target_outputs = {}
target_link_deps = {}
-class MakefileWriter:
+class MakefileWriter(object):
"""MakefileWriter packages up the writing of one target-specific foobar.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
@@ -758,6 +783,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# Sources.
all_sources = spec.get('sources', []) + extra_sources
if all_sources:
+ if self.flavor == 'mac':
+ # libtool on OS X generates warnings for duplicate basenames in the same
+ # target.
+ _ValidateSourcesForOSX(spec, all_sources)
self.WriteSources(
configs, deps, all_sources, extra_outputs,
extra_link_deps, part_of_all,
@@ -990,7 +1019,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# accidentally writing duplicate dummy rules for those outputs.
self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
- self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions)
+ self.WriteMakeRule(outputs, inputs, actions,
+ command="%s_%d" % (name, count))
# Spaces in rule filenames are not supported, but rule variables have
# spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
# The spaces within the variables are valid, so remove the variables
@@ -1101,9 +1131,12 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
map(Sourceify, map(self.Absolutify, resources))):
- self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
- part_of_all=True)
- bundle_deps.append(output)
+ _, ext = os.path.splitext(output)
+ if ext != '.xcassets':
+ # Make does not supports '.xcassets' emulation.
+ self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
+ part_of_all=True)
+ bundle_deps.append(output)
def WriteMacInfoPlist(self, bundle_deps):
@@ -1447,8 +1480,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
libraries = spec.get('libraries')
if libraries:
# Remove duplicate entries
- # Commented out due to https://code.google.com/p/gyp/issues/detail?id=419
- # libraries = gyp.common.uniquer(libraries)
+ libraries = gyp.common.uniquer(libraries)
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries)
self.WriteList(libraries, 'LIBS')
@@ -1547,7 +1579,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
for link_dep in link_deps:
assert ' ' not in link_dep, (
"Spaces in alink input filenames not supported (%s)" % link_dep)
- if (self.flavor not in ('mac', 'openbsd', 'win') and not
+ if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
self.is_standalone_static_library):
self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
part_of_all, postbuilds=postbuilds)
@@ -1657,6 +1689,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteMakeRule(outputs, inputs,
actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
comment = comment,
+ command = command,
force = True)
# Add our outputs to the list of targets we read depfiles from.
# all_deps is only used for deps file reading, and for deps files we replace
@@ -1667,7 +1700,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
- order_only=False, force=False, phony=False):
+ order_only=False, force=False, phony=False, command=None):
"""Write a Makefile rule, with some extra tricks.
outputs: a list of outputs for the rule (note: this is not directly
@@ -1680,6 +1713,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
force: if true, include FORCE_DO_CMD as an order-only dep
phony: if true, the rule does not actually generate the named output, the
output is just a name to run the rule
+ command: (optional) command name to generate unambiguous labels
"""
outputs = map(QuoteSpaces, outputs)
inputs = map(QuoteSpaces, inputs)
@@ -1688,44 +1722,38 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteLn('# ' + comment)
if phony:
self.WriteLn('.PHONY: ' + ' '.join(outputs))
- # TODO(evanm): just make order_only a list of deps instead of these hacks.
- if order_only:
- order_insert = '| '
- pick_output = ' '.join(outputs)
- else:
- order_insert = ''
- pick_output = outputs[0]
- if force:
- force_append = ' FORCE_DO_CMD'
- else:
- force_append = ''
if actions:
self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
- self.WriteLn('%s: %s%s%s' % (pick_output, order_insert, ' '.join(inputs),
- force_append))
+ force_append = ' FORCE_DO_CMD' if force else ''
+
+ if order_only:
+ # Order only rule: Just write a simple rule.
+ # TODO(evanm): just make order_only a list of deps instead of this hack.
+ self.WriteLn('%s: | %s%s' %
+ (' '.join(outputs), ' '.join(inputs), force_append))
+ elif len(outputs) == 1:
+ # Regular rule, one output: Just write a simple rule.
+ self.WriteLn('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append))
+ else:
+ # Regular rule, more than one output: Multiple outputs are tricky in
+ # make. We will write three rules:
+ # - All outputs depend on an intermediate file.
+ # - Make .INTERMEDIATE depend on the intermediate.
+ # - The intermediate file depends on the inputs and executes the
+ # actual command.
+ # - The intermediate recipe will 'touch' the intermediate file.
+ # - The multi-output rule will have an do-nothing recipe.
+ intermediate = "%s.intermediate" % (command if command else self.target)
+ self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
+ self.WriteLn('\t%s' % '@:');
+ self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
+ self.WriteLn('%s: %s%s' %
+ (intermediate, ' '.join(inputs), force_append))
+ actions.insert(0, '$(call do_cmd,touch)')
+
if actions:
for action in actions:
self.WriteLn('\t%s' % action)
- if not order_only and len(outputs) > 1:
- # If we have more than one output, a rule like
- # foo bar: baz
- # that for *each* output we must run the action, potentially
- # in parallel. That is not what we're trying to write -- what
- # we want is that we run the action once and it generates all
- # the files.
- # http://www.gnu.org/software/hello/manual/automake/Multiple-Outputs.html
- # discusses this problem and has this solution:
- # 1) Write the naive rule that would produce parallel runs of
- # the action.
- # 2) Make the outputs seralized on each other, so we won't start
- # a parallel run until the first run finishes, at which point
- # we'll have generated all the outputs and we're done.
- self.WriteLn('%s: %s' % (' '.join(outputs[1:]), outputs[0]))
- # Add a dummy command to the "extra outputs" rule, otherwise make seems to
- # think these outputs haven't (couldn't have?) changed, and thus doesn't
- # flag them as changed (i.e. include in '$?') when evaluating dependent
- # rules, which in turn causes do_cmd() to skip running dependent commands.
- self.WriteLn('%s: ;' % (' '.join(outputs[1:])))
self.WriteLn()
@@ -1982,6 +2010,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
srcdir_prefix = '$(srcdir)/'
flock_command= 'flock'
+ copy_archive_arguments = '-af'
header_params = {
'default_target': default_target,
'builddir': builddir_name,
@@ -1991,6 +2020,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
'link_commands': LINK_COMMANDS_LINUX,
'extra_commands': '',
'srcdir': srcdir,
+ 'copy_archive_args': copy_archive_arguments,
}
if flavor == 'mac':
flock_command = './gyp-mac-tool flock'
@@ -2014,8 +2044,15 @@ def GenerateOutput(target_list, target_dicts, data, params):
header_params.update({
'flock': 'lockf',
})
+ elif flavor == 'openbsd':
+ copy_archive_arguments = '-pPRf'
+ header_params.update({
+ 'copy_archive_args': copy_archive_arguments,
+ })
elif flavor == 'aix':
+ copy_archive_arguments = '-pPRf'
header_params.update({
+ 'copy_archive_args': copy_archive_arguments,
'link_commands': LINK_COMMANDS_AIX,
'flock': './gyp-flock-tool flock',
'flock_index': 2,
@@ -2035,7 +2072,6 @@ def GenerateOutput(target_list, target_dicts, data, params):
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_array = data[build_file].get('make_global_settings', [])
wrappers = {}
- wrappers['LINK'] = '%s $(builddir)/linker.lock' % flock_command
for key, value in make_global_settings_array:
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
diff --git a/deps/gyp/pylib/gyp/generator/msvs.py b/deps/gyp/pylib/gyp/generator/msvs.py
index 64991d4248..44cc1304a2 100644
--- a/deps/gyp/pylib/gyp/generator/msvs.py
+++ b/deps/gyp/pylib/gyp/generator/msvs.py
@@ -12,6 +12,7 @@ import sys
import gyp.common
import gyp.easy_xml as easy_xml
+import gyp.generator.ninja as ninja_generator
import gyp.MSVSNew as MSVSNew
import gyp.MSVSProject as MSVSProject
import gyp.MSVSSettings as MSVSSettings
@@ -41,7 +42,7 @@ OrderedDict = _import_OrderedDict()
# if IncrediBuild is executed from inside Visual Studio. This regex
# validates that the string looks like a GUID with all uppercase hex
# letters.
-VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$')
+VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
generator_default_variables = {
@@ -82,6 +83,12 @@ generator_additional_non_configuration_keys = [
'msvs_external_builder_build_cmd',
'msvs_external_builder_clean_cmd',
'msvs_external_builder_clcompile_cmd',
+ 'msvs_enable_winrt',
+ 'msvs_requires_importlibrary',
+ 'msvs_enable_winphone',
+ 'msvs_application_type_revision',
+ 'msvs_target_platform_version',
+ 'msvs_target_platform_minversion',
]
@@ -114,11 +121,11 @@ def _GetDomainAndUserName():
call = subprocess.Popen(['net', 'config', 'Workstation'],
stdout=subprocess.PIPE)
config = call.communicate()[0]
- username_re = re.compile('^User name\s+(\S+)', re.MULTILINE)
+ username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE)
username_match = username_re.search(config)
if username_match:
username = username_match.group(1)
- domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE)
+ domain_re = re.compile(r'^Logon domain\s+(\S+)', re.MULTILINE)
domain_match = domain_re.search(config)
if domain_match:
domain = domain_match.group(1)
@@ -284,7 +291,7 @@ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
if [x for x in cmd if '$(InputDir)' in x]:
input_dir_preamble = (
'set INPUTDIR=$(InputDir)\n'
- 'set INPUTDIR=%INPUTDIR:$(ProjectDir)=%\n'
+ 'if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n'
'set INPUTDIR=%INPUTDIR:~0,-1%\n'
)
else:
@@ -813,10 +820,10 @@ def _GenerateRulesForMSVS(p, output_dir, options, spec,
if rules_external:
_GenerateExternalRules(rules_external, output_dir, spec,
sources, options, actions_to_add)
- _AdjustSourcesForRules(spec, rules, sources, excluded_sources)
+ _AdjustSourcesForRules(rules, sources, excluded_sources, False)
-def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
+def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
# Add outputs generated by each rule (if applicable).
for rule in rules:
# Add in the outputs from this rule.
@@ -833,7 +840,7 @@ def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
outputs = OrderedSet(_FixPaths(outputs))
inputs.remove(_FixPath(trigger_file))
sources.update(inputs)
- if spec['type'] != 'none' and not spec.get('msvs_external_builder'):
+ if not is_msbuild:
excluded_sources.update(inputs)
sources.update(outputs)
@@ -920,6 +927,42 @@ def _GenerateProject(project, options, version, generator_flags):
return _GenerateMSVSProject(project, options, version, generator_flags)
+# TODO: Avoid code duplication with _ValidateSourcesForOSX in make.py.
+def _ValidateSourcesForMSVSProject(spec, version):
+ """Makes sure if duplicate basenames are not specified in the source list.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ version: The VisualStudioVersion object.
+ """
+ # This validation should not be applied to MSVC2010 and later.
+ assert not version.UsesVcxproj()
+
+ # TODO: Check if MSVC allows this for loadable_module targets.
+ if spec.get('type', None) not in ('static_library', 'shared_library'):
+ return
+ sources = spec.get('sources', [])
+ basenames = {}
+ for source in sources:
+ name, ext = os.path.splitext(source)
+ is_compiled_file = ext in [
+ '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
+ if not is_compiled_file:
+ continue
+ basename = os.path.basename(name) # Don't include extension.
+ basenames.setdefault(basename, []).append(source)
+
+ error = ''
+ for basename, files in basenames.iteritems():
+ if len(files) > 1:
+ error += ' %s: %s\n' % (basename, ' '.join(files))
+
+ if error:
+ print('static library %s has several files with the same basename:\n' %
+ spec['target_name'] + error + 'MSVC08 cannot handle that.')
+ raise GypError('Duplicate basenames in sources section, see list above')
+
+
def _GenerateMSVSProject(project, options, version, generator_flags):
"""Generates a .vcproj file. It may create .rules and .user files too.
@@ -945,6 +988,11 @@ def _GenerateMSVSProject(project, options, version, generator_flags):
for config_name, config in spec['configurations'].iteritems():
_AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
+ # MSVC08 and prior version cannot handle duplicate basenames in the same
+ # target.
+ # TODO: Take excluded sources into consideration if possible.
+ _ValidateSourcesForMSVSProject(spec, version)
+
# Prepare list of sources and excluded sources.
gyp_file = os.path.split(project.build_file)[1]
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
@@ -1064,7 +1112,8 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
for this configuration.
"""
# Get the information for this configuration
- include_dirs, resource_include_dirs = _GetIncludeDirs(config)
+ include_dirs, midl_include_dirs, resource_include_dirs = \
+ _GetIncludeDirs(config)
libraries = _GetLibraries(spec)
library_dirs = _GetLibraryDirs(config)
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
@@ -1092,6 +1141,8 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
# Add the information to the appropriate tool
_ToolAppend(tools, 'VCCLCompilerTool',
'AdditionalIncludeDirectories', include_dirs)
+ _ToolAppend(tools, 'VCMIDLTool',
+ 'AdditionalIncludeDirectories', midl_include_dirs)
_ToolAppend(tools, 'VCResourceCompilerTool',
'AdditionalIncludeDirectories', resource_include_dirs)
# Add in libraries.
@@ -1147,10 +1198,14 @@ def _GetIncludeDirs(config):
include_dirs = (
config.get('include_dirs', []) +
config.get('msvs_system_include_dirs', []))
+ midl_include_dirs = (
+ config.get('midl_include_dirs', []) +
+ config.get('msvs_system_include_dirs', []))
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
include_dirs = _FixPaths(include_dirs)
+ midl_include_dirs = _FixPaths(midl_include_dirs)
resource_include_dirs = _FixPaths(resource_include_dirs)
- return include_dirs, resource_include_dirs
+ return include_dirs, midl_include_dirs, resource_include_dirs
def _GetLibraryDirs(config):
@@ -1184,7 +1239,7 @@ def _GetLibraries(spec):
found = OrderedSet()
unique_libraries_list = []
for entry in reversed(libraries):
- library = re.sub('^\-l', '', entry)
+ library = re.sub(r'^\-l', '', entry)
if not os.path.splitext(library)[1]:
library += '.lib'
if library not in found:
@@ -1392,7 +1447,7 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
# Add all inputs to sources and excluded sources.
inputs = OrderedSet(inputs)
sources.update(inputs)
- if spec['type'] != 'none' and not spec.get('msvs_external_builder'):
+ if not spec.get('msvs_external_builder'):
excluded_sources.update(inputs)
if int(a.get('process_outputs_as_sources', False)):
_AddNormalizedSources(sources, a.get('outputs', []))
@@ -1787,7 +1842,7 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
return projects
-def _InitNinjaFlavor(options, target_list, target_dicts):
+def _InitNinjaFlavor(params, target_list, target_dicts):
"""Initialize targets for the ninja flavor.
This sets up the necessary variables in the targets to generate msvs projects
@@ -1795,7 +1850,7 @@ def _InitNinjaFlavor(options, target_list, target_dicts):
if they have not been set. This allows individual specs to override the
default values initialized here.
Arguments:
- options: Options provided to the generator.
+ params: Params provided to the generator.
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
"""
@@ -1809,8 +1864,15 @@ def _InitNinjaFlavor(options, target_list, target_dicts):
spec['msvs_external_builder'] = 'ninja'
if not spec.get('msvs_external_builder_out_dir'):
- spec['msvs_external_builder_out_dir'] = \
- options.depth + '/out/$(Configuration)'
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ gyp_dir = os.path.dirname(gyp_file)
+ configuration = '$(Configuration)'
+ if params.get('target_arch') == 'x64':
+ configuration += '_x64'
+ spec['msvs_external_builder_out_dir'] = os.path.join(
+ gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir),
+ ninja_generator.ComputeOutputDir(params),
+ configuration)
if not spec.get('msvs_external_builder_build_cmd'):
spec['msvs_external_builder_build_cmd'] = [
path_to_ninja,
@@ -1823,8 +1885,7 @@ def _InitNinjaFlavor(options, target_list, target_dicts):
path_to_ninja,
'-C',
'$(OutDir)',
- '-t',
- 'clean',
+ '-tclean',
'$(ProjectName)',
]
@@ -1905,7 +1966,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# Optionally configure each spec to use ninja as the external builder.
if params.get('flavor') == 'ninja':
- _InitNinjaFlavor(options, target_list, target_dicts)
+ _InitNinjaFlavor(params, target_list, target_dicts)
# Prepare the set of configurations.
configs = set()
@@ -1958,7 +2019,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
def _GenerateMSBuildFiltersFile(filters_path, source_files,
- extension_to_rule_name):
+ rule_dependencies, extension_to_rule_name):
"""Generate the filters file.
This file is used by Visual Studio to organize the presentation of source
@@ -1971,8 +2032,8 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files,
"""
filter_group = []
source_group = []
- _AppendFiltersForMSBuild('', source_files, extension_to_rule_name,
- filter_group, source_group)
+ _AppendFiltersForMSBuild('', source_files, rule_dependencies,
+ extension_to_rule_name, filter_group, source_group)
if filter_group:
content = ['Project',
{'ToolsVersion': '4.0',
@@ -1987,7 +2048,7 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files,
os.unlink(filters_path)
-def _AppendFiltersForMSBuild(parent_filter_name, sources,
+def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
extension_to_rule_name,
filter_group, source_group):
"""Creates the list of filters and sources to be added in the filter file.
@@ -2013,11 +2074,12 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources,
['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
# Recurse and add its dependents.
_AppendFiltersForMSBuild(filter_name, source.contents,
- extension_to_rule_name,
+ rule_dependencies, extension_to_rule_name,
filter_group, source_group)
else:
# It's a source. Create a source entry.
- _, element = _MapFileToMsBuildSourceType(source, extension_to_rule_name)
+ _, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
+ extension_to_rule_name)
source_entry = [element, {'Include': source}]
# Specify the filter it is part of, if any.
if parent_filter_name:
@@ -2025,7 +2087,8 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources,
source_group.append(source_entry)
-def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
+def _MapFileToMsBuildSourceType(source, rule_dependencies,
+ extension_to_rule_name):
"""Returns the group and element type of the source file.
Arguments:
@@ -2048,9 +2111,15 @@ def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
elif ext == '.rc':
group = 'resource'
element = 'ResourceCompile'
+ elif ext == '.asm':
+ group = 'masm'
+ element = 'MASM'
elif ext == '.idl':
group = 'midl'
element = 'Midl'
+ elif source in rule_dependencies:
+ group = 'rule_dependency'
+ element = 'CustomBuild'
else:
group = 'none'
element = 'None'
@@ -2060,7 +2129,8 @@ def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
def _GenerateRulesForMSBuild(output_dir, options, spec,
sources, excluded_sources,
props_files_of_rules, targets_files_of_rules,
- actions_to_add, extension_to_rule_name):
+ actions_to_add, rule_dependencies,
+ extension_to_rule_name):
# MSBuild rules are implemented using three files: an XML file, a .targets
# file and a .props file.
# See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
@@ -2076,6 +2146,7 @@ def _GenerateRulesForMSBuild(output_dir, options, spec,
continue
msbuild_rule = MSBuildRule(rule, spec)
msbuild_rules.append(msbuild_rule)
+ rule_dependencies.update(msbuild_rule.additional_dependencies.split(';'))
extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
if msbuild_rules:
base = spec['target_name'] + options.suffix
@@ -2097,7 +2168,7 @@ def _GenerateRulesForMSBuild(output_dir, options, spec,
if rules_external:
_GenerateExternalRules(rules_external, output_dir, spec,
sources, options, actions_to_add)
- _AdjustSourcesForRules(spec, rules, sources, excluded_sources)
+ _AdjustSourcesForRules(rules, sources, excluded_sources, True)
class MSBuildRule(object):
@@ -2276,6 +2347,9 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
rule_name,
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule_name, rule_name),
+ 'EchoOff': 'true',
+ 'StandardOutputImportance': 'High',
+ 'StandardErrorImportance': 'High',
'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name,
'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name,
'Inputs': rule_inputs
@@ -2550,15 +2624,45 @@ def _GetMSBuildProjectConfigurations(configurations):
def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
namespace = os.path.splitext(gyp_file_name)[0]
- return [
+ properties = [
['PropertyGroup', {'Label': 'Globals'},
- ['ProjectGuid', guid],
- ['Keyword', 'Win32Proj'],
- ['RootNamespace', namespace],
- ['IgnoreWarnCompileDuplicatedFilename', 'true'],
+ ['ProjectGuid', guid],
+ ['Keyword', 'Win32Proj'],
+ ['RootNamespace', namespace],
+ ['IgnoreWarnCompileDuplicatedFilename', 'true'],
]
- ]
+ ]
+
+ if os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or \
+ os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
+ properties[0].append(['PreferredToolArchitecture', 'x64'])
+
+ if spec.get('msvs_enable_winrt'):
+ properties[0].append(['DefaultLanguage', 'en-US'])
+ properties[0].append(['AppContainerApplication', 'true'])
+ if spec.get('msvs_application_type_revision'):
+ app_type_revision = spec.get('msvs_application_type_revision')
+ properties[0].append(['ApplicationTypeRevision', app_type_revision])
+ else:
+ properties[0].append(['ApplicationTypeRevision', '8.1'])
+
+ if spec.get('msvs_target_platform_version'):
+ target_platform_version = spec.get('msvs_target_platform_version')
+ properties[0].append(['WindowsTargetPlatformVersion',
+ target_platform_version])
+ if spec.get('msvs_target_platform_minversion'):
+ target_platform_minversion = spec.get('msvs_target_platform_minversion')
+ properties[0].append(['WindowsTargetPlatformMinVersion',
+ target_platform_minversion])
+ else:
+ properties[0].append(['WindowsTargetPlatformMinVersion',
+ target_platform_version])
+ if spec.get('msvs_enable_winphone'):
+ properties[0].append(['ApplicationType', 'Windows Phone'])
+ else:
+ properties[0].append(['ApplicationType', 'Windows Store'])
+ return properties
def _GetMSBuildConfigurationDetails(spec, build_file):
properties = {}
@@ -2569,8 +2673,9 @@ def _GetMSBuildConfigurationDetails(spec, build_file):
_AddConditionalProperty(properties, condition, 'ConfigurationType',
msbuild_attributes['ConfigurationType'])
if character_set:
- _AddConditionalProperty(properties, condition, 'CharacterSet',
- character_set)
+ if 'msvs_enable_winrt' not in spec :
+ _AddConditionalProperty(properties, condition, 'CharacterSet',
+ character_set)
return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
@@ -2785,7 +2890,7 @@ def _AddConditionalProperty(properties, condition, name, value):
# Regex for msvs variable references ( i.e. $(FOO) ).
-MSVS_VARIABLE_REFERENCE = re.compile('\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
+MSVS_VARIABLE_REFERENCE = re.compile(r'\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
def _GetMSBuildPropertyGroup(spec, label, properties):
@@ -2869,7 +2974,8 @@ def _FinalizeMSBuildSettings(spec, configuration):
converted = True
msvs_settings = configuration.get('msvs_settings', {})
msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
- include_dirs, resource_include_dirs = _GetIncludeDirs(configuration)
+ include_dirs, midl_include_dirs, resource_include_dirs = \
+ _GetIncludeDirs(configuration)
libraries = _GetLibraries(spec)
library_dirs = _GetLibraryDirs(configuration)
out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
@@ -2899,6 +3005,8 @@ def _FinalizeMSBuildSettings(spec, configuration):
# if you don't have any resources.
_ToolAppend(msbuild_settings, 'ClCompile',
'AdditionalIncludeDirectories', include_dirs)
+ _ToolAppend(msbuild_settings, 'Midl',
+ 'AdditionalIncludeDirectories', midl_include_dirs)
_ToolAppend(msbuild_settings, 'ResourceCompile',
'AdditionalIncludeDirectories', resource_include_dirs)
# Add in libraries, note that even for empty libraries, we want this
@@ -2929,6 +3037,13 @@ def _FinalizeMSBuildSettings(spec, configuration):
'PrecompiledHeaderFile', precompiled_header)
_ToolAppend(msbuild_settings, 'ClCompile',
'ForcedIncludeFiles', [precompiled_header])
+ else:
+ _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'NotUsing')
+ # Turn off WinRT compilation
+ _ToolAppend(msbuild_settings, 'ClCompile', 'CompileAsWinRT', 'false')
+ # Turn on import libraries if appropriate
+ if spec.get('msvs_requires_importlibrary'):
+ _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'false')
# Loadable modules don't generate import libraries;
# tell dependent projects to not expect one.
if spec['type'] == 'loadable_module':
@@ -2996,15 +3111,18 @@ def _VerifySourcesExist(sources, root_dir):
return missing_sources
-def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name,
- actions_spec, sources_handled_by_action, list_excluded):
- groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule']
+def _GetMSBuildSources(spec, sources, exclusions, rule_dependencies,
+ extension_to_rule_name, actions_spec,
+ sources_handled_by_action, list_excluded):
+ groups = ['none', 'masm', 'midl', 'include', 'compile', 'resource', 'rule',
+ 'rule_dependency']
grouped_sources = {}
for g in groups:
grouped_sources[g] = []
_AddSources2(spec, sources, exclusions, grouped_sources,
- extension_to_rule_name, sources_handled_by_action, list_excluded)
+ rule_dependencies, extension_to_rule_name,
+ sources_handled_by_action, list_excluded)
sources = []
for g in groups:
if grouped_sources[g]:
@@ -3015,13 +3133,15 @@ def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name,
def _AddSources2(spec, sources, exclusions, grouped_sources,
- extension_to_rule_name, sources_handled_by_action,
+ rule_dependencies, extension_to_rule_name,
+ sources_handled_by_action,
list_excluded):
extensions_excluded_from_precompile = []
for source in sources:
if isinstance(source, MSVSProject.Filter):
_AddSources2(spec, source.contents, exclusions, grouped_sources,
- extension_to_rule_name, sources_handled_by_action,
+ rule_dependencies, extension_to_rule_name,
+ sources_handled_by_action,
list_excluded)
else:
if not source in sources_handled_by_action:
@@ -3064,7 +3184,7 @@ def _AddSources2(spec, sources, exclusions, grouped_sources,
detail.append(['PrecompiledHeader', ''])
detail.append(['ForcedIncludeFiles', ''])
- group, element = _MapFileToMsBuildSourceType(source,
+ group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
extension_to_rule_name)
grouped_sources[group].append([element, {'Include': source}] + detail)
@@ -3108,6 +3228,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
actions_to_add = {}
props_files_of_rules = set()
targets_files_of_rules = set()
+ rule_dependencies = set()
extension_to_rule_name = {}
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
@@ -3116,10 +3237,11 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
_GenerateRulesForMSBuild(project_dir, options, spec,
sources, excluded_sources,
props_files_of_rules, targets_files_of_rules,
- actions_to_add, extension_to_rule_name)
+ actions_to_add, rule_dependencies,
+ extension_to_rule_name)
else:
rules = spec.get('rules', [])
- _AdjustSourcesForRules(spec, rules, sources, excluded_sources)
+ _AdjustSourcesForRules(rules, sources, excluded_sources, True)
sources, excluded_sources, excluded_idl = (
_AdjustSourcesAndConvertToFilterHierarchy(spec, options,
@@ -3142,6 +3264,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
spec, actions_to_add)
_GenerateMSBuildFiltersFile(project.path + '.filters', sources,
+ rule_dependencies,
extension_to_rule_name)
missing_sources = _VerifySourcesExist(sources, project_dir)
@@ -3156,6 +3279,12 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
import_cpp_targets_section = [
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
+ import_masm_props_section = [
+ ['Import',
+ {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]]
+ import_masm_targets_section = [
+ ['Import',
+ {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]]
macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
content = [
@@ -3169,8 +3298,12 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
content += import_default_section
content += _GetMSBuildConfigurationDetails(spec, project.build_file)
- content += _GetMSBuildLocalProperties(project.msbuild_toolset)
+ if spec.get('msvs_enable_winphone'):
+ content += _GetMSBuildLocalProperties('v120_wp81')
+ else:
+ content += _GetMSBuildLocalProperties(project.msbuild_toolset)
content += import_cpp_props_section
+ content += import_masm_props_section
content += _GetMSBuildExtensions(props_files_of_rules)
content += _GetMSBuildPropertySheets(configurations)
content += macro_section
@@ -3178,10 +3311,11 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
project.build_file)
content += _GetMSBuildToolSettingsSections(spec, configurations)
content += _GetMSBuildSources(
- spec, sources, exclusions, extension_to_rule_name, actions_spec,
- sources_handled_by_action, list_excluded)
+ spec, sources, exclusions, rule_dependencies, extension_to_rule_name,
+ actions_spec, sources_handled_by_action, list_excluded)
content += _GetMSBuildProjectReferences(project)
content += import_cpp_targets_section
+ content += import_masm_targets_section
content += _GetMSBuildExtensionTargets(targets_files_of_rules)
if spec.get('msvs_external_builder'):
@@ -3283,8 +3417,8 @@ def _GenerateActionsForMSBuild(spec, actions_to_add):
# get too long. See also _AddActions: cygwin's setup_env mustn't be called
# for every invocation or the command that sets the PATH will grow too
# long.
- command = (
- '\r\nif %errorlevel% neq 0 exit /b %errorlevel%\r\n'.join(commands))
+ command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%'
+ for c in commands])
_AddMSBuildAction(spec,
primary_input,
inputs,
diff --git a/deps/gyp/pylib/gyp/generator/ninja.py b/deps/gyp/pylib/gyp/generator/ninja.py
index e3fafb5d43..b13affe0a1 100644
--- a/deps/gyp/pylib/gyp/generator/ninja.py
+++ b/deps/gyp/pylib/gyp/generator/ninja.py
@@ -14,6 +14,7 @@ import subprocess
import sys
import gyp
import gyp.common
+from gyp.common import OrderedSet
import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
@@ -61,17 +62,7 @@ generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
-# TODO: figure out how to not build extra host objects in the non-cross-compile
-# case when this is enabled, and enable unconditionally.
-generator_supports_multiple_toolsets = (
- os.environ.get('GYP_CROSSCOMPILE') or
- os.environ.get('AR_host') or
- os.environ.get('CC_host') or
- os.environ.get('CXX_host') or
- os.environ.get('AR_target') or
- os.environ.get('CC_target') or
- os.environ.get('CXX_target'))
-
+generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
def StripPrefix(arg, prefix):
if arg.startswith(prefix):
@@ -107,7 +98,7 @@ def AddArch(output, arch):
return '%s.%s%s' % (output, arch, extension)
-class Target:
+class Target(object):
"""Target represents the paths used within a single gyp target.
Conceptually, building a single target A is a series of steps:
@@ -148,8 +139,11 @@ class Target:
self.bundle = None
# On Windows, incremental linking requires linking against all the .objs
# that compose a .lib (rather than the .lib itself). That list is stored
- # here.
+ # here. In this case, we also need to save the compile_deps for the target,
+ # so that the the target that directly depends on the .objs can also depend
+ # on those.
self.component_objs = None
+ self.compile_deps = None
# Windows only. The import .lib is the output of a build step, but
# because dependents only link against the lib (not both the lib and the
# dll) we keep track of the import library here.
@@ -211,8 +205,8 @@ class Target:
# an output file; the result can be namespaced such that it is unique
# to the input file name as well as the output target name.
-class NinjaWriter:
- def __init__(self, qualified_target, target_outputs, base_dir, build_dir,
+class NinjaWriter(object):
+ def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
output_file, toplevel_build, output_file_name, flavor,
toplevel_dir=None):
"""
@@ -222,7 +216,7 @@ class NinjaWriter:
toplevel_dir: path to the toplevel directory
"""
- self.qualified_target = qualified_target
+ self.hash_for_rules = hash_for_rules
self.target_outputs = target_outputs
self.base_dir = base_dir
self.build_dir = build_dir
@@ -339,6 +333,9 @@ class NinjaWriter:
obj += '.' + self.toolset
path_dir, path_basename = os.path.split(path)
+ assert not os.path.isabs(path_dir), (
+ "'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
+
if qualified:
path_basename = self.name + '.' + path_basename
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
@@ -393,6 +390,9 @@ class NinjaWriter:
self.ninja.variable('arch', self.win_env[arch])
self.ninja.variable('cc', '$cl_' + arch)
self.ninja.variable('cxx', '$cl_' + arch)
+ self.ninja.variable('cc_host', '$cl_' + arch)
+ self.ninja.variable('cxx_host', '$cl_' + arch)
+ self.ninja.variable('asm', '$ml_' + arch)
if self.flavor == 'mac':
self.archs = self.xcode_settings.GetActiveArchs(config_name)
@@ -477,16 +477,17 @@ class NinjaWriter:
elif self.flavor == 'mac' and len(self.archs) > 1:
link_deps = collections.defaultdict(list)
-
+ compile_deps = self.target.actions_stamp or actions_depends
if self.flavor == 'win' and self.target.type == 'static_library':
self.target.component_objs = link_deps
+ self.target.compile_deps = compile_deps
# Write out a link step, if needed.
output = None
is_empty_bundle = not link_deps and not mac_bundle_depends
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
- self.target.actions_stamp or actions_depends)
+ compile_deps)
if self.is_mac_bundle:
mac_bundle_depends.append(output)
@@ -527,7 +528,7 @@ class NinjaWriter:
def WriteWinIdlFiles(self, spec, prebuild):
"""Writes rules to match MSVS's implicit idl handling."""
assert self.flavor == 'win'
- if self.msvs_settings.HasExplicitIdlRules(spec):
+ if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
return []
outputs = []
for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
@@ -561,9 +562,10 @@ class NinjaWriter:
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
- self.WriteMacBundleResources(
+ xcassets = self.WriteMacBundleResources(
extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
- self.WriteMacInfoPlist(mac_bundle_depends)
+ partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
+ self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
return stamp
@@ -588,16 +590,20 @@ class NinjaWriter:
all_outputs = []
for action in actions:
# First write out a rule for the action.
- name = '%s_%s' % (action['action_name'],
- hashlib.md5(self.qualified_target).hexdigest())
+ name = '%s_%s' % (action['action_name'], self.hash_for_rules)
description = self.GenerateDescription('ACTION',
action.get('message', None),
name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
if self.flavor == 'win' else False)
args = action['action']
+ depfile = action.get('depfile', None)
+ if depfile:
+ depfile = self.ExpandSpecial(depfile, self.base_to_build)
+ pool = 'console' if int(action.get('ninja_use_console', 0)) else None
rule_name, _ = self.WriteNewNinjaRule(name, args, description,
- is_cygwin, env=env)
+ is_cygwin, env, pool,
+ depfile=depfile)
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
if int(action.get('process_outputs_as_sources', False)):
@@ -625,8 +631,7 @@ class NinjaWriter:
continue
# First write out a rule for the rule action.
- name = '%s_%s' % (rule['rule_name'],
- hashlib.md5(self.qualified_target).hexdigest())
+ name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
args = rule['action']
description = self.GenerateDescription(
@@ -635,8 +640,9 @@ class NinjaWriter:
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
if self.flavor == 'win' else False)
+ pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
rule_name, args = self.WriteNewNinjaRule(
- name, args, description, is_cygwin, env=env)
+ name, args, description, is_cygwin, env, pool)
# TODO: if the command references the outputs directly, we should
# simplify it to just use $out.
@@ -648,10 +654,11 @@ class NinjaWriter:
needed_variables = set(['source'])
for argument in args:
for var in special_locals:
- if ('${%s}' % var) in argument:
+ if '${%s}' % var in argument:
needed_variables.add(var)
def cygwin_munge(path):
+ # pylint: disable=cell-var-from-loop
if is_cygwin:
return path.replace('\\', '/')
return path
@@ -757,15 +764,68 @@ class NinjaWriter:
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
+ xcassets = []
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
- self.ninja.build(output, 'mac_tool', res,
- variables=[('mactool_cmd', 'copy-bundle-resource')])
- bundle_depends.append(output)
+ if os.path.splitext(output)[-1] != '.xcassets':
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
+ self.ninja.build(output, 'mac_tool', res,
+ variables=[('mactool_cmd', 'copy-bundle-resource'), \
+ ('binary', isBinary)])
+ bundle_depends.append(output)
+ else:
+ xcassets.append(res)
+ return xcassets
+
+ def WriteMacXCassets(self, xcassets, bundle_depends):
+ """Writes ninja edges for 'mac_bundle_resources' .xcassets files.
+
+ This add an invocation of 'actool' via the 'mac_tool.py' helper script.
+ It assumes that the assets catalogs define at least one imageset and
+ thus an Assets.car file will be generated in the application resources
+ directory. If this is not the case, then the build will probably be done
+ at each invocation of ninja."""
+ if not xcassets:
+ return
+
+ extra_arguments = {}
+ settings_to_arg = {
+ 'XCASSETS_APP_ICON': 'app-icon',
+ 'XCASSETS_LAUNCH_IMAGE': 'launch-image',
+ }
+ settings = self.xcode_settings.xcode_settings[self.config_name]
+ for settings_key, arg_name in settings_to_arg.iteritems():
+ value = settings.get(settings_key)
+ if value:
+ extra_arguments[arg_name] = value
+
+ partial_info_plist = None
+ if extra_arguments:
+ partial_info_plist = self.GypPathToUniqueOutput(
+ 'assetcatalog_generated_info.plist')
+ extra_arguments['output-partial-info-plist'] = partial_info_plist
- def WriteMacInfoPlist(self, bundle_depends):
+ outputs = []
+ outputs.append(
+ os.path.join(
+ self.xcode_settings.GetBundleResourceFolder(),
+ 'Assets.car'))
+ if partial_info_plist:
+ outputs.append(partial_info_plist)
+
+ keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+
+ bundle_depends.extend(self.ninja.build(
+ outputs, 'compile_xcassets', xcassets,
+ variables=[('env', env), ('keys', keys)]))
+ return partial_info_plist
+
+ def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
"""Write build rules for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'],
@@ -785,10 +845,18 @@ class NinjaWriter:
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
+ if partial_info_plist:
+ intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
+ info_plist = self.ninja.build(
+ intermediate_plist, 'merge_infoplist',
+ [partial_info_plist, info_plist])
+
keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
keys = QuoteShellArgument(json.dumps(keys), self.flavor)
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(out, 'copy_infoplist', info_plist,
- variables=[('env', env), ('keys', keys)])
+ variables=[('env', env), ('keys', keys),
+ ('binary', isBinary)])
bundle_depends.append(out)
def WriteSources(self, ninja_file, config_name, config, sources, predepends,
@@ -800,6 +868,8 @@ class NinjaWriter:
self.ninja.variable('cxx', '$cxx_host')
self.ninja.variable('ld', '$ld_host')
self.ninja.variable('ldxx', '$ldxx_host')
+ self.ninja.variable('nm', '$nm_host')
+ self.ninja.variable('readelf', '$readelf_host')
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteSourcesForArch(
@@ -855,6 +925,11 @@ class NinjaWriter:
os.environ.get('CFLAGS', '').split() + cflags_c)
cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CXXFLAGS', '').split() + cflags_cc)
+ elif self.toolset == 'host':
+ cflags_c = (os.environ.get('CPPFLAGS_host', '').split() +
+ os.environ.get('CFLAGS_host', '').split() + cflags_c)
+ cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() +
+ os.environ.get('CXXFLAGS_host', '').split() + cflags_cc)
defines = config.get('defines', []) + extra_defines
self.WriteVariableList(ninja_file, 'defines',
@@ -877,6 +952,14 @@ class NinjaWriter:
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in include_dirs])
+ if self.flavor == 'win':
+ midl_include_dirs = config.get('midl_include_dirs', [])
+ midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
+ midl_include_dirs, config_name)
+ self.WriteVariableList(ninja_file, 'midl_includes',
+ [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
+ for i in midl_include_dirs])
+
pch_commands = precompiled_header.GetPchBuildCommands(arch)
if self.flavor == 'mac':
# Most targets use no precompiled headers, so only write these if needed.
@@ -885,6 +968,8 @@ class NinjaWriter:
include = precompiled_header.GetInclude(ext, arch)
if include: ninja_file.variable(var, include)
+ arflags = config.get('arflags', [])
+
self.WriteVariableList(ninja_file, 'cflags',
map(self.ExpandSpecial, cflags))
self.WriteVariableList(ninja_file, 'cflags_c',
@@ -896,6 +981,8 @@ class NinjaWriter:
map(self.ExpandSpecial, cflags_objc))
self.WriteVariableList(ninja_file, 'cflags_objcc',
map(self.ExpandSpecial, cflags_objcc))
+ self.WriteVariableList(ninja_file, 'arflags',
+ map(self.ExpandSpecial, arflags))
ninja_file.newline()
outputs = []
has_rc_source = False
@@ -911,9 +998,7 @@ class NinjaWriter:
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
command = 'cc_s'
elif (self.flavor == 'win' and ext == 'asm' and
- self.msvs_settings.GetArch(config_name) == 'x86' and
not self.msvs_settings.HasExplicitAsmRules(spec)):
- # Asm files only get auto assembled for x86 (not x64).
command = 'asm'
# Add the _asm suffix as msvs is capable of handling .cc and
# .asm files of the same name without collision.
@@ -985,9 +1070,19 @@ class NinjaWriter:
arch=arch)
for arch in self.archs]
extra_bindings = []
+ build_output = output
if not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
- self.ninja.build(output, 'lipo', inputs, variables=extra_bindings)
+
+ # TODO(yyanagisawa): more work needed to fix:
+ # https://code.google.com/p/gyp/issues/detail?id=411
+ if (spec['type'] in ('shared_library', 'loadable_module') and
+ not self.is_mac_bundle):
+ extra_bindings.append(('lib', output))
+ self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
+ variables=extra_bindings)
+ else:
+ self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
return output
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
@@ -1002,6 +1097,7 @@ class NinjaWriter:
implicit_deps = set()
solibs = set()
+ order_deps = set()
if 'dependencies' in spec:
# Two kinds of dependencies:
@@ -1020,6 +1116,8 @@ class NinjaWriter:
target.component_objs and
self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
new_deps = target.component_objs
+ if target.compile_deps:
+ order_deps.add(target.compile_deps)
elif self.flavor == 'win' and target.import_lib:
new_deps = [target.import_lib]
elif target.UsesToc(self.flavor):
@@ -1080,10 +1178,10 @@ class NinjaWriter:
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
- ldflags.append('-Wl,-rpath=\$$ORIGIN/%s' % rpath)
+ ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList(ninja_file, 'ldflags',
- gyp.common.uniquer(map(self.ExpandSpecial, ldflags)))
+ map(self.ExpandSpecial, ldflags))
library_dirs = config.get('library_dirs', [])
if self.flavor == 'win':
@@ -1131,7 +1229,8 @@ class NinjaWriter:
gyp.common.EncodePOSIXShellArgument(link_file_list)))
if self.flavor == 'win':
extra_bindings.append(('binary', output))
- if '/NOENTRY' not in ldflags:
+ if ('/NOENTRY' not in ldflags and
+ not self.msvs_settings.GetNoImportLibrary(config_name)):
self.target.import_lib = output + '.lib'
extra_bindings.append(('implibflag',
'/IMPLIB:%s' % self.target.import_lib))
@@ -1157,6 +1256,7 @@ class NinjaWriter:
ninja_file.build(output, command + command_suffix, link_deps,
implicit=list(implicit_deps),
+ order_only=list(order_deps),
variables=extra_bindings)
return linked_binary
@@ -1171,7 +1271,7 @@ class NinjaWriter:
self.target.type = 'none'
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
- if (self.flavor not in ('mac', 'openbsd', 'win') and not
+ if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
order_only=compile_deps)
@@ -1424,7 +1524,8 @@ class NinjaWriter:
values = []
ninja_file.variable(var, ' '.join(values))
- def WriteNewNinjaRule(self, name, args, description, is_cygwin, env):
+ def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
+ depfile=None):
"""Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule, and a copy of |args| with variables
@@ -1482,7 +1583,8 @@ class NinjaWriter:
# GYP rules/actions express being no-ops by not touching their outputs.
# Avoid executing downstream dependencies in this case by specifying
# restat=1 to ninja.
- self.ninja.rule(rule_name, command, description, restat=True,
+ self.ninja.rule(rule_name, command, description, depfile=depfile,
+ restat=True, pool=pool,
rspfile=rspfile, rspfile_content=rspfile_content)
self.ninja.newline()
@@ -1513,12 +1615,13 @@ def CalculateVariables(default_variables, params):
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
elif flavor == 'win':
+ exts = gyp.MSVSUtil.TARGET_TYPE_EXT
default_variables.setdefault('OS', 'win')
- default_variables['EXECUTABLE_SUFFIX'] = '.exe'
+ default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
default_variables['STATIC_LIB_PREFIX'] = ''
- default_variables['STATIC_LIB_SUFFIX'] = '.lib'
+ default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
default_variables['SHARED_LIB_PREFIX'] = ''
- default_variables['SHARED_LIB_SUFFIX'] = '.dll'
+ default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
@@ -1582,6 +1685,10 @@ def CommandWithWrapper(cmd, wrappers, prog):
def GetDefaultConcurrentLinks():
"""Returns a best-guess for a number of concurrent links."""
+ pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0))
+ if pool_size:
+ return pool_size
+
if sys.platform in ('win32', 'cygwin'):
import ctypes
@@ -1602,8 +1709,10 @@ def GetDefaultConcurrentLinks():
stat.dwLength = ctypes.sizeof(stat)
ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
- mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30))) # total / 4GB
- hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+ # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
+ # on a 64 GB machine.
+ mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB
+ hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
return min(mem_limit, hard_cap)
elif sys.platform.startswith('linux'):
if os.path.exists("/proc/meminfo"):
@@ -1711,14 +1820,15 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
# to cc/cxx.
if flavor == 'win':
- # Overridden by local arch choice in the use_deps case.
- # Chromium's ffmpeg c99conv.py currently looks for a 'cc =' line in
- # build.ninja so needs something valid here. http://crbug.com/233985
- cc = 'cl.exe'
- cxx = 'cl.exe'
+ ar = 'lib.exe'
+ # cc and cxx must be set to the correct architecture by overriding with one
+ # of cl_x86 or cl_x64 below.
+ cc = 'UNSET'
+ cxx = 'UNSET'
ld = 'link.exe'
ld_host = '$ld'
else:
+ ar = 'ar'
cc = 'cc'
cxx = 'c++'
ld = '$cc'
@@ -1726,11 +1836,16 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
ld_host = '$cc_host'
ldxx_host = '$cxx_host'
+ ar_host = ar
cc_host = None
cxx_host = None
cc_host_global_setting = None
cxx_host_global_setting = None
clang_cl = None
+ nm = 'nm'
+ nm_host = 'nm'
+ readelf = 'readelf'
+ readelf_host = 'readelf'
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings = data[build_file].get('make_global_settings', [])
@@ -1738,6 +1853,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
options.toplevel_dir)
wrappers = {}
for key, value in make_global_settings:
+ if key == 'AR':
+ ar = os.path.join(build_to_root, value)
+ if key == 'AR.host':
+ ar_host = os.path.join(build_to_root, value)
if key == 'CC':
cc = os.path.join(build_to_root, value)
if cc.endswith('clang-cl'):
@@ -1750,6 +1869,18 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
if key == 'CXX.host':
cxx_host = os.path.join(build_to_root, value)
cxx_host_global_setting = value
+ if key == 'LD':
+ ld = os.path.join(build_to_root, value)
+ if key == 'LD.host':
+ ld_host = os.path.join(build_to_root, value)
+ if key == 'NM':
+ nm = os.path.join(build_to_root, value)
+ if key == 'NM.host':
+ nm_host = os.path.join(build_to_root, value)
+ if key == 'READELF':
+ readelf = os.path.join(build_to_root, value)
+ if key == 'READELF.host':
+ readelf_host = os.path.join(build_to_root, value)
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
@@ -1761,8 +1892,15 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
wrappers[key_prefix] = os.path.join(build_to_root, value)
if flavor == 'win':
+ configs = [target_dicts[qualified_target]['configurations'][config_name]
+ for qualified_target in target_list]
+ shared_system_includes = None
+ if not generator_flags.get('ninja_use_custom_environment_files', 0):
+ shared_system_includes = \
+ gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
+ configs, generator_flags)
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
- toplevel_build, generator_flags, OpenOutput)
+ toplevel_build, generator_flags, shared_system_includes, OpenOutput)
for arch, path in cl_paths.iteritems():
if clang_cl:
# If we have selected clang-cl, use that instead.
@@ -1782,14 +1920,22 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
if flavor == 'win':
master_ninja.variable('ld', ld)
master_ninja.variable('idl', 'midl.exe')
- master_ninja.variable('ar', 'lib.exe')
+ master_ninja.variable('ar', ar)
master_ninja.variable('rc', 'rc.exe')
- master_ninja.variable('asm', 'ml.exe')
+ master_ninja.variable('ml_x86', 'ml.exe')
+ master_ninja.variable('ml_x64', 'ml64.exe')
master_ninja.variable('mt', 'mt.exe')
else:
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
- master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar'))
+ master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
+ if flavor != 'mac':
+ # Mac does not use readelf/nm for .TOC generation, so avoiding polluting
+ # the master ninja with extra unused variables.
+ master_ninja.variable(
+ 'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
+ master_ninja.variable(
+ 'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
if generator_supports_multiple_toolsets:
if not cc_host:
@@ -1797,7 +1943,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
if not cxx_host:
cxx_host = cxx
- master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
+ master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
+ master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
+ master_ninja.variable('readelf_host',
+ GetEnvironFallback(['READELF_host'], readelf_host))
cc_host = GetEnvironFallback(['CC_host'], cc_host)
cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
@@ -1880,7 +2029,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
description='IDL $in',
command=('%s gyp-win-tool midl-wrapper $arch $outdir '
'$tlb $h $dlldata $iid $proxy $in '
- '$idlflags' % sys.executable))
+ '$midl_includes $idlflags' % sys.executable))
master_ninja.rule(
'rc',
description='RC $in',
@@ -1899,11 +2048,11 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja.rule(
'alink',
description='AR $out',
- command='rm -f $out && $ar rcs $out $in')
+ command='rm -f $out && $ar rcs $arflags $out $in')
master_ninja.rule(
'alink_thin',
description='AR $out',
- command='rm -f $out && $ar rcsT $out $in')
+ command='rm -f $out && $ar rcsT $arflags $out $in')
# This allows targets that only need to depend on $lib's API to declare an
# order-only dependency on $lib.TOC and avoid relinking such downstream
@@ -1919,8 +2068,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
% { 'solink':
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
'extract_toc':
- ('{ readelf -d $lib | grep SONAME ; '
- 'nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
+ ('{ $readelf -d $lib | grep SONAME ; '
+ '$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
master_ninja.rule(
'solink',
@@ -1937,13 +2086,13 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
restat=True,
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
rspfile='$link_file_list',
- rspfile_content='-Wl,--start-group $in $solibs -Wl,--end-group $libs',
+ rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld $ldflags -o $out '
- '-Wl,--start-group $in $solibs -Wl,--end-group $libs'),
+ '-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
pool='link_pool')
elif flavor == 'win':
master_ninja.rule(
@@ -1982,6 +2131,16 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
'lipo',
description='LIPO $out, POSTBUILDS',
command='rm -f $out && lipo -create $in -output $out$postbuilds')
+ master_ninja.rule(
+ 'solipo',
+ description='SOLIPO $out, POSTBUILDS',
+ command=(
+ 'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
+ '%(extract_toc)s > $lib.TOC'
+ % { 'extract_toc':
+ '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
+ 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
+
# Record the public interface of $lib in $lib.TOC. See the corresponding
# comment in the posix section above for details.
@@ -2056,11 +2215,19 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja.rule(
'copy_infoplist',
description='COPY INFOPLIST $in',
- command='$env ./gyp-mac-tool copy-info-plist $in $out $keys')
+ command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
+ master_ninja.rule(
+ 'merge_infoplist',
+ description='MERGE INFOPLISTS $in',
+ command='$env ./gyp-mac-tool merge-info-plist $out $in')
+ master_ninja.rule(
+ 'compile_xcassets',
+ description='COMPILE XCASSETS $in',
+ command='$env ./gyp-mac-tool compile-xcassets $keys $in')
master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
- command='$env ./gyp-mac-tool $mactool_cmd $in $out')
+ command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
master_ninja.rule(
'package_framework',
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
@@ -2100,6 +2267,15 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# objects.
target_short_names = {}
+ # short name of targets that were skipped because they didn't contain anything
+ # interesting.
+ # NOTE: there may be overlap between this an non_empty_target_names.
+ empty_target_names = set()
+
+ # Set of non-empty short target names.
+ # NOTE: there may be overlap between this an empty_target_names.
+ non_empty_target_names = set()
+
for qualified_target in target_list:
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
build_file, name, toolset = \
@@ -2114,7 +2290,15 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
- build_file = gyp.common.RelativePath(build_file, options.toplevel_dir)
+ # If build_file is a symlink, we must not follow it because there's a chance
+ # it could point to a path above toplevel_dir, and we cannot correctly deal
+ # with that case at the moment.
+ build_file = gyp.common.RelativePath(build_file, options.toplevel_dir,
+ False)
+
+ qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
+ toolset)
+ hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
base_path = os.path.dirname(build_file)
obj = 'obj'
@@ -2123,7 +2307,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
output_file = os.path.join(obj, base_path, name + '.ninja')
ninja_output = StringIO()
- writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir,
+ writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
ninja_output,
toplevel_build, output_file,
flavor, toplevel_dir=options.toplevel_dir)
@@ -2143,6 +2327,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
target_outputs[qualified_target] = target
if qualified_target in all_targets:
all_outputs.add(target.FinalOutput())
+ non_empty_target_names.add(name)
+ else:
+ empty_target_names.add(name)
if target_short_names:
# Write a short name to build this target. This benefits both the
@@ -2154,6 +2341,16 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
+ # Write phony targets for any empty targets that weren't written yet. As
+ # short names are not necessarily unique only do this for short names that
+ # haven't already been output for another target.
+ empty_target_names = empty_target_names - non_empty_target_names
+ if empty_target_names:
+ master_ninja.newline()
+ master_ninja.comment('Empty targets (output for completeness).')
+ for name in sorted(empty_target_names):
+ master_ninja.build(name, 'phony')
+
if all_outputs:
master_ninja.newline()
master_ninja.build('all', 'phony', list(all_outputs))
diff --git a/deps/gyp/pylib/gyp/generator/ninja_test.py b/deps/gyp/pylib/gyp/generator/ninja_test.py
index 52661bcdf0..1767b2f45a 100644
--- a/deps/gyp/pylib/gyp/generator/ninja_test.py
+++ b/deps/gyp/pylib/gyp/generator/ninja_test.py
@@ -15,15 +15,18 @@ import TestCommon
class TestPrefixesAndSuffixes(unittest.TestCase):
def test_BinaryNamesWindows(self):
- writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
- 'build.ninja', 'win')
- spec = { 'target_name': 'wee' }
- self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
- endswith('.exe'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
- endswith('.dll'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
- endswith('.lib'))
+ # These cannot run on non-Windows as they require a VS installation to
+ # correctly handle variable expansion.
+ if sys.platform.startswith('win'):
+ writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
+ 'build.ninja', 'win')
+ spec = { 'target_name': 'wee' }
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
+ endswith('.exe'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
+ endswith('.dll'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
+ endswith('.lib'))
def test_BinaryNamesLinux(self):
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
diff --git a/deps/gyp/pylib/gyp/generator/xcode.py b/deps/gyp/pylib/gyp/generator/xcode.py
index 783dafa0b0..0e3fb9301e 100644
--- a/deps/gyp/pylib/gyp/generator/xcode.py
+++ b/deps/gyp/pylib/gyp/generator/xcode.py
@@ -31,10 +31,6 @@ _intermediate_var = 'INTERMEDIATE_DIR'
# targets that share the same BUILT_PRODUCTS_DIR.
_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
-# ONLY_ACTIVE_ARCH means that only the active architecture should be build for
-# Debugging purposes to shorten the build time
-_only_active_arch = 'ONLY_ACTIVE_ARCH'
-
_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
generator_default_variables = {
@@ -73,6 +69,9 @@ generator_additional_path_sections = [
# The Xcode-specific keys that exist on targets and aren't moved down to
# configurations.
generator_additional_non_configuration_keys = [
+ 'ios_app_extension',
+ 'ios_watch_app',
+ 'ios_watchkit_extension',
'mac_bundle',
'mac_bundle_resources',
'mac_framework_headers',
@@ -88,6 +87,8 @@ generator_extra_sources_for_rules = [
'mac_framework_private_headers',
]
+generator_filelist_paths = None
+
# Xcode's standard set of library directories, which don't need to be duplicated
# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
xcode_standard_library_dirs = frozenset([
@@ -172,8 +173,6 @@ class XcodeProject(object):
xccl.SetBuildSetting(_shared_intermediate_var,
'$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
- xccl.ConfigurationNamed('Debug').SetBuildSetting(_only_active_arch, 'YES')
-
# Set user-specified project-wide build settings and config files. This
# is intended to be used very sparingly. Really, almost everything should
# go into target-specific build settings sections. The project-wide
@@ -491,7 +490,7 @@ sys.exit(subprocess.call(sys.argv[1:]))" """
def AddSourceToTarget(source, type, pbxp, xct):
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
# little bit fancier.
- source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's']
+ source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
# .o is conceptually more of a "source" than a "library," but Xcode thinks
# of "sources" as things to compile and "libraries" (or "frameworks") as
@@ -527,7 +526,7 @@ def AddHeaderToTarget(header, pbxp, xct, is_public):
xct.HeadersPhase().AddFile(header, settings)
-_xcode_variable_re = re.compile('(\$\((.*?)\))')
+_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
def ExpandXcodeVariables(string, expansions):
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
@@ -581,6 +580,26 @@ def PerformBuild(data, configurations, params):
subprocess.check_call(arguments)
+def CalculateGeneratorInputInfo(params):
+ toplevel = params['options'].toplevel_dir
+ if params.get('flavor') == 'ninja':
+ generator_dir = os.path.relpath(params['options'].generator_output or '.')
+ output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
+ output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, output_dir, 'gypfiles-xcode-ninja'))
+ else:
+ output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, output_dir, 'gypfiles'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+
def GenerateOutput(target_list, target_dicts, data, params):
# Optionally configure each spec to use ninja as the external builder.
ninja_wrapper = params.get('flavor') == 'ninja'
@@ -593,7 +612,15 @@ def GenerateOutput(target_list, target_dicts, data, params):
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
serialize_all_tests = \
generator_flags.get('xcode_serialize_all_test_runs', True)
- project_version = generator_flags.get('xcode_project_version', None)
+ upgrade_check_project_version = \
+ generator_flags.get('xcode_upgrade_check_project_version', None)
+
+ # Format upgrade_check_project_version with leading zeros as needed.
+ if upgrade_check_project_version:
+ upgrade_check_project_version = str(upgrade_check_project_version)
+ while len(upgrade_check_project_version) < 4:
+ upgrade_check_project_version = '0' + upgrade_check_project_version
+
skip_excluded_files = \
not generator_flags.get('xcode_list_excluded_files', True)
xcode_projects = {}
@@ -608,15 +635,17 @@ def GenerateOutput(target_list, target_dicts, data, params):
xcode_projects[build_file] = xcp
pbxp = xcp.project
+ # Set project-level attributes from multiple options
+ project_attributes = {};
if parallel_builds:
- pbxp.SetProperty('attributes',
- {
- 'BuildIndependentTargetsInParallel': 'YES',
- 'LastUpgradeCheck': '0500'
- }
- )
- if project_version:
- xcp.project_file.SetXcodeVersion(project_version)
+ project_attributes['BuildIndependentTargetsInParallel'] = 'YES'
+ if upgrade_check_project_version:
+ project_attributes['LastUpgradeCheck'] = upgrade_check_project_version
+ project_attributes['LastTestingUpgradeCheck'] = \
+ upgrade_check_project_version
+ project_attributes['LastSwiftUpdateCheck'] = \
+ upgrade_check_project_version
+ pbxp.SetProperty('attributes', project_attributes)
# Add gyp/gypi files to project
if not generator_flags.get('standalone'):
@@ -654,14 +683,21 @@ def GenerateOutput(target_list, target_dicts, data, params):
# com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
# to create a single-file mh_bundle.
_types = {
- 'executable': 'com.apple.product-type.tool',
- 'loadable_module': 'com.googlecode.gyp.xcode.bundle',
- 'shared_library': 'com.apple.product-type.library.dynamic',
- 'static_library': 'com.apple.product-type.library.static',
- 'executable+bundle': 'com.apple.product-type.application',
- 'loadable_module+bundle': 'com.apple.product-type.bundle',
- 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
- 'shared_library+bundle': 'com.apple.product-type.framework',
+ 'executable': 'com.apple.product-type.tool',
+ 'loadable_module': 'com.googlecode.gyp.xcode.bundle',
+ 'shared_library': 'com.apple.product-type.library.dynamic',
+ 'static_library': 'com.apple.product-type.library.static',
+ 'mac_kernel_extension': 'com.apple.product-type.kernel-extension',
+ 'executable+bundle': 'com.apple.product-type.application',
+ 'loadable_module+bundle': 'com.apple.product-type.bundle',
+ 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
+ 'shared_library+bundle': 'com.apple.product-type.framework',
+ 'executable+extension+bundle': 'com.apple.product-type.app-extension',
+ 'executable+watch+extension+bundle':
+ 'com.apple.product-type.watchkit-extension',
+ 'executable+watch+bundle':
+ 'com.apple.product-type.application.watchapp',
+ 'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',
}
target_properties = {
@@ -672,6 +708,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
type = spec['type']
is_xctest = int(spec.get('mac_xctest_bundle', 0))
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
+ is_app_extension = int(spec.get('ios_app_extension', 0))
+ is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
+ is_watch_app = int(spec.get('ios_watch_app', 0))
if type != 'none':
type_bundle_key = type
if is_xctest:
@@ -679,6 +718,18 @@ def GenerateOutput(target_list, target_dicts, data, params):
assert type == 'loadable_module', (
'mac_xctest_bundle targets must have type loadable_module '
'(target %s)' % target_name)
+ elif is_app_extension:
+ assert is_bundle, ('ios_app_extension flag requires mac_bundle '
+ '(target %s)' % target_name)
+ type_bundle_key += '+extension+bundle'
+ elif is_watchkit_extension:
+ assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
+ '(target %s)' % target_name)
+ type_bundle_key += '+watch+extension+bundle'
+ elif is_watch_app:
+ assert is_bundle, ('ios_watch_app flag requires mac_bundle '
+ '(target %s)' % target_name)
+ type_bundle_key += '+watch+bundle'
elif is_bundle:
type_bundle_key += '+bundle'
@@ -1118,6 +1169,9 @@ exit 1
# Relative paths are relative to $(SRCROOT).
dest = '$(SRCROOT)/' + dest
+ code_sign = int(copy_group.get('xcode_code_sign', 0))
+ settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
+
# Coalesce multiple "copies" sections in the same target with the same
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
# they'll wind up with ID collisions.
@@ -1136,7 +1190,7 @@ exit 1
pbxcp_dict[dest] = pbxcp
for file in copy_group['files']:
- pbxcp.AddFile(file)
+ pbxcp.AddFile(file, settings)
# Excluded files can also go into the project file.
if not skip_excluded_files:
diff --git a/deps/gyp/pylib/gyp/input.py b/deps/gyp/pylib/gyp/input.py
index dc143d9dfc..20178672b2 100644
--- a/deps/gyp/pylib/gyp/input.py
+++ b/deps/gyp/pylib/gyp/input.py
@@ -28,7 +28,12 @@ from gyp.common import OrderedSet
# A list of types that are treated as linkable.
-linkable_types = ['executable', 'shared_library', 'loadable_module']
+linkable_types = [
+ 'executable',
+ 'shared_library',
+ 'loadable_module',
+ 'mac_kernel_extension',
+]
# A list of sections that contain links to other targets.
dependency_sections = ['dependencies', 'export_dependent_settings']
@@ -48,11 +53,16 @@ base_path_sections = [
]
path_sections = set()
+# These per-process dictionaries are used to cache build file data when loading
+# in parallel mode.
+per_process_data = {}
+per_process_aux_data = {}
+
def IsPathSection(section):
# If section ends in one of the '=+?!' characters, it's applied to a section
# without the trailing characters. '/' is notably absent from this list,
# because there's no way for a regular expression to be treated as a path.
- while section[-1:] in '=+?!':
+ while section and section[-1:] in '=+?!':
section = section[:-1]
if section in path_sections:
@@ -210,8 +220,8 @@ def CheckNode(node, keypath):
elif isinstance(node, Const):
return node.getChildren()[0]
else:
- raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
- "': " + repr(node)
+ raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
+ "': " + repr(node))
def LoadOneBuildFile(build_file_path, data, aux_data, includes,
@@ -341,7 +351,8 @@ def ProcessToolsetsInDict(data):
for condition in data['conditions']:
if type(condition) is list:
for condition_dict in condition[1:]:
- ProcessToolsetsInDict(condition_dict)
+ if type(condition_dict) is dict:
+ ProcessToolsetsInDict(condition_dict)
# TODO(mark): I don't love this name. It just means that it's going to load
@@ -361,10 +372,17 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
else:
variables['DEPTH'] = d.replace('\\', '/')
- if build_file_path in data['target_build_files']:
- # Already loaded.
- return False
- data['target_build_files'].add(build_file_path)
+ # The 'target_build_files' key is only set when loading target build files in
+ # the non-parallel code path, where LoadTargetBuildFile is called
+ # recursively. In the parallel code path, we don't need to check whether the
+ # |build_file_path| has already been loaded, because the 'scheduled' set in
+ # ParallelState guarantees that we never load the same |build_file_path|
+ # twice.
+ if 'target_build_files' in data:
+ if build_file_path in data['target_build_files']:
+ # Already loaded.
+ return False
+ data['target_build_files'].add(build_file_path)
gyp.DebugOutput(gyp.DEBUG_INCLUDES,
"Loading Target Build File '%s'", build_file_path)
@@ -455,10 +473,8 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
else:
return (build_file_path, dependencies)
-
def CallLoadTargetBuildFile(global_flags,
- build_file_path, data,
- aux_data, variables,
+ build_file_path, variables,
includes, depth, check,
generator_input_info):
"""Wrapper around LoadTargetBuildFile for parallel processing.
@@ -474,35 +490,24 @@ def CallLoadTargetBuildFile(global_flags,
for key, value in global_flags.iteritems():
globals()[key] = value
- # Save the keys so we can return data that changed.
- data_keys = set(data)
- aux_data_keys = set(aux_data)
-
SetGeneratorGlobals(generator_input_info)
- result = LoadTargetBuildFile(build_file_path, data,
- aux_data, variables,
+ result = LoadTargetBuildFile(build_file_path, per_process_data,
+ per_process_aux_data, variables,
includes, depth, check, False)
if not result:
return result
(build_file_path, dependencies) = result
- data_out = {}
- for key in data:
- if key == 'target_build_files':
- continue
- if key not in data_keys:
- data_out[key] = data[key]
- aux_data_out = {}
- for key in aux_data:
- if key not in aux_data_keys:
- aux_data_out[key] = aux_data[key]
+ # We can safely pop the build_file_data from per_process_data because it
+ # will never be referenced by this process again, so we don't need to keep
+ # it in the cache.
+ build_file_data = per_process_data.pop(build_file_path)
# This gets serialized and sent back to the main process via a pipe.
# It's handled in LoadTargetBuildFileCallback.
return (build_file_path,
- data_out,
- aux_data_out,
+ build_file_data,
dependencies)
except GypError, e:
sys.stderr.write("gyp: %s\n" % e)
@@ -533,8 +538,6 @@ class ParallelState(object):
self.condition = None
# The "data" dict that was passed to LoadTargetBuildFileParallel
self.data = None
- # The "aux_data" dict that was passed to LoadTargetBuildFileParallel
- self.aux_data = None
# The number of parallel calls outstanding; decremented when a response
# was received.
self.pending = 0
@@ -555,12 +558,9 @@ class ParallelState(object):
self.condition.notify()
self.condition.release()
return
- (build_file_path0, data0, aux_data0, dependencies0) = result
+ (build_file_path0, build_file_data0, dependencies0) = result
+ self.data[build_file_path0] = build_file_data0
self.data['target_build_files'].add(build_file_path0)
- for key in data0:
- self.data[key] = data0[key]
- for key in aux_data0:
- self.aux_data[key] = aux_data0[key]
for new_dependency in dependencies0:
if new_dependency not in self.scheduled:
self.scheduled.add(new_dependency)
@@ -570,9 +570,8 @@ class ParallelState(object):
self.condition.release()
-def LoadTargetBuildFilesParallel(build_files, data, aux_data,
- variables, includes, depth, check,
- generator_input_info):
+def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
+ check, generator_input_info):
parallel_state = ParallelState()
parallel_state.condition = threading.Condition()
# Make copies of the build_files argument that we can modify while working.
@@ -580,7 +579,6 @@ def LoadTargetBuildFilesParallel(build_files, data, aux_data,
parallel_state.scheduled = set(build_files)
parallel_state.pending = 0
parallel_state.data = data
- parallel_state.aux_data = aux_data
try:
parallel_state.condition.acquire()
@@ -594,20 +592,16 @@ def LoadTargetBuildFilesParallel(build_files, data, aux_data,
dependency = parallel_state.dependencies.pop()
parallel_state.pending += 1
- data_in = {}
- data_in['target_build_files'] = data['target_build_files']
- aux_data_in = {}
global_flags = {
'path_sections': globals()['path_sections'],
'non_configuration_keys': globals()['non_configuration_keys'],
'multiple_toolsets': globals()['multiple_toolsets']}
if not parallel_state.pool:
- parallel_state.pool = multiprocessing.Pool(8)
+ parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
parallel_state.pool.apply_async(
CallLoadTargetBuildFile,
args = (global_flags, dependency,
- data_in, aux_data_in,
variables, includes, depth, check, generator_input_info),
callback = parallel_state.LoadTargetBuildFileCallback)
except KeyboardInterrupt, e:
@@ -674,24 +668,24 @@ def IsStrCanonicalInt(string):
# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
# In the last case, the inner "<()" is captured in match['content'].
early_variable_re = re.compile(
- '(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
- '(?P<command_string>[-a-zA-Z0-9_.]+)?'
- '\((?P<is_array>\s*\[?)'
- '(?P<content>.*?)(\]?)\))')
+ r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
+ r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
+ r'\((?P<is_array>\s*\[?)'
+ r'(?P<content>.*?)(\]?)\))')
# This matches the same as early_variable_re, but with '>' instead of '<'.
late_variable_re = re.compile(
- '(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
- '(?P<command_string>[-a-zA-Z0-9_.]+)?'
- '\((?P<is_array>\s*\[?)'
- '(?P<content>.*?)(\]?)\))')
+ r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
+ r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
+ r'\((?P<is_array>\s*\[?)'
+ r'(?P<content>.*?)(\]?)\))')
# This matches the same as early_variable_re, but with '^' instead of '<'.
latelate_variable_re = re.compile(
- '(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
- '(?P<command_string>[-a-zA-Z0-9_.]+)?'
- '\((?P<is_array>\s*\[?)'
- '(?P<content>.*?)(\]?)\))')
+ r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
+ r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
+ r'\((?P<is_array>\s*\[?)'
+ r'(?P<content>.*?)(\]?)\))')
# Global cache of results from running commands so they don't have to be run
# more then once.
@@ -904,11 +898,15 @@ def ExpandVariables(input, phase, variables, build_file):
else:
# Fix up command with platform specific workarounds.
contents = FixupPlatformCommand(contents)
- p = subprocess.Popen(contents, shell=use_shell,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- stdin=subprocess.PIPE,
- cwd=build_file_dir)
+ try:
+ p = subprocess.Popen(contents, shell=use_shell,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ cwd=build_file_dir)
+ except Exception, e:
+ raise GypError("%s while executing command '%s' in %s" %
+ (e, contents, build_file))
p_stdout, p_stderr = p.communicate('')
@@ -916,8 +914,8 @@ def ExpandVariables(input, phase, variables, build_file):
sys.stderr.write(p_stderr)
# Simulate check_call behavior, since check_call only exists
# in python 2.5 and later.
- raise GypError("Call to '%s' returned exit status %d." %
- (contents, p.returncode))
+ raise GypError("Call to '%s' returned exit status %d while in %s." %
+ (contents, p.returncode, build_file))
replacement = p_stdout.rstrip()
cached_command_results[cache_key] = replacement
@@ -994,23 +992,29 @@ def ExpandVariables(input, phase, variables, build_file):
# Prepare for the next match iteration.
input_str = output
- # Look for more matches now that we've replaced some, to deal with
- # expanding local variables (variables defined in the same
- # variables block as this one).
- gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
- if type(output) is list:
- if output and type(output[0]) is list:
- # Leave output alone if it's a list of lists.
- # We don't want such lists to be stringified.
- pass
- else:
- new_output = []
- for item in output:
- new_output.append(
- ExpandVariables(item, phase, variables, build_file))
- output = new_output
+ if output == input:
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES,
+ "Found only identity matches on %r, avoiding infinite "
+ "recursion.",
+ output)
else:
- output = ExpandVariables(output, phase, variables, build_file)
+ # Look for more matches now that we've replaced some, to deal with
+ # expanding local variables (variables defined in the same
+ # variables block as this one).
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
+ if type(output) is list:
+ if output and type(output[0]) is list:
+ # Leave output alone if it's a list of lists.
+ # We don't want such lists to be stringified.
+ pass
+ else:
+ new_output = []
+ for item in output:
+ new_output.append(
+ ExpandVariables(item, phase, variables, build_file))
+ output = new_output
+ else:
+ output = ExpandVariables(output, phase, variables, build_file)
# Convert all strings that are canonically-represented integers into integers.
if type(output) is list:
@@ -1031,17 +1035,40 @@ def EvalCondition(condition, conditions_key, phase, variables, build_file):
that nothing should be used."""
if type(condition) is not list:
raise GypError(conditions_key + ' must be a list')
- if len(condition) != 2 and len(condition) != 3:
+ if len(condition) < 2:
# It's possible that condition[0] won't work in which case this
# attempt will raise its own IndexError. That's probably fine.
raise GypError(conditions_key + ' ' + condition[0] +
- ' must be length 2 or 3, not ' + str(len(condition)))
+ ' must be at least length 2, not ' + str(len(condition)))
+
+ i = 0
+ result = None
+ while i < len(condition):
+ cond_expr = condition[i]
+ true_dict = condition[i + 1]
+ if type(true_dict) is not dict:
+ raise GypError('{} {} must be followed by a dictionary, not {}'.format(
+ conditions_key, cond_expr, type(true_dict)))
+ if len(condition) > i + 2 and type(condition[i + 2]) is dict:
+ false_dict = condition[i + 2]
+ i = i + 3
+ if i != len(condition):
+ raise GypError('{} {} has {} unexpected trailing items'.format(
+ conditions_key, cond_expr, len(condition) - i))
+ else:
+ false_dict = None
+ i = i + 2
+ if result == None:
+ result = EvalSingleCondition(
+ cond_expr, true_dict, false_dict, phase, variables, build_file)
+
+ return result
- [cond_expr, true_dict] = condition[0:2]
- false_dict = None
- if len(condition) == 3:
- false_dict = condition[2]
+def EvalSingleCondition(
+ cond_expr, true_dict, false_dict, phase, variables, build_file):
+ """Returns true_dict if cond_expr evaluates to true, and false_dict
+ otherwise."""
# Do expansions on the condition itself. Since the conditon can naturally
# contain variable references without needing to resort to GYP expansion
# syntax, this is of dubious value for variables, but someone might want to
@@ -1049,9 +1076,9 @@ def EvalCondition(condition, conditions_key, phase, variables, build_file):
cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
build_file)
if type(cond_expr_expanded) not in (str, int):
- raise ValueError, \
+ raise ValueError(
'Variable expansion in this context permits str and int ' + \
- 'only, found ' + cond_expr_expanded.__class__.__name__
+ 'only, found ' + cond_expr_expanded.__class__.__name__)
try:
if cond_expr_expanded in cached_conditions_asts:
@@ -1193,9 +1220,9 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
if key != 'variables' and type(value) is str:
expanded = ExpandVariables(value, phase, variables, build_file)
if type(expanded) not in (str, int):
- raise ValueError, \
+ raise ValueError(
'Variable expansion in this context permits str and int ' + \
- 'only, found ' + expanded.__class__.__name__ + ' for ' + key
+ 'only, found ' + expanded.__class__.__name__ + ' for ' + key)
the_dict[key] = expanded
# Variable expansion may have resulted in changes to automatics. Reload.
@@ -1264,8 +1291,8 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
ProcessVariablesAndConditionsInList(value, phase, variables,
build_file)
elif type(value) is not int:
- raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
- ' for ' + key
+ raise TypeError('Unknown type ' + value.__class__.__name__ + \
+ ' for ' + key)
def ProcessVariablesAndConditionsInList(the_list, phase, variables,
@@ -1292,13 +1319,13 @@ def ProcessVariablesAndConditionsInList(the_list, phase, variables,
# without falling into the index increment below.
continue
else:
- raise ValueError, \
+ raise ValueError(
'Variable expansion in this context permits strings and ' + \
'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
- index
+ index)
elif type(item) is not int:
- raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
- ' at index ' + index
+ raise TypeError('Unknown type ' + item.__class__.__name__ + \
+ ' at index ' + index)
index = index + 1
@@ -1550,26 +1577,25 @@ class DependencyGraphNode(object):
return list(flat_list)
- def FindCycles(self, path=None):
+ def FindCycles(self):
"""
Returns a list of cycles in the graph, where each cycle is its own list.
"""
- if path is None:
- path = [self]
-
results = []
- for node in self.dependents:
- if node in path:
- cycle = [node]
- for part in path:
- cycle.append(part)
- if part == node:
- break
- results.append(tuple(cycle))
- else:
- results.extend(node.FindCycles([node] + path))
+ visited = set()
+
+ def Visit(node, path):
+ for child in node.dependents:
+ if child in path:
+ results.append([child] + path[:path.index(child) + 1])
+ elif not child in visited:
+ visited.add(child)
+ Visit(child, [child] + path)
+
+ visited.add(self)
+ Visit(self, [self])
- return list(set(results))
+ return results
def DirectDependencies(self, dependencies=None):
"""Returns a list of just direct dependencies."""
@@ -1645,8 +1671,8 @@ class DependencyGraphNode(object):
if dependency.ref is None:
continue
if dependency.ref not in dependencies:
- dependencies.add(dependency.ref)
dependency.DeepDependencies(dependencies)
+ dependencies.add(dependency.ref)
return dependencies
@@ -1703,11 +1729,12 @@ class DependencyGraphNode(object):
dependencies.add(self.ref)
return dependencies
- # Executables and loadable modules are already fully and finally linked.
- # Nothing else can be a link dependency of them, there can only be
- # dependencies in the sense that a dependent target might run an
- # executable or load the loadable_module.
- if not initial and target_type in ('executable', 'loadable_module'):
+ # Executables, mac kernel extensions and loadable modules are already fully
+ # and finally linked. Nothing else can be a link dependency of them, there
+ # can only be dependencies in the sense that a dependent target might run
+ # an executable or load the loadable_module.
+ if not initial and target_type in ('executable', 'loadable_module',
+ 'mac_kernel_extension'):
return dependencies
# Shared libraries are already fully linked. They should only be included
@@ -1786,12 +1813,22 @@ def BuildDependencyList(targets):
flat_list = root_node.FlattenToList()
# If there's anything left unvisited, there must be a circular dependency
- # (cycle). If you need to figure out what's wrong, look for elements of
- # targets that are not in flat_list.
+ # (cycle).
if len(flat_list) != len(targets):
+ if not root_node.dependents:
+ # If all targets have dependencies, add the first target as a dependent
+ # of root_node so that the cycle can be discovered from root_node.
+ target = targets.keys()[0]
+ target_node = dependency_nodes[target]
+ target_node.dependencies.append(root_node)
+ root_node.dependents.append(target_node)
+
+ cycles = []
+ for cycle in root_node.FindCycles():
+ paths = [node.ref for node in cycle]
+ cycles.append('Cycle: %s' % ' -> '.join(paths))
raise DependencyGraphNode.CircularException(
- 'Some targets not reachable, cycle in dependency graph detected: ' +
- ' '.join(set(flat_list) ^ set(targets)))
+ 'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
return [dependency_nodes, flat_list]
@@ -1841,20 +1878,18 @@ def VerifyNoGYPFileCircularDependencies(targets):
# If there's anything left unvisited, there must be a circular dependency
# (cycle).
if len(flat_list) != len(dependency_nodes):
- bad_files = []
- for file in dependency_nodes.iterkeys():
- if not file in flat_list:
- bad_files.append(file)
- common_path_prefix = os.path.commonprefix(dependency_nodes)
+ if not root_node.dependents:
+ # If all files have dependencies, add the first file as a dependent
+ # of root_node so that the cycle can be discovered from root_node.
+ file_node = dependency_nodes.values()[0]
+ file_node.dependencies.append(root_node)
+ root_node.dependents.append(file_node)
cycles = []
for cycle in root_node.FindCycles():
- simplified_paths = []
- for node in cycle:
- assert(node.ref.startswith(common_path_prefix))
- simplified_paths.append(node.ref[len(common_path_prefix):])
- cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
- raise DependencyGraphNode.CircularException, \
- 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
+ paths = [node.ref for node in cycle]
+ cycles.append('Cycle: %s' % ' -> '.join(paths))
+ raise DependencyGraphNode.CircularException(
+ 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
def DoDependentSettings(key, flat_list, targets, dependency_nodes):
@@ -2044,9 +2079,9 @@ def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
to_item = []
MergeLists(to_item, item, to_file, fro_file)
else:
- raise TypeError, \
+ raise TypeError(
'Attempt to merge list item of unsupported type ' + \
- item.__class__.__name__
+ item.__class__.__name__)
if append:
# If appending a singleton that's already in the list, don't append.
@@ -2088,10 +2123,10 @@ def MergeDicts(to, fro, to_file, fro_file):
bad_merge = True
if bad_merge:
- raise TypeError, \
+ raise TypeError(
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
' into incompatible type ' + to[k].__class__.__name__ + \
- ' for key ' + k
+ ' for key ' + k)
if type(v) in (str, int):
# Overwrite the existing value, if any. Cheap and easy.
is_path = IsPathSection(k)
@@ -2150,10 +2185,10 @@ def MergeDicts(to, fro, to_file, fro_file):
elif type(to[list_base]) is not list:
# This may not have been checked above if merging in a list with an
# extension character.
- raise TypeError, \
+ raise TypeError(
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
' into incompatible type ' + to[list_base].__class__.__name__ + \
- ' for key ' + list_base + '(' + k + ')'
+ ' for key ' + list_base + '(' + k + ')')
else:
to[list_base] = []
@@ -2165,9 +2200,9 @@ def MergeDicts(to, fro, to_file, fro_file):
is_paths = IsPathSection(list_base)
MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
else:
- raise TypeError, \
+ raise TypeError(
'Attempt to merge dict value of unsupported type ' + \
- v.__class__.__name__ + ' for key ' + k
+ v.__class__.__name__ + ' for key ' + k)
def MergeConfigWithInheritance(new_configuration_dict, build_file,
@@ -2312,8 +2347,8 @@ def ProcessListFiltersInDict(name, the_dict):
continue
if type(value) is not list:
- raise ValueError, name + ' key ' + key + ' must be list, not ' + \
- value.__class__.__name__
+ raise ValueError(name + ' key ' + key + ' must be list, not ' + \
+ value.__class__.__name__)
list_key = key[:-1]
if list_key not in the_dict:
@@ -2325,10 +2360,10 @@ def ProcessListFiltersInDict(name, the_dict):
if type(the_dict[list_key]) is not list:
value = the_dict[list_key]
- raise ValueError, name + ' key ' + list_key + \
- ' must be list, not ' + \
- value.__class__.__name__ + ' when applying ' + \
- {'!': 'exclusion', '/': 'regex'}[operation]
+ raise ValueError(name + ' key ' + list_key + \
+ ' must be list, not ' + \
+ value.__class__.__name__ + ' when applying ' + \
+ {'!': 'exclusion', '/': 'regex'}[operation])
if not list_key in lists:
lists.append(list_key)
@@ -2377,8 +2412,8 @@ def ProcessListFiltersInDict(name, the_dict):
action_value = 1
else:
# This is an action that doesn't make any sense.
- raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \
- ' key ' + regex_key
+ raise ValueError('Unrecognized action ' + action + ' in ' + name + \
+ ' key ' + regex_key)
for index in xrange(0, len(the_list)):
list_item = the_list[index]
@@ -2450,7 +2485,7 @@ def ValidateTargetType(target, target_dict):
"""
VALID_TARGET_TYPES = ('executable', 'loadable_module',
'static_library', 'shared_library',
- 'none')
+ 'mac_kernel_extension', 'none')
target_type = target_dict.get('type', None)
if target_type not in VALID_TARGET_TYPES:
raise GypError("Target %s has an invalid target type '%s'. "
@@ -2463,9 +2498,11 @@ def ValidateTargetType(target, target_dict):
target_type))
-def ValidateSourcesInTarget(target, target_dict, build_file):
- # TODO: Check if MSVC allows this for loadable_module targets.
- if target_dict.get('type', None) not in ('static_library', 'shared_library'):
+def ValidateSourcesInTarget(target, target_dict, build_file,
+ duplicate_basename_check):
+ if not duplicate_basename_check:
+ return
+ if target_dict.get('type', None) != 'static_library':
return
sources = target_dict.get('sources', [])
basenames = {}
@@ -2485,8 +2522,8 @@ def ValidateSourcesInTarget(target, target_dict, build_file):
if error:
print('static library %s has several files with the same basename:\n' %
- target + error + 'Some build systems, e.g. MSVC08, '
- 'cannot handle that.')
+ target + error + 'libtool on Mac cannot handle that. Use '
+ '--no-duplicate-basename-check to disable this validation.')
raise GypError('Duplicate basenames in sources section, see list above')
@@ -2710,7 +2747,7 @@ def SetGeneratorGlobals(generator_input_info):
def Load(build_files, variables, includes, depth, generator_input_info, check,
- circular_check, parallel, root_targets):
+ circular_check, duplicate_basename_check, parallel, root_targets):
SetGeneratorGlobals(generator_input_info)
# A generator can have other lists (in addition to sources) be processed
# for rules.
@@ -2724,15 +2761,14 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
# well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
# track of the keys corresponding to "target" files.
data = {'target_build_files': set()}
- aux_data = {}
# Normalize paths everywhere. This is important because paths will be
# used as keys to the data dict and for references between input files.
build_files = set(map(os.path.normpath, build_files))
if parallel:
- LoadTargetBuildFilesParallel(build_files, data, aux_data,
- variables, includes, depth, check,
- generator_input_info)
+ LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
+ check, generator_input_info)
else:
+ aux_data = {}
for build_file in build_files:
try:
LoadTargetBuildFile(build_file, data, aux_data,
@@ -2843,10 +2879,8 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ValidateTargetType(target, target_dict)
- # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
- # scalesystemdependent_arm_additions.c or similar.
- if 'arm' not in variables.get('target_arch', ''):
- ValidateSourcesInTarget(target, target_dict, build_file)
+ ValidateSourcesInTarget(target, target_dict, build_file,
+ duplicate_basename_check)
ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
ValidateRunAsInTarget(target, target_dict, build_file)
ValidateActionsInTarget(target, target_dict, build_file)
diff --git a/deps/gyp/pylib/gyp/input_test.py b/deps/gyp/pylib/gyp/input_test.py
index cdbf6b2fad..4234fbb830 100755
--- a/deps/gyp/pylib/gyp/input_test.py
+++ b/deps/gyp/pylib/gyp/input_test.py
@@ -44,16 +44,16 @@ class TestFindCycles(unittest.TestCase):
def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])
- self.assertEquals([(self.nodes['a'], self.nodes['a'])],
+ self.assertEquals([[self.nodes['a'], self.nodes['a']]],
self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
- self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
+ self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
self.nodes['a'].FindCycles())
- self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
+ self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
self.nodes['b'].FindCycles())
def test_two_cycles(self):
@@ -65,9 +65,9 @@ class TestFindCycles(unittest.TestCase):
cycles = self.nodes['a'].FindCycles()
self.assertTrue(
- (self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
+ [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
self.assertTrue(
- (self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
+ [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
self.assertEquals(2, len(cycles))
def test_big_cycle(self):
@@ -77,12 +77,12 @@ class TestFindCycles(unittest.TestCase):
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])
- self.assertEquals([(self.nodes['a'],
+ self.assertEquals([[self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
self.nodes['e'],
- self.nodes['a'])],
+ self.nodes['a']]],
self.nodes['a'].FindCycles())
diff --git a/deps/gyp/pylib/gyp/mac_tool.py b/deps/gyp/pylib/gyp/mac_tool.py
index 821e291e9f..eeeaceb0c7 100755
--- a/deps/gyp/pylib/gyp/mac_tool.py
+++ b/deps/gyp/pylib/gyp/mac_tool.py
@@ -45,7 +45,7 @@ class MacTool(object):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
- def ExecCopyBundleResource(self, source, dest):
+ def ExecCopyBundleResource(self, source, dest, convert_to_binary):
"""Copies a resource file to the bundle/Resources directory, performing any
necessary compilation on each resource."""
extension = os.path.splitext(source)[1].lower()
@@ -62,7 +62,7 @@ class MacTool(object):
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
elif extension == '.strings':
- self._CopyStringsFile(source, dest)
+ self._CopyStringsFile(source, dest, convert_to_binary)
else:
shutil.copy(source, dest)
@@ -92,7 +92,11 @@ class MacTool(object):
sys.stdout.write(line)
return ibtoolout.returncode
- def _CopyStringsFile(self, source, dest):
+ def _ConvertToBinary(self, dest):
+ subprocess.check_call([
+ 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
+
+ def _CopyStringsFile(self, source, dest, convert_to_binary):
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
input_code = self._DetectInputEncoding(source) or "UTF-8"
@@ -112,6 +116,9 @@ class MacTool(object):
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
+ if convert_to_binary == 'True':
+ self._ConvertToBinary(dest)
+
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
encoding. Returns None as a guess if it can't detect it."""
@@ -131,7 +138,7 @@ class MacTool(object):
else:
return None
- def ExecCopyInfoPlist(self, source, dest, *keys):
+ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
fd = open(source, 'r')
@@ -146,7 +153,7 @@ class MacTool(object):
# Go through all the environment variables and replace them as variables in
# the file.
- IDENT_RE = re.compile('[/\s]')
+ IDENT_RE = re.compile(r'[/\s]')
for key in os.environ:
if key.startswith('_'):
continue
@@ -185,6 +192,9 @@ class MacTool(object):
# "compiled".
self._WritePkgInfo(dest)
+ if convert_to_binary == 'True':
+ self._ConvertToBinary(dest)
+
def _WritePkgInfo(self, info_plist):
"""This writes the PkgInfo file from the data stored in Info.plist."""
plist = plistlib.readPlist(info_plist)
@@ -223,11 +233,24 @@ class MacTool(object):
r'^.*libtool: warning for library: ' +
r'.* the table of contents is empty ' +
r'\(no object file members in the library define global symbols\)$')
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
+ env = os.environ.copy()
+ # Ref:
+ # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
+ # The problem with this flag is that it resets the file mtime on the file to
+ # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
+ env['ZERO_AR_DATE'] = '1'
+ libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
_, err = libtoolout.communicate()
for line in err.splitlines():
if not libtool_re.match(line) and not libtool_re5.match(line):
print >>sys.stderr, line
+ # Unconditionally touch the output .a file on the command line if present
+ # and the command succeeded. A bit hacky.
+ if not libtoolout.returncode:
+ for i in range(len(cmd_list) - 1):
+ if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
+ os.utime(cmd_list[i+1], None)
+ break
return libtoolout.returncode
def ExecPackageFramework(self, framework, version):
@@ -266,6 +289,66 @@ class MacTool(object):
os.remove(link)
os.symlink(dest, link)
+ def ExecCompileXcassets(self, keys, *inputs):
+ """Compiles multiple .xcassets files into a single .car file.
+
+ This invokes 'actool' to compile all the inputs .xcassets files. The
+ |keys| arguments is a json-encoded dictionary of extra arguments to
+ pass to 'actool' when the asset catalogs contains an application icon
+ or a launch image.
+
+ Note that 'actool' does not create the Assets.car file if the asset
+ catalogs does not contains imageset.
+ """
+ command_line = [
+ 'xcrun', 'actool', '--output-format', 'human-readable-text',
+ '--compress-pngs', '--notices', '--warnings', '--errors',
+ ]
+ is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
+ if is_iphone_target:
+ platform = os.environ['CONFIGURATION'].split('-')[-1]
+ if platform not in ('iphoneos', 'iphonesimulator'):
+ platform = 'iphonesimulator'
+ command_line.extend([
+ '--platform', platform, '--target-device', 'iphone',
+ '--target-device', 'ipad', '--minimum-deployment-target',
+ os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
+ os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
+ ])
+ else:
+ command_line.extend([
+ '--platform', 'macosx', '--target-device', 'mac',
+ '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
+ '--compile',
+ os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
+ ])
+ if keys:
+ keys = json.loads(keys)
+ for key, value in keys.iteritems():
+ arg_name = '--' + key
+ if isinstance(value, bool):
+ if value:
+ command_line.append(arg_name)
+ elif isinstance(value, list):
+ for v in value:
+ command_line.append(arg_name)
+ command_line.append(str(v))
+ else:
+ command_line.append(arg_name)
+ command_line.append(str(value))
+ # Note: actool crashes if inputs path are relative, so use os.path.abspath
+ # to get absolute path name for inputs.
+ command_line.extend(map(os.path.abspath, inputs))
+ subprocess.check_call(command_line)
+
+ def ExecMergeInfoPlist(self, output, *inputs):
+ """Merge multiple .plist files into a single .plist file."""
+ merged_plist = {}
+ for path in inputs:
+ plist = self._LoadPlistMaybeBinary(path)
+ self._MergePlist(merged_plist, plist)
+ plistlib.writePlist(merged_plist, output)
+
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
"""Code sign a bundle.
@@ -402,6 +485,19 @@ class MacTool(object):
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
return self._LoadPlistMaybeBinary(temp.name)
+ def _MergePlist(self, merged_plist, plist):
+ """Merge |plist| into |merged_plist|."""
+ for key, value in plist.iteritems():
+ if isinstance(value, dict):
+ merged_value = merged_plist.get(key, {})
+ if isinstance(merged_value, dict):
+ self._MergePlist(merged_value, value)
+ merged_plist[key] = merged_value
+ else:
+ merged_plist[key] = value
+ else:
+ merged_plist[key] = value
+
def _LoadPlistMaybeBinary(self, plist_path):
"""Loads into a memory a plist possibly encoded in binary format.
diff --git a/deps/gyp/pylib/gyp/msvs_emulation.py b/deps/gyp/pylib/gyp/msvs_emulation.py
index 63593a424d..ca67b122f0 100644
--- a/deps/gyp/pylib/gyp/msvs_emulation.py
+++ b/deps/gyp/pylib/gyp/msvs_emulation.py
@@ -12,10 +12,14 @@ import re
import subprocess
import sys
+from gyp.common import OrderedSet
+import gyp.MSVSUtil
import gyp.MSVSVersion
+
windows_quoter_regex = re.compile(r'(\\*)"')
+
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
@@ -131,6 +135,54 @@ def _FindDirectXInstallation():
return dxsdk_dir
+def GetGlobalVSMacroEnv(vs_version):
+ """Get a dict of variables mapping internal VS macro names to their gyp
+ equivalents. Returns all variables that are independent of the target."""
+ env = {}
+ # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
+ # Visual Studio is actually installed.
+ if vs_version.Path():
+ env['$(VSInstallDir)'] = vs_version.Path()
+ env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
+ # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
+ # set. This happens when the SDK is sync'd via src-internal, rather than
+ # by typical end-user installation of the SDK. If it's not set, we don't
+ # want to leave the unexpanded variable in the path, so simply strip it.
+ dxsdk_dir = _FindDirectXInstallation()
+ env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
+ # Try to find an installation location for the Windows DDK by checking
+ # the WDK_DIR environment variable, may be None.
+ env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
+ return env
+
+def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
+ """Finds msvs_system_include_dirs that are common to all targets, removes
+ them from all targets, and returns an OrderedSet containing them."""
+ all_system_includes = OrderedSet(
+ configs[0].get('msvs_system_include_dirs', []))
+ for config in configs[1:]:
+ system_includes = config.get('msvs_system_include_dirs', [])
+ all_system_includes = all_system_includes & OrderedSet(system_includes)
+ if not all_system_includes:
+ return None
+ # Expand macros in all_system_includes.
+ env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
+ expanded_system_includes = OrderedSet([ExpandMacros(include, env)
+ for include in all_system_includes])
+ if any(['$' in include for include in expanded_system_includes]):
+ # Some path relies on target-specific variables, bail.
+ return None
+
+ # Remove system includes shared by all targets from the targets.
+ for config in configs:
+ includes = config.get('msvs_system_include_dirs', [])
+ if includes: # Don't insert a msvs_system_include_dirs key if not needed.
+ # This must check the unexpanded includes list:
+ new_includes = [i for i in includes if i not in all_system_includes]
+ config['msvs_system_include_dirs'] = new_includes
+ return expanded_system_includes
+
+
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
@@ -139,11 +191,6 @@ class MsvsSettings(object):
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
- self.dxsdk_dir = _FindDirectXInstallation()
-
- # Try to find an installation location for the Windows DDK by checking
- # the WDK_DIR environment variable, may be None.
- self.wdk_dir = os.environ.get('WDK_DIR')
supported_fields = [
('msvs_configuration_attributes', dict),
@@ -176,6 +223,17 @@ class MsvsSettings(object):
if unsupported:
raise Exception('\n'.join(unsupported))
+ def GetExtension(self):
+ """Returns the extension for the target, with no leading dot.
+
+ Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
+ the target type.
+ """
+ ext = self.spec.get('product_extension', None)
+ if ext:
+ return ext
+ return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
+
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
@@ -183,29 +241,24 @@ class MsvsSettings(object):
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
+ target_ext = '.' + self.GetExtension()
+ target_file_name = target_name + target_ext
+
replacements = {
- '$(OutDir)\\': target_dir,
- '$(TargetDir)\\': target_dir,
- '$(IntDir)': '$!INTERMEDIATE_DIR',
- '$(InputPath)': '${source}',
'$(InputName)': '${root}',
- '$(ProjectName)': self.spec['target_name'],
- '$(TargetName)': target_name,
+ '$(InputPath)': '${source}',
+ '$(IntDir)': '$!INTERMEDIATE_DIR',
+ '$(OutDir)\\': target_dir,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
+ '$(ProjectName)': self.spec['target_name'],
+ '$(TargetDir)\\': target_dir,
+ '$(TargetExt)': target_ext,
+ '$(TargetFileName)': target_file_name,
+ '$(TargetName)': target_name,
+ '$(TargetPath)': os.path.join(target_dir, target_file_name),
}
- # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
- # Visual Studio is actually installed.
- if self.vs_version.Path():
- replacements['$(VSInstallDir)'] = self.vs_version.Path()
- replacements['$(VCInstallDir)'] = os.path.join(self.vs_version.Path(),
- 'VC') + '\\'
- # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
- # set. This happens when the SDK is sync'd via src-internal, rather than
- # by typical end-user installation of the SDK. If it's not set, we don't
- # want to leave the unexpanded variable in the path, so simply strip it.
- replacements['$(DXSDK_DIR)'] = self.dxsdk_dir if self.dxsdk_dir else ''
- replacements['$(WDK_DIR)'] = self.wdk_dir if self.wdk_dir else ''
+ replacements.update(GetGlobalVSMacroEnv(self.vs_version))
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
@@ -285,6 +338,15 @@ class MsvsSettings(object):
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
+ def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
+ """Updates midl_include_dirs to expand VS specific paths, and adds the
+ system include dirs used for platform SDK and similar."""
+ config = self._TargetConfig(config)
+ includes = midl_include_dirs + self.msvs_system_include_dirs[config]
+ includes.extend(self._Setting(
+ ('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
+ return [self.ConvertVSMacros(p, config=config) for p in includes]
+
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
@@ -337,7 +399,7 @@ class MsvsSettings(object):
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
generate_debug_info = self._Setting(
('VCLinkerTool', 'GenerateDebugInformation'), config)
- if generate_debug_info:
+ if generate_debug_info == 'true':
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
@@ -345,6 +407,13 @@ class MsvsSettings(object):
else:
return None
+ def GetNoImportLibrary(self, config):
+ """If NoImportLibrary: true, ninja will not expect the output to include
+ an import library."""
+ config = self._TargetConfig(config)
+ noimplib = self._Setting(('NoImportLibrary',), config)
+ return noimplib == 'true'
+
def GetAsmflags(self, config):
"""Returns the flags that need to be added to ml invocations."""
config = self._TargetConfig(config)
@@ -370,9 +439,15 @@ class MsvsSettings(object):
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
+ cl('FloatingPointModel',
+ map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
+ default='0')
+ cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
+ cl('CallingConvention',
+ map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
@@ -389,10 +464,11 @@ class MsvsSettings(object):
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
cl('EnableEnhancedInstructionSet',
- map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32'}, prefix='/arch:')
+ map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
+ prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
- if self.vs_version.short_name in ('2013', '2013e'):
+ if self.vs_version.short_name in ('2013', '2013e', '2015'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
@@ -443,7 +519,8 @@ class MsvsSettings(object):
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
- lib('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
+ lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
+ prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
@@ -486,7 +563,8 @@ class MsvsSettings(object):
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
- ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
+ ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
+ prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
@@ -516,6 +594,15 @@ class MsvsSettings(object):
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
+ stack_reserve_size = self._Setting(
+ ('VCLinkerTool', 'StackReserveSize'), config, default='')
+ if stack_reserve_size:
+ stack_commit_size = self._Setting(
+ ('VCLinkerTool', 'StackCommitSize'), config, default='')
+ if stack_commit_size:
+ stack_commit_size = ',' + stack_commit_size
+ ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
+
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
@@ -537,10 +624,17 @@ class MsvsSettings(object):
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
- ld('ImageHasSafeExceptionHandlers', map={'true': '/SAFESEH'})
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
+ if self.GetArch(config) == 'x86':
+ safeseh_default = 'true'
+ else:
+ safeseh_default = None
+ ld('ImageHasSafeExceptionHandlers',
+ map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
+ default=safeseh_default)
+
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
@@ -727,10 +821,16 @@ class MsvsSettings(object):
return True
return False
- def HasExplicitIdlRules(self, spec):
- """Determine if there's an explicit rule for idl files. When there isn't we
- need to generate implicit rules to build MIDL .idl files."""
- return self._HasExplicitRuleForExtension(spec, 'idl')
+ def _HasExplicitIdlActions(self, spec):
+ """Determine if an action should not run midl for .idl files."""
+ return any([action.get('explicit_idl_action', 0)
+ for action in spec.get('actions', [])])
+
+ def HasExplicitIdlRulesOrActions(self, spec):
+ """Determine if there's an explicit rule or action for idl files. When
+ there isn't we need to generate implicit rules to build MIDL .idl files."""
+ return (self._HasExplicitRuleForExtension(spec, 'idl') or
+ self._HasExplicitIdlActions(spec))
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
@@ -829,7 +929,8 @@ def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
- generator_flags.get('msvs_version', 'auto'))
+ generator_flags.get('msvs_version', 'auto'),
+ allow_fallback=False)
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
@@ -897,7 +998,8 @@ def _ExtractCLPath(output_of_where):
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
-def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
+def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
+ system_includes, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
@@ -928,6 +1030,13 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
+
+ # Inject system includes from gyp files into INCLUDE.
+ if system_includes:
+ system_includes = system_includes | OrderedSet(
+ env.get('INCLUDE', '').split(';'))
+ env['INCLUDE'] = ';'.join(system_includes)
+
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
diff --git a/deps/gyp/pylib/gyp/win_tool.py b/deps/gyp/pylib/gyp/win_tool.py
index 44e1b0760b..bb6f1ea436 100755
--- a/deps/gyp/pylib/gyp/win_tool.py
+++ b/deps/gyp/pylib/gyp/win_tool.py
@@ -116,14 +116,16 @@ class WinTool(object):
env = self._GetEnv(arch)
if use_separate_mspdbsrv == 'True':
self._UseSeparateMspdbsrv(env, args)
- link = subprocess.Popen(args,
+ link = subprocess.Popen([args[0].replace('/', '\\')] + list(args[1:]),
shell=True,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, _ = link.communicate()
for line in out.splitlines():
- if not line.startswith(' Creating library '):
+ if (not line.startswith(' Creating library ') and
+ not line.startswith('Generating code') and
+ not line.startswith('Finished generating code')):
print line
return link.returncode
@@ -259,9 +261,6 @@ class WinTool(object):
def ExecAsmWrapper(self, arch, *args):
"""Filter logo banner from invocations of asm.exe."""
env = self._GetEnv(arch)
- # MSVS doesn't assemble x64 asm files.
- if arch == 'environment.x64':
- return 0
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
diff --git a/deps/gyp/pylib/gyp/xcode_emulation.py b/deps/gyp/pylib/gyp/xcode_emulation.py
index 859cd5a937..ac6852faf9 100644
--- a/deps/gyp/pylib/gyp/xcode_emulation.py
+++ b/deps/gyp/pylib/gyp/xcode_emulation.py
@@ -215,9 +215,24 @@ class XcodeSettings(object):
if test_key in self._Settings():
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
+ def IsBinaryOutputFormat(self, configname):
+ default = "binary" if self.isIOS else "xml"
+ format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT',
+ default)
+ return format == "binary"
+
def _IsBundle(self):
return int(self.spec.get('mac_bundle', 0)) != 0
+ def _IsIosAppExtension(self):
+ return int(self.spec.get('ios_app_extension', 0)) != 0
+
+ def _IsIosWatchKitExtension(self):
+ return int(self.spec.get('ios_watchkit_extension', 0)) != 0
+
+ def _IsIosWatchApp(self):
+ return int(self.spec.get('ios_watch_app', 0)) != 0
+
def GetFrameworkVersion(self):
"""Returns the framework version of the current target. Only valid for
bundles."""
@@ -237,7 +252,10 @@ class XcodeSettings(object):
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
- return '.' + self.spec.get('product_extension', 'app')
+ if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
+ return '.' + self.spec.get('product_extension', 'appex')
+ else:
+ return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
@@ -292,6 +310,18 @@ class XcodeSettings(object):
def GetProductType(self):
"""Returns the PRODUCT_TYPE of this target."""
+ if self._IsIosAppExtension():
+ assert self._IsBundle(), ('ios_app_extension flag requires mac_bundle '
+ '(target %s)' % self.spec['target_name'])
+ return 'com.apple.product-type.app-extension'
+ if self._IsIosWatchKitExtension():
+ assert self._IsBundle(), ('ios_watchkit_extension flag requires '
+ 'mac_bundle (target %s)' % self.spec['target_name'])
+ return 'com.apple.product-type.watchkit-extension'
+ if self._IsIosWatchApp():
+ assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
+ '(target %s)' % self.spec['target_name'])
+ return 'com.apple.product-type.application.watchapp'
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
@@ -495,6 +525,13 @@ class XcodeSettings(object):
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
cflags.append('-Wnewline-eof')
+ # In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or
+ # llvm-gcc. It also requires a fairly recent libtool, and
+ # if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the
+ # path to the libLTO.dylib that matches the used clang.
+ if self._Test('LLVM_LTO', 'YES', default='NO'):
+ cflags.append('-flto')
+
self._AppendPlatformVersionMinFlags(cflags)
# TODO:
@@ -703,8 +740,8 @@ class XcodeSettings(object):
# -exported_symbols_list file
# -Wl,exported_symbols_list file
# -Wl,exported_symbols_list,file
- LINKER_FILE = '(\S+)'
- WORD = '\S+'
+ LINKER_FILE = r'(\S+)'
+ WORD = r'\S+'
linker_flags = [
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
['-unexported_symbols_list', LINKER_FILE],
@@ -794,6 +831,20 @@ class XcodeSettings(object):
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
+ is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
+ if sdk_root and is_extension:
+ # Adds the link flags for extensions. These flags are common for all
+ # extensions and provide loader and main function.
+ # These flags reflect the compilation options used by xcode to compile
+ # extensions.
+ ldflags.append('-lpkstart')
+ if XcodeVersion() < '0900':
+ ldflags.append(sdk_root +
+ '/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
+ ldflags.append('-fapplication-extension')
+ ldflags.append('-Xlinker -rpath '
+ '-Xlinker @executable_path/../../Frameworks')
+
self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
self.configname = None
@@ -921,7 +972,7 @@ class XcodeSettings(object):
"""Return a shell command to codesign the iOS output binary so it can
be deployed to a device. This should be run as the very last step of the
build."""
- if not (self.isIOS and self.spec['type'] == "executable"):
+ if not (self.isIOS and self.spec['type'] == 'executable'):
return []
settings = self.xcode_settings[configname]
@@ -981,7 +1032,23 @@ class XcodeSettings(object):
sdk_root = self._SdkPath(config_name)
if not sdk_root:
sdk_root = ''
- return l.replace('$(SDKROOT)', sdk_root)
+ # Xcode 7 started shipping with ".tbd" (text based stubs) files instead of
+ # ".dylib" without providing a real support for them. What it does, for
+ # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the
+ # library order and cause collision when building Chrome.
+ #
+ # Instead substitude ".tbd" to ".dylib" in the generated project when the
+ # following conditions are both true:
+ # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib",
+ # - the ".dylib" file does not exists but a ".tbd" file do.
+ library = l.replace('$(SDKROOT)', sdk_root)
+ if l.startswith('$(SDKROOT)'):
+ basename, ext = os.path.splitext(library)
+ if ext == '.dylib' and not os.path.exists(library):
+ tbd_library = basename + '.tbd'
+ if os.path.exists(tbd_library):
+ library = tbd_library
+ return library
def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
@@ -1191,13 +1258,13 @@ def XcodeVersion():
# In that case this may be a CLT-only install so fall back to
# checking that version.
if len(version_list) < 2:
- raise GypError, "xcodebuild returned unexpected results"
+ raise GypError("xcodebuild returned unexpected results")
except:
version = CLTVersion()
if version:
- version = re.match('(\d\.\d\.?\d*)', version).groups()[0]
+ version = re.match(r'(\d\.\d\.?\d*)', version).groups()[0]
else:
- raise GypError, "No Xcode or CLT version detected!"
+ raise GypError("No Xcode or CLT version detected!")
# The CLT has no build information, so we return an empty string.
version_list = [version, '']
version = version_list[0]
@@ -1385,6 +1452,7 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
# These are filled in on a as-needed basis.
env = {
+ 'BUILT_FRAMEWORKS_DIR' : built_products_dir,
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : configuration,
'PRODUCT_NAME' : xcode_settings.GetProductName(),
diff --git a/deps/gyp/pylib/gyp/xcode_ninja.py b/deps/gyp/pylib/gyp/xcode_ninja.py
index 0e5a70c714..3820d6bf04 100644
--- a/deps/gyp/pylib/gyp/xcode_ninja.py
+++ b/deps/gyp/pylib/gyp/xcode_ninja.py
@@ -19,10 +19,13 @@ import re
import xml.sax.saxutils
-def _WriteWorkspace(main_gyp, sources_gyp):
+def _WriteWorkspace(main_gyp, sources_gyp, params):
""" Create a workspace to wrap main and sources gyp paths. """
(build_file_root, build_file_ext) = os.path.splitext(main_gyp)
workspace_path = build_file_root + '.xcworkspace'
+ options = params['options']
+ if options.generator_output:
+ workspace_path = os.path.join(options.generator_output, workspace_path)
try:
os.makedirs(workspace_path)
except OSError, e:
@@ -64,10 +67,13 @@ def _TargetFromSpec(old_spec, params):
target_name = old_spec.get('target_name')
product_name = old_spec.get('product_name', target_name)
+ product_extension = old_spec.get('product_extension')
ninja_target = {}
ninja_target['target_name'] = target_name
ninja_target['product_name'] = product_name
+ if product_extension:
+ ninja_target['product_extension'] = product_extension
ninja_target['toolset'] = old_spec.get('toolset')
ninja_target['default_configuration'] = old_spec.get('default_configuration')
ninja_target['configurations'] = {}
@@ -80,7 +86,8 @@ def _TargetFromSpec(old_spec, params):
if 'configurations' in old_spec:
for config in old_spec['configurations'].iterkeys():
- old_xcode_settings = old_spec['configurations'][config]['xcode_settings']
+ old_xcode_settings = \
+ old_spec['configurations'][config].get('xcode_settings', {})
if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
@@ -90,6 +97,10 @@ def _TargetFromSpec(old_spec, params):
new_xcode_settings
ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
+ ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0)
+ ninja_target['ios_watchkit_extension'] = \
+ old_spec.get('ios_watchkit_extension', 0)
+ ninja_target['ios_watchkit_app'] = old_spec.get('ios_watchkit_app', 0)
ninja_target['type'] = old_spec['type']
if ninja_toplevel:
ninja_target['actions'] = [
@@ -218,9 +229,11 @@ def CreateWrapper(target_list, target_dicts, data, params):
sources = []
for target, target_dict in target_dicts.iteritems():
- base = os.path.dirname(target)
+ base = os.path.dirname(target)
files = target_dict.get('sources', []) + \
target_dict.get('mac_bundle_resources', [])
+ for action in target_dict.get('actions', []):
+ files.extend(action.get('inputs', []))
# Remove files starting with $. These are mostly intermediate files for the
# build system.
files = [ file for file in files if not file.startswith('$')]
@@ -253,5 +266,5 @@ def CreateWrapper(target_list, target_dicts, data, params):
new_data[sources_gyp]['targets'].append(new_data_target)
# Write workspace to file.
- _WriteWorkspace(main_gyp, sources_gyp)
+ _WriteWorkspace(main_gyp, sources_gyp, params)
return (new_target_list, new_target_dicts, new_data)
diff --git a/deps/gyp/pylib/gyp/xcodeproj_file.py b/deps/gyp/pylib/gyp/xcodeproj_file.py
index fc40fcb2db..d08b7f7770 100644
--- a/deps/gyp/pylib/gyp/xcodeproj_file.py
+++ b/deps/gyp/pylib/gyp/xcodeproj_file.py
@@ -173,7 +173,7 @@ _escaped = re.compile('[\\\\"]|[\x00-\x1f]')
# Used by SourceTreeAndPathFromPath
-_path_leading_variable = re.compile('^\$\((.*?)\)(/(.*))?$')
+_path_leading_variable = re.compile(r'^\$\((.*?)\)(/(.*))?$')
def SourceTreeAndPathFromPath(input_path):
"""Given input_path, returns a tuple with sourceTree and path values.
@@ -196,7 +196,7 @@ def SourceTreeAndPathFromPath(input_path):
return (source_tree, output_path)
def ConvertVariablesToShellSyntax(input_string):
- return re.sub('\$\((.*?)\)', '${\\1}', input_string)
+ return re.sub(r'\$\((.*?)\)', '${\\1}', input_string)
class XCObject(object):
"""The abstract base of all class types used in Xcode project files.
@@ -341,13 +341,13 @@ class XCObject(object):
elif isinstance(value, dict):
# dicts are never strong.
if is_strong:
- raise TypeError, 'Strong dict for key ' + key + ' in ' + \
- self.__class__.__name__
+ raise TypeError('Strong dict for key ' + key + ' in ' + \
+ self.__class__.__name__)
else:
that._properties[key] = value.copy()
else:
- raise TypeError, 'Unexpected type ' + value.__class__.__name__ + \
- ' for key ' + key + ' in ' + self.__class__.__name__
+ raise TypeError('Unexpected type ' + value.__class__.__name__ + \
+ ' for key ' + key + ' in ' + self.__class__.__name__)
return that
@@ -366,8 +366,7 @@ class XCObject(object):
('name' in self._schema and self._schema['name'][3]):
return self._properties['name']
- raise NotImplementedError, \
- self.__class__.__name__ + ' must implement Name'
+ raise NotImplementedError(self.__class__.__name__ + ' must implement Name')
def Comment(self):
"""Return a comment string for the object.
@@ -466,10 +465,10 @@ class XCObject(object):
for descendant in descendants:
if descendant.id in ids:
other = ids[descendant.id]
- raise KeyError, \
+ raise KeyError(
'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \
(descendant.id, str(descendant._properties),
- str(other._properties), self._properties['rootObject'].Name())
+ str(other._properties), self._properties['rootObject'].Name()))
ids[descendant.id] = descendant
def Children(self):
@@ -630,7 +629,7 @@ class XCObject(object):
sep
printable += end_tabs + '}'
else:
- raise TypeError, "Can't make " + value.__class__.__name__ + ' printable'
+ raise TypeError("Can't make " + value.__class__.__name__ + ' printable')
if comment != None:
printable += ' ' + self._EncodeComment(comment)
@@ -756,31 +755,31 @@ class XCObject(object):
for property, value in properties.iteritems():
# Make sure the property is in the schema.
if not property in self._schema:
- raise KeyError, property + ' not in ' + self.__class__.__name__
+ raise KeyError(property + ' not in ' + self.__class__.__name__)
# Make sure the property conforms to the schema.
(is_list, property_type, is_strong) = self._schema[property][0:3]
if is_list:
if value.__class__ != list:
- raise TypeError, \
+ raise TypeError(
property + ' of ' + self.__class__.__name__ + \
- ' must be list, not ' + value.__class__.__name__
+ ' must be list, not ' + value.__class__.__name__)
for item in value:
if not isinstance(item, property_type) and \
not (item.__class__ == unicode and property_type == str):
# Accept unicode where str is specified. str is treated as
# UTF-8-encoded.
- raise TypeError, \
+ raise TypeError(
'item of ' + property + ' of ' + self.__class__.__name__ + \
' must be ' + property_type.__name__ + ', not ' + \
- item.__class__.__name__
+ item.__class__.__name__)
elif not isinstance(value, property_type) and \
not (value.__class__ == unicode and property_type == str):
# Accept unicode where str is specified. str is treated as
# UTF-8-encoded.
- raise TypeError, \
+ raise TypeError(
property + ' of ' + self.__class__.__name__ + ' must be ' + \
- property_type.__name__ + ', not ' + value.__class__.__name__
+ property_type.__name__ + ', not ' + value.__class__.__name__)
# Checks passed, perform the assignment.
if do_copy:
@@ -804,9 +803,9 @@ class XCObject(object):
elif isinstance(value, dict):
self._properties[property] = value.copy()
else:
- raise TypeError, "Don't know how to copy a " + \
- value.__class__.__name__ + ' object for ' + \
- property + ' in ' + self.__class__.__name__
+ raise TypeError("Don't know how to copy a " + \
+ value.__class__.__name__ + ' object for ' + \
+ property + ' in ' + self.__class__.__name__)
else:
self._properties[property] = value
@@ -837,15 +836,15 @@ class XCObject(object):
# Schema validation.
if not key in self._schema:
- raise KeyError, key + ' not in ' + self.__class__.__name__
+ raise KeyError(key + ' not in ' + self.__class__.__name__)
(is_list, property_type, is_strong) = self._schema[key][0:3]
if not is_list:
- raise TypeError, key + ' of ' + self.__class__.__name__ + ' must be list'
+ raise TypeError(key + ' of ' + self.__class__.__name__ + ' must be list')
if not isinstance(value, property_type):
- raise TypeError, 'item of ' + key + ' of ' + self.__class__.__name__ + \
- ' must be ' + property_type.__name__ + ', not ' + \
- value.__class__.__name__
+ raise TypeError('item of ' + key + ' of ' + self.__class__.__name__ + \
+ ' must be ' + property_type.__name__ + ', not ' + \
+ value.__class__.__name__)
# If the property doesn't exist yet, create a new empty list to receive the
# item.
@@ -869,7 +868,7 @@ class XCObject(object):
for property, attributes in self._schema.iteritems():
(is_list, property_type, is_strong, is_required) = attributes[0:4]
if is_required and not property in self._properties:
- raise KeyError, self.__class__.__name__ + ' requires ' + property
+ raise KeyError(self.__class__.__name__ + ' requires ' + property)
def _SetDefaultsFromSchema(self):
"""Assign object default values according to the schema. This will not
@@ -1143,16 +1142,16 @@ class PBXGroup(XCHierarchicalElement):
child_path = child.PathFromSourceTreeAndPath()
if child_path:
if child_path in self._children_by_path:
- raise ValueError, 'Found multiple children with path ' + child_path
+ raise ValueError('Found multiple children with path ' + child_path)
self._children_by_path[child_path] = child
if isinstance(child, PBXVariantGroup):
child_name = child._properties.get('name', None)
key = (child_name, child_path)
if key in self._variant_children_by_name_and_path:
- raise ValueError, 'Found multiple PBXVariantGroup children with ' + \
- 'name ' + str(child_name) + ' and path ' + \
- str(child_path)
+ raise ValueError('Found multiple PBXVariantGroup children with ' + \
+ 'name ' + str(child_name) + ' and path ' + \
+ str(child_path))
self._variant_children_by_name_and_path[key] = child
def AppendChild(self, child):
@@ -1493,6 +1492,7 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
'icns': 'image.icns',
'java': 'sourcecode.java',
'js': 'sourcecode.javascript',
+ 'kext': 'wrapper.kext',
'm': 'sourcecode.c.objc',
'mm': 'sourcecode.cpp.objcpp',
'nib': 'wrapper.nib',
@@ -1508,10 +1508,12 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
's': 'sourcecode.asm',
'storyboard': 'file.storyboard',
'strings': 'text.plist.strings',
+ 'swift': 'sourcecode.swift',
'ttf': 'file',
'xcassets': 'folder.assetcatalog',
'xcconfig': 'text.xcconfig',
'xcdatamodel': 'wrapper.xcdatamodel',
+ 'xcdatamodeld':'wrapper.xcdatamodeld',
'xib': 'file.xib',
'y': 'sourcecode.yacc',
}
@@ -1606,7 +1608,7 @@ class XCConfigurationList(XCObject):
if configuration._properties['name'] == name:
return configuration
- raise KeyError, name
+ raise KeyError(name)
def DefaultConfiguration(self):
"""Convenience accessor to obtain the default XCBuildConfiguration."""
@@ -1663,7 +1665,7 @@ class XCConfigurationList(XCObject):
value = configuration_value
else:
if value != configuration_value:
- raise ValueError, 'Variant values for ' + key
+ raise ValueError('Variant values for ' + key)
return value
@@ -1770,8 +1772,8 @@ class XCBuildPhase(XCObject):
# added, either as a child or deeper descendant. The second item should
# be a boolean indicating whether files should be added into hierarchical
# groups or one single flat group.
- raise NotImplementedError, \
- self.__class__.__name__ + ' must implement FileGroup'
+ raise NotImplementedError(
+ self.__class__.__name__ + ' must implement FileGroup')
def _AddPathToDict(self, pbxbuildfile, path):
"""Adds path to the dict tracking paths belonging to this build phase.
@@ -1780,7 +1782,7 @@ class XCBuildPhase(XCObject):
"""
if path in self._files_by_path:
- raise ValueError, 'Found multiple build files with path ' + path
+ raise ValueError('Found multiple build files with path ' + path)
self._files_by_path[path] = pbxbuildfile
def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
@@ -1835,8 +1837,8 @@ class XCBuildPhase(XCObject):
# problem.
if xcfilelikeelement in self._files_by_xcfilelikeelement and \
self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile:
- raise ValueError, 'Found multiple build files for ' + \
- xcfilelikeelement.Name()
+ raise ValueError('Found multiple build files for ' + \
+ xcfilelikeelement.Name())
self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
def AppendBuildFile(self, pbxbuildfile, path=None):
@@ -1950,6 +1952,7 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
# path_tree_to_subfolder maps names of Xcode variables to the associated
# dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object.
path_tree_to_subfolder = {
+ 'BUILT_FRAMEWORKS_DIR': 10, # Frameworks Directory
'BUILT_PRODUCTS_DIR': 16, # Products Directory
# Other types that can be chosen via the Xcode UI.
# TODO(mark): Map Xcode variable names to these.
@@ -1957,7 +1960,6 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
# : 6, # Executables: 6
# : 7, # Resources
# : 15, # Java Resources
- # : 10, # Frameworks
# : 11, # Shared Frameworks
# : 12, # Shared Support
# : 13, # PlugIns
@@ -2000,8 +2002,8 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
subfolder = 0
relative_path = path[1:]
else:
- raise ValueError, 'Can\'t use path %s in a %s' % \
- (path, self.__class__.__name__)
+ raise ValueError('Can\'t use path %s in a %s' % \
+ (path, self.__class__.__name__))
self._properties['dstPath'] = relative_path
self._properties['dstSubfolderSpec'] = subfolder
@@ -2158,7 +2160,6 @@ class XCTarget(XCRemoteObject):
if configs.HasBuildSetting('PRODUCT_NAME') == 0:
configs.SetBuildSetting('PRODUCT_NAME',
self._properties['productName'])
- configs.SetBuildSetting('COMBINE_HIDPI_IMAGES', 'YES')
def AddDependency(self, other):
pbxproject = self.PBXProjectAncestor()
@@ -2238,10 +2239,16 @@ class PBXNativeTarget(XCTarget):
# Mapping from Xcode product-types to settings. The settings are:
# filetype : used for explicitFileType in the project file
# prefix : the prefix for the file name
- # suffix : the suffix for the filen ame
+ # suffix : the suffix for the file name
_product_filetypes = {
- 'com.apple.product-type.application': ['wrapper.application',
- '', '.app'],
+ 'com.apple.product-type.application': ['wrapper.application',
+ '', '.app'],
+ 'com.apple.product-type.application.watchapp': ['wrapper.application',
+ '', '.app'],
+ 'com.apple.product-type.watchkit-extension': ['wrapper.app-extension',
+ '', '.appex'],
+ 'com.apple.product-type.app-extension': ['wrapper.app-extension',
+ '', '.appex'],
'com.apple.product-type.bundle': ['wrapper.cfbundle',
'', '.bundle'],
'com.apple.product-type.framework': ['wrapper.framework',
@@ -2256,6 +2263,8 @@ class PBXNativeTarget(XCTarget):
'', '.xctest'],
'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib',
'', '.so'],
+ 'com.apple.product-type.kernel-extension': ['wrapper.kext',
+ '', '.kext'],
}
def __init__(self, properties=None, id=None, parent=None,
@@ -2314,11 +2323,11 @@ class PBXNativeTarget(XCTarget):
if force_extension is not None:
# If it's a wrapper (bundle), set WRAPPER_EXTENSION.
+ # Extension override.
+ suffix = '.' + force_extension
if filetype.startswith('wrapper.'):
self.SetBuildSetting('WRAPPER_EXTENSION', force_extension)
else:
- # Extension override.
- suffix = '.' + force_extension
self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension)
if filetype.startswith('compiled.mach-o.executable'):
@@ -2734,8 +2743,53 @@ class PBXProject(XCContainerPortal):
self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
+ inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False)
+ targets = other_pbxproject.GetProperty('targets')
+ if all(self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets):
+ dir_path = project_ref._properties['path']
+ product_group._hashables.extend(dir_path)
+
return [product_group, project_ref]
+ def _AllSymrootsUnique(self, target, inherit_unique_symroot):
+ # Returns True if all configurations have a unique 'SYMROOT' attribute.
+ # The value of inherit_unique_symroot decides, if a configuration is assumed
+ # to inherit a unique 'SYMROOT' attribute from its parent, if it doesn't
+ # define an explicit value for 'SYMROOT'.
+ symroots = self._DefinedSymroots(target)
+ for s in self._DefinedSymroots(target):
+ if (s is not None and not self._IsUniqueSymrootForTarget(s) or
+ s is None and not inherit_unique_symroot):
+ return False
+ return True if symroots else inherit_unique_symroot
+
+ def _DefinedSymroots(self, target):
+ # Returns all values for the 'SYMROOT' attribute defined in all
+ # configurations for this target. If any configuration doesn't define the
+ # 'SYMROOT' attribute, None is added to the returned set. If all
+ # configurations don't define the 'SYMROOT' attribute, an empty set is
+ # returned.
+ config_list = target.GetProperty('buildConfigurationList')
+ symroots = set()
+ for config in config_list.GetProperty('buildConfigurations'):
+ setting = config.GetProperty('buildSettings')
+ if 'SYMROOT' in setting:
+ symroots.add(setting['SYMROOT'])
+ else:
+ symroots.add(None)
+ if len(symroots) == 1 and None in symroots:
+ return set()
+ return symroots
+
+ def _IsUniqueSymrootForTarget(self, symroot):
+ # This method returns True if all configurations in target contain a
+ # 'SYMROOT' attribute that is unique for the given target. A value is
+ # unique, if the Xcode macro '$SRCROOT' appears in it in any form.
+ uniquifier = ['$SRCROOT', '$(SRCROOT)']
+ if any(x in symroot for x in uniquifier):
+ return True
+ return False
+
def _SetUpProductReferences(self, other_pbxproject, product_group,
project_ref):
# TODO(mark): This only adds references to products in other_pbxproject
@@ -2804,7 +2858,7 @@ class PBXProject(XCContainerPortal):
product_group = ref_dict['ProductGroup']
product_group._properties['children'] = sorted(
product_group._properties['children'],
- cmp=lambda x, y: CompareProducts(x, y, remote_products))
+ cmp=lambda x, y, rp=remote_products: CompareProducts(x, y, rp))
class XCProjectFile(XCObject):
@@ -2816,23 +2870,6 @@ class XCProjectFile(XCObject):
'rootObject': [0, PBXProject, 1, 1],
})
- def SetXcodeVersion(self, version):
- version_to_object_version = {
- '2.4': 45,
- '3.0': 45,
- '3.1': 45,
- '3.2': 46,
- }
- if not version in version_to_object_version:
- supported_str = ', '.join(sorted(version_to_object_version.keys()))
- raise Exception(
- 'Unsupported Xcode version %s (supported: %s)' %
- ( version, supported_str ) )
- compatibility_version = 'Xcode %s' % version
- self._properties['rootObject'].SetProperty('compatibilityVersion',
- compatibility_version)
- self.SetProperty('objectVersion', version_to_object_version[version]);
-
def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
# Although XCProjectFile is implemented here as an XCObject, it's not a
# proper object in the Xcode sense, and it certainly doesn't have its own