summaryrefslogtreecommitdiff
path: root/deps
diff options
context:
space:
mode:
authorDane Springmeyer <dane@mapbox.com>2014-05-27 18:55:20 -0700
committerDane Springmeyer <dane@mapbox.com>2014-05-27 18:55:20 -0700
commita1835b3fc27b8fecaa947eb578c60450e3063635 (patch)
treec75dba0cbc30b2ae8aeda4321aa4e210dd88ef3e /deps
parent2381fedd3793d9cf34a2d6423645eca5bbafb16e (diff)
downloadqtlocation-mapboxgl-a1835b3fc27b8fecaa947eb578c60450e3063635.tar.gz
update gyp to svn r1923
Diffstat (limited to 'deps')
-rw-r--r--deps/gyp/PRESUBMIT.py2
-rw-r--r--deps/gyp/pylib/gyp/MSVSSettings.py73
-rw-r--r--deps/gyp/pylib/gyp/MSVSUtil.py21
-rw-r--r--deps/gyp/pylib/gyp/MSVSVersion.py2
-rw-r--r--deps/gyp/pylib/gyp/common.py67
-rw-r--r--deps/gyp/pylib/gyp/generator/android.py77
-rw-r--r--deps/gyp/pylib/gyp/generator/eclipse.py7
-rw-r--r--deps/gyp/pylib/gyp/generator/make.py3
-rw-r--r--deps/gyp/pylib/gyp/generator/msvs.py88
-rw-r--r--deps/gyp/pylib/gyp/generator/ninja.py163
-rw-r--r--deps/gyp/pylib/gyp/generator/xcode.py16
-rw-r--r--deps/gyp/pylib/gyp/input.py403
-rwxr-xr-xdeps/gyp/pylib/gyp/mac_tool.py6
-rw-r--r--deps/gyp/pylib/gyp/msvs_emulation.py20
-rwxr-xr-xdeps/gyp/pylib/gyp/win_tool.py31
-rw-r--r--deps/gyp/pylib/gyp/xcode_emulation.py343
16 files changed, 839 insertions, 483 deletions
diff --git a/deps/gyp/PRESUBMIT.py b/deps/gyp/PRESUBMIT.py
index 9c474eb2b1..b79316a915 100644
--- a/deps/gyp/PRESUBMIT.py
+++ b/deps/gyp/PRESUBMIT.py
@@ -16,8 +16,6 @@ PYLINT_BLACKLIST = [
'test/lib/TestCmd.py',
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
- # Needs style fix.
- 'pylib/gyp/generator/xcode.py',
]
diff --git a/deps/gyp/pylib/gyp/MSVSSettings.py b/deps/gyp/pylib/gyp/MSVSSettings.py
index 773b74e984..205b3b5b9b 100644
--- a/deps/gyp/pylib/gyp/MSVSSettings.py
+++ b/deps/gyp/pylib/gyp/MSVSSettings.py
@@ -367,6 +367,35 @@ fix_vc_macro_slashes_regex = re.compile(
r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
)
+# Regular expression to detect keys that were generated by exclusion lists
+_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
+
+
+def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
+ """Verify that 'setting' is valid if it is generated from an exclusion list.
+
+ If the setting appears to be generated from an exclusion list, the root name
+ is checked.
+
+ Args:
+ setting: A string that is the setting name to validate
+ settings: A dictionary where the keys are valid settings
+ error_msg: The message to emit in the event of error
+ stderr: The stream receiving the error messages.
+ """
+ # This may be unrecognized because it's an exclusion list. If the
+ # setting name has the _excluded suffix, then check the root name.
+ unrecognized = True
+ m = re.match(_EXCLUDED_SUFFIX_RE, setting)
+ if m:
+ root_setting = m.group(1)
+ unrecognized = root_setting not in settings
+
+ if unrecognized:
+ # We don't know this setting. Give a warning.
+ print >> stderr, error_msg
+
+
def FixVCMacroSlashes(s):
"""Replace macros which have excessive following slashes.
@@ -403,9 +432,6 @@ def ConvertVCMacrosToMSBuild(s):
return s
-_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
-
-
def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
"""Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
@@ -432,19 +458,12 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
'%s' % (msvs_tool_name, msvs_setting, e))
else:
- # This may be unrecognized because it's an exclusion list. If the
- # setting name has the _excluded suffix, then check the root name.
- unrecognized = True
- m = re.match(_EXCLUDED_SUFFIX_RE, msvs_setting)
- if m:
- root_msvs_setting = m.group(1)
- unrecognized = root_msvs_setting not in msvs_tool
-
- if unrecognized:
- # We don't know this setting. Give a warning.
- print >> stderr, ('Warning: unrecognized setting %s/%s '
- 'while converting to MSBuild.' %
- (msvs_tool_name, msvs_setting))
+ _ValidateExclusionSetting(msvs_setting,
+ msvs_tool,
+ ('Warning: unrecognized setting %s/%s '
+ 'while converting to MSBuild.' %
+ (msvs_tool_name, msvs_setting)),
+ stderr)
else:
print >> stderr, ('Warning: unrecognized tool %s while converting to '
'MSBuild.' % msvs_tool_name)
@@ -495,8 +514,12 @@ def _ValidateSettings(validators, settings, stderr):
print >> stderr, ('Warning: for %s/%s, %s' %
(tool_name, setting, e))
else:
- print >> stderr, ('Warning: unrecognized setting %s/%s' %
- (tool_name, setting))
+ _ValidateExclusionSetting(setting,
+ tool_validators,
+ ('Warning: unrecognized setting %s/%s' %
+ (tool_name, setting)),
+ stderr)
+
else:
print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
@@ -593,7 +616,9 @@ _Same(_compile, 'DebugInformationFormat',
_Same(_compile, 'EnableEnhancedInstructionSet',
_Enumeration(['NotSet',
'StreamingSIMDExtensions', # /arch:SSE
- 'StreamingSIMDExtensions2'])) # /arch:SSE2
+ 'StreamingSIMDExtensions2', # /arch:SSE2
+ 'AdvancedVectorExtensions', # /arch:AVX (vs2012+)
+ 'NoExtensions',])) # /arch:IA32 (vs2012+)
_Same(_compile, 'ErrorReporting',
_Enumeration(['None', # /errorReport:none
'Prompt', # /errorReport:prompt
@@ -848,13 +873,6 @@ _Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
# MSVS options not found in MSBuild.
_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
-# These settings generate correctly in the MSVS output files when using
-# e.g. DelayLoadDLLs! or AdditionalDependencies! to exclude files from
-# configuration entries, but result in spurious artifacts which can be
-# safely ignored here. See crbug.com/246570
-_MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list)
-_MSVSOnly(_link, 'DelayLoadDLLs_excluded', _file_list)
-_MSVSOnly(_link, 'AdditionalDependencies_excluded', _file_list)
# MSBuild options not found in MSVS.
_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
@@ -1003,9 +1021,6 @@ _Same(_lib, 'TargetMachine', _target_machine_enumeration)
# ProjectReference. We may want to validate that they are consistent.
_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
-# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp.
-_MSVSOnly(_lib, 'AdditionalLibraryDirectories_excluded', _folder_list)
-
_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
_MSBuildOnly(_lib, 'ErrorReporting',
_Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
diff --git a/deps/gyp/pylib/gyp/MSVSUtil.py b/deps/gyp/pylib/gyp/MSVSUtil.py
index 62e8d260d4..fbf3ed2e3c 100644
--- a/deps/gyp/pylib/gyp/MSVSUtil.py
+++ b/deps/gyp/pylib/gyp/MSVSUtil.py
@@ -109,15 +109,16 @@ def ShardTargets(target_list, target_dicts):
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
for t in new_target_dicts:
- dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
- new_dependencies = []
- for d in dependencies:
- if d in targets_to_shard:
- for i in range(targets_to_shard[d]):
- new_dependencies.append(_ShardName(d, i))
- else:
- new_dependencies.append(d)
- new_target_dicts[t]['dependencies'] = new_dependencies
+ for deptype in ('dependencies', 'dependencies_original'):
+ dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
+ new_dependencies = []
+ for d in dependencies:
+ if d in targets_to_shard:
+ for i in range(targets_to_shard[d]):
+ new_dependencies.append(_ShardName(d, i))
+ else:
+ new_dependencies.append(d)
+ new_target_dicts[t][deptype] = new_dependencies
return (new_target_list, new_target_dicts)
@@ -264,4 +265,4 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
# Update the original target to depend on the shim target.
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
- return (target_list, target_dicts) \ No newline at end of file
+ return (target_list, target_dicts)
diff --git a/deps/gyp/pylib/gyp/MSVSVersion.py b/deps/gyp/pylib/gyp/MSVSVersion.py
index 03b6d8ad42..bcd6122f2d 100644
--- a/deps/gyp/pylib/gyp/MSVSVersion.py
+++ b/deps/gyp/pylib/gyp/MSVSVersion.py
@@ -379,7 +379,7 @@ def SelectVisualStudioVersion(version='auto'):
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
- 'auto': ('10.0', '12.0', '9.0', '8.0', '11.0'),
+ 'auto': ('12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
diff --git a/deps/gyp/pylib/gyp/common.py b/deps/gyp/pylib/gyp/common.py
index f9c6c6f3a8..df71d973e1 100644
--- a/deps/gyp/pylib/gyp/common.py
+++ b/deps/gyp/pylib/gyp/common.py
@@ -4,6 +4,7 @@
from __future__ import with_statement
+import collections
import errno
import filecmp
import os.path
@@ -472,6 +473,72 @@ def uniquer(seq, idfun=None):
return result
+# Based on http://code.activestate.com/recipes/576694/.
+class OrderedSet(collections.MutableSet):
+ def __init__(self, iterable=None):
+ self.end = end = []
+ end += [None, end, end] # sentinel node for doubly linked list
+ self.map = {} # key --> [key, prev, next]
+ if iterable is not None:
+ self |= iterable
+
+ def __len__(self):
+ return len(self.map)
+
+ def __contains__(self, key):
+ return key in self.map
+
+ def add(self, key):
+ if key not in self.map:
+ end = self.end
+ curr = end[1]
+ curr[2] = end[1] = self.map[key] = [key, curr, end]
+
+ def discard(self, key):
+ if key in self.map:
+ key, prev_item, next_item = self.map.pop(key)
+ prev_item[2] = next_item
+ next_item[1] = prev_item
+
+ def __iter__(self):
+ end = self.end
+ curr = end[2]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[2]
+
+ def __reversed__(self):
+ end = self.end
+ curr = end[1]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[1]
+
+ # The second argument is an addition that causes a pylint warning.
+ def pop(self, last=True): # pylint: disable=W0221
+ if not self:
+ raise KeyError('set is empty')
+ key = self.end[1][0] if last else self.end[2][0]
+ self.discard(key)
+ return key
+
+ def __repr__(self):
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, list(self))
+
+ def __eq__(self, other):
+ if isinstance(other, OrderedSet):
+ return len(self) == len(other) and list(self) == list(other)
+ return set(self) == set(other)
+
+ # Extensions to the recipe.
+ def update(self, iterable):
+ for i in iterable:
+ if i not in self:
+ self.add(i)
+
+
class CycleError(Exception):
"""An exception raised when an unexpected cycle is detected."""
def __init__(self, nodes):
diff --git a/deps/gyp/pylib/gyp/generator/android.py b/deps/gyp/pylib/gyp/generator/android.py
index 41346e2b1c..39884749b1 100644
--- a/deps/gyp/pylib/gyp/generator/android.py
+++ b/deps/gyp/pylib/gyp/generator/android.py
@@ -55,7 +55,7 @@ generator_additional_path_sections = []
generator_extra_sources_for_rules = []
-SHARED_FOOTER = """\
+ALL_MODULES_FOOTER = """\
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
# all the included sub-makefiles. This is just here to clarify.
gyp_all_modules:
@@ -133,7 +133,7 @@ class AndroidMkWriter(object):
self.android_top_dir = android_top_dir
def Write(self, qualified_target, relative_target, base_path, output_filename,
- spec, configs, part_of_all):
+ spec, configs, part_of_all, write_alias_target):
"""The main entry point: writes a .mk file for a single target.
Arguments:
@@ -144,6 +144,8 @@ class AndroidMkWriter(object):
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
+ write_alias_target: flag indicating whether to create short aliases for
+ this target
"""
gyp.common.EnsureDirExists(output_filename)
@@ -186,11 +188,19 @@ class AndroidMkWriter(object):
self.WriteLn('LOCAL_MODULE_TAGS := optional')
if self.toolset == 'host':
self.WriteLn('LOCAL_IS_HOST_MODULE := true')
+ else:
+ self.WriteLn('LOCAL_MODULE_TARGET_ARCH := '
+ '$(TARGET_$(GYP_VAR_PREFIX)ARCH)')
# Grab output directories; needed for Actions and Rules.
- self.WriteLn('gyp_intermediate_dir := $(call local-intermediates-dir)')
+ if self.toolset == 'host':
+ self.WriteLn('gyp_intermediate_dir := '
+ '$(call local-intermediates-dir)')
+ else:
+ self.WriteLn('gyp_intermediate_dir := '
+ '$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))')
self.WriteLn('gyp_shared_intermediate_dir := '
- '$(call intermediates-dir-for,GYP,shared)')
+ '$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))')
self.WriteLn()
# List files this target depends on so that actions/rules/copies/sources
@@ -226,7 +236,8 @@ class AndroidMkWriter(object):
if spec.get('sources', []) or extra_sources:
self.WriteSources(spec, configs, extra_sources)
- self.WriteTarget(spec, configs, deps, link_deps, part_of_all)
+ self.WriteTarget(spec, configs, deps, link_deps, part_of_all,
+ write_alias_target)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = ('path', self.output_binary)
@@ -291,6 +302,7 @@ class AndroidMkWriter(object):
# writing duplicate dummy rules for those outputs.
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
+ self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
@@ -337,13 +349,10 @@ class AndroidMkWriter(object):
"""
if len(rules) == 0:
return
- rule_trigger = '%s_rule_trigger' % self.android_module
- did_write_rule = False
for rule in rules:
if len(rule.get('rule_sources', [])) == 0:
continue
- did_write_rule = True
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
rule['rule_name']))
self.WriteLn('\n### Generated for rule "%s":' % name)
@@ -391,6 +400,7 @@ class AndroidMkWriter(object):
outputs = map(self.LocalPathify, outputs)
main_output = outputs[0]
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
+ self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
@@ -412,13 +422,9 @@ class AndroidMkWriter(object):
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (output, main_output))
- self.WriteLn('.PHONY: %s' % (rule_trigger))
- self.WriteLn('%s: %s' % (rule_trigger, main_output))
- self.WriteLn('')
- if did_write_rule:
- extra_sources.append(rule_trigger) # Force all rules to run.
- self.WriteLn('### Finished generating for all rules')
- self.WriteLn('')
+ self.WriteLn()
+
+ self.WriteLn()
def WriteCopies(self, copies, extra_outputs):
@@ -501,6 +507,9 @@ class AndroidMkWriter(object):
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
+ # Android uses separate flags for assembly file invocations, but gyp expects
+ # the same CFLAGS to be applied:
+ self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
def WriteSources(self, spec, configs, extra_sources):
@@ -609,16 +618,16 @@ class AndroidMkWriter(object):
prefix = ''
if spec['toolset'] == 'host':
- suffix = '_host_gyp'
+ suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp'
else:
suffix = '_gyp'
if self.path:
- name = '%s%s_%s%s' % (prefix, self.path, self.target, suffix)
+ middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target))
else:
- name = '%s%s%s' % (prefix, self.target, suffix)
+ middle = make.StringToMakefileVariable(self.target)
- return make.StringToMakefileVariable(name)
+ return ''.join([prefix, middle, suffix])
def ComputeOutputParts(self, spec):
@@ -680,15 +689,15 @@ class AndroidMkWriter(object):
if self.toolset == 'host':
path = '$(HOST_OUT_INTERMEDIATE_LIBRARIES)'
else:
- path = '$(TARGET_OUT_INTERMEDIATE_LIBRARIES)'
+ path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)'
else:
# Other targets just get built into their intermediate dir.
if self.toolset == 'host':
path = '$(call intermediates-dir-for,%s,%s,true)' % (self.android_class,
self.android_module)
else:
- path = '$(call intermediates-dir-for,%s,%s)' % (self.android_class,
- self.android_module)
+ path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))'
+ % (self.android_class, self.android_module))
assert spec.get('product_dir') is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
@@ -816,12 +825,15 @@ class AndroidMkWriter(object):
'LOCAL_SHARED_LIBRARIES')
- def WriteTarget(self, spec, configs, deps, link_deps, part_of_all):
+ def WriteTarget(self, spec, configs, deps, link_deps, part_of_all,
+ write_alias_target):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
part_of_all: flag indicating this target is part of 'all'
+ write_alias_target: flag indicating whether to create short aliases for this
+ target
"""
self.WriteLn('### Rules for final target.')
@@ -832,7 +844,7 @@ class AndroidMkWriter(object):
# name 'gyp_all_modules' as the Android build system doesn't allow the use
# of the Make target 'all' and because 'all_modules' is the equivalent of
# the Make target 'all' on Android.
- if part_of_all:
+ if part_of_all and write_alias_target:
self.WriteLn('# Add target alias to "gyp_all_modules" target.')
self.WriteLn('.PHONY: gyp_all_modules')
self.WriteLn('gyp_all_modules: %s' % self.android_module)
@@ -841,7 +853,7 @@ class AndroidMkWriter(object):
# Add an alias from the gyp target name to the Android module name. This
# simplifies manual builds of the target, and is required by the test
# framework.
- if self.target != self.android_module:
+ if self.target != self.android_module and write_alias_target:
self.WriteLn('# Alias gyp target name.')
self.WriteLn('.PHONY: %s' % self.target)
self.WriteLn('%s: %s' % (self.target, self.android_module))
@@ -870,6 +882,8 @@ class AndroidMkWriter(object):
else:
self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
+ if self.toolset == 'target':
+ self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)')
self.WriteLn()
self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
self.WriteLn()
@@ -877,6 +891,9 @@ class AndroidMkWriter(object):
self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) touch $@')
+ if self.toolset == 'target':
+ self.WriteLn()
+ self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
def WriteList(self, value_list, variable=None, prefix='',
@@ -926,7 +943,7 @@ class AndroidMkWriter(object):
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
- return path
+ return os.path.normpath(path)
def PerformBuild(data, configurations, params):
@@ -946,6 +963,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
limit_to_target_all = generator_flags.get('limit_to_target_all', False)
+ write_alias_targets = generator_flags.get('write_alias_targets', True)
android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
@@ -1041,7 +1059,8 @@ def GenerateOutput(target_list, target_dicts, data, params):
writer = AndroidMkWriter(android_top_dir)
android_module = writer.Write(qualified_target, relative_target, base_path,
output_file, spec, configs,
- part_of_all=part_of_all)
+ part_of_all=part_of_all,
+ write_alias_target=write_alias_targets)
if android_module in android_modules:
print ('ERROR: Android module names must be unique. The following '
'targets both generate Android module name %s.\n %s\n %s' %
@@ -1057,6 +1076,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
include_list.add(mkfile_rel_path)
root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
+ root_makefile.write('GYP_VAR_PREFIX ?=\n')
# Write out the sorted list of includes.
root_makefile.write('\n')
@@ -1064,6 +1084,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
root_makefile.write('\n')
- root_makefile.write(SHARED_FOOTER)
+ if write_alias_targets:
+ root_makefile.write(ALL_MODULES_FOOTER)
root_makefile.close()
diff --git a/deps/gyp/pylib/gyp/generator/eclipse.py b/deps/gyp/pylib/gyp/generator/eclipse.py
index 8d08f57eaa..718eb5d3db 100644
--- a/deps/gyp/pylib/gyp/generator/eclipse.py
+++ b/deps/gyp/pylib/gyp/generator/eclipse.py
@@ -165,7 +165,7 @@ def GetAllIncludeDirectories(target_list, target_dicts,
return all_includes_list
-def GetCompilerPath(target_list, data):
+def GetCompilerPath(target_list, data, options):
"""Determine a command that can be used to invoke the compiler.
Returns:
@@ -173,13 +173,12 @@ def GetCompilerPath(target_list, data):
the compiler from that. Otherwise, see if a compiler was specified via the
CC_target environment variable.
"""
-
# First, see if the compiler is configured in make's settings.
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_dict = data[build_file].get('make_global_settings', {})
for key, value in make_global_settings_dict:
if key in ['CC', 'CXX']:
- return value
+ return os.path.join(options.toplevel_dir, value)
# Check to see if the compiler was specified as an environment variable.
for key in ['CC_target', 'CC', 'CXX']:
@@ -304,7 +303,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
'GNU C++', 'GNU C', 'Assembly']
- compiler_path = GetCompilerPath(target_list, data)
+ compiler_path = GetCompilerPath(target_list, data, options)
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
shared_intermediate_dirs, config_name,
params, compiler_path)
diff --git a/deps/gyp/pylib/gyp/generator/make.py b/deps/gyp/pylib/gyp/generator/make.py
index b6f766d554..b88a433d3d 100644
--- a/deps/gyp/pylib/gyp/generator/make.py
+++ b/deps/gyp/pylib/gyp/generator/make.py
@@ -1447,8 +1447,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
libraries = spec.get('libraries')
if libraries:
# Remove duplicate entries
- # Commented out due to https://code.google.com/p/gyp/issues/detail?id=419
- # libraries = gyp.common.uniquer(libraries)
+ libraries = gyp.common.uniquer(libraries)
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries)
self.WriteList(libraries, 'LIBS')
diff --git a/deps/gyp/pylib/gyp/generator/msvs.py b/deps/gyp/pylib/gyp/generator/msvs.py
index 9dcdab6cfa..64991d4248 100644
--- a/deps/gyp/pylib/gyp/generator/msvs.py
+++ b/deps/gyp/pylib/gyp/generator/msvs.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import collections
import copy
import ntpath
import os
@@ -21,6 +20,7 @@ import gyp.MSVSUserFile as MSVSUserFile
import gyp.MSVSUtil as MSVSUtil
import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError
+from gyp.common import OrderedSet
# TODO: Remove once bots are on 2.7, http://crbug.com/241769
def _import_OrderedDict():
@@ -81,6 +81,7 @@ generator_additional_non_configuration_keys = [
'msvs_external_builder_out_dir',
'msvs_external_builder_build_cmd',
'msvs_external_builder_clean_cmd',
+ 'msvs_external_builder_clcompile_cmd',
]
@@ -97,46 +98,6 @@ cached_username = None
cached_domain = None
-# Based on http://code.activestate.com/recipes/576694/.
-class OrderedSet(collections.MutableSet):
- def __init__(self, iterable=None):
- self.end = end = []
- end += [None, end, end] # sentinel node for doubly linked list
- self.map = {} # key --> [key, prev, next]
- if iterable is not None:
- self |= iterable
-
- def __len__(self):
- return len(self.map)
-
- def discard(self, key):
- if key in self.map:
- key, prev, next = self.map.pop(key)
- prev[2] = next
- next[1] = prev
-
- def __contains__(self, key):
- return key in self.map
-
- def add(self, key):
- if key not in self.map:
- end = self.end
- curr = end[1]
- curr[2] = end[1] = self.map[key] = [key, curr, end]
-
- def update(self, iterable):
- for i in iterable:
- if i not in self:
- self.add(i)
-
- def __iter__(self):
- end = self.end
- curr = end[2]
- while curr is not end:
- yield curr[0]
- curr = curr[2]
-
-
# TODO(gspencer): Switch the os.environ calls to be
# win32api.GetDomainName() and win32api.GetUserName() once the
# python version in depot_tools has been updated to work on Vista
@@ -858,17 +819,21 @@ def _GenerateRulesForMSVS(p, output_dir, options, spec,
def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
# Add outputs generated by each rule (if applicable).
for rule in rules:
- # Done if not processing outputs as sources.
- if int(rule.get('process_outputs_as_sources', False)):
- # Add in the outputs from this rule.
- trigger_files = _FindRuleTriggerFiles(rule, sources)
- for trigger_file in trigger_files:
+ # Add in the outputs from this rule.
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for trigger_file in trigger_files:
+ # Remove trigger_file from excluded_sources to let the rule be triggered
+ # (e.g. rule trigger ax_enums.idl is added to excluded_sources
+ # because it's also in an action's inputs in the same project)
+ excluded_sources.discard(_FixPath(trigger_file))
+ # Done if not processing outputs as sources.
+ if int(rule.get('process_outputs_as_sources', False)):
inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
inputs = OrderedSet(_FixPaths(inputs))
outputs = OrderedSet(_FixPaths(outputs))
inputs.remove(_FixPath(trigger_file))
sources.update(inputs)
- if not spec.get('msvs_external_builder'):
+ if spec['type'] != 'none' and not spec.get('msvs_external_builder'):
excluded_sources.update(inputs)
sources.update(outputs)
@@ -1427,7 +1392,7 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
# Add all inputs to sources and excluded sources.
inputs = OrderedSet(inputs)
sources.update(inputs)
- if not spec.get('msvs_external_builder'):
+ if spec['type'] != 'none' and not spec.get('msvs_external_builder'):
excluded_sources.update(inputs)
if int(a.get('process_outputs_as_sources', False)):
_AddNormalizedSources(sources, a.get('outputs', []))
@@ -1479,8 +1444,14 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
# Prune filters with a single child to flatten ugly directory structures
# such as ../../src/modules/module1 etc.
- while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
- sources = sources[0].contents
+ if version.UsesVcxproj():
+ while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
+ and len(set([s.name for s in sources])) == 1:
+ assert all([len(s.contents) == 1 for s in sources])
+ sources = [s.contents[0] for s in sources]
+ else:
+ while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
+ sources = sources[0].contents
return sources, excluded_sources, excluded_idl
@@ -2584,6 +2555,7 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
['ProjectGuid', guid],
['Keyword', 'Win32Proj'],
['RootNamespace', namespace],
+ ['IgnoreWarnCompileDuplicatedFilename', 'true'],
]
]
@@ -3226,7 +3198,9 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
def _GetMSBuildExternalBuilderTargets(spec):
"""Return a list of MSBuild targets for external builders.
- Right now, only "Build" and "Clean" targets are generated.
+ The "Build" and "Clean" targets are always generated. If the spec contains
+ 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
+ be generated, to support building selected C/C++ files.
Arguments:
spec: The gyp target spec.
@@ -3245,7 +3219,17 @@ def _GetMSBuildExternalBuilderTargets(spec):
clean_target = ['Target', {'Name': 'Clean'}]
clean_target.append(['Exec', {'Command': clean_cmd}])
- return [build_target, clean_target]
+ targets = [build_target, clean_target]
+
+ if spec.get('msvs_external_builder_clcompile_cmd'):
+ clcompile_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec['msvs_external_builder_clcompile_cmd'],
+ False, False, False, False)
+ clcompile_target = ['Target', {'Name': 'ClCompile'}]
+ clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
+ targets.append(clcompile_target)
+
+ return targets
def _GetMSBuildExtensions(props_files_of_rules):
diff --git a/deps/gyp/pylib/gyp/generator/ninja.py b/deps/gyp/pylib/gyp/generator/ninja.py
index 1ed23f64cc..e3fafb5d43 100644
--- a/deps/gyp/pylib/gyp/generator/ninja.py
+++ b/deps/gyp/pylib/gyp/generator/ninja.py
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import collections
import copy
import hashlib
import json
@@ -343,7 +344,7 @@ class NinjaWriter:
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
path_basename))
- def WriteCollapsedDependencies(self, name, targets):
+ def WriteCollapsedDependencies(self, name, targets, order_only=None):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
@@ -351,10 +352,11 @@ class NinjaWriter:
assert targets == filter(None, targets), targets
if len(targets) == 0:
+ assert not order_only
return None
- if len(targets) > 1:
+ if len(targets) > 1 or order_only:
stamp = self.GypPathToUniqueOutput(name + '.stamp')
- targets = self.ninja.build(stamp, 'stamp', targets)
+ targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
self.ninja.newline()
return targets[0]
@@ -472,6 +474,8 @@ class NinjaWriter:
else:
print "Warning: Actions/rules writing object files don't work with " \
"multiarch targets, dropping. (target %s)" % spec['target_name']
+ elif self.flavor == 'mac' and len(self.archs) > 1:
+ link_deps = collections.defaultdict(list)
if self.flavor == 'win' and self.target.type == 'static_library':
@@ -580,10 +584,7 @@ class NinjaWriter:
def WriteActions(self, actions, extra_sources, prebuild,
extra_mac_bundle_resources):
# Actions cd into the base directory.
- env = self.GetSortedXcodeEnv()
- if self.flavor == 'win':
- env = self.msvs_settings.GetVSMacroEnv(
- '$!PRODUCT_DIR', config=self.config_name)
+ env = self.GetToolchainEnv()
all_outputs = []
for action in actions:
# First write out a rule for the action.
@@ -616,15 +617,17 @@ class NinjaWriter:
def WriteRules(self, rules, extra_sources, prebuild,
mac_bundle_resources, extra_mac_bundle_resources):
- env = self.GetSortedXcodeEnv()
+ env = self.GetToolchainEnv()
all_outputs = []
for rule in rules:
- # First write out a rule for the rule action.
- name = '%s_%s' % (rule['rule_name'],
- hashlib.md5(self.qualified_target).hexdigest())
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
+
+ # First write out a rule for the rule action.
+ name = '%s_%s' % (rule['rule_name'],
+ hashlib.md5(self.qualified_target).hexdigest())
+
args = rule['action']
description = self.GenerateDescription(
'RULE',
@@ -653,8 +656,22 @@ class NinjaWriter:
return path.replace('\\', '/')
return path
+ inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
+
+ # If there are n source files matching the rule, and m additional rule
+ # inputs, then adding 'inputs' to each build edge written below will
+ # write m * n inputs. Collapsing reduces this to m + n.
+ sources = rule.get('rule_sources', [])
+ num_inputs = len(inputs)
+ if prebuild:
+ num_inputs += 1
+ if num_inputs > 2 and len(sources) > 2:
+ inputs = [self.WriteCollapsedDependencies(
+ rule['rule_name'], inputs, order_only=prebuild)]
+ prebuild = []
+
# For each source file, write an edge that generates all the outputs.
- for source in rule.get('rule_sources', []):
+ for source in sources:
source = os.path.normpath(source)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
@@ -663,9 +680,6 @@ class NinjaWriter:
outputs = [self.ExpandRuleVariables(o, root, dirname,
source, ext, basename)
for o in rule['outputs']]
- inputs = [self.ExpandRuleVariables(i, root, dirname,
- source, ext, basename)
- for i in rule.get('inputs', [])]
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
@@ -703,10 +717,11 @@ class NinjaWriter:
else:
assert var == None, repr(var)
- inputs = [self.GypPathToNinja(i, env) for i in inputs]
outputs = [self.GypPathToNinja(o, env) for o in outputs]
- extra_bindings.append(('unique_name',
- hashlib.md5(outputs[0]).hexdigest()))
+ if self.flavor == 'win':
+ # WriteNewNinjaRule uses unique_name for creating an rsp file on win.
+ extra_bindings.append(('unique_name',
+ hashlib.md5(outputs[0]).hexdigest()))
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
@@ -718,7 +733,7 @@ class NinjaWriter:
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
- env = self.GetSortedXcodeEnv()
+ env = self.GetToolchainEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
@@ -810,6 +825,7 @@ class NinjaWriter:
cflags_objcc = ['$cflags_cc'] + \
self.xcode_settings.GetCflagsObjCC(config_name)
elif self.flavor == 'win':
+ asmflags = self.msvs_settings.GetAsmflags(config_name)
cflags = self.msvs_settings.GetCflags(config_name)
cflags_c = self.msvs_settings.GetCflagsC(config_name)
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
@@ -844,16 +860,17 @@ class NinjaWriter:
self.WriteVariableList(ninja_file, 'defines',
[Define(d, self.flavor) for d in defines])
if self.flavor == 'win':
+ self.WriteVariableList(ninja_file, 'asmflags',
+ map(self.ExpandSpecial, asmflags))
self.WriteVariableList(ninja_file, 'rcflags',
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
for f in self.msvs_settings.GetRcflags(config_name,
self.GypPathToNinja)])
include_dirs = config.get('include_dirs', [])
- env = self.GetSortedXcodeEnv()
+
+ env = self.GetToolchainEnv()
if self.flavor == 'win':
- env = self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
- config=config_name)
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
config_name)
self.WriteVariableList(ninja_file, 'includes',
@@ -1095,6 +1112,23 @@ class NinjaWriter:
extra_bindings.append(('soname', os.path.split(output)[1]))
extra_bindings.append(('lib',
gyp.common.EncodePOSIXShellArgument(output)))
+ if self.flavor != 'win':
+ link_file_list = output
+ if self.is_mac_bundle:
+ # 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
+ # 'Dependency Framework.framework.rsp'
+ link_file_list = self.xcode_settings.GetWrapperName()
+ if arch:
+ link_file_list += '.' + arch
+ link_file_list += '.rsp'
+ # If an rspfile contains spaces, ninja surrounds the filename with
+ # quotes around it and then passes it to open(), creating a file with
+ # quotes in its name (and when looking for the rsp file, the name
+ # makes it through bash which strips the quotes) :-/
+ link_file_list = link_file_list.replace(' ', '_')
+ extra_bindings.append(
+ ('link_file_list',
+ gyp.common.EncodePOSIXShellArgument(link_file_list)))
if self.flavor == 'win':
extra_bindings.append(('binary', output))
if '/NOENTRY' not in ldflags:
@@ -1196,6 +1230,19 @@ class NinjaWriter:
self.target.bundle = output
return output
+ def GetToolchainEnv(self, additional_settings=None):
+ """Returns the variables toolchain would set for build steps."""
+ env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
+ if self.flavor == 'win':
+ env = self.GetMsvsToolchainEnv(
+ additional_settings=additional_settings)
+ return env
+
+ def GetMsvsToolchainEnv(self, additional_settings=None):
+ """Returns the variables Visual Studio would set for build steps."""
+ return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
+ config=self.config_name)
+
def GetSortedXcodeEnv(self, additional_settings=None):
"""Returns the variables Xcode would set for build steps."""
assert self.abs_build_dir
@@ -1559,14 +1606,15 @@ def GetDefaultConcurrentLinks():
hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
return min(mem_limit, hard_cap)
elif sys.platform.startswith('linux'):
- with open("/proc/meminfo") as meminfo:
- memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
- for line in meminfo:
- match = memtotal_re.match(line)
- if not match:
- continue
- # Allow 8Gb per link on Linux because Gold is quite memory hungry
- return max(1, int(match.group(1)) / (8 * (2 ** 20)))
+ if os.path.exists("/proc/meminfo"):
+ with open("/proc/meminfo") as meminfo:
+ memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+ for line in meminfo:
+ match = memtotal_re.match(line)
+ if not match:
+ continue
+ # Allow 8Gb per link on Linux because Gold is quite memory hungry
+ return max(1, int(match.group(1)) / (8 * (2 ** 20)))
return 1
elif sys.platform == 'darwin':
try:
@@ -1842,9 +1890,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
sys.executable))
master_ninja.rule(
'asm',
- description='ASM $in',
+ description='ASM $out',
command=('%s gyp-win-tool asm-wrapper '
- '$arch $asm $defines $includes /c /Fo $out $in' %
+ '$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
sys.executable))
if flavor != 'mac' and flavor != 'win':
@@ -1863,32 +1911,33 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# The resulting string leaves an uninterpolated %{suffix} which
# is used in the final substitution below.
mtime_preserving_solink_base = (
- 'if [ ! -e $lib -o ! -e ${lib}.TOC ]; then '
- '%(solink)s && %(extract_toc)s > ${lib}.TOC; else '
- '%(solink)s && %(extract_toc)s > ${lib}.tmp && '
- 'if ! cmp -s ${lib}.tmp ${lib}.TOC; then mv ${lib}.tmp ${lib}.TOC ; '
+ 'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
+ '%(solink)s && %(extract_toc)s > $lib.TOC; else '
+ '%(solink)s && %(extract_toc)s > $lib.tmp && '
+ 'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
'fi; fi'
% { 'solink':
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
'extract_toc':
- ('{ readelf -d ${lib} | grep SONAME ; '
- 'nm -gD -f p ${lib} | cut -f1-2 -d\' \'; }')})
+ ('{ readelf -d $lib | grep SONAME ; '
+ 'nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
master_ninja.rule(
'solink',
description='SOLINK $lib',
restat=True,
- command=(mtime_preserving_solink_base % {
- 'suffix': '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive '
- '$libs'}),
+ command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
+ rspfile='$link_file_list',
+ rspfile_content=
+ '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib',
restat=True,
- command=(mtime_preserving_solink_base % {
- 'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group '
- '$libs'}),
+ command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
+ rspfile='$link_file_list',
+ rspfile_content='-Wl,--start-group $in $solibs -Wl,--end-group $libs',
pool='link_pool')
master_ninja.rule(
'link',
@@ -1938,16 +1987,16 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# comment in the posix section above for details.
solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
mtime_preserving_solink_base = (
- 'if [ ! -e $lib -o ! -e ${lib}.TOC ] || '
+ 'if [ ! -e $lib -o ! -e $lib.TOC ] || '
# Always force dependent targets to relink if this library
# reexports something. Handling this correctly would require
# recursive TOC dumping but this is rare in practice, so punt.
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
- '%(solink)s && %(extract_toc)s > ${lib}.TOC; '
+ '%(solink)s && %(extract_toc)s > $lib.TOC; '
'else '
- '%(solink)s && %(extract_toc)s > ${lib}.tmp && '
- 'if ! cmp -s ${lib}.tmp ${lib}.TOC; then '
- 'mv ${lib}.tmp ${lib}.TOC ; '
+ '%(solink)s && %(extract_toc)s > $lib.tmp && '
+ 'if ! cmp -s $lib.tmp $lib.TOC; then '
+ 'mv $lib.tmp $lib.TOC ; '
'fi; '
'fi'
% { 'solink': solink_base,
@@ -1955,34 +2004,42 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
- solink_suffix = '$in $solibs $libs$postbuilds'
+
+ solink_suffix = '@$link_file_list$postbuilds'
master_ninja.rule(
'solink',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-shared'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_notoc',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
pool='link_pool')
- solink_module_suffix = '$in $solibs $libs$postbuilds'
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
- command=mtime_preserving_solink_base % {'suffix': solink_module_suffix,
+ command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-bundle'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_module_notoc',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
- command=solink_base % {'suffix': solink_module_suffix, 'type': '-bundle'},
+ command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
diff --git a/deps/gyp/pylib/gyp/generator/xcode.py b/deps/gyp/pylib/gyp/generator/xcode.py
index 331e78baaa..7972459363 100644
--- a/deps/gyp/pylib/gyp/generator/xcode.py
+++ b/deps/gyp/pylib/gyp/generator/xcode.py
@@ -5,6 +5,7 @@
import filecmp
import gyp.common
import gyp.xcodeproj_file
+import gyp.xcode_ninja
import errno
import os
import sys
@@ -575,6 +576,12 @@ def PerformBuild(data, configurations, params):
def GenerateOutput(target_list, target_dicts, data, params):
+ # Optionally configure each spec to use ninja as the external builder.
+ ninja_wrapper = params.get('flavor') == 'ninja'
+ if ninja_wrapper:
+ (target_list, target_dicts, data) = \
+ gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
+
options = params['options']
generator_flags = params.get('generator_flags', {})
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
@@ -703,11 +710,16 @@ def GenerateOutput(target_list, target_dicts, data, params):
# and is made a dependency of this target. This way the work is done
# before the dependency checks for what should be recompiled.
support_xct = None
- if type != 'none' and (spec_actions or spec_rules):
+ # The Xcode "issues" don't affect xcode-ninja builds, since the dependency
+ # logic all happens in ninja. Don't bother creating the extra targets in
+ # that case.
+ if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
support_xccl = CreateXCConfigurationList(configuration_names);
+ support_target_suffix = generator_flags.get(
+ 'support_target_suffix', ' Support')
support_target_properties = {
'buildConfigurationList': support_xccl,
- 'name': target_name + ' Support',
+ 'name': target_name + support_target_suffix,
}
if target_product_name:
support_target_properties['productName'] = \
diff --git a/deps/gyp/pylib/gyp/input.py b/deps/gyp/pylib/gyp/input.py
index 6472912db8..dc143d9dfc 100644
--- a/deps/gyp/pylib/gyp/input.py
+++ b/deps/gyp/pylib/gyp/input.py
@@ -10,8 +10,8 @@ from compiler.ast import Module
from compiler.ast import Node
from compiler.ast import Stmt
import compiler
-import copy
import gyp.common
+import gyp.simple_copy
import multiprocessing
import optparse
import os.path
@@ -24,6 +24,7 @@ import threading
import time
import traceback
from gyp.common import GypError
+from gyp.common import OrderedSet
# A list of types that are treated as linkable.
@@ -45,18 +46,31 @@ base_path_sections = [
'outputs',
'sources',
]
-path_sections = []
-
-is_path_section_charset = set('=+?!')
-is_path_section_match_re = re.compile('_(dir|file|path)s?$')
+path_sections = set()
def IsPathSection(section):
- # If section ends in one of these characters, it's applied to a section
+ # If section ends in one of the '=+?!' characters, it's applied to a section
# without the trailing characters. '/' is notably absent from this list,
# because there's no way for a regular expression to be treated as a path.
- while section[-1:] in is_path_section_charset:
+ while section[-1:] in '=+?!':
section = section[:-1]
- return section in path_sections or is_path_section_match_re.search(section)
+
+ if section in path_sections:
+ return True
+
+ # Sections mathing the regexp '_(dir|file|path)s?$' are also
+ # considered PathSections. Using manual string matching since that
+ # is much faster than the regexp and this can be called hundreds of
+ # thousands of times so micro performance matters.
+ if "_" in section:
+ tail = section[-6:]
+ if tail[-1] == 's':
+ tail = tail[:-1]
+ if tail[-5:] in ('_file', '_path'):
+ return True
+ return tail[-4:] == '_dir'
+
+ return False
# base_non_configuration_keys is a list of key names that belong in the target
# itself and should not be propagated into its configurations. It is merged
@@ -200,7 +214,7 @@ def CheckNode(node, keypath):
"': " + repr(node)
-def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
+def LoadOneBuildFile(build_file_path, data, aux_data, includes,
is_target, check):
if build_file_path in data:
return data[build_file_path]
@@ -224,7 +238,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
raise
- if not isinstance(build_file_data, dict):
+ if type(build_file_data) is not dict:
raise GypError("%s does not evaluate to a dictionary." % build_file_path)
data[build_file_path] = build_file_data
@@ -236,10 +250,10 @@ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
try:
if is_target:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
- aux_data, variables, includes, check)
+ aux_data, includes, check)
else:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
- aux_data, variables, None, check)
+ aux_data, None, check)
except Exception, e:
gyp.common.ExceptionAppend(e,
'while reading includes of ' + build_file_path)
@@ -249,7 +263,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
- variables, includes, check):
+ includes, check):
includes_list = []
if includes != None:
includes_list.extend(includes)
@@ -273,30 +287,27 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
MergeDicts(subdict,
- LoadOneBuildFile(include, data, aux_data, variables, None,
- False, check),
+ LoadOneBuildFile(include, data, aux_data, None, False, check),
subdict_path, include)
# Recurse into subdictionaries.
for k, v in subdict.iteritems():
- if v.__class__ == dict:
- LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
+ if type(v) is dict:
+ LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
None, check)
- elif v.__class__ == list:
- LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
+ elif type(v) is list:
+ LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
check)
# This recurses into lists so that it can look for dicts.
-def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
- variables, check):
+def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
for item in sublist:
- if item.__class__ == dict:
+ if type(item) is dict:
LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
- variables, None, check)
- elif item.__class__ == list:
- LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
- variables, check)
+ None, check)
+ elif type(item) is list:
+ LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
# Processes toolsets in all the targets. This recurses into condition entries
# since they can contain toolsets as well.
@@ -320,7 +331,7 @@ def ProcessToolsetsInDict(data):
if len(toolsets) > 0:
# Optimization: only do copies if more than one toolset is specified.
for build in toolsets[1:]:
- new_target = copy.deepcopy(target)
+ new_target = gyp.simple_copy.deepcopy(target)
new_target['toolset'] = build
new_target_list.append(new_target)
target['toolset'] = toolsets[0]
@@ -328,7 +339,7 @@ def ProcessToolsetsInDict(data):
data['targets'] = new_target_list
if 'conditions' in data:
for condition in data['conditions']:
- if isinstance(condition, list):
+ if type(condition) is list:
for condition_dict in condition[1:]:
ProcessToolsetsInDict(condition_dict)
@@ -358,7 +369,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
gyp.DebugOutput(gyp.DEBUG_INCLUDES,
"Loading Target Build File '%s'", build_file_path)
- build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
+ build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
includes, True, check)
# Store DEPTH for later use in generators.
@@ -408,7 +419,8 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
# copy with the target-specific data merged into it as the replacement
# target dict.
old_target_dict = build_file_data['targets'][index]
- new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
+ new_target_dict = gyp.simple_copy.deepcopy(
+ build_file_data['target_defaults'])
MergeDicts(new_target_dict, old_target_dict,
build_file_path, build_file_path)
build_file_data['targets'][index] = new_target_dict
@@ -636,15 +648,26 @@ def FindEnclosingBracketGroup(input_str):
return (-1, -1)
-canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$')
-
-
def IsStrCanonicalInt(string):
"""Returns True if |string| is in its canonical integer form.
The canonical form is such that str(int(string)) == string.
"""
- return isinstance(string, str) and canonical_int_re.match(string)
+ if type(string) is str:
+ # This function is called a lot so for maximum performance, avoid
+ # involving regexps which would otherwise make the code much
+ # shorter. Regexps would need twice the time of this function.
+ if string:
+ if string == "0":
+ return True
+ if string[0] == "-":
+ string = string[1:]
+ if not string:
+ return False
+ if '1' <= string[0] <= '9':
+ return string.isdigit()
+
+ return False
# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
@@ -677,7 +700,7 @@ cached_command_results = {}
def FixupPlatformCommand(cmd):
if sys.platform == 'win32':
- if type(cmd) == list:
+ if type(cmd) is list:
cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
else:
cmd = re.sub('^cat ', 'type ', cmd)
@@ -767,7 +790,7 @@ def ExpandVariables(input, phase, variables, build_file):
# contexts. However, since filtration has no chance to run on <|(),
# this seems like the only obvious way to give them access to filters.
if file_list:
- processed_variables = copy.deepcopy(variables)
+ processed_variables = gyp.simple_copy.deepcopy(variables)
ProcessListFiltersInDict(contents, processed_variables)
# Recurse to expand variables in the contents
contents = ExpandVariables(contents, phase,
@@ -804,7 +827,7 @@ def ExpandVariables(input, phase, variables, build_file):
# This works around actions/rules which have more inputs than will
# fit on the command line.
if file_list:
- if type(contents) == list:
+ if type(contents) is list:
contents_list = contents
else:
contents_list = contents.split(' ')
@@ -837,17 +860,15 @@ def ExpandVariables(input, phase, variables, build_file):
use_shell = False
# Check for a cached value to avoid executing commands, or generating
- # file lists more than once.
- # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
- # possible that the command being invoked depends on the current
- # directory. For that case the syntax needs to be extended so that the
- # directory is also used in cache_key (it becomes a tuple).
+ # file lists more than once. The cache key contains the command to be
+ # run as well as the directory to run it from, to account for commands
+ # that depend on their current directory.
# TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
# someone could author a set of GYP files where each time the command
# is invoked it produces different output by design. When the need
# arises, the syntax should be extended to support no caching off a
# command's output so it is run every time.
- cache_key = str(contents)
+ cache_key = (str(contents), build_file_dir)
cached_value = cached_command_results.get(cache_key, None)
if cached_value is None:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
@@ -925,10 +946,9 @@ def ExpandVariables(input, phase, variables, build_file):
else:
replacement = variables[contents]
- if isinstance(replacement, list):
+ if type(replacement) is list:
for item in replacement:
- if (not contents[-1] == '/' and
- not isinstance(item, str) and not isinstance(item, int)):
+ if not contents[-1] == '/' and type(item) not in (str, int):
raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' +
'list contains a ' +
@@ -938,8 +958,7 @@ def ExpandVariables(input, phase, variables, build_file):
# with conditions sections.
ProcessVariablesAndConditionsInList(replacement, phase, variables,
build_file)
- elif not isinstance(replacement, str) and \
- not isinstance(replacement, int):
+ elif type(replacement) not in (str, int):
raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' +
'found a ' + replacement.__class__.__name__)
@@ -948,7 +967,7 @@ def ExpandVariables(input, phase, variables, build_file):
# Expanding in list context. It's guaranteed that there's only one
# replacement to do in |input_str| and that it's this replacement. See
# above.
- if isinstance(replacement, list):
+ if type(replacement) is list:
# If it's already a list, make a copy.
output = replacement[:]
else:
@@ -957,7 +976,7 @@ def ExpandVariables(input, phase, variables, build_file):
else:
# Expanding in string context.
encoded_replacement = ''
- if isinstance(replacement, list):
+ if type(replacement) is list:
# When expanding a list into string context, turn the list items
# into a string in a way that will work with a subprocess call.
#
@@ -979,8 +998,8 @@ def ExpandVariables(input, phase, variables, build_file):
# expanding local variables (variables defined in the same
# variables block as this one).
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
- if isinstance(output, list):
- if output and isinstance(output[0], list):
+ if type(output) is list:
+ if output and type(output[0]) is list:
# Leave output alone if it's a list of lists.
# We don't want such lists to be stringified.
pass
@@ -994,7 +1013,7 @@ def ExpandVariables(input, phase, variables, build_file):
output = ExpandVariables(output, phase, variables, build_file)
# Convert all strings that are canonically-represented integers into integers.
- if isinstance(output, list):
+ if type(output) is list:
for index in xrange(0, len(output)):
if IsStrCanonicalInt(output[index]):
output[index] = int(output[index])
@@ -1003,6 +1022,57 @@ def ExpandVariables(input, phase, variables, build_file):
return output
+# The same condition is often evaluated over and over again so it
+# makes sense to cache as much as possible between evaluations.
+cached_conditions_asts = {}
+
+def EvalCondition(condition, conditions_key, phase, variables, build_file):
+ """Returns the dict that should be used or None if the result was
+ that nothing should be used."""
+ if type(condition) is not list:
+ raise GypError(conditions_key + ' must be a list')
+ if len(condition) != 2 and len(condition) != 3:
+ # It's possible that condition[0] won't work in which case this
+ # attempt will raise its own IndexError. That's probably fine.
+ raise GypError(conditions_key + ' ' + condition[0] +
+ ' must be length 2 or 3, not ' + str(len(condition)))
+
+ [cond_expr, true_dict] = condition[0:2]
+ false_dict = None
+ if len(condition) == 3:
+ false_dict = condition[2]
+
+ # Do expansions on the condition itself. Since the conditon can naturally
+ # contain variable references without needing to resort to GYP expansion
+ # syntax, this is of dubious value for variables, but someone might want to
+ # use a command expansion directly inside a condition.
+ cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
+ build_file)
+ if type(cond_expr_expanded) not in (str, int):
+ raise ValueError, \
+ 'Variable expansion in this context permits str and int ' + \
+ 'only, found ' + cond_expr_expanded.__class__.__name__
+
+ try:
+ if cond_expr_expanded in cached_conditions_asts:
+ ast_code = cached_conditions_asts[cond_expr_expanded]
+ else:
+ ast_code = compile(cond_expr_expanded, '<string>', 'eval')
+ cached_conditions_asts[cond_expr_expanded] = ast_code
+ if eval(ast_code, {'__builtins__': None}, variables):
+ return true_dict
+ return false_dict
+ except SyntaxError, e:
+ syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
+ 'at character %d.' %
+ (str(e.args[0]), e.text, build_file, e.offset),
+ e.filename, e.lineno, e.offset, e.text)
+ raise syntax_error
+ except NameError, e:
+ gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
+ (cond_expr_expanded, build_file))
+ raise GypError(e)
+
def ProcessConditionsInDict(the_dict, phase, variables, build_file):
# Process a 'conditions' or 'target_conditions' section in the_dict,
@@ -1038,48 +1108,8 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file):
del the_dict[conditions_key]
for condition in conditions_list:
- if not isinstance(condition, list):
- raise GypError(conditions_key + ' must be a list')
- if len(condition) != 2 and len(condition) != 3:
- # It's possible that condition[0] won't work in which case this
- # attempt will raise its own IndexError. That's probably fine.
- raise GypError(conditions_key + ' ' + condition[0] +
- ' must be length 2 or 3, not ' + str(len(condition)))
-
- [cond_expr, true_dict] = condition[0:2]
- false_dict = None
- if len(condition) == 3:
- false_dict = condition[2]
-
- # Do expansions on the condition itself. Since the conditon can naturally
- # contain variable references without needing to resort to GYP expansion
- # syntax, this is of dubious value for variables, but someone might want to
- # use a command expansion directly inside a condition.
- cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
- build_file)
- if not isinstance(cond_expr_expanded, str) and \
- not isinstance(cond_expr_expanded, int):
- raise ValueError, \
- 'Variable expansion in this context permits str and int ' + \
- 'only, found ' + expanded.__class__.__name__
-
- try:
- ast_code = compile(cond_expr_expanded, '<string>', 'eval')
-
- if eval(ast_code, {'__builtins__': None}, variables):
- merge_dict = true_dict
- else:
- merge_dict = false_dict
- except SyntaxError, e:
- syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
- 'at character %d.' %
- (str(e.args[0]), e.text, build_file, e.offset),
- e.filename, e.lineno, e.offset, e.text)
- raise syntax_error
- except NameError, e:
- gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
- (cond_expr_expanded, build_file))
- raise GypError(e)
+ merge_dict = EvalCondition(condition, conditions_key, phase, variables,
+ build_file)
if merge_dict != None:
# Expand variables and nested conditinals in the merge_dict before
@@ -1094,8 +1124,7 @@ def LoadAutomaticVariablesFromDict(variables, the_dict):
# Any keys with plain string values in the_dict become automatic variables.
# The variable name is the key name with a "_" character prepended.
for key, value in the_dict.iteritems():
- if isinstance(value, str) or isinstance(value, int) or \
- isinstance(value, list):
+ if type(value) in (str, int, list):
variables['_' + key] = value
@@ -1108,8 +1137,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
# (it could be a list or it could be parentless because it is a root dict),
# the_dict_key will be None.
for key, value in the_dict.get('variables', {}).iteritems():
- if not isinstance(value, str) and not isinstance(value, int) and \
- not isinstance(value, list):
+ if type(value) not in (str, int, list):
continue
if key.endswith('%'):
@@ -1162,9 +1190,9 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
for key, value in the_dict.iteritems():
# Skip "variables", which was already processed if present.
- if key != 'variables' and isinstance(value, str):
+ if key != 'variables' and type(value) is str:
expanded = ExpandVariables(value, phase, variables, build_file)
- if not isinstance(expanded, str) and not isinstance(expanded, int):
+ if type(expanded) not in (str, int):
raise ValueError, \
'Variable expansion in this context permits str and int ' + \
'only, found ' + expanded.__class__.__name__ + ' for ' + key
@@ -1221,21 +1249,21 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
for key, value in the_dict.iteritems():
# Skip "variables" and string values, which were already processed if
# present.
- if key == 'variables' or isinstance(value, str):
+ if key == 'variables' or type(value) is str:
continue
- if isinstance(value, dict):
+ if type(value) is dict:
# Pass a copy of the variables dict so that subdicts can't influence
# parents.
ProcessVariablesAndConditionsInDict(value, phase, variables,
build_file, key)
- elif isinstance(value, list):
+ elif type(value) is list:
# The list itself can't influence the variables dict, and
# ProcessVariablesAndConditionsInList will make copies of the variables
# dict if it needs to pass it to something that can influence it. No
# copy is necessary here.
ProcessVariablesAndConditionsInList(value, phase, variables,
build_file)
- elif not isinstance(value, int):
+ elif type(value) is not int:
raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
' for ' + key
@@ -1246,17 +1274,17 @@ def ProcessVariablesAndConditionsInList(the_list, phase, variables,
index = 0
while index < len(the_list):
item = the_list[index]
- if isinstance(item, dict):
+ if type(item) is dict:
# Make a copy of the variables dict so that it won't influence anything
# outside of its own scope.
ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
- elif isinstance(item, list):
+ elif type(item) is list:
ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
- elif isinstance(item, str):
+ elif type(item) is str:
expanded = ExpandVariables(item, phase, variables, build_file)
- if isinstance(expanded, str) or isinstance(expanded, int):
+ if type(expanded) in (str, int):
the_list[index] = expanded
- elif isinstance(expanded, list):
+ elif type(expanded) is list:
the_list[index:index+1] = expanded
index += len(expanded)
@@ -1268,7 +1296,7 @@ def ProcessVariablesAndConditionsInList(the_list, phase, variables,
'Variable expansion in this context permits strings and ' + \
'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
index
- elif not isinstance(item, int):
+ elif type(item) is not int:
raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
' at index ' + index
index = index + 1
@@ -1443,6 +1471,20 @@ def RemoveSelfDependencies(targets):
target_dict[dependency_key] = Filter(dependencies, target_name)
+def RemoveLinkDependenciesFromNoneTargets(targets):
+ """Remove dependencies having the 'link_dependency' attribute from the 'none'
+ targets."""
+ for target_name, target_dict in targets.iteritems():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+ for t in dependencies:
+ if target_dict.get('type', None) == 'none':
+ if targets[t].get('variables', {}).get('link_dependency', 0):
+ target_dict[dependency_key] = \
+ Filter(target_dict[dependency_key], t)
+
+
class DependencyGraphNode(object):
"""
@@ -1468,7 +1510,7 @@ class DependencyGraphNode(object):
# are the "ref" attributes of DependencyGraphNodes. Every target will
# appear in flat_list after all of its dependencies, and before all of its
# dependents.
- flat_list = []
+ flat_list = OrderedSet()
# in_degree_zeros is the list of DependencyGraphNodes that have no
# dependencies not in flat_list. Initially, it is a copy of the children
@@ -1482,12 +1524,15 @@ class DependencyGraphNode(object):
# as work progresses, so that the next node to process from the list can
# always be accessed at a consistent position.
node = in_degree_zeros.pop()
- flat_list.append(node.ref)
+ flat_list.add(node.ref)
# Look at dependents of the node just added to flat_list. Some of them
# may now belong in in_degree_zeros.
for node_dependent in node.dependents:
is_in_degree_zero = True
+ # TODO: We want to check through the
+ # node_dependent.dependencies list but if it's long and we
+ # always start at the beginning, then we get O(n^2) behaviour.
for node_dependent_dependency in node_dependent.dependencies:
if not node_dependent_dependency.ref in flat_list:
# The dependent one or more dependencies not in flat_list. There
@@ -1503,7 +1548,7 @@ class DependencyGraphNode(object):
# iteration of the outer loop.
in_degree_zeros.add(node_dependent)
- return flat_list
+ return list(flat_list)
def FindCycles(self, path=None):
"""
@@ -1589,21 +1634,26 @@ class DependencyGraphNode(object):
return self._AddImportedDependencies(targets, dependencies)
def DeepDependencies(self, dependencies=None):
- """Returns a list of all of a target's dependencies, recursively."""
- if dependencies == None:
- dependencies = []
+ """Returns an OrderedSet of all of a target's dependencies, recursively."""
+ if dependencies is None:
+ # Using a list to get ordered output and a set to do fast "is it
+ # already added" checks.
+ dependencies = OrderedSet()
for dependency in self.dependencies:
# Check for None, corresponding to the root node.
- if dependency.ref != None and dependency.ref not in dependencies:
- dependencies.append(dependency.ref)
+ if dependency.ref is None:
+ continue
+ if dependency.ref not in dependencies:
+ dependencies.add(dependency.ref)
dependency.DeepDependencies(dependencies)
return dependencies
def _LinkDependenciesInternal(self, targets, include_shared_libraries,
dependencies=None, initial=True):
- """Returns a list of dependency targets that are linked into this target.
+ """Returns an OrderedSet of dependency targets that are linked
+ into this target.
This function has a split personality, depending on the setting of
|initial|. Outside callers should always leave |initial| at its default
@@ -1616,11 +1666,13 @@ class DependencyGraphNode(object):
If |include_shared_libraries| is False, the resulting dependencies will not
include shared_library targets that are linked into this target.
"""
- if dependencies == None:
- dependencies = []
+ if dependencies is None:
+ # Using a list to get ordered output and a set to do fast "is it
+ # already added" checks.
+ dependencies = OrderedSet()
# Check for None, corresponding to the root node.
- if self.ref == None:
+ if self.ref is None:
return dependencies
# It's kind of sucky that |targets| has to be passed into this function,
@@ -1648,8 +1700,7 @@ class DependencyGraphNode(object):
# Don't traverse 'none' targets if explicitly excluded.
if (target_type == 'none' and
not targets[self.ref].get('dependencies_traverse', True)):
- if self.ref not in dependencies:
- dependencies.append(self.ref)
+ dependencies.add(self.ref)
return dependencies
# Executables and loadable modules are already fully and finally linked.
@@ -1671,7 +1722,7 @@ class DependencyGraphNode(object):
# The target is linkable, add it to the list of link dependencies.
if self.ref not in dependencies:
- dependencies.append(self.ref)
+ dependencies.add(self.ref)
if initial or not is_linkable:
# If this is a subsequent target and it's linkable, don't look any
# further for linkable dependencies, as they'll already be linked into
@@ -1966,25 +2017,25 @@ def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
hashable_to_set = set(x for x in to if is_hashable(x))
for item in fro:
singleton = False
- if isinstance(item, str) or isinstance(item, int):
+ if type(item) in (str, int):
# The cheap and easy case.
if is_paths:
to_item = MakePathRelative(to_file, fro_file, item)
else:
to_item = item
- if not isinstance(item, str) or not item.startswith('-'):
+ if not (type(item) is str and item.startswith('-')):
# Any string that doesn't begin with a "-" is a singleton - it can
# only appear once in a list, to be enforced by the list merge append
# or prepend.
singleton = True
- elif isinstance(item, dict):
+ elif type(item) is dict:
# Make a copy of the dictionary, continuing to look for paths to fix.
# The other intelligent aspects of merge processing won't apply because
# item is being merged into an empty dict.
to_item = {}
MergeDicts(to_item, item, to_file, fro_file)
- elif isinstance(item, list):
+ elif type(item) is list:
# Recurse, making a copy of the list. If the list contains any
# descendant dicts, path fixing will occur. Note that here, custom
# values for is_paths and append are dropped; those are only to be
@@ -2030,10 +2081,10 @@ def MergeDicts(to, fro, to_file, fro_file):
# modified.
if k in to:
bad_merge = False
- if isinstance(v, str) or isinstance(v, int):
- if not (isinstance(to[k], str) or isinstance(to[k], int)):
+ if type(v) in (str, int):
+ if type(to[k]) not in (str, int):
bad_merge = True
- elif v.__class__ != to[k].__class__:
+ elif type(v) is not type(to[k]):
bad_merge = True
if bad_merge:
@@ -2041,19 +2092,19 @@ def MergeDicts(to, fro, to_file, fro_file):
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
' into incompatible type ' + to[k].__class__.__name__ + \
' for key ' + k
- if isinstance(v, str) or isinstance(v, int):
+ if type(v) in (str, int):
# Overwrite the existing value, if any. Cheap and easy.
is_path = IsPathSection(k)
if is_path:
to[k] = MakePathRelative(to_file, fro_file, v)
else:
to[k] = v
- elif isinstance(v, dict):
+ elif type(v) is dict:
# Recurse, guaranteeing copies will be made of objects that require it.
if not k in to:
to[k] = {}
MergeDicts(to[k], v, to_file, fro_file)
- elif isinstance(v, list):
+ elif type(v) is list:
# Lists in dicts can be merged with different policies, depending on
# how the key in the "from" dict (k, the from-key) is written.
#
@@ -2096,7 +2147,7 @@ def MergeDicts(to, fro, to_file, fro_file):
# If the key ends in "?", the list will only be merged if it doesn't
# already exist.
continue
- if not isinstance(to[list_base], list):
+ elif type(to[list_base]) is not list:
# This may not have been checked above if merging in a list with an
# extension character.
raise TypeError, \
@@ -2157,43 +2208,39 @@ def SetUpConfigurations(target, target_dict):
if not 'configurations' in target_dict:
target_dict['configurations'] = {'Default': {}}
if not 'default_configuration' in target_dict:
- concrete = [i for i in target_dict['configurations'].iterkeys()
- if not target_dict['configurations'][i].get('abstract')]
+ concrete = [i for (i, config) in target_dict['configurations'].iteritems()
+ if not config.get('abstract')]
target_dict['default_configuration'] = sorted(concrete)[0]
- for configuration in target_dict['configurations'].keys():
- old_configuration_dict = target_dict['configurations'][configuration]
+ merged_configurations = {}
+ configs = target_dict['configurations']
+ for (configuration, old_configuration_dict) in configs.iteritems():
# Skip abstract configurations (saves work only).
if old_configuration_dict.get('abstract'):
continue
# Configurations inherit (most) settings from the enclosing target scope.
# Get the inheritance relationship right by making a copy of the target
# dict.
- new_configuration_dict = copy.deepcopy(target_dict)
-
- # Take out the bits that don't belong in a "configurations" section.
- # Since configuration setup is done before conditional, exclude, and rules
- # processing, be careful with handling of the suffix characters used in
- # those phases.
- delete_keys = []
- for key in new_configuration_dict:
+ new_configuration_dict = {}
+ for (key, target_val) in target_dict.iteritems():
key_ext = key[-1:]
if key_ext in key_suffixes:
key_base = key[:-1]
else:
key_base = key
- if key_base in non_configuration_keys:
- delete_keys.append(key)
-
- for key in delete_keys:
- del new_configuration_dict[key]
+ if not key_base in non_configuration_keys:
+ new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
# Merge in configuration (with all its parents first).
MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, configuration, [])
- # Put the new result back into the target dict as a configuration.
- target_dict['configurations'][configuration] = new_configuration_dict
+ merged_configurations[configuration] = new_configuration_dict
+
+ # Put the new configurations back into the target dict as a configuration.
+ for configuration in merged_configurations.keys():
+ target_dict['configurations'][configuration] = (
+ merged_configurations[configuration])
# Now drop all the abstract ones.
for configuration in target_dict['configurations'].keys():
@@ -2264,7 +2311,7 @@ def ProcessListFiltersInDict(name, the_dict):
if operation != '!' and operation != '/':
continue
- if not isinstance(value, list):
+ if type(value) is not list:
raise ValueError, name + ' key ' + key + ' must be list, not ' + \
value.__class__.__name__
@@ -2276,7 +2323,7 @@ def ProcessListFiltersInDict(name, the_dict):
del_lists.append(key)
continue
- if not isinstance(the_dict[list_key], list):
+ if type(the_dict[list_key]) is not list:
value = the_dict[list_key]
raise ValueError, name + ' key ' + list_key + \
' must be list, not ' + \
@@ -2378,17 +2425,17 @@ def ProcessListFiltersInDict(name, the_dict):
# Now recurse into subdicts and lists that may contain dicts.
for key, value in the_dict.iteritems():
- if isinstance(value, dict):
+ if type(value) is dict:
ProcessListFiltersInDict(key, value)
- elif isinstance(value, list):
+ elif type(value) is list:
ProcessListFiltersInList(key, value)
def ProcessListFiltersInList(name, the_list):
for item in the_list:
- if isinstance(item, dict):
+ if type(item) is dict:
ProcessListFiltersInDict(name, item)
- elif isinstance(item, list):
+ elif type(item) is list:
ProcessListFiltersInList(name, item)
@@ -2506,7 +2553,7 @@ def ValidateRunAsInTarget(target, target_dict, build_file):
run_as = target_dict.get('run_as')
if not run_as:
return
- if not isinstance(run_as, dict):
+ if type(run_as) is not dict:
raise GypError("The 'run_as' in target %s from file %s should be a "
"dictionary." %
(target_name, build_file))
@@ -2515,17 +2562,17 @@ def ValidateRunAsInTarget(target, target_dict, build_file):
raise GypError("The 'run_as' in target %s from file %s must have an "
"'action' section." %
(target_name, build_file))
- if not isinstance(action, list):
+ if type(action) is not list:
raise GypError("The 'action' for 'run_as' in target %s from file %s "
"must be a list." %
(target_name, build_file))
working_directory = run_as.get('working_directory')
- if working_directory and not isinstance(working_directory, str):
+ if working_directory and type(working_directory) is not str:
raise GypError("The 'working_directory' for 'run_as' in target %s "
"in file %s should be a string." %
(target_name, build_file))
environment = run_as.get('environment')
- if environment and not isinstance(environment, dict):
+ if environment and type(environment) is not dict:
raise GypError("The 'environment' for 'run_as' in target %s "
"in file %s should be a dictionary." %
(target_name, build_file))
@@ -2555,17 +2602,17 @@ def TurnIntIntoStrInDict(the_dict):
# Use items instead of iteritems because there's no need to try to look at
# reinserted keys and their associated values.
for k, v in the_dict.items():
- if isinstance(v, int):
+ if type(v) is int:
v = str(v)
the_dict[k] = v
- elif isinstance(v, dict):
+ elif type(v) is dict:
TurnIntIntoStrInDict(v)
- elif isinstance(v, list):
+ elif type(v) is list:
TurnIntIntoStrInList(v)
- if isinstance(k, int):
- the_dict[str(k)] = v
+ if type(k) is int:
del the_dict[k]
+ the_dict[str(k)] = v
def TurnIntIntoStrInList(the_list):
@@ -2573,11 +2620,11 @@ def TurnIntIntoStrInList(the_list):
"""
for index in xrange(0, len(the_list)):
item = the_list[index]
- if isinstance(item, int):
+ if type(item) is int:
the_list[index] = str(item)
- elif isinstance(item, dict):
+ elif type(item) is dict:
TurnIntIntoStrInDict(item)
- elif isinstance(item, list):
+ elif type(item) is list:
TurnIntIntoStrInList(item)
@@ -2647,8 +2694,8 @@ def SetGeneratorGlobals(generator_input_info):
# Set up path_sections and non_configuration_keys with the default data plus
# the generator-specific data.
global path_sections
- path_sections = base_path_sections[:]
- path_sections.extend(generator_input_info['path_sections'])
+ path_sections = set(base_path_sections)
+ path_sections.update(generator_input_info['path_sections'])
global non_configuration_keys
non_configuration_keys = base_non_configuration_keys[:]
@@ -2707,6 +2754,10 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
# Expand dependencies specified as build_file:*.
ExpandWildcardDependencies(targets, data)
+ # Remove all dependencies marked as 'link_dependency' from the targets of
+ # type 'none'.
+ RemoveLinkDependenciesFromNoneTargets(targets)
+
# Apply exclude (!) and regex (/) list filters only for dependency_sections.
for target_name, target_dict in targets.iteritems():
tmp_dict = {}
diff --git a/deps/gyp/pylib/gyp/mac_tool.py b/deps/gyp/pylib/gyp/mac_tool.py
index c61a3ef60b..821e291e9f 100755
--- a/deps/gyp/pylib/gyp/mac_tool.py
+++ b/deps/gyp/pylib/gyp/mac_tool.py
@@ -219,10 +219,14 @@ class MacTool(object):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
+ libtool_re5 = re.compile(
+ r'^.*libtool: warning for library: ' +
+ r'.* the table of contents is empty ' +
+ r'\(no object file members in the library define global symbols\)$')
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
_, err = libtoolout.communicate()
for line in err.splitlines():
- if not libtool_re.match(line):
+ if not libtool_re.match(line) and not libtool_re5.match(line):
print >>sys.stderr, line
return libtoolout.returncode
diff --git a/deps/gyp/pylib/gyp/msvs_emulation.py b/deps/gyp/pylib/gyp/msvs_emulation.py
index 709ba305d2..63593a424d 100644
--- a/deps/gyp/pylib/gyp/msvs_emulation.py
+++ b/deps/gyp/pylib/gyp/msvs_emulation.py
@@ -345,6 +345,15 @@ class MsvsSettings(object):
else:
return None
+ def GetAsmflags(self, config):
+ """Returns the flags that need to be added to ml invocations."""
+ config = self._TargetConfig(config)
+ asmflags = []
+ safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
+ if safeseh == 'true':
+ asmflags.append('/safeseh')
+ return asmflags
+
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
@@ -379,6 +388,8 @@ class MsvsSettings(object):
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
+ cl('EnableEnhancedInstructionSet',
+ map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32'}, prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e'):
@@ -388,12 +399,6 @@ class MsvsSettings(object):
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
- def GetPrecompiledHeader(self, config, gyp_to_build_path):
- """Returns an object that handles the generation of precompiled header
- build steps."""
- config = self._TargetConfig(config)
- return _PchHelper(self, config, gyp_to_build_path)
-
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
@@ -532,6 +537,7 @@ class MsvsSettings(object):
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
+ ld('ImageHasSafeExceptionHandlers', map={'true': '/SAFESEH'})
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
@@ -787,7 +793,7 @@ class PrecompiledHeader(object):
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
- additional wrapping in the return value is for interface compatability
+ additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
diff --git a/deps/gyp/pylib/gyp/win_tool.py b/deps/gyp/pylib/gyp/win_tool.py
index 5872f0750c..44e1b0760b 100755
--- a/deps/gyp/pylib/gyp/win_tool.py
+++ b/deps/gyp/pylib/gyp/win_tool.py
@@ -13,6 +13,7 @@ import os
import re
import shutil
import subprocess
+import stat
import string
import sys
@@ -89,9 +90,19 @@ class WinTool(object):
"""Emulation of rm -rf out && cp -af in out."""
if os.path.exists(dest):
if os.path.isdir(dest):
- shutil.rmtree(dest)
+ def _on_error(fn, path, excinfo):
+ # The operation failed, possibly because the file is set to
+ # read-only. If that's why, make it writable and try the op again.
+ if not os.access(path, os.W_OK):
+ os.chmod(path, stat.S_IWRITE)
+ fn(path)
+ shutil.rmtree(dest, onerror=_on_error)
else:
+ if not os.access(dest, os.W_OK):
+ # Attempt to make the file writable before deleting it.
+ os.chmod(dest, stat.S_IWRITE)
os.unlink(dest)
+
if os.path.isdir(source):
shutil.copytree(source, dest)
else:
@@ -237,10 +248,11 @@ class WinTool(object):
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.splitlines()
- prefix = 'Processing '
- processing = set(os.path.basename(x) for x in lines if x.startswith(prefix))
+ prefixes = ('Processing ', '64 bit Processing ')
+ processing = set(os.path.basename(x)
+ for x in lines if x.startswith(prefixes))
for line in lines:
- if not line.startswith(prefix) and line not in processing:
+ if not line.startswith(prefixes) and line not in processing:
print line
return popen.returncode
@@ -288,5 +300,16 @@ class WinTool(object):
dir = dir[0] if dir else None
return subprocess.call(args, shell=True, env=env, cwd=dir)
+ def ExecClCompile(self, project_dir, selected_files):
+ """Executed by msvs-ninja projects when the 'ClCompile' target is used to
+ build selected C/C++ files."""
+ project_dir = os.path.relpath(project_dir, BASE_DIR)
+ selected_files = selected_files.split(';')
+ ninja_targets = [os.path.join(project_dir, filename) + '^^'
+ for filename in selected_files]
+ cmd = ['ninja.exe']
+ cmd.extend(ninja_targets)
+ return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
+
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
diff --git a/deps/gyp/pylib/gyp/xcode_emulation.py b/deps/gyp/pylib/gyp/xcode_emulation.py
index 30f27d5832..859cd5a937 100644
--- a/deps/gyp/pylib/gyp/xcode_emulation.py
+++ b/deps/gyp/pylib/gyp/xcode_emulation.py
@@ -18,6 +18,129 @@ import sys
import tempfile
from gyp.common import GypError
+# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
+# "xcodebuild" is called too quickly (it has been found to return incorrect
+# version number).
+XCODE_VERSION_CACHE = None
+
+# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
+# corresponding to the installed version of Xcode.
+XCODE_ARCHS_DEFAULT_CACHE = None
+
+
+def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
+ """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
+ and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
+ mapping = {'$(ARCHS_STANDARD)': archs}
+ if archs_including_64_bit:
+ mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit
+ return mapping
+
+class XcodeArchsDefault(object):
+ """A class to resolve ARCHS variable from xcode_settings, resolving Xcode
+ macros and implementing filtering by VALID_ARCHS. The expansion of macros
+ depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
+ on the version of Xcode.
+ """
+
+ # Match variable like $(ARCHS_STANDARD).
+ variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$')
+
+ def __init__(self, default, mac, iphonesimulator, iphoneos):
+ self._default = (default,)
+ self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator}
+
+ def _VariableMapping(self, sdkroot):
+ """Returns the dictionary of variable mapping depending on the SDKROOT."""
+ sdkroot = sdkroot.lower()
+ if 'iphoneos' in sdkroot:
+ return self._archs['ios']
+ elif 'iphonesimulator' in sdkroot:
+ return self._archs['iossim']
+ else:
+ return self._archs['mac']
+
+ def _ExpandArchs(self, archs, sdkroot):
+ """Expands variables references in ARCHS, and remove duplicates."""
+ variable_mapping = self._VariableMapping(sdkroot)
+ expanded_archs = []
+ for arch in archs:
+ if self.variable_pattern.match(arch):
+ variable = arch
+ try:
+ variable_expansion = variable_mapping[variable]
+ for arch in variable_expansion:
+ if arch not in expanded_archs:
+ expanded_archs.append(arch)
+ except KeyError as e:
+ print 'Warning: Ignoring unsupported variable "%s".' % variable
+ elif arch not in expanded_archs:
+ expanded_archs.append(arch)
+ return expanded_archs
+
+ def ActiveArchs(self, archs, valid_archs, sdkroot):
+ """Expands variables references in ARCHS, and filter by VALID_ARCHS if it
+ is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
+ values present in VALID_ARCHS are kept)."""
+ expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '')
+ if valid_archs:
+ filtered_archs = []
+ for arch in expanded_archs:
+ if arch in valid_archs:
+ filtered_archs.append(arch)
+ expanded_archs = filtered_archs
+ return expanded_archs
+
+
+def GetXcodeArchsDefault():
+ """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
+ installed version of Xcode. The default values used by Xcode for ARCHS
+ and the expansion of the variables depends on the version of Xcode used.
+
+ For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
+ uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
+ $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
+ and deprecated with Xcode 5.1.
+
+ For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
+ architecture as part of $(ARCHS_STANDARD) and default to only building it.
+
+ For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
+ of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
+ are also part of $(ARCHS_STANDARD).
+
+ All thoses rules are coded in the construction of the |XcodeArchsDefault|
+ object to use depending on the version of Xcode detected. The object is
+ for performance reason."""
+ global XCODE_ARCHS_DEFAULT_CACHE
+ if XCODE_ARCHS_DEFAULT_CACHE:
+ return XCODE_ARCHS_DEFAULT_CACHE
+ xcode_version, _ = XcodeVersion()
+ if xcode_version < '0500':
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ '$(ARCHS_STANDARD)',
+ XcodeArchsVariableMapping(['i386']),
+ XcodeArchsVariableMapping(['i386']),
+ XcodeArchsVariableMapping(['armv7']))
+ elif xcode_version < '0510':
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
+ XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
+ XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']),
+ XcodeArchsVariableMapping(
+ ['armv7', 'armv7s'],
+ ['armv7', 'armv7s', 'arm64']))
+ else:
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ '$(ARCHS_STANDARD)',
+ XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
+ XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']),
+ XcodeArchsVariableMapping(
+ ['armv7', 'armv7s', 'arm64'],
+ ['armv7', 'armv7s', 'arm64']))
+ return XCODE_ARCHS_DEFAULT_CACHE
+
+
class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object."""
@@ -34,10 +157,6 @@ class XcodeSettings(object):
# cached at class-level for efficiency.
_codesigning_key_cache = {}
- # Populated lazily by _XcodeVersion. Shared by all XcodeSettings, so cached
- # at class-level for efficiency.
- _xcode_version_cache = ()
-
def __init__(self, spec):
self.spec = spec
@@ -267,17 +386,12 @@ class XcodeSettings(object):
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
- # TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
- # CURRENT_ARCH / NATIVE_ARCH env vars?
- return self.xcode_settings[configname].get('ARCHS', [self._DefaultArch()])
-
- def _GetStdout(self, cmdlist):
- job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
- out = job.communicate()[0]
- if job.returncode != 0:
- sys.stderr.write(out + '\n')
- raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
- return out.rstrip('\n')
+ config_settings = self.xcode_settings[configname]
+ xcode_archs_default = GetXcodeArchsDefault()
+ return xcode_archs_default.ActiveArchs(
+ config_settings.get('ARCHS'),
+ config_settings.get('VALID_ARCHS'),
+ config_settings.get('SDKROOT'))
def _GetSdkVersionInfoItem(self, sdk, infoitem):
# xcodebuild requires Xcode and can't run on Command Line Tools-only
@@ -285,7 +399,7 @@ class XcodeSettings(object):
# Since the CLT has no SDK paths anyway, returning None is the
# most sensible route and should still do the right thing.
try:
- return self._GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
+ return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
except:
pass
@@ -396,7 +510,8 @@ class XcodeSettings(object):
if arch is not None:
archs = [arch]
else:
- archs = self._Settings().get('ARCHS', [self._DefaultArch()])
+ assert self.configname
+ archs = self.GetActiveArchs(self.configname)
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
@@ -653,7 +768,8 @@ class XcodeSettings(object):
if arch is not None:
archs = [arch]
else:
- archs = self._Settings().get('ARCHS', [self._DefaultArch()])
+ assert self.configname
+ archs = self.GetActiveArchs(self.configname)
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
@@ -678,6 +794,8 @@ class XcodeSettings(object):
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
+ self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
+
self.configname = None
return ldflags
@@ -874,65 +992,7 @@ class XcodeSettings(object):
return libraries
def _BuildMachineOSBuild(self):
- return self._GetStdout(['sw_vers', '-buildVersion'])
-
- # This method ported from the logic in Homebrew's CLT version check
- def _CLTVersion(self):
- # pkgutil output looks like
- # package-id: com.apple.pkg.CLTools_Executables
- # version: 5.0.1.0.1.1382131676
- # volume: /
- # location: /
- # install-time: 1382544035
- # groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
- STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
- FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
- MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
-
- regex = re.compile('version: (?P<version>.+)')
- for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
- try:
- output = self._GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
- return re.search(regex, output).groupdict()['version']
- except:
- continue
-
- def _XcodeVersion(self):
- # `xcodebuild -version` output looks like
- # Xcode 4.6.3
- # Build version 4H1503
- # or like
- # Xcode 3.2.6
- # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
- # BuildVersion: 10M2518
- # Convert that to '0463', '4H1503'.
- if len(XcodeSettings._xcode_version_cache) == 0:
- try:
- version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
- # In some circumstances xcodebuild exits 0 but doesn't return
- # the right results; for example, a user on 10.7 or 10.8 with
- # a bogus path set via xcode-select
- # In that case this may be a CLT-only install so fall back to
- # checking that version.
- if len(version_list) < 2:
- raise GypError, "xcodebuild returned unexpected results"
- except:
- version = self._CLTVersion()
- if version:
- version = re.match('(\d\.\d\.?\d*)', version).groups()[0]
- else:
- raise GypError, "No Xcode or CLT version detected!"
- # The CLT has no build information, so we return an empty string.
- version_list = [version, '']
- version = version_list[0]
- build = version_list[-1]
- # Be careful to convert "4.2" to "0420":
- version = version.split()[-1].replace('.', '')
- version = (version + '0' * (3 - len(version))).zfill(4)
- if build:
- build = build.split()[-1]
- XcodeSettings._xcode_version_cache = (version, build)
- return XcodeSettings._xcode_version_cache
+ return GetStdout(['sw_vers', '-buildVersion'])
def _XcodeIOSDeviceFamily(self, configname):
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
@@ -944,7 +1004,7 @@ class XcodeSettings(object):
cache = {}
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
- xcode, xcode_build = self._XcodeVersion()
+ xcode, xcode_build = XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
@@ -982,14 +1042,15 @@ class XcodeSettings(object):
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed.
"""
- if self._XcodeVersion() < '0500':
+ xcode_version, xcode_build = XcodeVersion()
+ if xcode_version < '0500':
return ''
default_sdk_path = self._XcodeSdkPath('')
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root:
return default_sdk_root
try:
- all_sdks = self._GetStdout(['xcodebuild', '-showsdks'])
+ all_sdks = GetStdout(['xcodebuild', '-showsdks'])
except:
# If xcodebuild fails, there will be no valid SDKs
return ''
@@ -1002,28 +1063,6 @@ class XcodeSettings(object):
return sdk_root
return ''
- def _DefaultArch(self):
- # For Mac projects, Xcode changed the default value used when ARCHS is not
- # set from "i386" to "x86_64".
- #
- # For iOS projects, if ARCHS is unset, it defaults to "armv7 armv7s" when
- # building for a device, and the simulator binaries are always build for
- # "i386".
- #
- # For new projects, ARCHS is set to $(ARCHS_STANDARD_INCLUDING_64_BIT),
- # which correspond to "armv7 armv7s arm64", and when building the simulator
- # the architecture is either "i386" or "x86_64" depending on the simulated
- # device (respectively 32-bit or 64-bit device).
- #
- # Since the value returned by this function is only used when ARCHS is not
- # set, then on iOS we return "i386", as the default xcode project generator
- # does not set ARCHS if it is not set in the .gyp file.
- if self.isIOS:
- return 'i386'
- version, build = self._XcodeVersion()
- if version >= '0500':
- return 'x86_64'
- return 'i386'
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
@@ -1131,6 +1170,81 @@ class MacPrefixHeader(object):
]
+def XcodeVersion():
+ """Returns a tuple of version and build version of installed Xcode."""
+ # `xcodebuild -version` output looks like
+ # Xcode 4.6.3
+ # Build version 4H1503
+ # or like
+ # Xcode 3.2.6
+ # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
+ # BuildVersion: 10M2518
+ # Convert that to '0463', '4H1503'.
+ global XCODE_VERSION_CACHE
+ if XCODE_VERSION_CACHE:
+ return XCODE_VERSION_CACHE
+ try:
+ version_list = GetStdout(['xcodebuild', '-version']).splitlines()
+ # In some circumstances xcodebuild exits 0 but doesn't return
+ # the right results; for example, a user on 10.7 or 10.8 with
+ # a bogus path set via xcode-select
+ # In that case this may be a CLT-only install so fall back to
+ # checking that version.
+ if len(version_list) < 2:
+ raise GypError, "xcodebuild returned unexpected results"
+ except:
+ version = CLTVersion()
+ if version:
+ version = re.match('(\d\.\d\.?\d*)', version).groups()[0]
+ else:
+ raise GypError, "No Xcode or CLT version detected!"
+ # The CLT has no build information, so we return an empty string.
+ version_list = [version, '']
+ version = version_list[0]
+ build = version_list[-1]
+ # Be careful to convert "4.2" to "0420":
+ version = version.split()[-1].replace('.', '')
+ version = (version + '0' * (3 - len(version))).zfill(4)
+ if build:
+ build = build.split()[-1]
+ XCODE_VERSION_CACHE = (version, build)
+ return XCODE_VERSION_CACHE
+
+
+# This function ported from the logic in Homebrew's CLT version check
+def CLTVersion():
+ """Returns the version of command-line tools from pkgutil."""
+ # pkgutil output looks like
+ # package-id: com.apple.pkg.CLTools_Executables
+ # version: 5.0.1.0.1.1382131676
+ # volume: /
+ # location: /
+ # install-time: 1382544035
+ # groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
+ STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
+ FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
+ MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
+
+ regex = re.compile('version: (?P<version>.+)')
+ for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
+ try:
+ output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
+ return re.search(regex, output).groupdict()['version']
+ except:
+ continue
+
+
+def GetStdout(cmdlist):
+ """Returns the content of standard output returned by invoking |cmdlist|.
+ Raises |GypError| if the command return with a non-zero return code."""
+ job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
+ out = job.communicate()[0]
+ if job.returncode != 0:
+ sys.stderr.write(out + '\n')
+ raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
+ return out.rstrip('\n')
+
+
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
target represented by spec. For keys that are both in the global and the local
@@ -1310,6 +1424,11 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
+ if XcodeVersion() >= '0500' and not env.get('SDKROOT'):
+ sdk_root = xcode_settings._SdkRoot(configuration)
+ if not sdk_root:
+ sdk_root = xcode_settings._XcodeSdkPath('')
+ env['SDKROOT'] = sdk_root
if not additional_settings:
additional_settings = {}
@@ -1420,16 +1539,16 @@ def _HasIOSTarget(targets):
def _AddIOSDeviceConfigurations(targets):
"""Clone all targets and append -iphoneos to the name. Configure these targets
- to build for iOS devices."""
- for target_dict in targets.values():
- for config_name in target_dict['configurations'].keys():
- config = target_dict['configurations'][config_name]
- new_config_name = config_name + '-iphoneos'
- new_config_dict = copy.deepcopy(config)
- if target_dict['toolset'] == 'target':
- new_config_dict['xcode_settings']['ARCHS'] = ['armv7']
- new_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
- target_dict['configurations'][new_config_name] = new_config_dict
+ to build for iOS devices and use correct architectures for those builds."""
+ for target_dict in targets.itervalues():
+ toolset = target_dict['toolset']
+ configs = target_dict['configurations']
+ for config_name, config_dict in dict(configs).iteritems():
+ iphoneos_config_dict = copy.deepcopy(config_dict)
+ configs[config_name + '-iphoneos'] = iphoneos_config_dict
+ configs[config_name + '-iphonesimulator'] = config_dict
+ if toolset == 'target':
+ iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
return targets
def CloneConfigurationForDeviceAndEmulator(target_dicts):