summaryrefslogtreecommitdiff
path: root/deps/gyp/pylib/gyp
diff options
context:
space:
mode:
Diffstat (limited to 'deps/gyp/pylib/gyp')
-rw-r--r--deps/gyp/pylib/gyp/MSVSNew.py340
-rw-r--r--deps/gyp/pylib/gyp/MSVSProject.py208
-rw-r--r--deps/gyp/pylib/gyp/MSVSSettings.py1096
-rwxr-xr-xdeps/gyp/pylib/gyp/MSVSSettings_test.py1483
-rw-r--r--deps/gyp/pylib/gyp/MSVSToolFile.py58
-rw-r--r--deps/gyp/pylib/gyp/MSVSUserFile.py147
-rw-r--r--deps/gyp/pylib/gyp/MSVSUtil.py270
-rw-r--r--deps/gyp/pylib/gyp/MSVSVersion.py443
-rwxr-xr-xdeps/gyp/pylib/gyp/__init__.py548
-rw-r--r--deps/gyp/pylib/gyp/common.py608
-rwxr-xr-xdeps/gyp/pylib/gyp/common_test.py72
-rw-r--r--deps/gyp/pylib/gyp/easy_xml.py157
-rwxr-xr-xdeps/gyp/pylib/gyp/easy_xml_test.py103
-rwxr-xr-xdeps/gyp/pylib/gyp/flock_tool.py54
-rw-r--r--deps/gyp/pylib/gyp/generator/__init__.py0
-rw-r--r--deps/gyp/pylib/gyp/generator/analyzer.py741
-rw-r--r--deps/gyp/pylib/gyp/generator/cmake.py1221
-rw-r--r--deps/gyp/pylib/gyp/generator/dump_dependency_json.py99
-rw-r--r--deps/gyp/pylib/gyp/generator/eclipse.py425
-rw-r--r--deps/gyp/pylib/gyp/generator/gypd.py94
-rw-r--r--deps/gyp/pylib/gyp/generator/gypsh.py56
-rw-r--r--deps/gyp/pylib/gyp/generator/make.py2219
-rw-r--r--deps/gyp/pylib/gyp/generator/msvs.py3453
-rwxr-xr-xdeps/gyp/pylib/gyp/generator/msvs_test.py37
-rw-r--r--deps/gyp/pylib/gyp/generator/ninja.py2410
-rw-r--r--deps/gyp/pylib/gyp/generator/ninja_test.py47
-rw-r--r--deps/gyp/pylib/gyp/generator/xcode.py1300
-rw-r--r--deps/gyp/pylib/gyp/generator/xcode_test.py23
-rw-r--r--deps/gyp/pylib/gyp/input.py2894
-rwxr-xr-xdeps/gyp/pylib/gyp/input_test.py90
-rwxr-xr-xdeps/gyp/pylib/gyp/mac_tool.py610
-rw-r--r--deps/gyp/pylib/gyp/msvs_emulation.py1087
-rw-r--r--deps/gyp/pylib/gyp/ninja_syntax.py160
-rw-r--r--deps/gyp/pylib/gyp/ordered_dict.py289
-rw-r--r--deps/gyp/pylib/gyp/simple_copy.py46
-rwxr-xr-xdeps/gyp/pylib/gyp/win_tool.py314
-rw-r--r--deps/gyp/pylib/gyp/xcode_emulation.py1627
-rw-r--r--deps/gyp/pylib/gyp/xcode_ninja.py270
-rw-r--r--deps/gyp/pylib/gyp/xcodeproj_file.py2927
-rw-r--r--deps/gyp/pylib/gyp/xml_fix.py69
40 files changed, 0 insertions, 28095 deletions
diff --git a/deps/gyp/pylib/gyp/MSVSNew.py b/deps/gyp/pylib/gyp/MSVSNew.py
deleted file mode 100644
index 593f0e5b0b..0000000000
--- a/deps/gyp/pylib/gyp/MSVSNew.py
+++ /dev/null
@@ -1,340 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""New implementation of Visual Studio project generation."""
-
-import os
-import random
-
-import gyp.common
-
-# hashlib is supplied as of Python 2.5 as the replacement interface for md5
-# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
-# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
-# preserving 2.4 compatibility.
-try:
- import hashlib
- _new_md5 = hashlib.md5
-except ImportError:
- import md5
- _new_md5 = md5.new
-
-
-# Initialize random number generator
-random.seed()
-
-# GUIDs for project types
-ENTRY_TYPE_GUIDS = {
- 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
- 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
-}
-
-#------------------------------------------------------------------------------
-# Helper functions
-
-
-def MakeGuid(name, seed='msvs_new'):
- """Returns a GUID for the specified target name.
-
- Args:
- name: Target name.
- seed: Seed for MD5 hash.
- Returns:
- A GUID-line string calculated from the name and seed.
-
- This generates something which looks like a GUID, but depends only on the
- name and seed. This means the same name/seed will always generate the same
- GUID, so that projects and solutions which refer to each other can explicitly
- determine the GUID to refer to explicitly. It also means that the GUID will
- not change when the project for a target is rebuilt.
- """
- # Calculate a MD5 signature for the seed and name.
- d = _new_md5(str(seed) + str(name)).hexdigest().upper()
- # Convert most of the signature to GUID form (discard the rest)
- guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
- + '-' + d[20:32] + '}')
- return guid
-
-#------------------------------------------------------------------------------
-
-
-class MSVSSolutionEntry(object):
- def __cmp__(self, other):
- # Sort by name then guid (so things are in order on vs2008).
- return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
-
-
-class MSVSFolder(MSVSSolutionEntry):
- """Folder in a Visual Studio project or solution."""
-
- def __init__(self, path, name = None, entries = None,
- guid = None, items = None):
- """Initializes the folder.
-
- Args:
- path: Full path to the folder.
- name: Name of the folder.
- entries: List of folder entries to nest inside this folder. May contain
- Folder or Project objects. May be None, if the folder is empty.
- guid: GUID to use for folder, if not None.
- items: List of solution items to include in the folder project. May be
- None, if the folder does not directly contain items.
- """
- if name:
- self.name = name
- else:
- # Use last layer.
- self.name = os.path.basename(path)
-
- self.path = path
- self.guid = guid
-
- # Copy passed lists (or set to empty lists)
- self.entries = sorted(list(entries or []))
- self.items = list(items or [])
-
- self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
-
- def get_guid(self):
- if self.guid is None:
- # Use consistent guids for folders (so things don't regenerate).
- self.guid = MakeGuid(self.path, seed='msvs_folder')
- return self.guid
-
-
-#------------------------------------------------------------------------------
-
-
-class MSVSProject(MSVSSolutionEntry):
- """Visual Studio project."""
-
- def __init__(self, path, name = None, dependencies = None, guid = None,
- spec = None, build_file = None, config_platform_overrides = None,
- fixpath_prefix = None):
- """Initializes the project.
-
- Args:
- path: Absolute path to the project file.
- name: Name of project. If None, the name will be the same as the base
- name of the project file.
- dependencies: List of other Project objects this project is dependent
- upon, if not None.
- guid: GUID to use for project, if not None.
- spec: Dictionary specifying how to build this project.
- build_file: Filename of the .gyp file that the vcproj file comes from.
- config_platform_overrides: optional dict of configuration platforms to
- used in place of the default for this target.
- fixpath_prefix: the path used to adjust the behavior of _fixpath
- """
- self.path = path
- self.guid = guid
- self.spec = spec
- self.build_file = build_file
- # Use project filename if name not specified
- self.name = name or os.path.splitext(os.path.basename(path))[0]
-
- # Copy passed lists (or set to empty lists)
- self.dependencies = list(dependencies or [])
-
- self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
-
- if config_platform_overrides:
- self.config_platform_overrides = config_platform_overrides
- else:
- self.config_platform_overrides = {}
- self.fixpath_prefix = fixpath_prefix
- self.msbuild_toolset = None
-
- def set_dependencies(self, dependencies):
- self.dependencies = list(dependencies or [])
-
- def get_guid(self):
- if self.guid is None:
- # Set GUID from path
- # TODO(rspangler): This is fragile.
- # 1. We can't just use the project filename sans path, since there could
- # be multiple projects with the same base name (for example,
- # foo/unittest.vcproj and bar/unittest.vcproj).
- # 2. The path needs to be relative to $SOURCE_ROOT, so that the project
- # GUID is the same whether it's included from base/base.sln or
- # foo/bar/baz/baz.sln.
- # 3. The GUID needs to be the same each time this builder is invoked, so
- # that we don't need to rebuild the solution when the project changes.
- # 4. We should be able to handle pre-built project files by reading the
- # GUID from the files.
- self.guid = MakeGuid(self.name)
- return self.guid
-
- def set_msbuild_toolset(self, msbuild_toolset):
- self.msbuild_toolset = msbuild_toolset
-
-#------------------------------------------------------------------------------
-
-
-class MSVSSolution(object):
- """Visual Studio solution."""
-
- def __init__(self, path, version, entries=None, variants=None,
- websiteProperties=True):
- """Initializes the solution.
-
- Args:
- path: Path to solution file.
- version: Format version to emit.
- entries: List of entries in solution. May contain Folder or Project
- objects. May be None, if the folder is empty.
- variants: List of build variant strings. If none, a default list will
- be used.
- websiteProperties: Flag to decide if the website properties section
- is generated.
- """
- self.path = path
- self.websiteProperties = websiteProperties
- self.version = version
-
- # Copy passed lists (or set to empty lists)
- self.entries = list(entries or [])
-
- if variants:
- # Copy passed list
- self.variants = variants[:]
- else:
- # Use default
- self.variants = ['Debug|Win32', 'Release|Win32']
- # TODO(rspangler): Need to be able to handle a mapping of solution config
- # to project config. Should we be able to handle variants being a dict,
- # or add a separate variant_map variable? If it's a dict, we can't
- # guarantee the order of variants since dict keys aren't ordered.
-
-
- # TODO(rspangler): Automatically write to disk for now; should delay until
- # node-evaluation time.
- self.Write()
-
-
- def Write(self, writer=gyp.common.WriteOnDiff):
- """Writes the solution file to disk.
-
- Raises:
- IndexError: An entry appears multiple times.
- """
- # Walk the entry tree and collect all the folders and projects.
- all_entries = set()
- entries_to_check = self.entries[:]
- while entries_to_check:
- e = entries_to_check.pop(0)
-
- # If this entry has been visited, nothing to do.
- if e in all_entries:
- continue
-
- all_entries.add(e)
-
- # If this is a folder, check its entries too.
- if isinstance(e, MSVSFolder):
- entries_to_check += e.entries
-
- all_entries = sorted(all_entries)
-
- # Open file and print header
- f = writer(self.path)
- f.write('Microsoft Visual Studio Solution File, '
- 'Format Version %s\r\n' % self.version.SolutionVersion())
- f.write('# %s\r\n' % self.version.Description())
-
- # Project entries
- sln_root = os.path.split(self.path)[0]
- for e in all_entries:
- relative_path = gyp.common.RelativePath(e.path, sln_root)
- # msbuild does not accept an empty folder_name.
- # use '.' in case relative_path is empty.
- folder_name = relative_path.replace('/', '\\') or '.'
- f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
- e.entry_type_guid, # Entry type GUID
- e.name, # Folder name
- folder_name, # Folder name (again)
- e.get_guid(), # Entry GUID
- ))
-
- # TODO(rspangler): Need a way to configure this stuff
- if self.websiteProperties:
- f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
- '\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
- '\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
- '\tEndProjectSection\r\n')
-
- if isinstance(e, MSVSFolder):
- if e.items:
- f.write('\tProjectSection(SolutionItems) = preProject\r\n')
- for i in e.items:
- f.write('\t\t%s = %s\r\n' % (i, i))
- f.write('\tEndProjectSection\r\n')
-
- if isinstance(e, MSVSProject):
- if e.dependencies:
- f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
- for d in e.dependencies:
- f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
- f.write('\tEndProjectSection\r\n')
-
- f.write('EndProject\r\n')
-
- # Global section
- f.write('Global\r\n')
-
- # Configurations (variants)
- f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
- for v in self.variants:
- f.write('\t\t%s = %s\r\n' % (v, v))
- f.write('\tEndGlobalSection\r\n')
-
- # Sort config guids for easier diffing of solution changes.
- config_guids = []
- config_guids_overrides = {}
- for e in all_entries:
- if isinstance(e, MSVSProject):
- config_guids.append(e.get_guid())
- config_guids_overrides[e.get_guid()] = e.config_platform_overrides
- config_guids.sort()
-
- f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
- for g in config_guids:
- for v in self.variants:
- nv = config_guids_overrides[g].get(v, v)
- # Pick which project configuration to build for this solution
- # configuration.
- f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
- g, # Project GUID
- v, # Solution build configuration
- nv, # Project build config for that solution config
- ))
-
- # Enable project in this solution configuration.
- f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
- g, # Project GUID
- v, # Solution build configuration
- nv, # Project build config for that solution config
- ))
- f.write('\tEndGlobalSection\r\n')
-
- # TODO(rspangler): Should be able to configure this stuff too (though I've
- # never seen this be any different)
- f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
- f.write('\t\tHideSolutionNode = FALSE\r\n')
- f.write('\tEndGlobalSection\r\n')
-
- # Folder mappings
- # Omit this section if there are no folders
- if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
- f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
- for e in all_entries:
- if not isinstance(e, MSVSFolder):
- continue # Does not apply to projects, only folders
- for subentry in e.entries:
- f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
- f.write('\tEndGlobalSection\r\n')
-
- f.write('EndGlobal\r\n')
-
- f.close()
diff --git a/deps/gyp/pylib/gyp/MSVSProject.py b/deps/gyp/pylib/gyp/MSVSProject.py
deleted file mode 100644
index db1ceede34..0000000000
--- a/deps/gyp/pylib/gyp/MSVSProject.py
+++ /dev/null
@@ -1,208 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Visual Studio project reader/writer."""
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-
-#------------------------------------------------------------------------------
-
-
-class Tool(object):
- """Visual Studio tool."""
-
- def __init__(self, name, attrs=None):
- """Initializes the tool.
-
- Args:
- name: Tool name.
- attrs: Dict of tool attributes; may be None.
- """
- self._attrs = attrs or {}
- self._attrs['Name'] = name
-
- def _GetSpecification(self):
- """Creates an element for the tool.
-
- Returns:
- A new xml.dom.Element for the tool.
- """
- return ['Tool', self._attrs]
-
-class Filter(object):
- """Visual Studio filter - that is, a virtual folder."""
-
- def __init__(self, name, contents=None):
- """Initializes the folder.
-
- Args:
- name: Filter (folder) name.
- contents: List of filenames and/or Filter objects contained.
- """
- self.name = name
- self.contents = list(contents or [])
-
-
-#------------------------------------------------------------------------------
-
-
-class Writer(object):
- """Visual Studio XML project writer."""
-
- def __init__(self, project_path, version, name, guid=None, platforms=None):
- """Initializes the project.
-
- Args:
- project_path: Path to the project file.
- version: Format version to emit.
- name: Name of the project.
- guid: GUID to use for project, if not None.
- platforms: Array of string, the supported platforms. If null, ['Win32']
- """
- self.project_path = project_path
- self.version = version
- self.name = name
- self.guid = guid
-
- # Default to Win32 for platforms.
- if not platforms:
- platforms = ['Win32']
-
- # Initialize the specifications of the various sections.
- self.platform_section = ['Platforms']
- for platform in platforms:
- self.platform_section.append(['Platform', {'Name': platform}])
- self.tool_files_section = ['ToolFiles']
- self.configurations_section = ['Configurations']
- self.files_section = ['Files']
-
- # Keep a dict keyed on filename to speed up access.
- self.files_dict = dict()
-
- def AddToolFile(self, path):
- """Adds a tool file to the project.
-
- Args:
- path: Relative path from project to tool file.
- """
- self.tool_files_section.append(['ToolFile', {'RelativePath': path}])
-
- def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
- """Returns the specification for a configuration.
-
- Args:
- config_type: Type of configuration node.
- config_name: Configuration name.
- attrs: Dict of configuration attributes; may be None.
- tools: List of tools (strings or Tool objects); may be None.
- Returns:
- """
- # Handle defaults
- if not attrs:
- attrs = {}
- if not tools:
- tools = []
-
- # Add configuration node and its attributes
- node_attrs = attrs.copy()
- node_attrs['Name'] = config_name
- specification = [config_type, node_attrs]
-
- # Add tool nodes and their attributes
- if tools:
- for t in tools:
- if isinstance(t, Tool):
- specification.append(t._GetSpecification())
- else:
- specification.append(Tool(t)._GetSpecification())
- return specification
-
-
- def AddConfig(self, name, attrs=None, tools=None):
- """Adds a configuration to the project.
-
- Args:
- name: Configuration name.
- attrs: Dict of configuration attributes; may be None.
- tools: List of tools (strings or Tool objects); may be None.
- """
- spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools)
- self.configurations_section.append(spec)
-
- def _AddFilesToNode(self, parent, files):
- """Adds files and/or filters to the parent node.
-
- Args:
- parent: Destination node
- files: A list of Filter objects and/or relative paths to files.
-
- Will call itself recursively, if the files list contains Filter objects.
- """
- for f in files:
- if isinstance(f, Filter):
- node = ['Filter', {'Name': f.name}]
- self._AddFilesToNode(node, f.contents)
- else:
- node = ['File', {'RelativePath': f}]
- self.files_dict[f] = node
- parent.append(node)
-
- def AddFiles(self, files):
- """Adds files to the project.
-
- Args:
- files: A list of Filter objects and/or relative paths to files.
-
- This makes a copy of the file/filter tree at the time of this call. If you
- later add files to a Filter object which was passed into a previous call
- to AddFiles(), it will not be reflected in this project.
- """
- self._AddFilesToNode(self.files_section, files)
- # TODO(rspangler) This also doesn't handle adding files to an existing
- # filter. That is, it doesn't merge the trees.
-
- def AddFileConfig(self, path, config, attrs=None, tools=None):
- """Adds a configuration to a file.
-
- Args:
- path: Relative path to the file.
- config: Name of configuration to add.
- attrs: Dict of configuration attributes; may be None.
- tools: List of tools (strings or Tool objects); may be None.
-
- Raises:
- ValueError: Relative path does not match any file added via AddFiles().
- """
- # Find the file node with the right relative path
- parent = self.files_dict.get(path)
- if not parent:
- raise ValueError('AddFileConfig: file "%s" not in project.' % path)
-
- # Add the config to the file node
- spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs,
- tools)
- parent.append(spec)
-
- def WriteIfChanged(self):
- """Writes the project file."""
- # First create XML content definition
- content = [
- 'VisualStudioProject',
- {'ProjectType': 'Visual C++',
- 'Version': self.version.ProjectVersion(),
- 'Name': self.name,
- 'ProjectGUID': self.guid,
- 'RootNamespace': self.name,
- 'Keyword': 'Win32Proj'
- },
- self.platform_section,
- self.tool_files_section,
- self.configurations_section,
- ['References'], # empty section
- self.files_section,
- ['Globals'] # empty section
- ]
- easy_xml.WriteXmlIfChanged(content, self.project_path,
- encoding="Windows-1252")
diff --git a/deps/gyp/pylib/gyp/MSVSSettings.py b/deps/gyp/pylib/gyp/MSVSSettings.py
deleted file mode 100644
index 4985756bdd..0000000000
--- a/deps/gyp/pylib/gyp/MSVSSettings.py
+++ /dev/null
@@ -1,1096 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-r"""Code to validate and convert settings of the Microsoft build tools.
-
-This file contains code to validate and convert settings of the Microsoft
-build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
-and ValidateMSBuildSettings() are the entry points.
-
-This file was created by comparing the projects created by Visual Studio 2008
-and Visual Studio 2010 for all available settings through the user interface.
-The MSBuild schemas were also considered. They are typically found in the
-MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
-"""
-
-import sys
-import re
-
-# Dictionaries of settings validators. The key is the tool name, the value is
-# a dictionary mapping setting names to validation functions.
-_msvs_validators = {}
-_msbuild_validators = {}
-
-
-# A dictionary of settings converters. The key is the tool name, the value is
-# a dictionary mapping setting names to conversion functions.
-_msvs_to_msbuild_converters = {}
-
-
-# Tool name mapping from MSVS to MSBuild.
-_msbuild_name_of_tool = {}
-
-
-class _Tool(object):
- """Represents a tool used by MSVS or MSBuild.
-
- Attributes:
- msvs_name: The name of the tool in MSVS.
- msbuild_name: The name of the tool in MSBuild.
- """
-
- def __init__(self, msvs_name, msbuild_name):
- self.msvs_name = msvs_name
- self.msbuild_name = msbuild_name
-
-
-def _AddTool(tool):
- """Adds a tool to the four dictionaries used to process settings.
-
- This only defines the tool. Each setting also needs to be added.
-
- Args:
- tool: The _Tool object to be added.
- """
- _msvs_validators[tool.msvs_name] = {}
- _msbuild_validators[tool.msbuild_name] = {}
- _msvs_to_msbuild_converters[tool.msvs_name] = {}
- _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name
-
-
-def _GetMSBuildToolSettings(msbuild_settings, tool):
- """Returns an MSBuild tool dictionary. Creates it if needed."""
- return msbuild_settings.setdefault(tool.msbuild_name, {})
-
-
-class _Type(object):
- """Type of settings (Base class)."""
-
- def ValidateMSVS(self, value):
- """Verifies that the value is legal for MSVS.
-
- Args:
- value: the value to check for this type.
-
- Raises:
- ValueError if value is not valid for MSVS.
- """
-
- def ValidateMSBuild(self, value):
- """Verifies that the value is legal for MSBuild.
-
- Args:
- value: the value to check for this type.
-
- Raises:
- ValueError if value is not valid for MSBuild.
- """
-
- def ConvertToMSBuild(self, value):
- """Returns the MSBuild equivalent of the MSVS value given.
-
- Args:
- value: the MSVS value to convert.
-
- Returns:
- the MSBuild equivalent.
-
- Raises:
- ValueError if value is not valid.
- """
- return value
-
-
-class _String(_Type):
- """A setting that's just a string."""
-
- def ValidateMSVS(self, value):
- if not isinstance(value, basestring):
- raise ValueError('expected string; got %r' % value)
-
- def ValidateMSBuild(self, value):
- if not isinstance(value, basestring):
- raise ValueError('expected string; got %r' % value)
-
- def ConvertToMSBuild(self, value):
- # Convert the macros
- return ConvertVCMacrosToMSBuild(value)
-
-
-class _StringList(_Type):
- """A settings that's a list of strings."""
-
- def ValidateMSVS(self, value):
- if not isinstance(value, basestring) and not isinstance(value, list):
- raise ValueError('expected string list; got %r' % value)
-
- def ValidateMSBuild(self, value):
- if not isinstance(value, basestring) and not isinstance(value, list):
- raise ValueError('expected string list; got %r' % value)
-
- def ConvertToMSBuild(self, value):
- # Convert the macros
- if isinstance(value, list):
- return [ConvertVCMacrosToMSBuild(i) for i in value]
- else:
- return ConvertVCMacrosToMSBuild(value)
-
-
-class _Boolean(_Type):
- """Boolean settings, can have the values 'false' or 'true'."""
-
- def _Validate(self, value):
- if value != 'true' and value != 'false':
- raise ValueError('expected bool; got %r' % value)
-
- def ValidateMSVS(self, value):
- self._Validate(value)
-
- def ValidateMSBuild(self, value):
- self._Validate(value)
-
- def ConvertToMSBuild(self, value):
- self._Validate(value)
- return value
-
-
-class _Integer(_Type):
- """Integer settings."""
-
- def __init__(self, msbuild_base=10):
- _Type.__init__(self)
- self._msbuild_base = msbuild_base
-
- def ValidateMSVS(self, value):
- # Try to convert, this will raise ValueError if invalid.
- self.ConvertToMSBuild(value)
-
- def ValidateMSBuild(self, value):
- # Try to convert, this will raise ValueError if invalid.
- int(value, self._msbuild_base)
-
- def ConvertToMSBuild(self, value):
- msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x'
- return msbuild_format % int(value)
-
-
-class _Enumeration(_Type):
- """Type of settings that is an enumeration.
-
- In MSVS, the values are indexes like '0', '1', and '2'.
- MSBuild uses text labels that are more representative, like 'Win32'.
-
- Constructor args:
- label_list: an array of MSBuild labels that correspond to the MSVS index.
- In the rare cases where MSVS has skipped an index value, None is
- used in the array to indicate the unused spot.
- new: an array of labels that are new to MSBuild.
- """
-
- def __init__(self, label_list, new=None):
- _Type.__init__(self)
- self._label_list = label_list
- self._msbuild_values = set(value for value in label_list
- if value is not None)
- if new is not None:
- self._msbuild_values.update(new)
-
- def ValidateMSVS(self, value):
- # Try to convert. It will raise an exception if not valid.
- self.ConvertToMSBuild(value)
-
- def ValidateMSBuild(self, value):
- if value not in self._msbuild_values:
- raise ValueError('unrecognized enumerated value %s' % value)
-
- def ConvertToMSBuild(self, value):
- index = int(value)
- if index < 0 or index >= len(self._label_list):
- raise ValueError('index value (%d) not in expected range [0, %d)' %
- (index, len(self._label_list)))
- label = self._label_list[index]
- if label is None:
- raise ValueError('converted value for %s not specified.' % value)
- return label
-
-
-# Instantiate the various generic types.
-_boolean = _Boolean()
-_integer = _Integer()
-# For now, we don't do any special validation on these types:
-_string = _String()
-_file_name = _String()
-_folder_name = _String()
-_file_list = _StringList()
-_folder_list = _StringList()
-_string_list = _StringList()
-# Some boolean settings went from numerical values to boolean. The
-# mapping is 0: default, 1: false, 2: true.
-_newly_boolean = _Enumeration(['', 'false', 'true'])
-
-
-def _Same(tool, name, setting_type):
- """Defines a setting that has the same name in MSVS and MSBuild.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- name: the name of the setting.
- setting_type: the type of this setting.
- """
- _Renamed(tool, name, name, setting_type)
-
-
-def _Renamed(tool, msvs_name, msbuild_name, setting_type):
- """Defines a setting for which the name has changed.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- msvs_name: the name of the MSVS setting.
- msbuild_name: the name of the MSBuild setting.
- setting_type: the type of this setting.
- """
-
- def _Translate(value, msbuild_settings):
- msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
- msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value)
-
- _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS
- _msbuild_validators[tool.msbuild_name][msbuild_name] = (
- setting_type.ValidateMSBuild)
- _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
-
-
-def _Moved(tool, settings_name, msbuild_tool_name, setting_type):
- _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name,
- setting_type)
-
-
-def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name,
- msbuild_settings_name, setting_type):
- """Defines a setting that may have moved to a new section.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- msvs_settings_name: the MSVS name of the setting.
- msbuild_tool_name: the name of the MSBuild tool to place the setting under.
- msbuild_settings_name: the MSBuild name of the setting.
- setting_type: the type of this setting.
- """
-
- def _Translate(value, msbuild_settings):
- tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
- tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value)
-
- _msvs_validators[tool.msvs_name][msvs_settings_name] = (
- setting_type.ValidateMSVS)
- validator = setting_type.ValidateMSBuild
- _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator
- _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate
-
-
-def _MSVSOnly(tool, name, setting_type):
- """Defines a setting that is only found in MSVS.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- name: the name of the setting.
- setting_type: the type of this setting.
- """
-
- def _Translate(unused_value, unused_msbuild_settings):
- # Since this is for MSVS only settings, no translation will happen.
- pass
-
- _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS
- _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
-
-
-def _MSBuildOnly(tool, name, setting_type):
- """Defines a setting that is only found in MSBuild.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- name: the name of the setting.
- setting_type: the type of this setting.
- """
-
- def _Translate(value, msbuild_settings):
- # Let msbuild-only properties get translated as-is from msvs_settings.
- tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {})
- tool_settings[name] = value
-
- _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
- _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
-
-
-def _ConvertedToAdditionalOption(tool, msvs_name, flag):
- """Defines a setting that's handled via a command line option in MSBuild.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- msvs_name: the name of the MSVS setting that if 'true' becomes a flag
- flag: the flag to insert at the end of the AdditionalOptions
- """
-
- def _Translate(value, msbuild_settings):
- if value == 'true':
- tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
- if 'AdditionalOptions' in tool_settings:
- new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag)
- else:
- new_flags = flag
- tool_settings['AdditionalOptions'] = new_flags
- _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS
- _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
-
-
-def _CustomGeneratePreprocessedFile(tool, msvs_name):
- def _Translate(value, msbuild_settings):
- tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
- if value == '0':
- tool_settings['PreprocessToFile'] = 'false'
- tool_settings['PreprocessSuppressLineNumbers'] = 'false'
- elif value == '1': # /P
- tool_settings['PreprocessToFile'] = 'true'
- tool_settings['PreprocessSuppressLineNumbers'] = 'false'
- elif value == '2': # /EP /P
- tool_settings['PreprocessToFile'] = 'true'
- tool_settings['PreprocessSuppressLineNumbers'] = 'true'
- else:
- raise ValueError('value must be one of [0, 1, 2]; got %s' % value)
- # Create a bogus validator that looks for '0', '1', or '2'
- msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS
- _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator
- msbuild_validator = _boolean.ValidateMSBuild
- msbuild_tool_validators = _msbuild_validators[tool.msbuild_name]
- msbuild_tool_validators['PreprocessToFile'] = msbuild_validator
- msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator
- _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
-
-
-fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir')
-fix_vc_macro_slashes_regex = re.compile(
- r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
-)
-
-# Regular expression to detect keys that were generated by exclusion lists
-_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
-
-
-def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
- """Verify that 'setting' is valid if it is generated from an exclusion list.
-
- If the setting appears to be generated from an exclusion list, the root name
- is checked.
-
- Args:
- setting: A string that is the setting name to validate
- settings: A dictionary where the keys are valid settings
- error_msg: The message to emit in the event of error
- stderr: The stream receiving the error messages.
- """
- # This may be unrecognized because it's an exclusion list. If the
- # setting name has the _excluded suffix, then check the root name.
- unrecognized = True
- m = re.match(_EXCLUDED_SUFFIX_RE, setting)
- if m:
- root_setting = m.group(1)
- unrecognized = root_setting not in settings
-
- if unrecognized:
- # We don't know this setting. Give a warning.
- print >> stderr, error_msg
-
-
-def FixVCMacroSlashes(s):
- """Replace macros which have excessive following slashes.
-
- These macros are known to have a built-in trailing slash. Furthermore, many
- scripts hiccup on processing paths with extra slashes in the middle.
-
- This list is probably not exhaustive. Add as needed.
- """
- if '$' in s:
- s = fix_vc_macro_slashes_regex.sub(r'\1', s)
- return s
-
-
-def ConvertVCMacrosToMSBuild(s):
- """Convert the the MSVS macros found in the string to the MSBuild equivalent.
-
- This list is probably not exhaustive. Add as needed.
- """
- if '$' in s:
- replace_map = {
- '$(ConfigurationName)': '$(Configuration)',
- '$(InputDir)': '%(RelativeDir)',
- '$(InputExt)': '%(Extension)',
- '$(InputFileName)': '%(Filename)%(Extension)',
- '$(InputName)': '%(Filename)',
- '$(InputPath)': '%(Identity)',
- '$(ParentName)': '$(ProjectFileName)',
- '$(PlatformName)': '$(Platform)',
- '$(SafeInputName)': '%(Filename)',
- }
- for old, new in replace_map.iteritems():
- s = s.replace(old, new)
- s = FixVCMacroSlashes(s)
- return s
-
-
-def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
- """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
-
- Args:
- msvs_settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
-
- Returns:
- A dictionary of MSBuild settings. The key is either the MSBuild tool name
- or the empty string (for the global settings). The values are themselves
- dictionaries of settings and their values.
- """
- msbuild_settings = {}
- for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems():
- if msvs_tool_name in _msvs_to_msbuild_converters:
- msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
- for msvs_setting, msvs_value in msvs_tool_settings.iteritems():
- if msvs_setting in msvs_tool:
- # Invoke the translation function.
- try:
- msvs_tool[msvs_setting](msvs_value, msbuild_settings)
- except ValueError, e:
- print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
- '%s' % (msvs_tool_name, msvs_setting, e))
- else:
- _ValidateExclusionSetting(msvs_setting,
- msvs_tool,
- ('Warning: unrecognized setting %s/%s '
- 'while converting to MSBuild.' %
- (msvs_tool_name, msvs_setting)),
- stderr)
- else:
- print >> stderr, ('Warning: unrecognized tool %s while converting to '
- 'MSBuild.' % msvs_tool_name)
- return msbuild_settings
-
-
-def ValidateMSVSSettings(settings, stderr=sys.stderr):
- """Validates that the names of the settings are valid for MSVS.
-
- Args:
- settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
- """
- _ValidateSettings(_msvs_validators, settings, stderr)
-
-
-def ValidateMSBuildSettings(settings, stderr=sys.stderr):
- """Validates that the names of the settings are valid for MSBuild.
-
- Args:
- settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
- """
- _ValidateSettings(_msbuild_validators, settings, stderr)
-
-
-def _ValidateSettings(validators, settings, stderr):
- """Validates that the settings are valid for MSBuild or MSVS.
-
- We currently only validate the names of the settings, not their values.
-
- Args:
- validators: A dictionary of tools and their validators.
- settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
- """
- for tool_name in settings:
- if tool_name in validators:
- tool_validators = validators[tool_name]
- for setting, value in settings[tool_name].iteritems():
- if setting in tool_validators:
- try:
- tool_validators[setting](value)
- except ValueError, e:
- print >> stderr, ('Warning: for %s/%s, %s' %
- (tool_name, setting, e))
- else:
- _ValidateExclusionSetting(setting,
- tool_validators,
- ('Warning: unrecognized setting %s/%s' %
- (tool_name, setting)),
- stderr)
-
- else:
- print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
-
-
-# MSVS and MBuild names of the tools.
-_compile = _Tool('VCCLCompilerTool', 'ClCompile')
-_link = _Tool('VCLinkerTool', 'Link')
-_midl = _Tool('VCMIDLTool', 'Midl')
-_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
-_lib = _Tool('VCLibrarianTool', 'Lib')
-_manifest = _Tool('VCManifestTool', 'Manifest')
-_masm = _Tool('MASM', 'MASM')
-
-
-_AddTool(_compile)
-_AddTool(_link)
-_AddTool(_midl)
-_AddTool(_rc)
-_AddTool(_lib)
-_AddTool(_manifest)
-_AddTool(_masm)
-# Add sections only found in the MSBuild settings.
-_msbuild_validators[''] = {}
-_msbuild_validators['ProjectReference'] = {}
-_msbuild_validators['ManifestResourceCompile'] = {}
-
-# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and
-# ClCompile in MSBuild.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for
-# the schema of the MSBuild ClCompile settings.
-
-# Options that have the same name in MSVS and MSBuild
-_Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I
-_Same(_compile, 'AdditionalOptions', _string_list)
-_Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI
-_Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa
-_Same(_compile, 'BrowseInformationFile', _file_name)
-_Same(_compile, 'BufferSecurityCheck', _boolean) # /GS
-_Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za
-_Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd
-_Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT
-_Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false'
-_Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx
-_Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except
-_Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope
-_Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI
-_Same(_compile, 'ForcedUsingFiles', _file_list) # /FU
-_Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc
-_Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X
-_Same(_compile, 'MinimalRebuild', _boolean) # /Gm
-_Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl
-_Same(_compile, 'OmitFramePointers', _boolean) # /Oy
-_Same(_compile, 'PreprocessorDefinitions', _string_list) # /D
-_Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd
-_Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR
-_Same(_compile, 'ShowIncludes', _boolean) # /showIncludes
-_Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc
-_Same(_compile, 'StringPooling', _boolean) # /GF
-_Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo
-_Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t
-_Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u
-_Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U
-_Same(_compile, 'UseFullPaths', _boolean) # /FC
-_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL
-_Same(_compile, 'XMLDocumentationFileName', _file_name)
-
-_Same(_compile, 'AssemblerOutput',
- _Enumeration(['NoListing',
- 'AssemblyCode', # /FA
- 'All', # /FAcs
- 'AssemblyAndMachineCode', # /FAc
- 'AssemblyAndSourceCode'])) # /FAs
-_Same(_compile, 'BasicRuntimeChecks',
- _Enumeration(['Default',
- 'StackFrameRuntimeCheck', # /RTCs
- 'UninitializedLocalUsageCheck', # /RTCu
- 'EnableFastChecks'])) # /RTC1
-_Same(_compile, 'BrowseInformation',
- _Enumeration(['false',
- 'true', # /FR
- 'true'])) # /Fr
-_Same(_compile, 'CallingConvention',
- _Enumeration(['Cdecl', # /Gd
- 'FastCall', # /Gr
- 'StdCall', # /Gz
- 'VectorCall'])) # /Gv
-_Same(_compile, 'CompileAs',
- _Enumeration(['Default',
- 'CompileAsC', # /TC
- 'CompileAsCpp'])) # /TP
-_Same(_compile, 'DebugInformationFormat',
- _Enumeration(['', # Disabled
- 'OldStyle', # /Z7
- None,
- 'ProgramDatabase', # /Zi
- 'EditAndContinue'])) # /ZI
-_Same(_compile, 'EnableEnhancedInstructionSet',
- _Enumeration(['NotSet',
- 'StreamingSIMDExtensions', # /arch:SSE
- 'StreamingSIMDExtensions2', # /arch:SSE2
- 'AdvancedVectorExtensions', # /arch:AVX (vs2012+)
- 'NoExtensions', # /arch:IA32 (vs2012+)
- # This one only exists in the new msbuild format.
- 'AdvancedVectorExtensions2', # /arch:AVX2 (vs2013r2+)
- ]))
-_Same(_compile, 'ErrorReporting',
- _Enumeration(['None', # /errorReport:none
- 'Prompt', # /errorReport:prompt
- 'Queue'], # /errorReport:queue
- new=['Send'])) # /errorReport:send"
-_Same(_compile, 'ExceptionHandling',
- _Enumeration(['false',
- 'Sync', # /EHsc
- 'Async'], # /EHa
- new=['SyncCThrow'])) # /EHs
-_Same(_compile, 'FavorSizeOrSpeed',
- _Enumeration(['Neither',
- 'Speed', # /Ot
- 'Size'])) # /Os
-_Same(_compile, 'FloatingPointModel',
- _Enumeration(['Precise', # /fp:precise
- 'Strict', # /fp:strict
- 'Fast'])) # /fp:fast
-_Same(_compile, 'InlineFunctionExpansion',
- _Enumeration(['Default',
- 'OnlyExplicitInline', # /Ob1
- 'AnySuitable'], # /Ob2
- new=['Disabled'])) # /Ob0
-_Same(_compile, 'Optimization',
- _Enumeration(['Disabled', # /Od
- 'MinSpace', # /O1
- 'MaxSpeed', # /O2
- 'Full'])) # /Ox
-_Same(_compile, 'RuntimeLibrary',
- _Enumeration(['MultiThreaded', # /MT
- 'MultiThreadedDebug', # /MTd
- 'MultiThreadedDLL', # /MD
- 'MultiThreadedDebugDLL'])) # /MDd
-_Same(_compile, 'StructMemberAlignment',
- _Enumeration(['Default',
- '1Byte', # /Zp1
- '2Bytes', # /Zp2
- '4Bytes', # /Zp4
- '8Bytes', # /Zp8
- '16Bytes'])) # /Zp16
-_Same(_compile, 'WarningLevel',
- _Enumeration(['TurnOffAllWarnings', # /W0
- 'Level1', # /W1
- 'Level2', # /W2
- 'Level3', # /W3
- 'Level4'], # /W4
- new=['EnableAllWarnings'])) # /Wall
-
-# Options found in MSVS that have been renamed in MSBuild.
-_Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking',
- _boolean) # /Gy
-_Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions',
- _boolean) # /Oi
-_Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C
-_Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo
-_Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp
-_Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile',
- _file_name) # Used with /Yc and /Yu
-_Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile',
- _file_name) # /Fp
-_Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader',
- _Enumeration(['NotUsing', # VS recognized '' for this value too.
- 'Create', # /Yc
- 'Use'])) # /Yu
-_Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX
-
-_ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J')
-
-# MSVS options not found in MSBuild.
-_MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean)
-_MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean)
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_compile, 'BuildingInIDE', _boolean)
-_MSBuildOnly(_compile, 'CompileAsManaged',
- _Enumeration([], new=['false',
- 'true'])) # /clr
-_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch
-_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP
-_MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi
-_MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors
-_MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we
-_MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu
-
-# Defines a setting that needs very customized processing
-_CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile')
-
-
-# Directives for converting MSVS VCLinkerTool to MSBuild Link.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for
-# the schema of the MSBuild Link settings.
-
-# Options that have the same name in MSVS and MSBuild
-_Same(_link, 'AdditionalDependencies', _file_list)
-_Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
-# /MANIFESTDEPENDENCY:
-_Same(_link, 'AdditionalManifestDependencies', _file_list)
-_Same(_link, 'AdditionalOptions', _string_list)
-_Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE
-_Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION
-_Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE
-_Same(_link, 'BaseAddress', _string) # /BASE
-_Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK
-_Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD
-_Same(_link, 'DelaySign', _boolean) # /DELAYSIGN
-_Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE
-_Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC
-_Same(_link, 'EntryPointSymbol', _string) # /ENTRY
-_Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE
-_Same(_link, 'FunctionOrder', _file_name) # /ORDER
-_Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG
-_Same(_link, 'GenerateMapFile', _boolean) # /MAP
-_Same(_link, 'HeapCommitSize', _string)
-_Same(_link, 'HeapReserveSize', _string) # /HEAP
-_Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
-_Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL
-_Same(_link, 'ImportLibrary', _file_name) # /IMPLIB
-_Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER
-_Same(_link, 'KeyFile', _file_name) # /KEYFILE
-_Same(_link, 'ManifestFile', _file_name) # /ManifestFile
-_Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS
-_Same(_link, 'MapFileName', _file_name)
-_Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT
-_Same(_link, 'MergeSections', _string) # /MERGE
-_Same(_link, 'MidlCommandFile', _file_name) # /MIDL
-_Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF
-_Same(_link, 'OutputFile', _file_name) # /OUT
-_Same(_link, 'PerUserRedirection', _boolean)
-_Same(_link, 'Profile', _boolean) # /PROFILE
-_Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD
-_Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB
-_Same(_link, 'RegisterOutput', _boolean)
-_Same(_link, 'SetChecksum', _boolean) # /RELEASE
-_Same(_link, 'StackCommitSize', _string)
-_Same(_link, 'StackReserveSize', _string) # /STACK
-_Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED
-_Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD
-_Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO
-_Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD
-_Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY
-_Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT
-_Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID
-_Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true'
-_Same(_link, 'Version', _string) # /VERSION
-
-_Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF
-_Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED
-_Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE
-_Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF
-_Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE
-_Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE
-
-_subsystem_enumeration = _Enumeration(
- ['NotSet',
- 'Console', # /SUBSYSTEM:CONSOLE
- 'Windows', # /SUBSYSTEM:WINDOWS
- 'Native', # /SUBSYSTEM:NATIVE
- 'EFI Application', # /SUBSYSTEM:EFI_APPLICATION
- 'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER
- 'EFI ROM', # /SUBSYSTEM:EFI_ROM
- 'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER
- 'WindowsCE'], # /SUBSYSTEM:WINDOWSCE
- new=['POSIX']) # /SUBSYSTEM:POSIX
-
-_target_machine_enumeration = _Enumeration(
- ['NotSet',
- 'MachineX86', # /MACHINE:X86
- None,
- 'MachineARM', # /MACHINE:ARM
- 'MachineEBC', # /MACHINE:EBC
- 'MachineIA64', # /MACHINE:IA64
- None,
- 'MachineMIPS', # /MACHINE:MIPS
- 'MachineMIPS16', # /MACHINE:MIPS16
- 'MachineMIPSFPU', # /MACHINE:MIPSFPU
- 'MachineMIPSFPU16', # /MACHINE:MIPSFPU16
- None,
- None,
- None,
- 'MachineSH4', # /MACHINE:SH4
- None,
- 'MachineTHUMB', # /MACHINE:THUMB
- 'MachineX64']) # /MACHINE:X64
-
-_Same(_link, 'AssemblyDebug',
- _Enumeration(['',
- 'true', # /ASSEMBLYDEBUG
- 'false'])) # /ASSEMBLYDEBUG:DISABLE
-_Same(_link, 'CLRImageType',
- _Enumeration(['Default',
- 'ForceIJWImage', # /CLRIMAGETYPE:IJW
- 'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE
- 'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE
-_Same(_link, 'CLRThreadAttribute',
- _Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE
- 'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA
- 'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA
-_Same(_link, 'DataExecutionPrevention',
- _Enumeration(['',
- 'false', # /NXCOMPAT:NO
- 'true'])) # /NXCOMPAT
-_Same(_link, 'Driver',
- _Enumeration(['NotSet',
- 'Driver', # /Driver
- 'UpOnly', # /DRIVER:UPONLY
- 'WDM'])) # /DRIVER:WDM
-_Same(_link, 'LinkTimeCodeGeneration',
- _Enumeration(['Default',
- 'UseLinkTimeCodeGeneration', # /LTCG
- 'PGInstrument', # /LTCG:PGInstrument
- 'PGOptimization', # /LTCG:PGOptimize
- 'PGUpdate'])) # /LTCG:PGUpdate
-_Same(_link, 'ShowProgress',
- _Enumeration(['NotSet',
- 'LinkVerbose', # /VERBOSE
- 'LinkVerboseLib'], # /VERBOSE:Lib
- new=['LinkVerboseICF', # /VERBOSE:ICF
- 'LinkVerboseREF', # /VERBOSE:REF
- 'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH
- 'LinkVerboseCLR'])) # /VERBOSE:CLR
-_Same(_link, 'SubSystem', _subsystem_enumeration)
-_Same(_link, 'TargetMachine', _target_machine_enumeration)
-_Same(_link, 'UACExecutionLevel',
- _Enumeration(['AsInvoker', # /level='asInvoker'
- 'HighestAvailable', # /level='highestAvailable'
- 'RequireAdministrator'])) # /level='requireAdministrator'
-_Same(_link, 'MinimumRequiredVersion', _string)
-_Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX
-
-
-# Options found in MSVS that have been renamed in MSBuild.
-_Renamed(_link, 'ErrorReporting', 'LinkErrorReporting',
- _Enumeration(['NoErrorReport', # /ERRORREPORT:NONE
- 'PromptImmediately', # /ERRORREPORT:PROMPT
- 'QueueForNextLogin'], # /ERRORREPORT:QUEUE
- new=['SendErrorReport'])) # /ERRORREPORT:SEND
-_Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries',
- _file_list) # /NODEFAULTLIB
-_Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY
-_Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET
-
-_Moved(_link, 'GenerateManifest', '', _boolean)
-_Moved(_link, 'IgnoreImportLibrary', '', _boolean)
-_Moved(_link, 'LinkIncremental', '', _newly_boolean)
-_Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
-_Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
-
-# MSVS options not found in MSBuild.
-_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
-_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
-_MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH
-_MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false'
-_MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS
-_MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND
-_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND
-_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false'
-_MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN
-_MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION
-_MSBuildOnly(_link, 'ForceFileOutput',
- _Enumeration([], new=['Enabled', # /FORCE
- # /FORCE:MULTIPLE
- 'MultiplyDefinedSymbolOnly',
- 'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED
-_MSBuildOnly(_link, 'CreateHotPatchableImage',
- _Enumeration([], new=['Enabled', # /FUNCTIONPADMIN
- 'X86Image', # /FUNCTIONPADMIN:5
- 'X64Image', # /FUNCTIONPADMIN:6
- 'ItaniumImage'])) # /FUNCTIONPADMIN:16
-_MSBuildOnly(_link, 'CLRSupportLastError',
- _Enumeration([], new=['Enabled', # /CLRSupportLastError
- 'Disabled', # /CLRSupportLastError:NO
- # /CLRSupportLastError:SYSTEMDLL
- 'SystemDlls']))
-
-
-# Directives for converting VCResourceCompilerTool to ResourceCompile.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for
-# the schema of the MSBuild ResourceCompile settings.
-
-_Same(_rc, 'AdditionalOptions', _string_list)
-_Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I
-_Same(_rc, 'Culture', _Integer(msbuild_base=16))
-_Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X
-_Same(_rc, 'PreprocessorDefinitions', _string_list) # /D
-_Same(_rc, 'ResourceOutputFileName', _string) # /fo
-_Same(_rc, 'ShowProgress', _boolean) # /v
-# There is no UI in VisualStudio 2008 to set the following properties.
-# However they are found in CL and other tools. Include them here for
-# completeness, as they are very likely to have the same usage pattern.
-_Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo
-_Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n
-_MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name)
-
-
-# Directives for converting VCMIDLTool to Midl.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for
-# the schema of the MSBuild Midl settings.
-
-_Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I
-_Same(_midl, 'AdditionalOptions', _string_list)
-_Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt
-_Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation
-_Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check
-_Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum
-_Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref
-_Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data
-_Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf
-_Same(_midl, 'GenerateTypeLibrary', _boolean)
-_Same(_midl, 'HeaderFileName', _file_name) # /h
-_Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir
-_Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid
-_Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203
-_Same(_midl, 'OutputDirectory', _string) # /out
-_Same(_midl, 'PreprocessorDefinitions', _string_list) # /D
-_Same(_midl, 'ProxyFileName', _file_name) # /proxy
-_Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o
-_Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo
-_Same(_midl, 'TypeLibraryName', _file_name) # /tlb
-_Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U
-_Same(_midl, 'WarnAsError', _boolean) # /WX
-
-_Same(_midl, 'DefaultCharType',
- _Enumeration(['Unsigned', # /char unsigned
- 'Signed', # /char signed
- 'Ascii'])) # /char ascii7
-_Same(_midl, 'TargetEnvironment',
- _Enumeration(['NotSet',
- 'Win32', # /env win32
- 'Itanium', # /env ia64
- 'X64'])) # /env x64
-_Same(_midl, 'EnableErrorChecks',
- _Enumeration(['EnableCustom',
- 'None', # /error none
- 'All'])) # /error all
-_Same(_midl, 'StructMemberAlignment',
- _Enumeration(['NotSet',
- '1', # Zp1
- '2', # Zp2
- '4', # Zp4
- '8'])) # Zp8
-_Same(_midl, 'WarningLevel',
- _Enumeration(['0', # /W0
- '1', # /W1
- '2', # /W2
- '3', # /W3
- '4'])) # /W4
-
-_Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata
-_Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters',
- _boolean) # /robust
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config
-_MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub
-_MSBuildOnly(_midl, 'GenerateClientFiles',
- _Enumeration([], new=['Stub', # /client stub
- 'None'])) # /client none
-_MSBuildOnly(_midl, 'GenerateServerFiles',
- _Enumeration([], new=['Stub', # /client stub
- 'None'])) # /client none
-_MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL
-_MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub
-_MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn
-_MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_midl, 'TypeLibFormat',
- _Enumeration([], new=['NewFormat', # /newtlb
- 'OldFormat'])) # /oldtlb
-
-
-# Directives for converting VCLibrarianTool to Lib.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for
-# the schema of the MSBuild Lib settings.
-
-_Same(_lib, 'AdditionalDependencies', _file_list)
-_Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
-_Same(_lib, 'AdditionalOptions', _string_list)
-_Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT
-_Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE
-_Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
-_Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB
-_Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF
-_Same(_lib, 'OutputFile', _file_name) # /OUT
-_Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO
-_Same(_lib, 'UseUnicodeResponseFiles', _boolean)
-_Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG
-_Same(_lib, 'TargetMachine', _target_machine_enumeration)
-
-# TODO(jeanluc) _link defines the same value that gets moved to
-# ProjectReference. We may want to validate that they are consistent.
-_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
-
-_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
-_MSBuildOnly(_lib, 'ErrorReporting',
- _Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
- 'QueueForNextLogin', # /ERRORREPORT:QUEUE
- 'SendErrorReport', # /ERRORREPORT:SEND
- 'NoErrorReport'])) # /ERRORREPORT:NONE
-_MSBuildOnly(_lib, 'MinimumRequiredVersion', _string)
-_MSBuildOnly(_lib, 'Name', _file_name) # /NAME
-_MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE
-_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
-_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX
-_MSBuildOnly(_lib, 'Verbose', _boolean)
-
-
-# Directives for converting VCManifestTool to Mt.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for
-# the schema of the MSBuild Lib settings.
-
-# Options that have the same name in MSVS and MSBuild
-_Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest
-_Same(_manifest, 'AdditionalOptions', _string_list)
-_Same(_manifest, 'AssemblyIdentity', _string) # /identity:
-_Same(_manifest, 'ComponentFileName', _file_name) # /dll
-_Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs
-_Same(_manifest, 'InputResourceManifests', _string) # /inputresource
-_Same(_manifest, 'OutputManifestFile', _file_name) # /out
-_Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs
-_Same(_manifest, 'ReplacementsFile', _file_name) # /replacements
-_Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo
-_Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb:
-_Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate
-_Same(_manifest, 'UpdateFileHashesSearchPath', _file_name)
-_Same(_manifest, 'VerboseOutput', _boolean) # /verbose
-
-# Options that have moved location.
-_MovedAndRenamed(_manifest, 'ManifestResourceFile',
- 'ManifestResourceCompile',
- 'ResourceOutputFileName',
- _file_name)
-_Moved(_manifest, 'EmbedManifest', '', _boolean)
-
-# MSVS options not found in MSBuild.
-_MSVSOnly(_manifest, 'DependencyInformationFile', _file_name)
-_MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean)
-_MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean)
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean)
-_MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category
-_MSBuildOnly(_manifest, 'ManifestFromManagedAssembly',
- _file_name) # /managedassemblyname
-_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource
-_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency
-_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
-
-
-# Directives for MASM.
-# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the
-# MSBuild MASM settings.
-
-# Options that have the same name in MSVS and MSBuild.
-_Same(_masm, 'UseSafeExceptionHandlers', _boolean) # /safeseh
diff --git a/deps/gyp/pylib/gyp/MSVSSettings_test.py b/deps/gyp/pylib/gyp/MSVSSettings_test.py
deleted file mode 100755
index bf6ea6b802..0000000000
--- a/deps/gyp/pylib/gyp/MSVSSettings_test.py
+++ /dev/null
@@ -1,1483 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for the MSVSSettings.py file."""
-
-import StringIO
-import unittest
-import gyp.MSVSSettings as MSVSSettings
-
-
-class TestSequenceFunctions(unittest.TestCase):
-
- def setUp(self):
- self.stderr = StringIO.StringIO()
-
- def _ExpectedWarnings(self, expected):
- """Compares recorded lines to expected warnings."""
- self.stderr.seek(0)
- actual = self.stderr.read().split('\n')
- actual = [line for line in actual if line]
- self.assertEqual(sorted(expected), sorted(actual))
-
- def testValidateMSVSSettings_tool_names(self):
- """Tests that only MSVS tool names are allowed."""
- MSVSSettings.ValidateMSVSSettings(
- {'VCCLCompilerTool': {},
- 'VCLinkerTool': {},
- 'VCMIDLTool': {},
- 'foo': {},
- 'VCResourceCompilerTool': {},
- 'VCLibrarianTool': {},
- 'VCManifestTool': {},
- 'ClCompile': {}},
- self.stderr)
- self._ExpectedWarnings([
- 'Warning: unrecognized tool foo',
- 'Warning: unrecognized tool ClCompile'])
-
- def testValidateMSVSSettings_settings(self):
- """Tests that for invalid MSVS settings."""
- MSVSSettings.ValidateMSVSSettings(
- {'VCCLCompilerTool': {
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'AdditionalOptions': ['string1', 'string2'],
- 'AdditionalUsingDirectories': 'folder1;folder2',
- 'AssemblerListingLocation': 'a_file_name',
- 'AssemblerOutput': '0',
- 'BasicRuntimeChecks': '5',
- 'BrowseInformation': 'fdkslj',
- 'BrowseInformationFile': 'a_file_name',
- 'BufferSecurityCheck': 'true',
- 'CallingConvention': '-1',
- 'CompileAs': '1',
- 'DebugInformationFormat': '2',
- 'DefaultCharIsUnsigned': 'true',
- 'Detect64BitPortabilityProblems': 'true',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'string1;string2',
- 'EnableEnhancedInstructionSet': '1',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnableFunctionLevelLinking': 'true',
- 'EnableIntrinsicFunctions': 'true',
- 'EnablePREfast': 'true',
- 'Enableprefast': 'bogus',
- 'ErrorReporting': '1',
- 'ExceptionHandling': '1',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': '1',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': '1',
- 'ForceConformanceInForLoopScope': 'true',
- 'ForcedIncludeFiles': 'file1;file2',
- 'ForcedUsingFiles': 'file1;file2',
- 'GeneratePreprocessedFile': '1',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': '1',
- 'KeepComments': 'true',
- 'MinimalRebuild': 'true',
- 'ObjectFile': 'a_file_name',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMP': 'true',
- 'Optimization': '1',
- 'PrecompiledHeaderFile': 'a_file_name',
- 'PrecompiledHeaderThrough': 'a_file_name',
- 'PreprocessorDefinitions': 'string1;string2',
- 'ProgramDataBaseFileName': 'a_file_name',
- 'RuntimeLibrary': '1',
- 'RuntimeTypeInfo': 'true',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '1',
- 'SuppressStartupBanner': 'true',
- 'TreatWChar_tAsBuiltInType': 'true',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'string1;string2',
- 'UseFullPaths': 'true',
- 'UsePrecompiledHeader': '1',
- 'UseUnicodeResponseFiles': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '1',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': 'a_file_name',
- 'ZZXYZ': 'bogus'},
- 'VCLinkerTool': {
- 'AdditionalDependencies': 'file1;file2',
- 'AdditionalDependencies_excluded': 'file3',
- 'AdditionalLibraryDirectories': 'folder1;folder2',
- 'AdditionalManifestDependencies': 'file1;file2',
- 'AdditionalOptions': 'a string1',
- 'AddModuleNamesToAssembly': 'file1;file2',
- 'AllowIsolation': 'true',
- 'AssemblyDebug': '2',
- 'AssemblyLinkResource': 'file1;file2',
- 'BaseAddress': 'a string1',
- 'CLRImageType': '2',
- 'CLRThreadAttribute': '2',
- 'CLRUnmanagedCodeCheck': 'true',
- 'DataExecutionPrevention': '2',
- 'DelayLoadDLLs': 'file1;file2',
- 'DelaySign': 'true',
- 'Driver': '2',
- 'EmbedManagedResourceFile': 'file1;file2',
- 'EnableCOMDATFolding': '2',
- 'EnableUAC': 'true',
- 'EntryPointSymbol': 'a string1',
- 'ErrorReporting': '2',
- 'FixedBaseAddress': '2',
- 'ForceSymbolReferences': 'file1;file2',
- 'FunctionOrder': 'a_file_name',
- 'GenerateDebugInformation': 'true',
- 'GenerateManifest': 'true',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': 'a string1',
- 'HeapReserveSize': 'a string1',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreDefaultLibraryNames': 'file1;file2',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreImportLibrary': 'true',
- 'ImportLibrary': 'a_file_name',
- 'KeyContainer': 'a_file_name',
- 'KeyFile': 'a_file_name',
- 'LargeAddressAware': '2',
- 'LinkIncremental': '2',
- 'LinkLibraryDependencies': 'true',
- 'LinkTimeCodeGeneration': '2',
- 'ManifestFile': 'a_file_name',
- 'MapExports': 'true',
- 'MapFileName': 'a_file_name',
- 'MergedIDLBaseFileName': 'a_file_name',
- 'MergeSections': 'a string1',
- 'MidlCommandFile': 'a_file_name',
- 'ModuleDefinitionFile': 'a_file_name',
- 'OptimizeForWindows98': '1',
- 'OptimizeReferences': '2',
- 'OutputFile': 'a_file_name',
- 'PerUserRedirection': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': 'a_file_name',
- 'ProgramDatabaseFile': 'a_file_name',
- 'RandomizedBaseAddress': '2',
- 'RegisterOutput': 'true',
- 'ResourceOnlyDLL': 'true',
- 'SetChecksum': 'true',
- 'ShowProgress': '2',
- 'StackCommitSize': 'a string1',
- 'StackReserveSize': 'a string1',
- 'StripPrivateSymbols': 'a_file_name',
- 'SubSystem': '2',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'true',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNet': 'true',
- 'TargetMachine': '2',
- 'TerminalServerAware': '2',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'TypeLibraryResourceID': '33',
- 'UACExecutionLevel': '2',
- 'UACUIAccess': 'true',
- 'UseLibraryDependencyInputs': 'true',
- 'UseUnicodeResponseFiles': 'true',
- 'Version': 'a string1'},
- 'VCMIDLTool': {
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'AdditionalOptions': 'a string1',
- 'CPreprocessOptions': 'a string1',
- 'DefaultCharType': '1',
- 'DLLDataFileName': 'a_file_name',
- 'EnableErrorChecks': '1',
- 'ErrorCheckAllocations': 'true',
- 'ErrorCheckBounds': 'true',
- 'ErrorCheckEnumRange': 'true',
- 'ErrorCheckRefPointers': 'true',
- 'ErrorCheckStubData': 'true',
- 'GenerateStublessProxies': 'true',
- 'GenerateTypeLibrary': 'true',
- 'HeaderFileName': 'a_file_name',
- 'IgnoreStandardIncludePath': 'true',
- 'InterfaceIdentifierFileName': 'a_file_name',
- 'MkTypLibCompatible': 'true',
- 'notgood': 'bogus',
- 'OutputDirectory': 'a string1',
- 'PreprocessorDefinitions': 'string1;string2',
- 'ProxyFileName': 'a_file_name',
- 'RedirectOutputAndErrors': 'a_file_name',
- 'StructMemberAlignment': '1',
- 'SuppressStartupBanner': 'true',
- 'TargetEnvironment': '1',
- 'TypeLibraryName': 'a_file_name',
- 'UndefinePreprocessorDefinitions': 'string1;string2',
- 'ValidateParameters': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '1'},
- 'VCResourceCompilerTool': {
- 'AdditionalOptions': 'a string1',
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'Culture': '1003',
- 'IgnoreStandardIncludePath': 'true',
- 'notgood2': 'bogus',
- 'PreprocessorDefinitions': 'string1;string2',
- 'ResourceOutputFileName': 'a string1',
- 'ShowProgress': 'true',
- 'SuppressStartupBanner': 'true',
- 'UndefinePreprocessorDefinitions': 'string1;string2'},
- 'VCLibrarianTool': {
- 'AdditionalDependencies': 'file1;file2',
- 'AdditionalLibraryDirectories': 'folder1;folder2',
- 'AdditionalOptions': 'a string1',
- 'ExportNamedFunctions': 'string1;string2',
- 'ForceSymbolReferences': 'a string1',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreSpecificDefaultLibraries': 'file1;file2',
- 'LinkLibraryDependencies': 'true',
- 'ModuleDefinitionFile': 'a_file_name',
- 'OutputFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'UseUnicodeResponseFiles': 'true'},
- 'VCManifestTool': {
- 'AdditionalManifestFiles': 'file1;file2',
- 'AdditionalOptions': 'a string1',
- 'AssemblyIdentity': 'a string1',
- 'ComponentFileName': 'a_file_name',
- 'DependencyInformationFile': 'a_file_name',
- 'GenerateCatalogFiles': 'true',
- 'InputResourceManifests': 'a string1',
- 'ManifestResourceFile': 'a_file_name',
- 'OutputManifestFile': 'a_file_name',
- 'RegistrarScriptFile': 'a_file_name',
- 'ReplacementsFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'UpdateFileHashes': 'truel',
- 'UpdateFileHashesSearchPath': 'a_file_name',
- 'UseFAT32Workaround': 'true',
- 'UseUnicodeResponseFiles': 'true',
- 'VerboseOutput': 'true'}},
- self.stderr)
- self._ExpectedWarnings([
- 'Warning: for VCCLCompilerTool/BasicRuntimeChecks, '
- 'index value (5) not in expected range [0, 4)',
- 'Warning: for VCCLCompilerTool/BrowseInformation, '
- "invalid literal for int() with base 10: 'fdkslj'",
- 'Warning: for VCCLCompilerTool/CallingConvention, '
- 'index value (-1) not in expected range [0, 4)',
- 'Warning: for VCCLCompilerTool/DebugInformationFormat, '
- 'converted value for 2 not specified.',
- 'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
- 'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
- 'Warning: for VCLinkerTool/TargetMachine, '
- 'converted value for 2 not specified.',
- 'Warning: unrecognized setting VCMIDLTool/notgood',
- 'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
- 'Warning: for VCManifestTool/UpdateFileHashes, '
- "expected bool; got 'truel'"
- ''])
-
- def testValidateMSBuildSettings_settings(self):
- """Tests that for invalid MSBuild settings."""
- MSVSSettings.ValidateMSBuildSettings(
- {'ClCompile': {
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'AdditionalOptions': ['string1', 'string2'],
- 'AdditionalUsingDirectories': 'folder1;folder2',
- 'AssemblerListingLocation': 'a_file_name',
- 'AssemblerOutput': 'NoListing',
- 'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
- 'BrowseInformation': 'false',
- 'BrowseInformationFile': 'a_file_name',
- 'BufferSecurityCheck': 'true',
- 'BuildingInIDE': 'true',
- 'CallingConvention': 'Cdecl',
- 'CompileAs': 'CompileAsC',
- 'CompileAsManaged': 'true',
- 'CreateHotpatchableImage': 'true',
- 'DebugInformationFormat': 'ProgramDatabase',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'string1;string2',
- 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnablePREfast': 'true',
- 'Enableprefast': 'bogus',
- 'ErrorReporting': 'Prompt',
- 'ExceptionHandling': 'SyncCThrow',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': 'Neither',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': 'Precise',
- 'ForceConformanceInForLoopScope': 'true',
- 'ForcedIncludeFiles': 'file1;file2',
- 'ForcedUsingFiles': 'file1;file2',
- 'FunctionLevelLinking': 'false',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': 'OnlyExplicitInline',
- 'IntrinsicFunctions': 'false',
- 'MinimalRebuild': 'true',
- 'MultiProcessorCompilation': 'true',
- 'ObjectFileName': 'a_file_name',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMPSupport': 'true',
- 'Optimization': 'Disabled',
- 'PrecompiledHeader': 'NotUsing',
- 'PrecompiledHeaderFile': 'a_file_name',
- 'PrecompiledHeaderOutputFile': 'a_file_name',
- 'PreprocessKeepComments': 'true',
- 'PreprocessorDefinitions': 'string1;string2',
- 'PreprocessOutputPath': 'a string1',
- 'PreprocessSuppressLineNumbers': 'false',
- 'PreprocessToFile': 'false',
- 'ProcessorNumber': '33',
- 'ProgramDataBaseFileName': 'a_file_name',
- 'RuntimeLibrary': 'MultiThreaded',
- 'RuntimeTypeInfo': 'true',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '1Byte',
- 'SuppressStartupBanner': 'true',
- 'TrackerLogDirectory': 'a_folder',
- 'TreatSpecificWarningsAsErrors': 'string1;string2',
- 'TreatWarningAsError': 'true',
- 'TreatWChar_tAsBuiltInType': 'true',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'string1;string2',
- 'UseFullPaths': 'true',
- 'UseUnicodeForAssemblerListing': 'true',
- 'WarningLevel': 'TurnOffAllWarnings',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': 'a_file_name',
- 'ZZXYZ': 'bogus'},
- 'Link': {
- 'AdditionalDependencies': 'file1;file2',
- 'AdditionalLibraryDirectories': 'folder1;folder2',
- 'AdditionalManifestDependencies': 'file1;file2',
- 'AdditionalOptions': 'a string1',
- 'AddModuleNamesToAssembly': 'file1;file2',
- 'AllowIsolation': 'true',
- 'AssemblyDebug': '',
- 'AssemblyLinkResource': 'file1;file2',
- 'BaseAddress': 'a string1',
- 'BuildingInIDE': 'true',
- 'CLRImageType': 'ForceIJWImage',
- 'CLRSupportLastError': 'Enabled',
- 'CLRThreadAttribute': 'MTAThreadingAttribute',
- 'CLRUnmanagedCodeCheck': 'true',
- 'CreateHotPatchableImage': 'X86Image',
- 'DataExecutionPrevention': 'false',
- 'DelayLoadDLLs': 'file1;file2',
- 'DelaySign': 'true',
- 'Driver': 'NotSet',
- 'EmbedManagedResourceFile': 'file1;file2',
- 'EnableCOMDATFolding': 'false',
- 'EnableUAC': 'true',
- 'EntryPointSymbol': 'a string1',
- 'FixedBaseAddress': 'false',
- 'ForceFileOutput': 'Enabled',
- 'ForceSymbolReferences': 'file1;file2',
- 'FunctionOrder': 'a_file_name',
- 'GenerateDebugInformation': 'true',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': 'a string1',
- 'HeapReserveSize': 'a string1',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreSpecificDefaultLibraries': 'a_file_list',
- 'ImageHasSafeExceptionHandlers': 'true',
- 'ImportLibrary': 'a_file_name',
- 'KeyContainer': 'a_file_name',
- 'KeyFile': 'a_file_name',
- 'LargeAddressAware': 'false',
- 'LinkDLL': 'true',
- 'LinkErrorReporting': 'SendErrorReport',
- 'LinkStatus': 'true',
- 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
- 'ManifestFile': 'a_file_name',
- 'MapExports': 'true',
- 'MapFileName': 'a_file_name',
- 'MergedIDLBaseFileName': 'a_file_name',
- 'MergeSections': 'a string1',
- 'MidlCommandFile': 'a_file_name',
- 'MinimumRequiredVersion': 'a string1',
- 'ModuleDefinitionFile': 'a_file_name',
- 'MSDOSStubFileName': 'a_file_name',
- 'NoEntryPoint': 'true',
- 'OptimizeReferences': 'false',
- 'OutputFile': 'a_file_name',
- 'PerUserRedirection': 'true',
- 'PreventDllBinding': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': 'a_file_name',
- 'ProgramDatabaseFile': 'a_file_name',
- 'RandomizedBaseAddress': 'false',
- 'RegisterOutput': 'true',
- 'SectionAlignment': '33',
- 'SetChecksum': 'true',
- 'ShowProgress': 'LinkVerboseREF',
- 'SpecifySectionAttributes': 'a string1',
- 'StackCommitSize': 'a string1',
- 'StackReserveSize': 'a string1',
- 'StripPrivateSymbols': 'a_file_name',
- 'SubSystem': 'Console',
- 'SupportNobindOfDelayLoadedDLL': 'true',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'true',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNET': 'true',
- 'TargetMachine': 'MachineX86',
- 'TerminalServerAware': 'false',
- 'TrackerLogDirectory': 'a_folder',
- 'TreatLinkerWarningAsErrors': 'true',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'TypeLibraryResourceID': '33',
- 'UACExecutionLevel': 'AsInvoker',
- 'UACUIAccess': 'true',
- 'Version': 'a string1'},
- 'ResourceCompile': {
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'AdditionalOptions': 'a string1',
- 'Culture': '0x236',
- 'IgnoreStandardIncludePath': 'true',
- 'NullTerminateStrings': 'true',
- 'PreprocessorDefinitions': 'string1;string2',
- 'ResourceOutputFileName': 'a string1',
- 'ShowProgress': 'true',
- 'SuppressStartupBanner': 'true',
- 'TrackerLogDirectory': 'a_folder',
- 'UndefinePreprocessorDefinitions': 'string1;string2'},
- 'Midl': {
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'AdditionalOptions': 'a string1',
- 'ApplicationConfigurationMode': 'true',
- 'ClientStubFile': 'a_file_name',
- 'CPreprocessOptions': 'a string1',
- 'DefaultCharType': 'Signed',
- 'DllDataFileName': 'a_file_name',
- 'EnableErrorChecks': 'EnableCustom',
- 'ErrorCheckAllocations': 'true',
- 'ErrorCheckBounds': 'true',
- 'ErrorCheckEnumRange': 'true',
- 'ErrorCheckRefPointers': 'true',
- 'ErrorCheckStubData': 'true',
- 'GenerateClientFiles': 'Stub',
- 'GenerateServerFiles': 'None',
- 'GenerateStublessProxies': 'true',
- 'GenerateTypeLibrary': 'true',
- 'HeaderFileName': 'a_file_name',
- 'IgnoreStandardIncludePath': 'true',
- 'InterfaceIdentifierFileName': 'a_file_name',
- 'LocaleID': '33',
- 'MkTypLibCompatible': 'true',
- 'OutputDirectory': 'a string1',
- 'PreprocessorDefinitions': 'string1;string2',
- 'ProxyFileName': 'a_file_name',
- 'RedirectOutputAndErrors': 'a_file_name',
- 'ServerStubFile': 'a_file_name',
- 'StructMemberAlignment': 'NotSet',
- 'SuppressCompilerWarnings': 'true',
- 'SuppressStartupBanner': 'true',
- 'TargetEnvironment': 'Itanium',
- 'TrackerLogDirectory': 'a_folder',
- 'TypeLibFormat': 'NewFormat',
- 'TypeLibraryName': 'a_file_name',
- 'UndefinePreprocessorDefinitions': 'string1;string2',
- 'ValidateAllParameters': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '1'},
- 'Lib': {
- 'AdditionalDependencies': 'file1;file2',
- 'AdditionalLibraryDirectories': 'folder1;folder2',
- 'AdditionalOptions': 'a string1',
- 'DisplayLibrary': 'a string1',
- 'ErrorReporting': 'PromptImmediately',
- 'ExportNamedFunctions': 'string1;string2',
- 'ForceSymbolReferences': 'a string1',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreSpecificDefaultLibraries': 'file1;file2',
- 'LinkTimeCodeGeneration': 'true',
- 'MinimumRequiredVersion': 'a string1',
- 'ModuleDefinitionFile': 'a_file_name',
- 'Name': 'a_file_name',
- 'OutputFile': 'a_file_name',
- 'RemoveObjects': 'file1;file2',
- 'SubSystem': 'Console',
- 'SuppressStartupBanner': 'true',
- 'TargetMachine': 'MachineX86i',
- 'TrackerLogDirectory': 'a_folder',
- 'TreatLibWarningAsErrors': 'true',
- 'UseUnicodeResponseFiles': 'true',
- 'Verbose': 'true'},
- 'Manifest': {
- 'AdditionalManifestFiles': 'file1;file2',
- 'AdditionalOptions': 'a string1',
- 'AssemblyIdentity': 'a string1',
- 'ComponentFileName': 'a_file_name',
- 'EnableDPIAwareness': 'fal',
- 'GenerateCatalogFiles': 'truel',
- 'GenerateCategoryTags': 'true',
- 'InputResourceManifests': 'a string1',
- 'ManifestFromManagedAssembly': 'a_file_name',
- 'notgood3': 'bogus',
- 'OutputManifestFile': 'a_file_name',
- 'OutputResourceManifests': 'a string1',
- 'RegistrarScriptFile': 'a_file_name',
- 'ReplacementsFile': 'a_file_name',
- 'SuppressDependencyElement': 'true',
- 'SuppressStartupBanner': 'true',
- 'TrackerLogDirectory': 'a_folder',
- 'TypeLibraryFile': 'a_file_name',
- 'UpdateFileHashes': 'true',
- 'UpdateFileHashesSearchPath': 'a_file_name',
- 'VerboseOutput': 'true'},
- 'ProjectReference': {
- 'LinkLibraryDependencies': 'true',
- 'UseLibraryDependencyInputs': 'true'},
- 'ManifestResourceCompile': {
- 'ResourceOutputFileName': 'a_file_name'},
- '': {
- 'EmbedManifest': 'true',
- 'GenerateManifest': 'true',
- 'IgnoreImportLibrary': 'true',
- 'LinkIncremental': 'false'}},
- self.stderr)
- self._ExpectedWarnings([
- 'Warning: unrecognized setting ClCompile/Enableprefast',
- 'Warning: unrecognized setting ClCompile/ZZXYZ',
- 'Warning: unrecognized setting Manifest/notgood3',
- 'Warning: for Manifest/GenerateCatalogFiles, '
- "expected bool; got 'truel'",
- 'Warning: for Lib/TargetMachine, unrecognized enumerated value '
- 'MachineX86i',
- "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"])
-
- def testConvertToMSBuildSettings_empty(self):
- """Tests an empty conversion."""
- msvs_settings = {}
- expected_msbuild_settings = {}
- actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
- msvs_settings,
- self.stderr)
- self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
- self._ExpectedWarnings([])
-
- def testConvertToMSBuildSettings_minimal(self):
- """Tests a minimal conversion."""
- msvs_settings = {
- 'VCCLCompilerTool': {
- 'AdditionalIncludeDirectories': 'dir1',
- 'AdditionalOptions': '/foo',
- 'BasicRuntimeChecks': '0',
- },
- 'VCLinkerTool': {
- 'LinkTimeCodeGeneration': '1',
- 'ErrorReporting': '1',
- 'DataExecutionPrevention': '2',
- },
- }
- expected_msbuild_settings = {
- 'ClCompile': {
- 'AdditionalIncludeDirectories': 'dir1',
- 'AdditionalOptions': '/foo',
- 'BasicRuntimeChecks': 'Default',
- },
- 'Link': {
- 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
- 'LinkErrorReporting': 'PromptImmediately',
- 'DataExecutionPrevention': 'true',
- },
- }
- actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
- msvs_settings,
- self.stderr)
- self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
- self._ExpectedWarnings([])
-
- def testConvertToMSBuildSettings_warnings(self):
- """Tests conversion that generates warnings."""
- msvs_settings = {
- 'VCCLCompilerTool': {
- 'AdditionalIncludeDirectories': '1',
- 'AdditionalOptions': '2',
- # These are incorrect values:
- 'BasicRuntimeChecks': '12',
- 'BrowseInformation': '21',
- 'UsePrecompiledHeader': '13',
- 'GeneratePreprocessedFile': '14'},
- 'VCLinkerTool': {
- # These are incorrect values:
- 'Driver': '10',
- 'LinkTimeCodeGeneration': '31',
- 'ErrorReporting': '21',
- 'FixedBaseAddress': '6'},
- 'VCResourceCompilerTool': {
- # Custom
- 'Culture': '1003'}}
- expected_msbuild_settings = {
- 'ClCompile': {
- 'AdditionalIncludeDirectories': '1',
- 'AdditionalOptions': '2'},
- 'Link': {},
- 'ResourceCompile': {
- # Custom
- 'Culture': '0x03eb'}}
- actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
- msvs_settings,
- self.stderr)
- self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
- self._ExpectedWarnings([
- 'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to '
- 'MSBuild, index value (12) not in expected range [0, 4)',
- 'Warning: while converting VCCLCompilerTool/BrowseInformation to '
- 'MSBuild, index value (21) not in expected range [0, 3)',
- 'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to '
- 'MSBuild, index value (13) not in expected range [0, 3)',
- 'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to '
- 'MSBuild, value must be one of [0, 1, 2]; got 14',
-
- 'Warning: while converting VCLinkerTool/Driver to '
- 'MSBuild, index value (10) not in expected range [0, 4)',
- 'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to '
- 'MSBuild, index value (31) not in expected range [0, 5)',
- 'Warning: while converting VCLinkerTool/ErrorReporting to '
- 'MSBuild, index value (21) not in expected range [0, 3)',
- 'Warning: while converting VCLinkerTool/FixedBaseAddress to '
- 'MSBuild, index value (6) not in expected range [0, 3)',
- ])
-
- def testConvertToMSBuildSettings_full_synthetic(self):
- """Tests conversion of all the MSBuild settings."""
- msvs_settings = {
- 'VCCLCompilerTool': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'AdditionalUsingDirectories': 'folder1;folder2;folder3',
- 'AssemblerListingLocation': 'a_file_name',
- 'AssemblerOutput': '0',
- 'BasicRuntimeChecks': '1',
- 'BrowseInformation': '2',
- 'BrowseInformationFile': 'a_file_name',
- 'BufferSecurityCheck': 'true',
- 'CallingConvention': '0',
- 'CompileAs': '1',
- 'DebugInformationFormat': '4',
- 'DefaultCharIsUnsigned': 'true',
- 'Detect64BitPortabilityProblems': 'true',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'd1;d2;d3',
- 'EnableEnhancedInstructionSet': '0',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnableFunctionLevelLinking': 'true',
- 'EnableIntrinsicFunctions': 'true',
- 'EnablePREfast': 'true',
- 'ErrorReporting': '1',
- 'ExceptionHandling': '2',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': '0',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': '1',
- 'ForceConformanceInForLoopScope': 'true',
- 'ForcedIncludeFiles': 'file1;file2;file3',
- 'ForcedUsingFiles': 'file1;file2;file3',
- 'GeneratePreprocessedFile': '1',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': '2',
- 'KeepComments': 'true',
- 'MinimalRebuild': 'true',
- 'ObjectFile': 'a_file_name',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMP': 'true',
- 'Optimization': '3',
- 'PrecompiledHeaderFile': 'a_file_name',
- 'PrecompiledHeaderThrough': 'a_file_name',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'ProgramDataBaseFileName': 'a_file_name',
- 'RuntimeLibrary': '0',
- 'RuntimeTypeInfo': 'true',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '1',
- 'SuppressStartupBanner': 'true',
- 'TreatWChar_tAsBuiltInType': 'true',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
- 'UseFullPaths': 'true',
- 'UsePrecompiledHeader': '1',
- 'UseUnicodeResponseFiles': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '2',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': 'a_file_name'},
- 'VCLinkerTool': {
- 'AdditionalDependencies': 'file1;file2;file3',
- 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
- 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
- 'AdditionalManifestDependencies': 'file1;file2;file3',
- 'AdditionalOptions': 'a_string',
- 'AddModuleNamesToAssembly': 'file1;file2;file3',
- 'AllowIsolation': 'true',
- 'AssemblyDebug': '0',
- 'AssemblyLinkResource': 'file1;file2;file3',
- 'BaseAddress': 'a_string',
- 'CLRImageType': '1',
- 'CLRThreadAttribute': '2',
- 'CLRUnmanagedCodeCheck': 'true',
- 'DataExecutionPrevention': '0',
- 'DelayLoadDLLs': 'file1;file2;file3',
- 'DelaySign': 'true',
- 'Driver': '1',
- 'EmbedManagedResourceFile': 'file1;file2;file3',
- 'EnableCOMDATFolding': '0',
- 'EnableUAC': 'true',
- 'EntryPointSymbol': 'a_string',
- 'ErrorReporting': '0',
- 'FixedBaseAddress': '1',
- 'ForceSymbolReferences': 'file1;file2;file3',
- 'FunctionOrder': 'a_file_name',
- 'GenerateDebugInformation': 'true',
- 'GenerateManifest': 'true',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': 'a_string',
- 'HeapReserveSize': 'a_string',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreDefaultLibraryNames': 'file1;file2;file3',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreImportLibrary': 'true',
- 'ImportLibrary': 'a_file_name',
- 'KeyContainer': 'a_file_name',
- 'KeyFile': 'a_file_name',
- 'LargeAddressAware': '2',
- 'LinkIncremental': '1',
- 'LinkLibraryDependencies': 'true',
- 'LinkTimeCodeGeneration': '2',
- 'ManifestFile': 'a_file_name',
- 'MapExports': 'true',
- 'MapFileName': 'a_file_name',
- 'MergedIDLBaseFileName': 'a_file_name',
- 'MergeSections': 'a_string',
- 'MidlCommandFile': 'a_file_name',
- 'ModuleDefinitionFile': 'a_file_name',
- 'OptimizeForWindows98': '1',
- 'OptimizeReferences': '0',
- 'OutputFile': 'a_file_name',
- 'PerUserRedirection': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': 'a_file_name',
- 'ProgramDatabaseFile': 'a_file_name',
- 'RandomizedBaseAddress': '1',
- 'RegisterOutput': 'true',
- 'ResourceOnlyDLL': 'true',
- 'SetChecksum': 'true',
- 'ShowProgress': '0',
- 'StackCommitSize': 'a_string',
- 'StackReserveSize': 'a_string',
- 'StripPrivateSymbols': 'a_file_name',
- 'SubSystem': '2',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'true',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNet': 'true',
- 'TargetMachine': '3',
- 'TerminalServerAware': '2',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'TypeLibraryResourceID': '33',
- 'UACExecutionLevel': '1',
- 'UACUIAccess': 'true',
- 'UseLibraryDependencyInputs': 'false',
- 'UseUnicodeResponseFiles': 'true',
- 'Version': 'a_string'},
- 'VCResourceCompilerTool': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'Culture': '1003',
- 'IgnoreStandardIncludePath': 'true',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'ResourceOutputFileName': 'a_string',
- 'ShowProgress': 'true',
- 'SuppressStartupBanner': 'true',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
- 'VCMIDLTool': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'CPreprocessOptions': 'a_string',
- 'DefaultCharType': '0',
- 'DLLDataFileName': 'a_file_name',
- 'EnableErrorChecks': '2',
- 'ErrorCheckAllocations': 'true',
- 'ErrorCheckBounds': 'true',
- 'ErrorCheckEnumRange': 'true',
- 'ErrorCheckRefPointers': 'true',
- 'ErrorCheckStubData': 'true',
- 'GenerateStublessProxies': 'true',
- 'GenerateTypeLibrary': 'true',
- 'HeaderFileName': 'a_file_name',
- 'IgnoreStandardIncludePath': 'true',
- 'InterfaceIdentifierFileName': 'a_file_name',
- 'MkTypLibCompatible': 'true',
- 'OutputDirectory': 'a_string',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'ProxyFileName': 'a_file_name',
- 'RedirectOutputAndErrors': 'a_file_name',
- 'StructMemberAlignment': '3',
- 'SuppressStartupBanner': 'true',
- 'TargetEnvironment': '1',
- 'TypeLibraryName': 'a_file_name',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
- 'ValidateParameters': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '4'},
- 'VCLibrarianTool': {
- 'AdditionalDependencies': 'file1;file2;file3',
- 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
- 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'ExportNamedFunctions': 'd1;d2;d3',
- 'ForceSymbolReferences': 'a_string',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
- 'LinkLibraryDependencies': 'true',
- 'ModuleDefinitionFile': 'a_file_name',
- 'OutputFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'UseUnicodeResponseFiles': 'true'},
- 'VCManifestTool': {
- 'AdditionalManifestFiles': 'file1;file2;file3',
- 'AdditionalOptions': 'a_string',
- 'AssemblyIdentity': 'a_string',
- 'ComponentFileName': 'a_file_name',
- 'DependencyInformationFile': 'a_file_name',
- 'EmbedManifest': 'true',
- 'GenerateCatalogFiles': 'true',
- 'InputResourceManifests': 'a_string',
- 'ManifestResourceFile': 'my_name',
- 'OutputManifestFile': 'a_file_name',
- 'RegistrarScriptFile': 'a_file_name',
- 'ReplacementsFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'UpdateFileHashes': 'true',
- 'UpdateFileHashesSearchPath': 'a_file_name',
- 'UseFAT32Workaround': 'true',
- 'UseUnicodeResponseFiles': 'true',
- 'VerboseOutput': 'true'}}
- expected_msbuild_settings = {
- 'ClCompile': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string /J',
- 'AdditionalUsingDirectories': 'folder1;folder2;folder3',
- 'AssemblerListingLocation': 'a_file_name',
- 'AssemblerOutput': 'NoListing',
- 'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
- 'BrowseInformation': 'true',
- 'BrowseInformationFile': 'a_file_name',
- 'BufferSecurityCheck': 'true',
- 'CallingConvention': 'Cdecl',
- 'CompileAs': 'CompileAsC',
- 'DebugInformationFormat': 'EditAndContinue',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'd1;d2;d3',
- 'EnableEnhancedInstructionSet': 'NotSet',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnablePREfast': 'true',
- 'ErrorReporting': 'Prompt',
- 'ExceptionHandling': 'Async',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': 'Neither',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': 'Strict',
- 'ForceConformanceInForLoopScope': 'true',
- 'ForcedIncludeFiles': 'file1;file2;file3',
- 'ForcedUsingFiles': 'file1;file2;file3',
- 'FunctionLevelLinking': 'true',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': 'AnySuitable',
- 'IntrinsicFunctions': 'true',
- 'MinimalRebuild': 'true',
- 'ObjectFileName': 'a_file_name',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMPSupport': 'true',
- 'Optimization': 'Full',
- 'PrecompiledHeader': 'Create',
- 'PrecompiledHeaderFile': 'a_file_name',
- 'PrecompiledHeaderOutputFile': 'a_file_name',
- 'PreprocessKeepComments': 'true',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'PreprocessSuppressLineNumbers': 'false',
- 'PreprocessToFile': 'true',
- 'ProgramDataBaseFileName': 'a_file_name',
- 'RuntimeLibrary': 'MultiThreaded',
- 'RuntimeTypeInfo': 'true',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '1Byte',
- 'SuppressStartupBanner': 'true',
- 'TreatWarningAsError': 'true',
- 'TreatWChar_tAsBuiltInType': 'true',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
- 'UseFullPaths': 'true',
- 'WarningLevel': 'Level2',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': 'a_file_name'},
- 'Link': {
- 'AdditionalDependencies': 'file1;file2;file3',
- 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
- 'AdditionalManifestDependencies': 'file1;file2;file3',
- 'AdditionalOptions': 'a_string',
- 'AddModuleNamesToAssembly': 'file1;file2;file3',
- 'AllowIsolation': 'true',
- 'AssemblyDebug': '',
- 'AssemblyLinkResource': 'file1;file2;file3',
- 'BaseAddress': 'a_string',
- 'CLRImageType': 'ForceIJWImage',
- 'CLRThreadAttribute': 'STAThreadingAttribute',
- 'CLRUnmanagedCodeCheck': 'true',
- 'DataExecutionPrevention': '',
- 'DelayLoadDLLs': 'file1;file2;file3',
- 'DelaySign': 'true',
- 'Driver': 'Driver',
- 'EmbedManagedResourceFile': 'file1;file2;file3',
- 'EnableCOMDATFolding': '',
- 'EnableUAC': 'true',
- 'EntryPointSymbol': 'a_string',
- 'FixedBaseAddress': 'false',
- 'ForceSymbolReferences': 'file1;file2;file3',
- 'FunctionOrder': 'a_file_name',
- 'GenerateDebugInformation': 'true',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': 'a_string',
- 'HeapReserveSize': 'a_string',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
- 'ImportLibrary': 'a_file_name',
- 'KeyContainer': 'a_file_name',
- 'KeyFile': 'a_file_name',
- 'LargeAddressAware': 'true',
- 'LinkErrorReporting': 'NoErrorReport',
- 'LinkTimeCodeGeneration': 'PGInstrument',
- 'ManifestFile': 'a_file_name',
- 'MapExports': 'true',
- 'MapFileName': 'a_file_name',
- 'MergedIDLBaseFileName': 'a_file_name',
- 'MergeSections': 'a_string',
- 'MidlCommandFile': 'a_file_name',
- 'ModuleDefinitionFile': 'a_file_name',
- 'NoEntryPoint': 'true',
- 'OptimizeReferences': '',
- 'OutputFile': 'a_file_name',
- 'PerUserRedirection': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': 'a_file_name',
- 'ProgramDatabaseFile': 'a_file_name',
- 'RandomizedBaseAddress': 'false',
- 'RegisterOutput': 'true',
- 'SetChecksum': 'true',
- 'ShowProgress': 'NotSet',
- 'StackCommitSize': 'a_string',
- 'StackReserveSize': 'a_string',
- 'StripPrivateSymbols': 'a_file_name',
- 'SubSystem': 'Windows',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'true',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNET': 'true',
- 'TargetMachine': 'MachineARM',
- 'TerminalServerAware': 'true',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'TypeLibraryResourceID': '33',
- 'UACExecutionLevel': 'HighestAvailable',
- 'UACUIAccess': 'true',
- 'Version': 'a_string'},
- 'ResourceCompile': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'Culture': '0x03eb',
- 'IgnoreStandardIncludePath': 'true',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'ResourceOutputFileName': 'a_string',
- 'ShowProgress': 'true',
- 'SuppressStartupBanner': 'true',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
- 'Midl': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'CPreprocessOptions': 'a_string',
- 'DefaultCharType': 'Unsigned',
- 'DllDataFileName': 'a_file_name',
- 'EnableErrorChecks': 'All',
- 'ErrorCheckAllocations': 'true',
- 'ErrorCheckBounds': 'true',
- 'ErrorCheckEnumRange': 'true',
- 'ErrorCheckRefPointers': 'true',
- 'ErrorCheckStubData': 'true',
- 'GenerateStublessProxies': 'true',
- 'GenerateTypeLibrary': 'true',
- 'HeaderFileName': 'a_file_name',
- 'IgnoreStandardIncludePath': 'true',
- 'InterfaceIdentifierFileName': 'a_file_name',
- 'MkTypLibCompatible': 'true',
- 'OutputDirectory': 'a_string',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'ProxyFileName': 'a_file_name',
- 'RedirectOutputAndErrors': 'a_file_name',
- 'StructMemberAlignment': '4',
- 'SuppressStartupBanner': 'true',
- 'TargetEnvironment': 'Win32',
- 'TypeLibraryName': 'a_file_name',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
- 'ValidateAllParameters': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '4'},
- 'Lib': {
- 'AdditionalDependencies': 'file1;file2;file3',
- 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'ExportNamedFunctions': 'd1;d2;d3',
- 'ForceSymbolReferences': 'a_string',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
- 'ModuleDefinitionFile': 'a_file_name',
- 'OutputFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'UseUnicodeResponseFiles': 'true'},
- 'Manifest': {
- 'AdditionalManifestFiles': 'file1;file2;file3',
- 'AdditionalOptions': 'a_string',
- 'AssemblyIdentity': 'a_string',
- 'ComponentFileName': 'a_file_name',
- 'GenerateCatalogFiles': 'true',
- 'InputResourceManifests': 'a_string',
- 'OutputManifestFile': 'a_file_name',
- 'RegistrarScriptFile': 'a_file_name',
- 'ReplacementsFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'UpdateFileHashes': 'true',
- 'UpdateFileHashesSearchPath': 'a_file_name',
- 'VerboseOutput': 'true'},
- 'ManifestResourceCompile': {
- 'ResourceOutputFileName': 'my_name'},
- 'ProjectReference': {
- 'LinkLibraryDependencies': 'true',
- 'UseLibraryDependencyInputs': 'false'},
- '': {
- 'EmbedManifest': 'true',
- 'GenerateManifest': 'true',
- 'IgnoreImportLibrary': 'true',
- 'LinkIncremental': 'false'}}
- actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
- msvs_settings,
- self.stderr)
- self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
- self._ExpectedWarnings([])
-
- def testConvertToMSBuildSettings_actual(self):
- """Tests the conversion of an actual project.
-
- A VS2008 project with most of the options defined was created through the
- VS2008 IDE. It was then converted to VS2010. The tool settings found in
- the .vcproj and .vcxproj files were converted to the two dictionaries
- msvs_settings and expected_msbuild_settings.
-
- Note that for many settings, the VS2010 converter adds macros like
- %(AdditionalIncludeDirectories) to make sure than inherited values are
- included. Since the Gyp projects we generate do not use inheritance,
- we removed these macros. They were:
- ClCompile:
- AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
- AdditionalOptions: ' %(AdditionalOptions)'
- AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
- DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
- ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
- ForcedUsingFiles: ';%(ForcedUsingFiles)',
- PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
- UndefinePreprocessorDefinitions:
- ';%(UndefinePreprocessorDefinitions)',
- Link:
- AdditionalDependencies: ';%(AdditionalDependencies)',
- AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
- AdditionalManifestDependencies:
- ';%(AdditionalManifestDependencies)',
- AdditionalOptions: ' %(AdditionalOptions)',
- AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
- AssemblyLinkResource: ';%(AssemblyLinkResource)',
- DelayLoadDLLs: ';%(DelayLoadDLLs)',
- EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
- ForceSymbolReferences: ';%(ForceSymbolReferences)',
- IgnoreSpecificDefaultLibraries:
- ';%(IgnoreSpecificDefaultLibraries)',
- ResourceCompile:
- AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
- AdditionalOptions: ' %(AdditionalOptions)',
- PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
- Manifest:
- AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
- AdditionalOptions: ' %(AdditionalOptions)',
- InputResourceManifests: ';%(InputResourceManifests)',
- """
- msvs_settings = {
- 'VCCLCompilerTool': {
- 'AdditionalIncludeDirectories': 'dir1',
- 'AdditionalOptions': '/more',
- 'AdditionalUsingDirectories': 'test',
- 'AssemblerListingLocation': '$(IntDir)\\a',
- 'AssemblerOutput': '1',
- 'BasicRuntimeChecks': '3',
- 'BrowseInformation': '1',
- 'BrowseInformationFile': '$(IntDir)\\e',
- 'BufferSecurityCheck': 'false',
- 'CallingConvention': '1',
- 'CompileAs': '1',
- 'DebugInformationFormat': '4',
- 'DefaultCharIsUnsigned': 'true',
- 'Detect64BitPortabilityProblems': 'true',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'abc',
- 'EnableEnhancedInstructionSet': '1',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnableFunctionLevelLinking': 'true',
- 'EnableIntrinsicFunctions': 'true',
- 'EnablePREfast': 'true',
- 'ErrorReporting': '2',
- 'ExceptionHandling': '2',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': '2',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': '1',
- 'ForceConformanceInForLoopScope': 'false',
- 'ForcedIncludeFiles': 'def',
- 'ForcedUsingFiles': 'ge',
- 'GeneratePreprocessedFile': '2',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': '1',
- 'KeepComments': 'true',
- 'MinimalRebuild': 'true',
- 'ObjectFile': '$(IntDir)\\b',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMP': 'true',
- 'Optimization': '3',
- 'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
- 'PrecompiledHeaderThrough': 'StdAfx.hd',
- 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
- 'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
- 'RuntimeLibrary': '3',
- 'RuntimeTypeInfo': 'false',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '3',
- 'SuppressStartupBanner': 'false',
- 'TreatWChar_tAsBuiltInType': 'false',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'wer',
- 'UseFullPaths': 'true',
- 'UsePrecompiledHeader': '0',
- 'UseUnicodeResponseFiles': 'false',
- 'WarnAsError': 'true',
- 'WarningLevel': '3',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': '$(IntDir)\\c'},
- 'VCLinkerTool': {
- 'AdditionalDependencies': 'zx',
- 'AdditionalLibraryDirectories': 'asd',
- 'AdditionalManifestDependencies': 's2',
- 'AdditionalOptions': '/mor2',
- 'AddModuleNamesToAssembly': 'd1',
- 'AllowIsolation': 'false',
- 'AssemblyDebug': '1',
- 'AssemblyLinkResource': 'd5',
- 'BaseAddress': '23423',
- 'CLRImageType': '3',
- 'CLRThreadAttribute': '1',
- 'CLRUnmanagedCodeCheck': 'true',
- 'DataExecutionPrevention': '0',
- 'DelayLoadDLLs': 'd4',
- 'DelaySign': 'true',
- 'Driver': '2',
- 'EmbedManagedResourceFile': 'd2',
- 'EnableCOMDATFolding': '1',
- 'EnableUAC': 'false',
- 'EntryPointSymbol': 'f5',
- 'ErrorReporting': '2',
- 'FixedBaseAddress': '1',
- 'ForceSymbolReferences': 'd3',
- 'FunctionOrder': 'fssdfsd',
- 'GenerateDebugInformation': 'true',
- 'GenerateManifest': 'false',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': '13',
- 'HeapReserveSize': '12',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreDefaultLibraryNames': 'flob;flok',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreImportLibrary': 'true',
- 'ImportLibrary': 'f4',
- 'KeyContainer': 'f7',
- 'KeyFile': 'f6',
- 'LargeAddressAware': '2',
- 'LinkIncremental': '0',
- 'LinkLibraryDependencies': 'false',
- 'LinkTimeCodeGeneration': '1',
- 'ManifestFile':
- '$(IntDir)\\$(TargetFileName).2intermediate.manifest',
- 'MapExports': 'true',
- 'MapFileName': 'd5',
- 'MergedIDLBaseFileName': 'f2',
- 'MergeSections': 'f5',
- 'MidlCommandFile': 'f1',
- 'ModuleDefinitionFile': 'sdsd',
- 'OptimizeForWindows98': '2',
- 'OptimizeReferences': '2',
- 'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
- 'PerUserRedirection': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
- 'ProgramDatabaseFile': 'Flob.pdb',
- 'RandomizedBaseAddress': '1',
- 'RegisterOutput': 'true',
- 'ResourceOnlyDLL': 'true',
- 'SetChecksum': 'false',
- 'ShowProgress': '1',
- 'StackCommitSize': '15',
- 'StackReserveSize': '14',
- 'StripPrivateSymbols': 'd3',
- 'SubSystem': '1',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'false',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNet': 'true',
- 'TargetMachine': '1',
- 'TerminalServerAware': '1',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'f3',
- 'TypeLibraryResourceID': '12',
- 'UACExecutionLevel': '2',
- 'UACUIAccess': 'true',
- 'UseLibraryDependencyInputs': 'true',
- 'UseUnicodeResponseFiles': 'false',
- 'Version': '333'},
- 'VCResourceCompilerTool': {
- 'AdditionalIncludeDirectories': 'f3',
- 'AdditionalOptions': '/more3',
- 'Culture': '3084',
- 'IgnoreStandardIncludePath': 'true',
- 'PreprocessorDefinitions': '_UNICODE;UNICODE2',
- 'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
- 'ShowProgress': 'true'},
- 'VCManifestTool': {
- 'AdditionalManifestFiles': 'sfsdfsd',
- 'AdditionalOptions': 'afdsdafsd',
- 'AssemblyIdentity': 'sddfdsadfsa',
- 'ComponentFileName': 'fsdfds',
- 'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
- 'EmbedManifest': 'false',
- 'GenerateCatalogFiles': 'true',
- 'InputResourceManifests': 'asfsfdafs',
- 'ManifestResourceFile':
- '$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
- 'OutputManifestFile': '$(TargetPath).manifestdfs',
- 'RegistrarScriptFile': 'sdfsfd',
- 'ReplacementsFile': 'sdffsd',
- 'SuppressStartupBanner': 'false',
- 'TypeLibraryFile': 'sfsd',
- 'UpdateFileHashes': 'true',
- 'UpdateFileHashesSearchPath': 'sfsd',
- 'UseFAT32Workaround': 'true',
- 'UseUnicodeResponseFiles': 'false',
- 'VerboseOutput': 'true'}}
- expected_msbuild_settings = {
- 'ClCompile': {
- 'AdditionalIncludeDirectories': 'dir1',
- 'AdditionalOptions': '/more /J',
- 'AdditionalUsingDirectories': 'test',
- 'AssemblerListingLocation': '$(IntDir)a',
- 'AssemblerOutput': 'AssemblyCode',
- 'BasicRuntimeChecks': 'EnableFastChecks',
- 'BrowseInformation': 'true',
- 'BrowseInformationFile': '$(IntDir)e',
- 'BufferSecurityCheck': 'false',
- 'CallingConvention': 'FastCall',
- 'CompileAs': 'CompileAsC',
- 'DebugInformationFormat': 'EditAndContinue',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'abc',
- 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnablePREfast': 'true',
- 'ErrorReporting': 'Queue',
- 'ExceptionHandling': 'Async',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': 'Size',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': 'Strict',
- 'ForceConformanceInForLoopScope': 'false',
- 'ForcedIncludeFiles': 'def',
- 'ForcedUsingFiles': 'ge',
- 'FunctionLevelLinking': 'true',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': 'OnlyExplicitInline',
- 'IntrinsicFunctions': 'true',
- 'MinimalRebuild': 'true',
- 'ObjectFileName': '$(IntDir)b',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMPSupport': 'true',
- 'Optimization': 'Full',
- 'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
- 'PrecompiledHeaderFile': 'StdAfx.hd',
- 'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
- 'PreprocessKeepComments': 'true',
- 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
- 'PreprocessSuppressLineNumbers': 'true',
- 'PreprocessToFile': 'true',
- 'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
- 'RuntimeLibrary': 'MultiThreadedDebugDLL',
- 'RuntimeTypeInfo': 'false',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '4Bytes',
- 'SuppressStartupBanner': 'false',
- 'TreatWarningAsError': 'true',
- 'TreatWChar_tAsBuiltInType': 'false',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'wer',
- 'UseFullPaths': 'true',
- 'WarningLevel': 'Level3',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': '$(IntDir)c'},
- 'Link': {
- 'AdditionalDependencies': 'zx',
- 'AdditionalLibraryDirectories': 'asd',
- 'AdditionalManifestDependencies': 's2',
- 'AdditionalOptions': '/mor2',
- 'AddModuleNamesToAssembly': 'd1',
- 'AllowIsolation': 'false',
- 'AssemblyDebug': 'true',
- 'AssemblyLinkResource': 'd5',
- 'BaseAddress': '23423',
- 'CLRImageType': 'ForceSafeILImage',
- 'CLRThreadAttribute': 'MTAThreadingAttribute',
- 'CLRUnmanagedCodeCheck': 'true',
- 'DataExecutionPrevention': '',
- 'DelayLoadDLLs': 'd4',
- 'DelaySign': 'true',
- 'Driver': 'UpOnly',
- 'EmbedManagedResourceFile': 'd2',
- 'EnableCOMDATFolding': 'false',
- 'EnableUAC': 'false',
- 'EntryPointSymbol': 'f5',
- 'FixedBaseAddress': 'false',
- 'ForceSymbolReferences': 'd3',
- 'FunctionOrder': 'fssdfsd',
- 'GenerateDebugInformation': 'true',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': '13',
- 'HeapReserveSize': '12',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreSpecificDefaultLibraries': 'flob;flok',
- 'ImportLibrary': 'f4',
- 'KeyContainer': 'f7',
- 'KeyFile': 'f6',
- 'LargeAddressAware': 'true',
- 'LinkErrorReporting': 'QueueForNextLogin',
- 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
- 'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
- 'MapExports': 'true',
- 'MapFileName': 'd5',
- 'MergedIDLBaseFileName': 'f2',
- 'MergeSections': 'f5',
- 'MidlCommandFile': 'f1',
- 'ModuleDefinitionFile': 'sdsd',
- 'NoEntryPoint': 'true',
- 'OptimizeReferences': 'true',
- 'OutputFile': '$(OutDir)$(ProjectName)2.exe',
- 'PerUserRedirection': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
- 'ProgramDatabaseFile': 'Flob.pdb',
- 'RandomizedBaseAddress': 'false',
- 'RegisterOutput': 'true',
- 'SetChecksum': 'false',
- 'ShowProgress': 'LinkVerbose',
- 'StackCommitSize': '15',
- 'StackReserveSize': '14',
- 'StripPrivateSymbols': 'd3',
- 'SubSystem': 'Console',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'false',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNET': 'true',
- 'TargetMachine': 'MachineX86',
- 'TerminalServerAware': 'false',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'f3',
- 'TypeLibraryResourceID': '12',
- 'UACExecutionLevel': 'RequireAdministrator',
- 'UACUIAccess': 'true',
- 'Version': '333'},
- 'ResourceCompile': {
- 'AdditionalIncludeDirectories': 'f3',
- 'AdditionalOptions': '/more3',
- 'Culture': '0x0c0c',
- 'IgnoreStandardIncludePath': 'true',
- 'PreprocessorDefinitions': '_UNICODE;UNICODE2',
- 'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
- 'ShowProgress': 'true'},
- 'Manifest': {
- 'AdditionalManifestFiles': 'sfsdfsd',
- 'AdditionalOptions': 'afdsdafsd',
- 'AssemblyIdentity': 'sddfdsadfsa',
- 'ComponentFileName': 'fsdfds',
- 'GenerateCatalogFiles': 'true',
- 'InputResourceManifests': 'asfsfdafs',
- 'OutputManifestFile': '$(TargetPath).manifestdfs',
- 'RegistrarScriptFile': 'sdfsfd',
- 'ReplacementsFile': 'sdffsd',
- 'SuppressStartupBanner': 'false',
- 'TypeLibraryFile': 'sfsd',
- 'UpdateFileHashes': 'true',
- 'UpdateFileHashesSearchPath': 'sfsd',
- 'VerboseOutput': 'true'},
- 'ProjectReference': {
- 'LinkLibraryDependencies': 'false',
- 'UseLibraryDependencyInputs': 'true'},
- '': {
- 'EmbedManifest': 'false',
- 'GenerateManifest': 'false',
- 'IgnoreImportLibrary': 'true',
- 'LinkIncremental': ''
- },
- 'ManifestResourceCompile': {
- 'ResourceOutputFileName':
- '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
- }
- actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
- msvs_settings,
- self.stderr)
- self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
- self._ExpectedWarnings([])
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/gyp/pylib/gyp/MSVSToolFile.py b/deps/gyp/pylib/gyp/MSVSToolFile.py
deleted file mode 100644
index 74e529a17f..0000000000
--- a/deps/gyp/pylib/gyp/MSVSToolFile.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Visual Studio project reader/writer."""
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-
-
-class Writer(object):
- """Visual Studio XML tool file writer."""
-
- def __init__(self, tool_file_path, name):
- """Initializes the tool file.
-
- Args:
- tool_file_path: Path to the tool file.
- name: Name of the tool file.
- """
- self.tool_file_path = tool_file_path
- self.name = name
- self.rules_section = ['Rules']
-
- def AddCustomBuildRule(self, name, cmd, description,
- additional_dependencies,
- outputs, extensions):
- """Adds a rule to the tool file.
-
- Args:
- name: Name of the rule.
- description: Description of the rule.
- cmd: Command line of the rule.
- additional_dependencies: other files which may trigger the rule.
- outputs: outputs of the rule.
- extensions: extensions handled by the rule.
- """
- rule = ['CustomBuildRule',
- {'Name': name,
- 'ExecutionDescription': description,
- 'CommandLine': cmd,
- 'Outputs': ';'.join(outputs),
- 'FileExtensions': ';'.join(extensions),
- 'AdditionalDependencies':
- ';'.join(additional_dependencies)
- }]
- self.rules_section.append(rule)
-
- def WriteIfChanged(self):
- """Writes the tool file."""
- content = ['VisualStudioToolFile',
- {'Version': '8.00',
- 'Name': self.name
- },
- self.rules_section
- ]
- easy_xml.WriteXmlIfChanged(content, self.tool_file_path,
- encoding="Windows-1252")
diff --git a/deps/gyp/pylib/gyp/MSVSUserFile.py b/deps/gyp/pylib/gyp/MSVSUserFile.py
deleted file mode 100644
index 6c07e9a893..0000000000
--- a/deps/gyp/pylib/gyp/MSVSUserFile.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Visual Studio user preferences file writer."""
-
-import os
-import re
-import socket # for gethostname
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-
-
-#------------------------------------------------------------------------------
-
-def _FindCommandInPath(command):
- """If there are no slashes in the command given, this function
- searches the PATH env to find the given command, and converts it
- to an absolute path. We have to do this because MSVS is looking
- for an actual file to launch a debugger on, not just a command
- line. Note that this happens at GYP time, so anything needing to
- be built needs to have a full path."""
- if '/' in command or '\\' in command:
- # If the command already has path elements (either relative or
- # absolute), then assume it is constructed properly.
- return command
- else:
- # Search through the path list and find an existing file that
- # we can access.
- paths = os.environ.get('PATH','').split(os.pathsep)
- for path in paths:
- item = os.path.join(path, command)
- if os.path.isfile(item) and os.access(item, os.X_OK):
- return item
- return command
-
-def _QuoteWin32CommandLineArgs(args):
- new_args = []
- for arg in args:
- # Replace all double-quotes with double-double-quotes to escape
- # them for cmd shell, and then quote the whole thing if there
- # are any.
- if arg.find('"') != -1:
- arg = '""'.join(arg.split('"'))
- arg = '"%s"' % arg
-
- # Otherwise, if there are any spaces, quote the whole arg.
- elif re.search(r'[ \t\n]', arg):
- arg = '"%s"' % arg
- new_args.append(arg)
- return new_args
-
-class Writer(object):
- """Visual Studio XML user user file writer."""
-
- def __init__(self, user_file_path, version, name):
- """Initializes the user file.
-
- Args:
- user_file_path: Path to the user file.
- version: Version info.
- name: Name of the user file.
- """
- self.user_file_path = user_file_path
- self.version = version
- self.name = name
- self.configurations = {}
-
- def AddConfig(self, name):
- """Adds a configuration to the project.
-
- Args:
- name: Configuration name.
- """
- self.configurations[name] = ['Configuration', {'Name': name}]
-
- def AddDebugSettings(self, config_name, command, environment = {},
- working_directory=""):
- """Adds a DebugSettings node to the user file for a particular config.
-
- Args:
- command: command line to run. First element in the list is the
- executable. All elements of the command will be quoted if
- necessary.
- working_directory: other files which may trigger the rule. (optional)
- """
- command = _QuoteWin32CommandLineArgs(command)
-
- abs_command = _FindCommandInPath(command[0])
-
- if environment and isinstance(environment, dict):
- env_list = ['%s="%s"' % (key, val)
- for (key,val) in environment.iteritems()]
- environment = ' '.join(env_list)
- else:
- environment = ''
-
- n_cmd = ['DebugSettings',
- {'Command': abs_command,
- 'WorkingDirectory': working_directory,
- 'CommandArguments': " ".join(command[1:]),
- 'RemoteMachine': socket.gethostname(),
- 'Environment': environment,
- 'EnvironmentMerge': 'true',
- # Currently these are all "dummy" values that we're just setting
- # in the default manner that MSVS does it. We could use some of
- # these to add additional capabilities, I suppose, but they might
- # not have parity with other platforms then.
- 'Attach': 'false',
- 'DebuggerType': '3', # 'auto' debugger
- 'Remote': '1',
- 'RemoteCommand': '',
- 'HttpUrl': '',
- 'PDBPath': '',
- 'SQLDebugging': '',
- 'DebuggerFlavor': '0',
- 'MPIRunCommand': '',
- 'MPIRunArguments': '',
- 'MPIRunWorkingDirectory': '',
- 'ApplicationCommand': '',
- 'ApplicationArguments': '',
- 'ShimCommand': '',
- 'MPIAcceptMode': '',
- 'MPIAcceptFilter': ''
- }]
-
- # Find the config, and add it if it doesn't exist.
- if config_name not in self.configurations:
- self.AddConfig(config_name)
-
- # Add the DebugSettings onto the appropriate config.
- self.configurations[config_name].append(n_cmd)
-
- def WriteIfChanged(self):
- """Writes the user file."""
- configs = ['Configurations']
- for config, spec in sorted(self.configurations.iteritems()):
- configs.append(spec)
-
- content = ['VisualStudioUserFile',
- {'Version': self.version.ProjectVersion(),
- 'Name': self.name
- },
- configs]
- easy_xml.WriteXmlIfChanged(content, self.user_file_path,
- encoding="Windows-1252")
diff --git a/deps/gyp/pylib/gyp/MSVSUtil.py b/deps/gyp/pylib/gyp/MSVSUtil.py
deleted file mode 100644
index 0b32e91180..0000000000
--- a/deps/gyp/pylib/gyp/MSVSUtil.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions shared amongst the Windows generators."""
-
-import copy
-import os
-
-
-# A dictionary mapping supported target types to extensions.
-TARGET_TYPE_EXT = {
- 'executable': 'exe',
- 'loadable_module': 'dll',
- 'shared_library': 'dll',
- 'static_library': 'lib',
-}
-
-
-def _GetLargePdbShimCcPath():
- """Returns the path of the large_pdb_shim.cc file."""
- this_dir = os.path.abspath(os.path.dirname(__file__))
- src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
- win_data_dir = os.path.join(src_dir, 'data', 'win')
- large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
- return large_pdb_shim_cc
-
-
-def _DeepCopySomeKeys(in_dict, keys):
- """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
-
- Arguments:
- in_dict: The dictionary to copy.
- keys: The keys to be copied. If a key is in this list and doesn't exist in
- |in_dict| this is not an error.
- Returns:
- The partially deep-copied dictionary.
- """
- d = {}
- for key in keys:
- if key not in in_dict:
- continue
- d[key] = copy.deepcopy(in_dict[key])
- return d
-
-
-def _SuffixName(name, suffix):
- """Add a suffix to the end of a target.
-
- Arguments:
- name: name of the target (foo#target)
- suffix: the suffix to be added
- Returns:
- Target name with suffix added (foo_suffix#target)
- """
- parts = name.rsplit('#', 1)
- parts[0] = '%s_%s' % (parts[0], suffix)
- return '#'.join(parts)
-
-
-def _ShardName(name, number):
- """Add a shard number to the end of a target.
-
- Arguments:
- name: name of the target (foo#target)
- number: shard number
- Returns:
- Target name with shard added (foo_1#target)
- """
- return _SuffixName(name, str(number))
-
-
-def ShardTargets(target_list, target_dicts):
- """Shard some targets apart to work around the linkers limits.
-
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- Returns:
- Tuple of the new sharded versions of the inputs.
- """
- # Gather the targets to shard, and how many pieces.
- targets_to_shard = {}
- for t in target_dicts:
- shards = int(target_dicts[t].get('msvs_shard', 0))
- if shards:
- targets_to_shard[t] = shards
- # Shard target_list.
- new_target_list = []
- for t in target_list:
- if t in targets_to_shard:
- for i in range(targets_to_shard[t]):
- new_target_list.append(_ShardName(t, i))
- else:
- new_target_list.append(t)
- # Shard target_dict.
- new_target_dicts = {}
- for t in target_dicts:
- if t in targets_to_shard:
- for i in range(targets_to_shard[t]):
- name = _ShardName(t, i)
- new_target_dicts[name] = copy.copy(target_dicts[t])
- new_target_dicts[name]['target_name'] = _ShardName(
- new_target_dicts[name]['target_name'], i)
- sources = new_target_dicts[name].get('sources', [])
- new_sources = []
- for pos in range(i, len(sources), targets_to_shard[t]):
- new_sources.append(sources[pos])
- new_target_dicts[name]['sources'] = new_sources
- else:
- new_target_dicts[t] = target_dicts[t]
- # Shard dependencies.
- for t in new_target_dicts:
- for deptype in ('dependencies', 'dependencies_original'):
- dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
- new_dependencies = []
- for d in dependencies:
- if d in targets_to_shard:
- for i in range(targets_to_shard[d]):
- new_dependencies.append(_ShardName(d, i))
- else:
- new_dependencies.append(d)
- new_target_dicts[t][deptype] = new_dependencies
-
- return (new_target_list, new_target_dicts)
-
-
-def _GetPdbPath(target_dict, config_name, vars):
- """Returns the path to the PDB file that will be generated by a given
- configuration.
-
- The lookup proceeds as follows:
- - Look for an explicit path in the VCLinkerTool configuration block.
- - Look for an 'msvs_large_pdb_path' variable.
- - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
- specified.
- - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
-
- Arguments:
- target_dict: The target dictionary to be searched.
- config_name: The name of the configuration of interest.
- vars: A dictionary of common GYP variables with generator-specific values.
- Returns:
- The path of the corresponding PDB file.
- """
- config = target_dict['configurations'][config_name]
- msvs = config.setdefault('msvs_settings', {})
-
- linker = msvs.get('VCLinkerTool', {})
-
- pdb_path = linker.get('ProgramDatabaseFile')
- if pdb_path:
- return pdb_path
-
- variables = target_dict.get('variables', {})
- pdb_path = variables.get('msvs_large_pdb_path', None)
- if pdb_path:
- return pdb_path
-
-
- pdb_base = target_dict.get('product_name', target_dict['target_name'])
- pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']])
- pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
-
- return pdb_path
-
-
-def InsertLargePdbShims(target_list, target_dicts, vars):
- """Insert a shim target that forces the linker to use 4KB pagesize PDBs.
-
- This is a workaround for targets with PDBs greater than 1GB in size, the
- limit for the 1KB pagesize PDBs created by the linker by default.
-
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- vars: A dictionary of common GYP variables with generator-specific values.
- Returns:
- Tuple of the shimmed version of the inputs.
- """
- # Determine which targets need shimming.
- targets_to_shim = []
- for t in target_dicts:
- target_dict = target_dicts[t]
-
- # We only want to shim targets that have msvs_large_pdb enabled.
- if not int(target_dict.get('msvs_large_pdb', 0)):
- continue
- # This is intended for executable, shared_library and loadable_module
- # targets where every configuration is set up to produce a PDB output.
- # If any of these conditions is not true then the shim logic will fail
- # below.
- targets_to_shim.append(t)
-
- large_pdb_shim_cc = _GetLargePdbShimCcPath()
-
- for t in targets_to_shim:
- target_dict = target_dicts[t]
- target_name = target_dict.get('target_name')
-
- base_dict = _DeepCopySomeKeys(target_dict,
- ['configurations', 'default_configuration', 'toolset'])
-
- # This is the dict for copying the source file (part of the GYP tree)
- # to the intermediate directory of the project. This is necessary because
- # we can't always build a relative path to the shim source file (on Windows
- # GYP and the project may be on different drives), and Ninja hates absolute
- # paths (it ends up generating the .obj and .obj.d alongside the source
- # file, polluting GYPs tree).
- copy_suffix = 'large_pdb_copy'
- copy_target_name = target_name + '_' + copy_suffix
- full_copy_target_name = _SuffixName(t, copy_suffix)
- shim_cc_basename = os.path.basename(large_pdb_shim_cc)
- shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
- shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
- copy_dict = copy.deepcopy(base_dict)
- copy_dict['target_name'] = copy_target_name
- copy_dict['type'] = 'none'
- copy_dict['sources'] = [ large_pdb_shim_cc ]
- copy_dict['copies'] = [{
- 'destination': shim_cc_dir,
- 'files': [ large_pdb_shim_cc ]
- }]
-
- # This is the dict for the PDB generating shim target. It depends on the
- # copy target.
- shim_suffix = 'large_pdb_shim'
- shim_target_name = target_name + '_' + shim_suffix
- full_shim_target_name = _SuffixName(t, shim_suffix)
- shim_dict = copy.deepcopy(base_dict)
- shim_dict['target_name'] = shim_target_name
- shim_dict['type'] = 'static_library'
- shim_dict['sources'] = [ shim_cc_path ]
- shim_dict['dependencies'] = [ full_copy_target_name ]
-
- # Set up the shim to output its PDB to the same location as the final linker
- # target.
- for config_name, config in shim_dict.get('configurations').iteritems():
- pdb_path = _GetPdbPath(target_dict, config_name, vars)
-
- # A few keys that we don't want to propagate.
- for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
- config.pop(key, None)
-
- msvs = config.setdefault('msvs_settings', {})
-
- # Update the compiler directives in the shim target.
- compiler = msvs.setdefault('VCCLCompilerTool', {})
- compiler['DebugInformationFormat'] = '3'
- compiler['ProgramDataBaseFileName'] = pdb_path
-
- # Set the explicit PDB path in the appropriate configuration of the
- # original target.
- config = target_dict['configurations'][config_name]
- msvs = config.setdefault('msvs_settings', {})
- linker = msvs.setdefault('VCLinkerTool', {})
- linker['GenerateDebugInformation'] = 'true'
- linker['ProgramDatabaseFile'] = pdb_path
-
- # Add the new targets. They must go to the beginning of the list so that
- # the dependency generation works as expected in ninja.
- target_list.insert(0, full_copy_target_name)
- target_list.insert(0, full_shim_target_name)
- target_dicts[full_copy_target_name] = copy_dict
- target_dicts[full_shim_target_name] = shim_dict
-
- # Update the original target to depend on the shim target.
- target_dict.setdefault('dependencies', []).append(full_shim_target_name)
-
- return (target_list, target_dicts)
diff --git a/deps/gyp/pylib/gyp/MSVSVersion.py b/deps/gyp/pylib/gyp/MSVSVersion.py
deleted file mode 100644
index d9bfa684fa..0000000000
--- a/deps/gyp/pylib/gyp/MSVSVersion.py
+++ /dev/null
@@ -1,443 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Handle version information related to Visual Stuio."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-import gyp
-import glob
-
-
-class VisualStudioVersion(object):
- """Information regarding a version of Visual Studio."""
-
- def __init__(self, short_name, description,
- solution_version, project_version, flat_sln, uses_vcxproj,
- path, sdk_based, default_toolset=None):
- self.short_name = short_name
- self.description = description
- self.solution_version = solution_version
- self.project_version = project_version
- self.flat_sln = flat_sln
- self.uses_vcxproj = uses_vcxproj
- self.path = path
- self.sdk_based = sdk_based
- self.default_toolset = default_toolset
-
- def ShortName(self):
- return self.short_name
-
- def Description(self):
- """Get the full description of the version."""
- return self.description
-
- def SolutionVersion(self):
- """Get the version number of the sln files."""
- return self.solution_version
-
- def ProjectVersion(self):
- """Get the version number of the vcproj or vcxproj files."""
- return self.project_version
-
- def FlatSolution(self):
- return self.flat_sln
-
- def UsesVcxproj(self):
- """Returns true if this version uses a vcxproj file."""
- return self.uses_vcxproj
-
- def ProjectExtension(self):
- """Returns the file extension for the project."""
- return self.uses_vcxproj and '.vcxproj' or '.vcproj'
-
- def Path(self):
- """Returns the path to Visual Studio installation."""
- return self.path
-
- def ToolPath(self, tool):
- """Returns the path to a given compiler tool. """
- return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
-
- def DefaultToolset(self):
- """Returns the msbuild toolset version that will be used in the absence
- of a user override."""
- return self.default_toolset
-
- def SetupScript(self, target_arch):
- """Returns a command (with arguments) to be used to set up the
- environment."""
- # Check if we are running in the SDK command line environment and use
- # the setup script from the SDK if so. |target_arch| should be either
- # 'x86' or 'x64'.
- assert target_arch in ('x86', 'x64')
- sdk_dir = os.environ.get('WindowsSDKDir')
- if self.sdk_based and sdk_dir:
- return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
- '/' + target_arch]
- else:
- # We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
- # vcvars32, which it can only find if VS??COMNTOOLS is set, which it
- # isn't always.
- if target_arch == 'x86':
- if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
- os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
- os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
- # VS2013 and later, non-Express have a x64-x86 cross that we want
- # to prefer.
- return [os.path.normpath(
- os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
- # Otherwise, the standard x86 compiler.
- return [os.path.normpath(
- os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
- else:
- assert target_arch == 'x64'
- arg = 'x86_amd64'
- # Use the 64-on-64 compiler if we're not using an express
- # edition and we're running on a 64bit OS.
- if self.short_name[-1] != 'e' and (
- os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
- os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
- arg = 'amd64'
- return [os.path.normpath(
- os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
-
-
-def _RegistryQueryBase(sysdir, key, value):
- """Use reg.exe to read a particular key.
-
- While ideally we might use the win32 module, we would like gyp to be
- python neutral, so for instance cygwin python lacks this module.
-
- Arguments:
- sysdir: The system subdirectory to attempt to launch reg.exe from.
- key: The registry key to read from.
- value: The particular value to read.
- Return:
- stdout from reg.exe, or None for failure.
- """
- # Skip if not on Windows or Python Win32 setup issue
- if sys.platform not in ('win32', 'cygwin'):
- return None
- # Setup params to pass to and attempt to launch reg.exe
- cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
- 'query', key]
- if value:
- cmd.extend(['/v', value])
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
- # Note that the error text may be in [1] in some cases
- text = p.communicate()[0]
- # Check return code from reg.exe; officially 0==success and 1==error
- if p.returncode:
- return None
- return text
-
-
-def _RegistryQuery(key, value=None):
- r"""Use reg.exe to read a particular key through _RegistryQueryBase.
-
- First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
- that fails, it falls back to System32. Sysnative is available on Vista and
- up and available on Windows Server 2003 and XP through KB patch 942589. Note
- that Sysnative will always fail if using 64-bit python due to it being a
- virtual directory and System32 will work correctly in the first place.
-
- KB 942589 - http://support.microsoft.com/kb/942589/en-us.
-
- Arguments:
- key: The registry key.
- value: The particular registry value to read (optional).
- Return:
- stdout from reg.exe, or None for failure.
- """
- text = None
- try:
- text = _RegistryQueryBase('Sysnative', key, value)
- except OSError, e:
- if e.errno == errno.ENOENT:
- text = _RegistryQueryBase('System32', key, value)
- else:
- raise
- return text
-
-
-def _RegistryGetValueUsingWinReg(key, value):
- """Use the _winreg module to obtain the value of a registry key.
-
- Args:
- key: The registry key.
- value: The particular registry value to read.
- Return:
- contents of the registry key's value, or None on failure. Throws
- ImportError if _winreg is unavailable.
- """
- import _winreg
- try:
- root, subkey = key.split('\\', 1)
- assert root == 'HKLM' # Only need HKLM for now.
- with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
- return _winreg.QueryValueEx(hkey, value)[0]
- except WindowsError:
- return None
-
-
-def _RegistryGetValue(key, value):
- """Use _winreg or reg.exe to obtain the value of a registry key.
-
- Using _winreg is preferable because it solves an issue on some corporate
- environments where access to reg.exe is locked down. However, we still need
- to fallback to reg.exe for the case where the _winreg module is not available
- (for example in cygwin python).
-
- Args:
- key: The registry key.
- value: The particular registry value to read.
- Return:
- contents of the registry key's value, or None on failure.
- """
- try:
- return _RegistryGetValueUsingWinReg(key, value)
- except ImportError:
- pass
-
- # Fallback to reg.exe if we fail to import _winreg.
- text = _RegistryQuery(key, value)
- if not text:
- return None
- # Extract value.
- match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
- if not match:
- return None
- return match.group(1)
-
-
-def _CreateVersion(name, path, sdk_based=False):
- """Sets up MSVS project generation.
-
- Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
- autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
- passed in that doesn't match a value in versions python will throw a error.
- """
- if path:
- path = os.path.normpath(path)
- versions = {
- '2015': VisualStudioVersion('2015',
- 'Visual Studio 2015',
- solution_version='12.00',
- project_version='14.0',
- flat_sln=False,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based,
- default_toolset='v140'),
- '2013': VisualStudioVersion('2013',
- 'Visual Studio 2013',
- solution_version='13.00',
- project_version='12.0',
- flat_sln=False,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based,
- default_toolset='v120'),
- '2013e': VisualStudioVersion('2013e',
- 'Visual Studio 2013',
- solution_version='13.00',
- project_version='12.0',
- flat_sln=True,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based,
- default_toolset='v120'),
- '2012': VisualStudioVersion('2012',
- 'Visual Studio 2012',
- solution_version='12.00',
- project_version='4.0',
- flat_sln=False,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based,
- default_toolset='v110'),
- '2012e': VisualStudioVersion('2012e',
- 'Visual Studio 2012',
- solution_version='12.00',
- project_version='4.0',
- flat_sln=True,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based,
- default_toolset='v110'),
- '2010': VisualStudioVersion('2010',
- 'Visual Studio 2010',
- solution_version='11.00',
- project_version='4.0',
- flat_sln=False,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based),
- '2010e': VisualStudioVersion('2010e',
- 'Visual C++ Express 2010',
- solution_version='11.00',
- project_version='4.0',
- flat_sln=True,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based),
- '2008': VisualStudioVersion('2008',
- 'Visual Studio 2008',
- solution_version='10.00',
- project_version='9.00',
- flat_sln=False,
- uses_vcxproj=False,
- path=path,
- sdk_based=sdk_based),
- '2008e': VisualStudioVersion('2008e',
- 'Visual Studio 2008',
- solution_version='10.00',
- project_version='9.00',
- flat_sln=True,
- uses_vcxproj=False,
- path=path,
- sdk_based=sdk_based),
- '2005': VisualStudioVersion('2005',
- 'Visual Studio 2005',
- solution_version='9.00',
- project_version='8.00',
- flat_sln=False,
- uses_vcxproj=False,
- path=path,
- sdk_based=sdk_based),
- '2005e': VisualStudioVersion('2005e',
- 'Visual Studio 2005',
- solution_version='9.00',
- project_version='8.00',
- flat_sln=True,
- uses_vcxproj=False,
- path=path,
- sdk_based=sdk_based),
- }
- return versions[str(name)]
-
-
-def _ConvertToCygpath(path):
- """Convert to cygwin path if we are using cygwin."""
- if sys.platform == 'cygwin':
- p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
- path = p.communicate()[0].strip()
- return path
-
-
-def _DetectVisualStudioVersions(versions_to_check, force_express):
- """Collect the list of installed visual studio versions.
-
- Returns:
- A list of visual studio versions installed in descending order of
- usage preference.
- Base this on the registry and a quick check if devenv.exe exists.
- Only versions 8-10 are considered.
- Possibilities are:
- 2005(e) - Visual Studio 2005 (8)
- 2008(e) - Visual Studio 2008 (9)
- 2010(e) - Visual Studio 2010 (10)
- 2012(e) - Visual Studio 2012 (11)
- 2013(e) - Visual Studio 2013 (12)
- 2015 - Visual Studio 2015 (14)
- Where (e) is e for express editions of MSVS and blank otherwise.
- """
- version_to_year = {
- '8.0': '2005',
- '9.0': '2008',
- '10.0': '2010',
- '11.0': '2012',
- '12.0': '2013',
- '14.0': '2015',
- }
- versions = []
- for version in versions_to_check:
- # Old method of searching for which VS version is installed
- # We don't use the 2010-encouraged-way because we also want to get the
- # path to the binaries, which it doesn't offer.
- keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
- r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
- r'HKLM\Software\Microsoft\VCExpress\%s' % version,
- r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
- for index in range(len(keys)):
- path = _RegistryGetValue(keys[index], 'InstallDir')
- if not path:
- continue
- path = _ConvertToCygpath(path)
- # Check for full.
- full_path = os.path.join(path, 'devenv.exe')
- express_path = os.path.join(path, '*express.exe')
- if not force_express and os.path.exists(full_path):
- # Add this one.
- versions.append(_CreateVersion(version_to_year[version],
- os.path.join(path, '..', '..')))
- # Check for express.
- elif glob.glob(express_path):
- # Add this one.
- versions.append(_CreateVersion(version_to_year[version] + 'e',
- os.path.join(path, '..', '..')))
-
- # The old method above does not work when only SDK is installed.
- keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
- r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
- for index in range(len(keys)):
- path = _RegistryGetValue(keys[index], version)
- if not path:
- continue
- path = _ConvertToCygpath(path)
- if version != '14.0': # There is no Express edition for 2015.
- versions.append(_CreateVersion(version_to_year[version] + 'e',
- os.path.join(path, '..'), sdk_based=True))
-
- return versions
-
-
-def SelectVisualStudioVersion(version='auto', allow_fallback=True):
- """Select which version of Visual Studio projects to generate.
-
- Arguments:
- version: Hook to allow caller to force a particular version (vs auto).
- Returns:
- An object representing a visual studio project format version.
- """
- # In auto mode, check environment variable for override.
- if version == 'auto':
- version = os.environ.get('GYP_MSVS_VERSION', 'auto')
- version_map = {
- 'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
- '2005': ('8.0',),
- '2005e': ('8.0',),
- '2008': ('9.0',),
- '2008e': ('9.0',),
- '2010': ('10.0',),
- '2010e': ('10.0',),
- '2012': ('11.0',),
- '2012e': ('11.0',),
- '2013': ('12.0',),
- '2013e': ('12.0',),
- '2015': ('14.0',),
- }
- override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
- if override_path:
- msvs_version = os.environ.get('GYP_MSVS_VERSION')
- if not msvs_version:
- raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
- 'set to a particular version (e.g. 2010e).')
- return _CreateVersion(msvs_version, override_path, sdk_based=True)
- version = str(version)
- versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
- if not versions:
- if not allow_fallback:
- raise ValueError('Could not locate Visual Studio installation.')
- if version == 'auto':
- # Default to 2005 if we couldn't find anything
- return _CreateVersion('2005', None)
- else:
- return _CreateVersion(version, None)
- return versions[0]
diff --git a/deps/gyp/pylib/gyp/__init__.py b/deps/gyp/pylib/gyp/__init__.py
deleted file mode 100755
index 668f38b60d..0000000000
--- a/deps/gyp/pylib/gyp/__init__.py
+++ /dev/null
@@ -1,548 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import copy
-import gyp.input
-import optparse
-import os.path
-import re
-import shlex
-import sys
-import traceback
-from gyp.common import GypError
-
-# Default debug modes for GYP
-debug = {}
-
-# List of "official" debug modes, but you can use anything you like.
-DEBUG_GENERAL = 'general'
-DEBUG_VARIABLES = 'variables'
-DEBUG_INCLUDES = 'includes'
-
-
-def DebugOutput(mode, message, *args):
- if 'all' in gyp.debug or mode in gyp.debug:
- ctx = ('unknown', 0, 'unknown')
- try:
- f = traceback.extract_stack(limit=2)
- if f:
- ctx = f[0][:3]
- except:
- pass
- if args:
- message %= args
- print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
- ctx[1], ctx[2], message)
-
-def FindBuildFiles():
- extension = '.gyp'
- files = os.listdir(os.getcwd())
- build_files = []
- for file in files:
- if file.endswith(extension):
- build_files.append(file)
- return build_files
-
-
-def Load(build_files, format, default_variables={},
- includes=[], depth='.', params=None, check=False,
- circular_check=True, duplicate_basename_check=True):
- """
- Loads one or more specified build files.
- default_variables and includes will be copied before use.
- Returns the generator for the specified format and the
- data returned by loading the specified build files.
- """
- if params is None:
- params = {}
-
- if '-' in format:
- format, params['flavor'] = format.split('-', 1)
-
- default_variables = copy.copy(default_variables)
-
- # Default variables provided by this program and its modules should be
- # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
- # avoiding collisions with user and automatic variables.
- default_variables['GENERATOR'] = format
- default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '')
-
- # Format can be a custom python file, or by default the name of a module
- # within gyp.generator.
- if format.endswith('.py'):
- generator_name = os.path.splitext(format)[0]
- path, generator_name = os.path.split(generator_name)
-
- # Make sure the path to the custom generator is in sys.path
- # Don't worry about removing it once we are done. Keeping the path
- # to each generator that is used in sys.path is likely harmless and
- # arguably a good idea.
- path = os.path.abspath(path)
- if path not in sys.path:
- sys.path.insert(0, path)
- else:
- generator_name = 'gyp.generator.' + format
-
- # These parameters are passed in order (as opposed to by key)
- # because ActivePython cannot handle key parameters to __import__.
- generator = __import__(generator_name, globals(), locals(), generator_name)
- for (key, val) in generator.generator_default_variables.items():
- default_variables.setdefault(key, val)
-
- # Give the generator the opportunity to set additional variables based on
- # the params it will receive in the output phase.
- if getattr(generator, 'CalculateVariables', None):
- generator.CalculateVariables(default_variables, params)
-
- # Give the generator the opportunity to set generator_input_info based on
- # the params it will receive in the output phase.
- if getattr(generator, 'CalculateGeneratorInputInfo', None):
- generator.CalculateGeneratorInputInfo(params)
-
- # Fetch the generator specific info that gets fed to input, we use getattr
- # so we can default things and the generators only have to provide what
- # they need.
- generator_input_info = {
- 'non_configuration_keys':
- getattr(generator, 'generator_additional_non_configuration_keys', []),
- 'path_sections':
- getattr(generator, 'generator_additional_path_sections', []),
- 'extra_sources_for_rules':
- getattr(generator, 'generator_extra_sources_for_rules', []),
- 'generator_supports_multiple_toolsets':
- getattr(generator, 'generator_supports_multiple_toolsets', False),
- 'generator_wants_static_library_dependencies_adjusted':
- getattr(generator,
- 'generator_wants_static_library_dependencies_adjusted', True),
- 'generator_wants_sorted_dependencies':
- getattr(generator, 'generator_wants_sorted_dependencies', False),
- 'generator_filelist_paths':
- getattr(generator, 'generator_filelist_paths', None),
- }
-
- # Process the input specific to this generator.
- result = gyp.input.Load(build_files, default_variables, includes[:],
- depth, generator_input_info, check, circular_check,
- duplicate_basename_check,
- params['parallel'], params['root_targets'])
- return [generator] + result
-
-def NameValueListToDict(name_value_list):
- """
- Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
- of the pairs. If a string is simply NAME, then the value in the dictionary
- is set to True. If VALUE can be converted to an integer, it is.
- """
- result = { }
- for item in name_value_list:
- tokens = item.split('=', 1)
- if len(tokens) == 2:
- # If we can make it an int, use that, otherwise, use the string.
- try:
- token_value = int(tokens[1])
- except ValueError:
- token_value = tokens[1]
- # Set the variable to the supplied value.
- result[tokens[0]] = token_value
- else:
- # No value supplied, treat it as a boolean and set it.
- result[tokens[0]] = True
- return result
-
-def ShlexEnv(env_name):
- flags = os.environ.get(env_name, [])
- if flags:
- flags = shlex.split(flags)
- return flags
-
-def FormatOpt(opt, value):
- if opt.startswith('--'):
- return '%s=%s' % (opt, value)
- return opt + value
-
-def RegenerateAppendFlag(flag, values, predicate, env_name, options):
- """Regenerate a list of command line flags, for an option of action='append'.
-
- The |env_name|, if given, is checked in the environment and used to generate
- an initial list of options, then the options that were specified on the
- command line (given in |values|) are appended. This matches the handling of
- environment variables and command line flags where command line flags override
- the environment, while not requiring the environment to be set when the flags
- are used again.
- """
- flags = []
- if options.use_environment and env_name:
- for flag_value in ShlexEnv(env_name):
- value = FormatOpt(flag, predicate(flag_value))
- if value in flags:
- flags.remove(value)
- flags.append(value)
- if values:
- for flag_value in values:
- flags.append(FormatOpt(flag, predicate(flag_value)))
- return flags
-
-def RegenerateFlags(options):
- """Given a parsed options object, and taking the environment variables into
- account, returns a list of flags that should regenerate an equivalent options
- object (even in the absence of the environment variables.)
-
- Any path options will be normalized relative to depth.
-
- The format flag is not included, as it is assumed the calling generator will
- set that as appropriate.
- """
- def FixPath(path):
- path = gyp.common.FixIfRelativePath(path, options.depth)
- if not path:
- return os.path.curdir
- return path
-
- def Noop(value):
- return value
-
- # We always want to ignore the environment when regenerating, to avoid
- # duplicate or changed flags in the environment at the time of regeneration.
- flags = ['--ignore-environment']
- for name, metadata in options._regeneration_metadata.iteritems():
- opt = metadata['opt']
- value = getattr(options, name)
- value_predicate = metadata['type'] == 'path' and FixPath or Noop
- action = metadata['action']
- env_name = metadata['env_name']
- if action == 'append':
- flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
- env_name, options))
- elif action in ('store', None): # None is a synonym for 'store'.
- if value:
- flags.append(FormatOpt(opt, value_predicate(value)))
- elif options.use_environment and env_name and os.environ.get(env_name):
- flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
- elif action in ('store_true', 'store_false'):
- if ((action == 'store_true' and value) or
- (action == 'store_false' and not value)):
- flags.append(opt)
- elif options.use_environment and env_name:
- print >>sys.stderr, ('Warning: environment regeneration unimplemented '
- 'for %s flag %r env_name %r' % (action, opt,
- env_name))
- else:
- print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
- 'flag %r' % (action, opt))
-
- return flags
-
-class RegeneratableOptionParser(optparse.OptionParser):
- def __init__(self):
- self.__regeneratable_options = {}
- optparse.OptionParser.__init__(self)
-
- def add_option(self, *args, **kw):
- """Add an option to the parser.
-
- This accepts the same arguments as OptionParser.add_option, plus the
- following:
- regenerate: can be set to False to prevent this option from being included
- in regeneration.
- env_name: name of environment variable that additional values for this
- option come from.
- type: adds type='path', to tell the regenerator that the values of
- this option need to be made relative to options.depth
- """
- env_name = kw.pop('env_name', None)
- if 'dest' in kw and kw.pop('regenerate', True):
- dest = kw['dest']
-
- # The path type is needed for regenerating, for optparse we can just treat
- # it as a string.
- type = kw.get('type')
- if type == 'path':
- kw['type'] = 'string'
-
- self.__regeneratable_options[dest] = {
- 'action': kw.get('action'),
- 'type': type,
- 'env_name': env_name,
- 'opt': args[0],
- }
-
- optparse.OptionParser.add_option(self, *args, **kw)
-
- def parse_args(self, *args):
- values, args = optparse.OptionParser.parse_args(self, *args)
- values._regeneration_metadata = self.__regeneratable_options
- return values, args
-
-def gyp_main(args):
- my_name = os.path.basename(sys.argv[0])
-
- parser = RegeneratableOptionParser()
- usage = 'usage: %s [options ...] [build_file ...]'
- parser.set_usage(usage.replace('%s', '%prog'))
- parser.add_option('--build', dest='configs', action='append',
- help='configuration for build after project generation')
- parser.add_option('--check', dest='check', action='store_true',
- help='check format of gyp files')
- parser.add_option('--config-dir', dest='config_dir', action='store',
- env_name='GYP_CONFIG_DIR', default=None,
- help='The location for configuration files like '
- 'include.gypi.')
- parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
- action='append', default=[], help='turn on a debugging '
- 'mode for debugging GYP. Supported modes are "variables", '
- '"includes" and "general" or "all" for all of them.')
- parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
- env_name='GYP_DEFINES',
- help='sets variable VAR to value VAL')
- parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
- help='set DEPTH gyp variable to a relative path to PATH')
- parser.add_option('-f', '--format', dest='formats', action='append',
- env_name='GYP_GENERATORS', regenerate=False,
- help='output formats to generate')
- parser.add_option('-G', dest='generator_flags', action='append', default=[],
- metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
- help='sets generator flag FLAG to VAL')
- parser.add_option('--generator-output', dest='generator_output',
- action='store', default=None, metavar='DIR', type='path',
- env_name='GYP_GENERATOR_OUTPUT',
- help='puts generated build files under DIR')
- parser.add_option('--ignore-environment', dest='use_environment',
- action='store_false', default=True, regenerate=False,
- help='do not read options from environment variables')
- parser.add_option('-I', '--include', dest='includes', action='append',
- metavar='INCLUDE', type='path',
- help='files to include in all loaded .gyp files')
- # --no-circular-check disables the check for circular relationships between
- # .gyp files. These relationships should not exist, but they've only been
- # observed to be harmful with the Xcode generator. Chromium's .gyp files
- # currently have some circular relationships on non-Mac platforms, so this
- # option allows the strict behavior to be used on Macs and the lenient
- # behavior to be used elsewhere.
- # TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
- parser.add_option('--no-circular-check', dest='circular_check',
- action='store_false', default=True, regenerate=False,
- help="don't check for circular relationships between files")
- # --no-duplicate-basename-check disables the check for duplicate basenames
- # in a static_library/shared_library project. Visual C++ 2008 generator
- # doesn't support this configuration. Libtool on Mac also generates warnings
- # when duplicate basenames are passed into Make generator on Mac.
- # TODO(yukawa): Remove this option when these legacy generators are
- # deprecated.
- parser.add_option('--no-duplicate-basename-check',
- dest='duplicate_basename_check', action='store_false',
- default=True, regenerate=False,
- help="don't check for duplicate basenames")
- parser.add_option('--no-parallel', action='store_true', default=False,
- help='Disable multiprocessing')
- parser.add_option('-S', '--suffix', dest='suffix', default='',
- help='suffix to add to generated files')
- parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
- default=None, metavar='DIR', type='path',
- help='directory to use as the root of the source tree')
- parser.add_option('-R', '--root-target', dest='root_targets',
- action='append', metavar='TARGET',
- help='include only TARGET and its deep dependencies')
-
- options, build_files_arg = parser.parse_args(args)
- build_files = build_files_arg
-
- # Set up the configuration directory (defaults to ~/.gyp)
- if not options.config_dir:
- home = None
- home_dot_gyp = None
- if options.use_environment:
- home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
- if home_dot_gyp:
- home_dot_gyp = os.path.expanduser(home_dot_gyp)
-
- if not home_dot_gyp:
- home_vars = ['HOME']
- if sys.platform in ('cygwin', 'win32'):
- home_vars.append('USERPROFILE')
- for home_var in home_vars:
- home = os.getenv(home_var)
- if home != None:
- home_dot_gyp = os.path.join(home, '.gyp')
- if not os.path.exists(home_dot_gyp):
- home_dot_gyp = None
- else:
- break
- else:
- home_dot_gyp = os.path.expanduser(options.config_dir)
-
- if home_dot_gyp and not os.path.exists(home_dot_gyp):
- home_dot_gyp = None
-
- if not options.formats:
- # If no format was given on the command line, then check the env variable.
- generate_formats = []
- if options.use_environment:
- generate_formats = os.environ.get('GYP_GENERATORS', [])
- if generate_formats:
- generate_formats = re.split(r'[\s,]', generate_formats)
- if generate_formats:
- options.formats = generate_formats
- else:
- # Nothing in the variable, default based on platform.
- if sys.platform == 'darwin':
- options.formats = ['xcode']
- elif sys.platform in ('win32', 'cygwin'):
- options.formats = ['msvs']
- else:
- options.formats = ['make']
-
- if not options.generator_output and options.use_environment:
- g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
- if g_o:
- options.generator_output = g_o
-
- options.parallel = not options.no_parallel
-
- for mode in options.debug:
- gyp.debug[mode] = 1
-
- # Do an extra check to avoid work when we're not debugging.
- if DEBUG_GENERAL in gyp.debug:
- DebugOutput(DEBUG_GENERAL, 'running with these options:')
- for option, value in sorted(options.__dict__.items()):
- if option[0] == '_':
- continue
- if isinstance(value, basestring):
- DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
- else:
- DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
-
- if not build_files:
- build_files = FindBuildFiles()
- if not build_files:
- raise GypError((usage + '\n\n%s: error: no build_file') %
- (my_name, my_name))
-
- # TODO(mark): Chromium-specific hack!
- # For Chromium, the gyp "depth" variable should always be a relative path
- # to Chromium's top-level "src" directory. If no depth variable was set
- # on the command line, try to find a "src" directory by looking at the
- # absolute path to each build file's directory. The first "src" component
- # found will be treated as though it were the path used for --depth.
- if not options.depth:
- for build_file in build_files:
- build_file_dir = os.path.abspath(os.path.dirname(build_file))
- build_file_dir_components = build_file_dir.split(os.path.sep)
- components_len = len(build_file_dir_components)
- for index in xrange(components_len - 1, -1, -1):
- if build_file_dir_components[index] == 'src':
- options.depth = os.path.sep.join(build_file_dir_components)
- break
- del build_file_dir_components[index]
-
- # If the inner loop found something, break without advancing to another
- # build file.
- if options.depth:
- break
-
- if not options.depth:
- raise GypError('Could not automatically locate src directory. This is'
- 'a temporary Chromium feature that will be removed. Use'
- '--depth as a workaround.')
-
- # If toplevel-dir is not set, we assume that depth is the root of our source
- # tree.
- if not options.toplevel_dir:
- options.toplevel_dir = options.depth
-
- # -D on the command line sets variable defaults - D isn't just for define,
- # it's for default. Perhaps there should be a way to force (-F?) a
- # variable's value so that it can't be overridden by anything else.
- cmdline_default_variables = {}
- defines = []
- if options.use_environment:
- defines += ShlexEnv('GYP_DEFINES')
- if options.defines:
- defines += options.defines
- cmdline_default_variables = NameValueListToDict(defines)
- if DEBUG_GENERAL in gyp.debug:
- DebugOutput(DEBUG_GENERAL,
- "cmdline_default_variables: %s", cmdline_default_variables)
-
- # Set up includes.
- includes = []
-
- # If ~/.gyp/include.gypi exists, it'll be forcibly included into every
- # .gyp file that's loaded, before anything else is included.
- if home_dot_gyp != None:
- default_include = os.path.join(home_dot_gyp, 'include.gypi')
- if os.path.exists(default_include):
- print 'Using overrides found in ' + default_include
- includes.append(default_include)
-
- # Command-line --include files come after the default include.
- if options.includes:
- includes.extend(options.includes)
-
- # Generator flags should be prefixed with the target generator since they
- # are global across all generator runs.
- gen_flags = []
- if options.use_environment:
- gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
- if options.generator_flags:
- gen_flags += options.generator_flags
- generator_flags = NameValueListToDict(gen_flags)
- if DEBUG_GENERAL in gyp.debug.keys():
- DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
-
- # Generate all requested formats (use a set in case we got one format request
- # twice)
- for format in set(options.formats):
- params = {'options': options,
- 'build_files': build_files,
- 'generator_flags': generator_flags,
- 'cwd': os.getcwd(),
- 'build_files_arg': build_files_arg,
- 'gyp_binary': sys.argv[0],
- 'home_dot_gyp': home_dot_gyp,
- 'parallel': options.parallel,
- 'root_targets': options.root_targets,
- 'target_arch': cmdline_default_variables.get('target_arch', '')}
-
- # Start with the default variables from the command line.
- [generator, flat_list, targets, data] = Load(
- build_files, format, cmdline_default_variables, includes, options.depth,
- params, options.check, options.circular_check,
- options.duplicate_basename_check)
-
- # TODO(mark): Pass |data| for now because the generator needs a list of
- # build files that came in. In the future, maybe it should just accept
- # a list, and not the whole data dict.
- # NOTE: flat_list is the flattened dependency graph specifying the order
- # that targets may be built. Build systems that operate serially or that
- # need to have dependencies defined before dependents reference them should
- # generate targets in the order specified in flat_list.
- generator.GenerateOutput(flat_list, targets, data, params)
-
- if options.configs:
- valid_configs = targets[flat_list[0]]['configurations'].keys()
- for conf in options.configs:
- if conf not in valid_configs:
- raise GypError('Invalid config specified via --build: %s' % conf)
- generator.PerformBuild(data, options.configs, params)
-
- # Done
- return 0
-
-
-def main(args):
- try:
- return gyp_main(args)
- except GypError, e:
- sys.stderr.write("gyp: %s\n" % e)
- return 1
-
-# NOTE: setuptools generated console_scripts calls function with no arguments
-def script_main():
- return main(sys.argv[1:])
-
-if __name__ == '__main__':
- sys.exit(script_main())
diff --git a/deps/gyp/pylib/gyp/common.py b/deps/gyp/pylib/gyp/common.py
deleted file mode 100644
index 256e3f3a6b..0000000000
--- a/deps/gyp/pylib/gyp/common.py
+++ /dev/null
@@ -1,608 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import with_statement
-
-import collections
-import errno
-import filecmp
-import os.path
-import re
-import tempfile
-import sys
-
-
-# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
-# among other "problems".
-class memoize(object):
- def __init__(self, func):
- self.func = func
- self.cache = {}
- def __call__(self, *args):
- try:
- return self.cache[args]
- except KeyError:
- result = self.func(*args)
- self.cache[args] = result
- return result
-
-
-class GypError(Exception):
- """Error class representing an error, which is to be presented
- to the user. The main entry point will catch and display this.
- """
- pass
-
-
-def ExceptionAppend(e, msg):
- """Append a message to the given exception's message."""
- if not e.args:
- e.args = (msg,)
- elif len(e.args) == 1:
- e.args = (str(e.args[0]) + ' ' + msg,)
- else:
- e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
-
-
-def FindQualifiedTargets(target, qualified_list):
- """
- Given a list of qualified targets, return the qualified targets for the
- specified |target|.
- """
- return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
-
-
-def ParseQualifiedTarget(target):
- # Splits a qualified target into a build file, target name and toolset.
-
- # NOTE: rsplit is used to disambiguate the Windows drive letter separator.
- target_split = target.rsplit(':', 1)
- if len(target_split) == 2:
- [build_file, target] = target_split
- else:
- build_file = None
-
- target_split = target.rsplit('#', 1)
- if len(target_split) == 2:
- [target, toolset] = target_split
- else:
- toolset = None
-
- return [build_file, target, toolset]
-
-
-def ResolveTarget(build_file, target, toolset):
- # This function resolves a target into a canonical form:
- # - a fully defined build file, either absolute or relative to the current
- # directory
- # - a target name
- # - a toolset
- #
- # build_file is the file relative to which 'target' is defined.
- # target is the qualified target.
- # toolset is the default toolset for that target.
- [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
-
- if parsed_build_file:
- if build_file:
- # If a relative path, parsed_build_file is relative to the directory
- # containing build_file. If build_file is not in the current directory,
- # parsed_build_file is not a usable path as-is. Resolve it by
- # interpreting it as relative to build_file. If parsed_build_file is
- # absolute, it is usable as a path regardless of the current directory,
- # and os.path.join will return it as-is.
- build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
- parsed_build_file))
- # Further (to handle cases like ../cwd), make it relative to cwd)
- if not os.path.isabs(build_file):
- build_file = RelativePath(build_file, '.')
- else:
- build_file = parsed_build_file
-
- if parsed_toolset:
- toolset = parsed_toolset
-
- return [build_file, target, toolset]
-
-
-def BuildFile(fully_qualified_target):
- # Extracts the build file from the fully qualified target.
- return ParseQualifiedTarget(fully_qualified_target)[0]
-
-
-def GetEnvironFallback(var_list, default):
- """Look up a key in the environment, with fallback to secondary keys
- and finally falling back to a default value."""
- for var in var_list:
- if var in os.environ:
- return os.environ[var]
- return default
-
-
-def QualifiedTarget(build_file, target, toolset):
- # "Qualified" means the file that a target was defined in and the target
- # name, separated by a colon, suffixed by a # and the toolset name:
- # /path/to/file.gyp:target_name#toolset
- fully_qualified = build_file + ':' + target
- if toolset:
- fully_qualified = fully_qualified + '#' + toolset
- return fully_qualified
-
-
-@memoize
-def RelativePath(path, relative_to, follow_path_symlink=True):
- # Assuming both |path| and |relative_to| are relative to the current
- # directory, returns a relative path that identifies path relative to
- # relative_to.
- # If |follow_symlink_path| is true (default) and |path| is a symlink, then
- # this method returns a path to the real file represented by |path|. If it is
- # false, this method returns a path to the symlink. If |path| is not a
- # symlink, this option has no effect.
-
- # Convert to normalized (and therefore absolute paths).
- if follow_path_symlink:
- path = os.path.realpath(path)
- else:
- path = os.path.abspath(path)
- relative_to = os.path.realpath(relative_to)
-
- # On Windows, we can't create a relative path to a different drive, so just
- # use the absolute path.
- if sys.platform == 'win32':
- if (os.path.splitdrive(path)[0].lower() !=
- os.path.splitdrive(relative_to)[0].lower()):
- return path
-
- # Split the paths into components.
- path_split = path.split(os.path.sep)
- relative_to_split = relative_to.split(os.path.sep)
-
- # Determine how much of the prefix the two paths share.
- prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
-
- # Put enough ".." components to back up out of relative_to to the common
- # prefix, and then append the part of path_split after the common prefix.
- relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
- path_split[prefix_len:]
-
- if len(relative_split) == 0:
- # The paths were the same.
- return ''
-
- # Turn it back into a string and we're done.
- return os.path.join(*relative_split)
-
-
-@memoize
-def InvertRelativePath(path, toplevel_dir=None):
- """Given a path like foo/bar that is relative to toplevel_dir, return
- the inverse relative path back to the toplevel_dir.
-
- E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
- should always produce the empty string, unless the path contains symlinks.
- """
- if not path:
- return path
- toplevel_dir = '.' if toplevel_dir is None else toplevel_dir
- return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
-
-
-def FixIfRelativePath(path, relative_to):
- # Like RelativePath but returns |path| unchanged if it is absolute.
- if os.path.isabs(path):
- return path
- return RelativePath(path, relative_to)
-
-
-def UnrelativePath(path, relative_to):
- # Assuming that |relative_to| is relative to the current directory, and |path|
- # is a path relative to the dirname of |relative_to|, returns a path that
- # identifies |path| relative to the current directory.
- rel_dir = os.path.dirname(relative_to)
- return os.path.normpath(os.path.join(rel_dir, path))
-
-
-# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
-# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
-# and the documentation for various shells.
-
-# _quote is a pattern that should match any argument that needs to be quoted
-# with double-quotes by EncodePOSIXShellArgument. It matches the following
-# characters appearing anywhere in an argument:
-# \t, \n, space parameter separators
-# # comments
-# $ expansions (quoted to always expand within one argument)
-# % called out by IEEE 1003.1 XCU.2.2
-# & job control
-# ' quoting
-# (, ) subshell execution
-# *, ?, [ pathname expansion
-# ; command delimiter
-# <, >, | redirection
-# = assignment
-# {, } brace expansion (bash)
-# ~ tilde expansion
-# It also matches the empty string, because "" (or '') is the only way to
-# represent an empty string literal argument to a POSIX shell.
-#
-# This does not match the characters in _escape, because those need to be
-# backslash-escaped regardless of whether they appear in a double-quoted
-# string.
-_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
-
-# _escape is a pattern that should match any character that needs to be
-# escaped with a backslash, whether or not the argument matched the _quote
-# pattern. _escape is used with re.sub to backslash anything in _escape's
-# first match group, hence the (parentheses) in the regular expression.
-#
-# _escape matches the following characters appearing anywhere in an argument:
-# " to prevent POSIX shells from interpreting this character for quoting
-# \ to prevent POSIX shells from interpreting this character for escaping
-# ` to prevent POSIX shells from interpreting this character for command
-# substitution
-# Missing from this list is $, because the desired behavior of
-# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
-#
-# Also missing from this list is !, which bash will interpret as the history
-# expansion character when history is enabled. bash does not enable history
-# by default in non-interactive shells, so this is not thought to be a problem.
-# ! was omitted from this list because bash interprets "\!" as a literal string
-# including the backslash character (avoiding history expansion but retaining
-# the backslash), which would not be correct for argument encoding. Handling
-# this case properly would also be problematic because bash allows the history
-# character to be changed with the histchars shell variable. Fortunately,
-# as history is not enabled in non-interactive shells and
-# EncodePOSIXShellArgument is only expected to encode for non-interactive
-# shells, there is no room for error here by ignoring !.
-_escape = re.compile(r'(["\\`])')
-
-def EncodePOSIXShellArgument(argument):
- """Encodes |argument| suitably for consumption by POSIX shells.
-
- argument may be quoted and escaped as necessary to ensure that POSIX shells
- treat the returned value as a literal representing the argument passed to
- this function. Parameter (variable) expansions beginning with $ are allowed
- to remain intact without escaping the $, to allow the argument to contain
- references to variables to be expanded by the shell.
- """
-
- if not isinstance(argument, str):
- argument = str(argument)
-
- if _quote.search(argument):
- quote = '"'
- else:
- quote = ''
-
- encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
-
- return encoded
-
-
-def EncodePOSIXShellList(list):
- """Encodes |list| suitably for consumption by POSIX shells.
-
- Returns EncodePOSIXShellArgument for each item in list, and joins them
- together using the space character as an argument separator.
- """
-
- encoded_arguments = []
- for argument in list:
- encoded_arguments.append(EncodePOSIXShellArgument(argument))
- return ' '.join(encoded_arguments)
-
-
-def DeepDependencyTargets(target_dicts, roots):
- """Returns the recursive list of target dependencies."""
- dependencies = set()
- pending = set(roots)
- while pending:
- # Pluck out one.
- r = pending.pop()
- # Skip if visited already.
- if r in dependencies:
- continue
- # Add it.
- dependencies.add(r)
- # Add its children.
- spec = target_dicts[r]
- pending.update(set(spec.get('dependencies', [])))
- pending.update(set(spec.get('dependencies_original', [])))
- return list(dependencies - set(roots))
-
-
-def BuildFileTargets(target_list, build_file):
- """From a target_list, returns the subset from the specified build_file.
- """
- return [p for p in target_list if BuildFile(p) == build_file]
-
-
-def AllTargets(target_list, target_dicts, build_file):
- """Returns all targets (direct and dependencies) for the specified build_file.
- """
- bftargets = BuildFileTargets(target_list, build_file)
- deptargets = DeepDependencyTargets(target_dicts, bftargets)
- return bftargets + deptargets
-
-
-def WriteOnDiff(filename):
- """Write to a file only if the new contents differ.
-
- Arguments:
- filename: name of the file to potentially write to.
- Returns:
- A file like object which will write to temporary file and only overwrite
- the target if it differs (on close).
- """
-
- class Writer(object):
- """Wrapper around file which only covers the target if it differs."""
- def __init__(self):
- # Pick temporary file.
- tmp_fd, self.tmp_path = tempfile.mkstemp(
- suffix='.tmp',
- prefix=os.path.split(filename)[1] + '.gyp.',
- dir=os.path.split(filename)[0])
- try:
- self.tmp_file = os.fdopen(tmp_fd, 'wb')
- except Exception:
- # Don't leave turds behind.
- os.unlink(self.tmp_path)
- raise
-
- def __getattr__(self, attrname):
- # Delegate everything else to self.tmp_file
- return getattr(self.tmp_file, attrname)
-
- def close(self):
- try:
- # Close tmp file.
- self.tmp_file.close()
- # Determine if different.
- same = False
- try:
- same = filecmp.cmp(self.tmp_path, filename, False)
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
-
- if same:
- # The new file is identical to the old one, just get rid of the new
- # one.
- os.unlink(self.tmp_path)
- else:
- # The new file is different from the old one, or there is no old one.
- # Rename the new file to the permanent name.
- #
- # tempfile.mkstemp uses an overly restrictive mode, resulting in a
- # file that can only be read by the owner, regardless of the umask.
- # There's no reason to not respect the umask here, which means that
- # an extra hoop is required to fetch it and reset the new file's mode.
- #
- # No way to get the umask without setting a new one? Set a safe one
- # and then set it back to the old value.
- umask = os.umask(077)
- os.umask(umask)
- os.chmod(self.tmp_path, 0666 & ~umask)
- if sys.platform == 'win32' and os.path.exists(filename):
- # NOTE: on windows (but not cygwin) rename will not replace an
- # existing file, so it must be preceded with a remove. Sadly there
- # is no way to make the switch atomic.
- os.remove(filename)
- os.rename(self.tmp_path, filename)
- except Exception:
- # Don't leave turds behind.
- os.unlink(self.tmp_path)
- raise
-
- return Writer()
-
-
-def EnsureDirExists(path):
- """Make sure the directory for |path| exists."""
- try:
- os.makedirs(os.path.dirname(path))
- except OSError:
- pass
-
-
-def GetFlavor(params):
- """Returns |params.flavor| if it's set, the system's default flavor else."""
- flavors = {
- 'cygwin': 'win',
- 'win32': 'win',
- 'darwin': 'mac',
- }
-
- if 'flavor' in params:
- return params['flavor']
- if sys.platform in flavors:
- return flavors[sys.platform]
- if sys.platform.startswith('sunos'):
- return 'solaris'
- if sys.platform.startswith('freebsd'):
- return 'freebsd'
- if sys.platform.startswith('openbsd'):
- return 'openbsd'
- if sys.platform.startswith('netbsd'):
- return 'netbsd'
- if sys.platform.startswith('aix'):
- return 'aix'
-
- return 'linux'
-
-
-def CopyTool(flavor, out_path):
- """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
- to |out_path|."""
- # aix and solaris just need flock emulation. mac and win use more complicated
- # support scripts.
- prefix = {
- 'aix': 'flock',
- 'solaris': 'flock',
- 'mac': 'mac',
- 'win': 'win'
- }.get(flavor, None)
- if not prefix:
- return
-
- # Slurp input file.
- source_path = os.path.join(
- os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix)
- with open(source_path) as source_file:
- source = source_file.readlines()
-
- # Add header and write it out.
- tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
- with open(tool_path, 'w') as tool_file:
- tool_file.write(
- ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
-
- # Make file executable.
- os.chmod(tool_path, 0755)
-
-
-# From Alex Martelli,
-# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
-# ASPN: Python Cookbook: Remove duplicates from a sequence
-# First comment, dated 2001/10/13.
-# (Also in the printed Python Cookbook.)
-
-def uniquer(seq, idfun=None):
- if idfun is None:
- idfun = lambda x: x
- seen = {}
- result = []
- for item in seq:
- marker = idfun(item)
- if marker in seen: continue
- seen[marker] = 1
- result.append(item)
- return result
-
-
-# Based on http://code.activestate.com/recipes/576694/.
-class OrderedSet(collections.MutableSet):
- def __init__(self, iterable=None):
- self.end = end = []
- end += [None, end, end] # sentinel node for doubly linked list
- self.map = {} # key --> [key, prev, next]
- if iterable is not None:
- self |= iterable
-
- def __len__(self):
- return len(self.map)
-
- def __contains__(self, key):
- return key in self.map
-
- def add(self, key):
- if key not in self.map:
- end = self.end
- curr = end[1]
- curr[2] = end[1] = self.map[key] = [key, curr, end]
-
- def discard(self, key):
- if key in self.map:
- key, prev_item, next_item = self.map.pop(key)
- prev_item[2] = next_item
- next_item[1] = prev_item
-
- def __iter__(self):
- end = self.end
- curr = end[2]
- while curr is not end:
- yield curr[0]
- curr = curr[2]
-
- def __reversed__(self):
- end = self.end
- curr = end[1]
- while curr is not end:
- yield curr[0]
- curr = curr[1]
-
- # The second argument is an addition that causes a pylint warning.
- def pop(self, last=True): # pylint: disable=W0221
- if not self:
- raise KeyError('set is empty')
- key = self.end[1][0] if last else self.end[2][0]
- self.discard(key)
- return key
-
- def __repr__(self):
- if not self:
- return '%s()' % (self.__class__.__name__,)
- return '%s(%r)' % (self.__class__.__name__, list(self))
-
- def __eq__(self, other):
- if isinstance(other, OrderedSet):
- return len(self) == len(other) and list(self) == list(other)
- return set(self) == set(other)
-
- # Extensions to the recipe.
- def update(self, iterable):
- for i in iterable:
- if i not in self:
- self.add(i)
-
-
-class CycleError(Exception):
- """An exception raised when an unexpected cycle is detected."""
- def __init__(self, nodes):
- self.nodes = nodes
- def __str__(self):
- return 'CycleError: cycle involving: ' + str(self.nodes)
-
-
-def TopologicallySorted(graph, get_edges):
- r"""Topologically sort based on a user provided edge definition.
-
- Args:
- graph: A list of node names.
- get_edges: A function mapping from node name to a hashable collection
- of node names which this node has outgoing edges to.
- Returns:
- A list containing all of the node in graph in topological order.
- It is assumed that calling get_edges once for each node and caching is
- cheaper than repeatedly calling get_edges.
- Raises:
- CycleError in the event of a cycle.
- Example:
- graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
- def GetEdges(node):
- return re.findall(r'\$\(([^))]\)', graph[node])
- print TopologicallySorted(graph.keys(), GetEdges)
- ==>
- ['a', 'c', b']
- """
- get_edges = memoize(get_edges)
- visited = set()
- visiting = set()
- ordered_nodes = []
- def Visit(node):
- if node in visiting:
- raise CycleError(visiting)
- if node in visited:
- return
- visited.add(node)
- visiting.add(node)
- for neighbor in get_edges(node):
- Visit(neighbor)
- visiting.remove(node)
- ordered_nodes.insert(0, node)
- for node in sorted(graph):
- Visit(node)
- return ordered_nodes
-
-def CrossCompileRequested():
- # TODO: figure out how to not build extra host objects in the
- # non-cross-compile case when this is enabled, and enable unconditionally.
- return (os.environ.get('GYP_CROSSCOMPILE') or
- os.environ.get('AR_host') or
- os.environ.get('CC_host') or
- os.environ.get('CXX_host') or
- os.environ.get('AR_target') or
- os.environ.get('CC_target') or
- os.environ.get('CXX_target'))
diff --git a/deps/gyp/pylib/gyp/common_test.py b/deps/gyp/pylib/gyp/common_test.py
deleted file mode 100755
index ad6f9a1438..0000000000
--- a/deps/gyp/pylib/gyp/common_test.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for the common.py file."""
-
-import gyp.common
-import unittest
-import sys
-
-
-class TestTopologicallySorted(unittest.TestCase):
- def test_Valid(self):
- """Test that sorting works on a valid graph with one possible order."""
- graph = {
- 'a': ['b', 'c'],
- 'b': [],
- 'c': ['d'],
- 'd': ['b'],
- }
- def GetEdge(node):
- return tuple(graph[node])
- self.assertEqual(
- gyp.common.TopologicallySorted(graph.keys(), GetEdge),
- ['a', 'c', 'd', 'b'])
-
- def test_Cycle(self):
- """Test that an exception is thrown on a cyclic graph."""
- graph = {
- 'a': ['b'],
- 'b': ['c'],
- 'c': ['d'],
- 'd': ['a'],
- }
- def GetEdge(node):
- return tuple(graph[node])
- self.assertRaises(
- gyp.common.CycleError, gyp.common.TopologicallySorted,
- graph.keys(), GetEdge)
-
-
-class TestGetFlavor(unittest.TestCase):
- """Test that gyp.common.GetFlavor works as intended"""
- original_platform = ''
-
- def setUp(self):
- self.original_platform = sys.platform
-
- def tearDown(self):
- sys.platform = self.original_platform
-
- def assertFlavor(self, expected, argument, param):
- sys.platform = argument
- self.assertEqual(expected, gyp.common.GetFlavor(param))
-
- def test_platform_default(self):
- self.assertFlavor('freebsd', 'freebsd9' , {})
- self.assertFlavor('freebsd', 'freebsd10', {})
- self.assertFlavor('openbsd', 'openbsd5' , {})
- self.assertFlavor('solaris', 'sunos5' , {});
- self.assertFlavor('solaris', 'sunos' , {});
- self.assertFlavor('linux' , 'linux2' , {});
- self.assertFlavor('linux' , 'linux3' , {});
-
- def test_param(self):
- self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/gyp/pylib/gyp/easy_xml.py b/deps/gyp/pylib/gyp/easy_xml.py
deleted file mode 100644
index bf949b6ac9..0000000000
--- a/deps/gyp/pylib/gyp/easy_xml.py
+++ /dev/null
@@ -1,157 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import re
-import os
-
-
-def XmlToString(content, encoding='utf-8', pretty=False):
- """ Writes the XML content to disk, touching the file only if it has changed.
-
- Visual Studio files have a lot of pre-defined structures. This function makes
- it easy to represent these structures as Python data structures, instead of
- having to create a lot of function calls.
-
- Each XML element of the content is represented as a list composed of:
- 1. The name of the element, a string,
- 2. The attributes of the element, a dictionary (optional), and
- 3+. The content of the element, if any. Strings are simple text nodes and
- lists are child elements.
-
- Example 1:
- <test/>
- becomes
- ['test']
-
- Example 2:
- <myelement a='value1' b='value2'>
- <childtype>This is</childtype>
- <childtype>it!</childtype>
- </myelement>
-
- becomes
- ['myelement', {'a':'value1', 'b':'value2'},
- ['childtype', 'This is'],
- ['childtype', 'it!'],
- ]
-
- Args:
- content: The structured content to be converted.
- encoding: The encoding to report on the first XML line.
- pretty: True if we want pretty printing with indents and new lines.
-
- Returns:
- The XML content as a string.
- """
- # We create a huge list of all the elements of the file.
- xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
- if pretty:
- xml_parts.append('\n')
- _ConstructContentList(xml_parts, content, pretty)
-
- # Convert it to a string
- return ''.join(xml_parts)
-
-
-def _ConstructContentList(xml_parts, specification, pretty, level=0):
- """ Appends the XML parts corresponding to the specification.
-
- Args:
- xml_parts: A list of XML parts to be appended to.
- specification: The specification of the element. See EasyXml docs.
- pretty: True if we want pretty printing with indents and new lines.
- level: Indentation level.
- """
- # The first item in a specification is the name of the element.
- if pretty:
- indentation = ' ' * level
- new_line = '\n'
- else:
- indentation = ''
- new_line = ''
- name = specification[0]
- if not isinstance(name, str):
- raise Exception('The first item of an EasyXml specification should be '
- 'a string. Specification was ' + str(specification))
- xml_parts.append(indentation + '<' + name)
-
- # Optionally in second position is a dictionary of the attributes.
- rest = specification[1:]
- if rest and isinstance(rest[0], dict):
- for at, val in sorted(rest[0].iteritems()):
- xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
- rest = rest[1:]
- if rest:
- xml_parts.append('>')
- all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
- multi_line = not all_strings
- if multi_line and new_line:
- xml_parts.append(new_line)
- for child_spec in rest:
- # If it's a string, append a text node.
- # Otherwise recurse over that child definition
- if isinstance(child_spec, str):
- xml_parts.append(_XmlEscape(child_spec))
- else:
- _ConstructContentList(xml_parts, child_spec, pretty, level + 1)
- if multi_line and indentation:
- xml_parts.append(indentation)
- xml_parts.append('</%s>%s' % (name, new_line))
- else:
- xml_parts.append('/>%s' % new_line)
-
-
-def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
- win32=False):
- """ Writes the XML content to disk, touching the file only if it has changed.
-
- Args:
- content: The structured content to be written.
- path: Location of the file.
- encoding: The encoding to report on the first line of the XML file.
- pretty: True if we want pretty printing with indents and new lines.
- """
- xml_string = XmlToString(content, encoding, pretty)
- if win32 and os.linesep != '\r\n':
- xml_string = xml_string.replace('\n', '\r\n')
-
- # Get the old content
- try:
- f = open(path, 'r')
- existing = f.read()
- f.close()
- except:
- existing = None
-
- # It has changed, write it
- if existing != xml_string:
- f = open(path, 'w')
- f.write(xml_string)
- f.close()
-
-
-_xml_escape_map = {
- '"': '&quot;',
- "'": '&apos;',
- '<': '&lt;',
- '>': '&gt;',
- '&': '&amp;',
- '\n': '&#xA;',
- '\r': '&#xD;',
-}
-
-
-_xml_escape_re = re.compile(
- "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
-
-
-def _XmlEscape(value, attr=False):
- """ Escape a string for inclusion in XML."""
- def replace(match):
- m = match.string[match.start() : match.end()]
- # don't replace single quotes in attrs
- if attr and m == "'":
- return m
- return _xml_escape_map[m]
- return _xml_escape_re.sub(replace, value)
diff --git a/deps/gyp/pylib/gyp/easy_xml_test.py b/deps/gyp/pylib/gyp/easy_xml_test.py
deleted file mode 100755
index df64354982..0000000000
--- a/deps/gyp/pylib/gyp/easy_xml_test.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the easy_xml.py file. """
-
-import gyp.easy_xml as easy_xml
-import unittest
-import StringIO
-
-
-class TestSequenceFunctions(unittest.TestCase):
-
- def setUp(self):
- self.stderr = StringIO.StringIO()
-
- def test_EasyXml_simple(self):
- self.assertEqual(
- easy_xml.XmlToString(['test']),
- '<?xml version="1.0" encoding="utf-8"?><test/>')
-
- self.assertEqual(
- easy_xml.XmlToString(['test'], encoding='Windows-1252'),
- '<?xml version="1.0" encoding="Windows-1252"?><test/>')
-
- def test_EasyXml_simple_with_attributes(self):
- self.assertEqual(
- easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
- '<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
-
- def test_EasyXml_escaping(self):
- original = '<test>\'"\r&\nfoo'
- converted = '&lt;test&gt;\'&quot;&#xD;&amp;&#xA;foo'
- converted_apos = converted.replace("'", '&apos;')
- self.assertEqual(
- easy_xml.XmlToString(['test3', {'a': original}, original]),
- '<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
- (converted, converted_apos))
-
- def test_EasyXml_pretty(self):
- self.assertEqual(
- easy_xml.XmlToString(
- ['test3',
- ['GrandParent',
- ['Parent1',
- ['Child']
- ],
- ['Parent2']
- ]
- ],
- pretty=True),
- '<?xml version="1.0" encoding="utf-8"?>\n'
- '<test3>\n'
- ' <GrandParent>\n'
- ' <Parent1>\n'
- ' <Child/>\n'
- ' </Parent1>\n'
- ' <Parent2/>\n'
- ' </GrandParent>\n'
- '</test3>\n')
-
-
- def test_EasyXml_complex(self):
- # We want to create:
- target = (
- '<?xml version="1.0" encoding="utf-8"?>'
- '<Project>'
- '<PropertyGroup Label="Globals">'
- '<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
- '<Keyword>Win32Proj</Keyword>'
- '<RootNamespace>automated_ui_tests</RootNamespace>'
- '</PropertyGroup>'
- '<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
- '<PropertyGroup '
- 'Condition="\'$(Configuration)|$(Platform)\'=='
- '\'Debug|Win32\'" Label="Configuration">'
- '<ConfigurationType>Application</ConfigurationType>'
- '<CharacterSet>Unicode</CharacterSet>'
- '</PropertyGroup>'
- '</Project>')
-
- xml = easy_xml.XmlToString(
- ['Project',
- ['PropertyGroup', {'Label': 'Globals'},
- ['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
- ['Keyword', 'Win32Proj'],
- ['RootNamespace', 'automated_ui_tests']
- ],
- ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
- ['PropertyGroup',
- {'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
- 'Label': 'Configuration'},
- ['ConfigurationType', 'Application'],
- ['CharacterSet', 'Unicode']
- ]
- ])
- self.assertEqual(xml, target)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/gyp/pylib/gyp/flock_tool.py b/deps/gyp/pylib/gyp/flock_tool.py
deleted file mode 100755
index b38d8660f7..0000000000
--- a/deps/gyp/pylib/gyp/flock_tool.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""These functions are executed via gyp-flock-tool when using the Makefile
-generator. Used on systems that don't have a built-in flock."""
-
-import fcntl
-import os
-import struct
-import subprocess
-import sys
-
-
-def main(args):
- executor = FlockTool()
- executor.Dispatch(args)
-
-
-class FlockTool(object):
- """This class emulates the 'flock' command."""
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- # Note that the stock python on SunOS has a bug
- # where fcntl.flock(fd, LOCK_EX) always fails
- # with EBADF, that's why we use this F_SETLK
- # hack instead.
- fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
- if sys.platform.startswith('aix'):
- # Python on AIX is compiled with LARGEFILE support, which changes the
- # struct size.
- op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
- else:
- op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
- fcntl.fcntl(fd, fcntl.F_SETLK, op)
- return subprocess.call(cmd_list)
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/deps/gyp/pylib/gyp/generator/__init__.py b/deps/gyp/pylib/gyp/generator/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/deps/gyp/pylib/gyp/generator/__init__.py
+++ /dev/null
diff --git a/deps/gyp/pylib/gyp/generator/analyzer.py b/deps/gyp/pylib/gyp/generator/analyzer.py
deleted file mode 100644
index 921c1a6b71..0000000000
--- a/deps/gyp/pylib/gyp/generator/analyzer.py
+++ /dev/null
@@ -1,741 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
-the generator flag config_path) the path of a json file that dictates the files
-and targets to search for. The following keys are supported:
-files: list of paths (relative) of the files to search for.
-test_targets: unqualified target names to search for. Any target in this list
-that depends upon a file in |files| is output regardless of the type of target
-or chain of dependencies.
-additional_compile_targets: Unqualified targets to search for in addition to
-test_targets. Targets in the combined list that depend upon a file in |files|
-are not necessarily output. For example, if the target is of type none then the
-target is not output (but one of the descendants of the target will be).
-
-The following is output:
-error: only supplied if there is an error.
-compile_targets: minimal set of targets that directly or indirectly (for
- targets of type none) depend on the files in |files| and is one of the
- supplied targets or a target that one of the supplied targets depends on.
- The expectation is this set of targets is passed into a build step. This list
- always contains the output of test_targets as well.
-test_targets: set of targets from the supplied |test_targets| that either
- directly or indirectly depend upon a file in |files|. This list if useful
- if additional processing needs to be done for certain targets after the
- build, such as running tests.
-status: outputs one of three values: none of the supplied files were found,
- one of the include files changed so that it should be assumed everything
- changed (in this case test_targets and compile_targets are not output) or at
- least one file was found.
-invalid_targets: list of supplied targets that were not found.
-
-Example:
-Consider a graph like the following:
- A D
- / \
-B C
-A depends upon both B and C, A is of type none and B and C are executables.
-D is an executable, has no dependencies and nothing depends on it.
-If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
-files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
-the following is output:
-|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
-and the supplied target A depends upon it. A is not output as a build_target
-as it is of type none with no rules and actions.
-|test_targets| = ["B"] B directly depends upon the change file b.cc.
-
-Even though the file d.cc, which D depends upon, has changed D is not output
-as it was not supplied by way of |additional_compile_targets| or |test_targets|.
-
-If the generator flag analyzer_output_path is specified, output is written
-there. Otherwise output is written to stdout.
-
-In Gyp the "all" target is shorthand for the root targets in the files passed
-to gyp. For example, if file "a.gyp" contains targets "a1" and
-"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
-on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
-Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
-directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
-then the "all" target includes "b1" and "b2".
-"""
-
-import gyp.common
-import gyp.ninja_syntax as ninja_syntax
-import json
-import os
-import posixpath
-import sys
-
-debug = False
-
-found_dependency_string = 'Found dependency'
-no_dependency_string = 'No dependencies'
-# Status when it should be assumed that everything has changed.
-all_changed_string = 'Found dependency (all)'
-
-# MatchStatus is used indicate if and how a target depends upon the supplied
-# sources.
-# The target's sources contain one of the supplied paths.
-MATCH_STATUS_MATCHES = 1
-# The target has a dependency on another target that contains one of the
-# supplied paths.
-MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
-# The target's sources weren't in the supplied paths and none of the target's
-# dependencies depend upon a target that matched.
-MATCH_STATUS_DOESNT_MATCH = 3
-# The target doesn't contain the source, but the dependent targets have not yet
-# been visited to determine a more specific status yet.
-MATCH_STATUS_TBD = 4
-
-generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
-
-generator_wants_static_library_dependencies_adjusted = False
-
-generator_default_variables = {
-}
-for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
- 'LIB_DIR', 'SHARED_LIB_DIR']:
- generator_default_variables[dirname] = '!!!'
-
-for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
- 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
- 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
- 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
- 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
- 'CONFIGURATION_NAME']:
- generator_default_variables[unused] = ''
-
-
-def _ToGypPath(path):
- """Converts a path to the format used by gyp."""
- if os.sep == '\\' and os.altsep == '/':
- return path.replace('\\', '/')
- return path
-
-
-def _ResolveParent(path, base_path_components):
- """Resolves |path|, which starts with at least one '../'. Returns an empty
- string if the path shouldn't be considered. See _AddSources() for a
- description of |base_path_components|."""
- depth = 0
- while path.startswith('../'):
- depth += 1
- path = path[3:]
- # Relative includes may go outside the source tree. For example, an action may
- # have inputs in /usr/include, which are not in the source tree.
- if depth > len(base_path_components):
- return ''
- if depth == len(base_path_components):
- return path
- return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
- '/' + path
-
-
-def _AddSources(sources, base_path, base_path_components, result):
- """Extracts valid sources from |sources| and adds them to |result|. Each
- source file is relative to |base_path|, but may contain '..'. To make
- resolving '..' easier |base_path_components| contains each of the
- directories in |base_path|. Additionally each source may contain variables.
- Such sources are ignored as it is assumed dependencies on them are expressed
- and tracked in some other means."""
- # NOTE: gyp paths are always posix style.
- for source in sources:
- if not len(source) or source.startswith('!!!') or source.startswith('$'):
- continue
- # variable expansion may lead to //.
- org_source = source
- source = source[0] + source[1:].replace('//', '/')
- if source.startswith('../'):
- source = _ResolveParent(source, base_path_components)
- if len(source):
- result.append(source)
- continue
- result.append(base_path + source)
- if debug:
- print 'AddSource', org_source, result[len(result) - 1]
-
-
-def _ExtractSourcesFromAction(action, base_path, base_path_components,
- results):
- if 'inputs' in action:
- _AddSources(action['inputs'], base_path, base_path_components, results)
-
-
-def _ToLocalPath(toplevel_dir, path):
- """Converts |path| to a path relative to |toplevel_dir|."""
- if path == toplevel_dir:
- return ''
- if path.startswith(toplevel_dir + '/'):
- return path[len(toplevel_dir) + len('/'):]
- return path
-
-
-def _ExtractSources(target, target_dict, toplevel_dir):
- # |target| is either absolute or relative and in the format of the OS. Gyp
- # source paths are always posix. Convert |target| to a posix path relative to
- # |toplevel_dir_|. This is done to make it easy to build source paths.
- base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
- base_path_components = base_path.split('/')
-
- # Add a trailing '/' so that _AddSources() can easily build paths.
- if len(base_path):
- base_path += '/'
-
- if debug:
- print 'ExtractSources', target, base_path
-
- results = []
- if 'sources' in target_dict:
- _AddSources(target_dict['sources'], base_path, base_path_components,
- results)
- # Include the inputs from any actions. Any changes to these affect the
- # resulting output.
- if 'actions' in target_dict:
- for action in target_dict['actions']:
- _ExtractSourcesFromAction(action, base_path, base_path_components,
- results)
- if 'rules' in target_dict:
- for rule in target_dict['rules']:
- _ExtractSourcesFromAction(rule, base_path, base_path_components, results)
-
- return results
-
-
-class Target(object):
- """Holds information about a particular target:
- deps: set of Targets this Target depends upon. This is not recursive, only the
- direct dependent Targets.
- match_status: one of the MatchStatus values.
- back_deps: set of Targets that have a dependency on this Target.
- visited: used during iteration to indicate whether we've visited this target.
- This is used for two iterations, once in building the set of Targets and
- again in _GetBuildTargets().
- name: fully qualified name of the target.
- requires_build: True if the target type is such that it needs to be built.
- See _DoesTargetTypeRequireBuild for details.
- added_to_compile_targets: used when determining if the target was added to the
- set of targets that needs to be built.
- in_roots: true if this target is a descendant of one of the root nodes.
- is_executable: true if the type of target is executable.
- is_static_library: true if the type of target is static_library.
- is_or_has_linked_ancestor: true if the target does a link (eg executable), or
- if there is a target in back_deps that does a link."""
- def __init__(self, name):
- self.deps = set()
- self.match_status = MATCH_STATUS_TBD
- self.back_deps = set()
- self.name = name
- # TODO(sky): I don't like hanging this off Target. This state is specific
- # to certain functions and should be isolated there.
- self.visited = False
- self.requires_build = False
- self.added_to_compile_targets = False
- self.in_roots = False
- self.is_executable = False
- self.is_static_library = False
- self.is_or_has_linked_ancestor = False
-
-
-class Config(object):
- """Details what we're looking for
- files: set of files to search for
- targets: see file description for details."""
- def __init__(self):
- self.files = []
- self.targets = set()
- self.additional_compile_target_names = set()
- self.test_target_names = set()
-
- def Init(self, params):
- """Initializes Config. This is a separate method as it raises an exception
- if there is a parse error."""
- generator_flags = params.get('generator_flags', {})
- config_path = generator_flags.get('config_path', None)
- if not config_path:
- return
- try:
- f = open(config_path, 'r')
- config = json.load(f)
- f.close()
- except IOError:
- raise Exception('Unable to open file ' + config_path)
- except ValueError as e:
- raise Exception('Unable to parse config file ' + config_path + str(e))
- if not isinstance(config, dict):
- raise Exception('config_path must be a JSON file containing a dictionary')
- self.files = config.get('files', [])
- self.additional_compile_target_names = set(
- config.get('additional_compile_targets', []))
- self.test_target_names = set(config.get('test_targets', []))
-
-
-def _WasBuildFileModified(build_file, data, files, toplevel_dir):
- """Returns true if the build file |build_file| is either in |files| or
- one of the files included by |build_file| is in |files|. |toplevel_dir| is
- the root of the source tree."""
- if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
- if debug:
- print 'gyp file modified', build_file
- return True
-
- # First element of included_files is the file itself.
- if len(data[build_file]['included_files']) <= 1:
- return False
-
- for include_file in data[build_file]['included_files'][1:]:
- # |included_files| are relative to the directory of the |build_file|.
- rel_include_file = \
- _ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
- if _ToLocalPath(toplevel_dir, rel_include_file) in files:
- if debug:
- print 'included gyp file modified, gyp_file=', build_file, \
- 'included file=', rel_include_file
- return True
- return False
-
-
-def _GetOrCreateTargetByName(targets, target_name):
- """Creates or returns the Target at targets[target_name]. If there is no
- Target for |target_name| one is created. Returns a tuple of whether a new
- Target was created and the Target."""
- if target_name in targets:
- return False, targets[target_name]
- target = Target(target_name)
- targets[target_name] = target
- return True, target
-
-
-def _DoesTargetTypeRequireBuild(target_dict):
- """Returns true if the target type is such that it needs to be built."""
- # If a 'none' target has rules or actions we assume it requires a build.
- return bool(target_dict['type'] != 'none' or
- target_dict.get('actions') or target_dict.get('rules'))
-
-
-def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
- build_files):
- """Returns a tuple of the following:
- . A dictionary mapping from fully qualified name to Target.
- . A list of the targets that have a source file in |files|.
- . Targets that constitute the 'all' target. See description at top of file
- for details on the 'all' target.
- This sets the |match_status| of the targets that contain any of the source
- files in |files| to MATCH_STATUS_MATCHES.
- |toplevel_dir| is the root of the source tree."""
- # Maps from target name to Target.
- name_to_target = {}
-
- # Targets that matched.
- matching_targets = []
-
- # Queue of targets to visit.
- targets_to_visit = target_list[:]
-
- # Maps from build file to a boolean indicating whether the build file is in
- # |files|.
- build_file_in_files = {}
-
- # Root targets across all files.
- roots = set()
-
- # Set of Targets in |build_files|.
- build_file_targets = set()
-
- while len(targets_to_visit) > 0:
- target_name = targets_to_visit.pop()
- created_target, target = _GetOrCreateTargetByName(name_to_target,
- target_name)
- if created_target:
- roots.add(target)
- elif target.visited:
- continue
-
- target.visited = True
- target.requires_build = _DoesTargetTypeRequireBuild(
- target_dicts[target_name])
- target_type = target_dicts[target_name]['type']
- target.is_executable = target_type == 'executable'
- target.is_static_library = target_type == 'static_library'
- target.is_or_has_linked_ancestor = (target_type == 'executable' or
- target_type == 'shared_library')
-
- build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
- if not build_file in build_file_in_files:
- build_file_in_files[build_file] = \
- _WasBuildFileModified(build_file, data, files, toplevel_dir)
-
- if build_file in build_files:
- build_file_targets.add(target)
-
- # If a build file (or any of its included files) is modified we assume all
- # targets in the file are modified.
- if build_file_in_files[build_file]:
- print 'matching target from modified build file', target_name
- target.match_status = MATCH_STATUS_MATCHES
- matching_targets.append(target)
- else:
- sources = _ExtractSources(target_name, target_dicts[target_name],
- toplevel_dir)
- for source in sources:
- if _ToGypPath(os.path.normpath(source)) in files:
- print 'target', target_name, 'matches', source
- target.match_status = MATCH_STATUS_MATCHES
- matching_targets.append(target)
- break
-
- # Add dependencies to visit as well as updating back pointers for deps.
- for dep in target_dicts[target_name].get('dependencies', []):
- targets_to_visit.append(dep)
-
- created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target,
- dep)
- if not created_dep_target:
- roots.discard(dep_target)
-
- target.deps.add(dep_target)
- dep_target.back_deps.add(target)
-
- return name_to_target, matching_targets, roots & build_file_targets
-
-
-def _GetUnqualifiedToTargetMapping(all_targets, to_find):
- """Returns a tuple of the following:
- . mapping (dictionary) from unqualified name to Target for all the
- Targets in |to_find|.
- . any target names not found. If this is empty all targets were found."""
- result = {}
- if not to_find:
- return {}, []
- to_find = set(to_find)
- for target_name in all_targets.keys():
- extracted = gyp.common.ParseQualifiedTarget(target_name)
- if len(extracted) > 1 and extracted[1] in to_find:
- to_find.remove(extracted[1])
- result[extracted[1]] = all_targets[target_name]
- if not to_find:
- return result, []
- return result, [x for x in to_find]
-
-
-def _DoesTargetDependOnMatchingTargets(target):
- """Returns true if |target| or any of its dependencies is one of the
- targets containing the files supplied as input to analyzer. This updates
- |matches| of the Targets as it recurses.
- target: the Target to look for."""
- if target.match_status == MATCH_STATUS_DOESNT_MATCH:
- return False
- if target.match_status == MATCH_STATUS_MATCHES or \
- target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
- return True
- for dep in target.deps:
- if _DoesTargetDependOnMatchingTargets(dep):
- target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
- print '\t', target.name, 'matches by dep', dep.name
- return True
- target.match_status = MATCH_STATUS_DOESNT_MATCH
- return False
-
-
-def _GetTargetsDependingOnMatchingTargets(possible_targets):
- """Returns the list of Targets in |possible_targets| that depend (either
- directly on indirectly) on at least one of the targets containing the files
- supplied as input to analyzer.
- possible_targets: targets to search from."""
- found = []
- print 'Targets that matched by dependency:'
- for target in possible_targets:
- if _DoesTargetDependOnMatchingTargets(target):
- found.append(target)
- return found
-
-
-def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
- """Recurses through all targets that depend on |target|, adding all targets
- that need to be built (and are in |roots|) to |result|.
- roots: set of root targets.
- add_if_no_ancestor: If true and there are no ancestors of |target| then add
- |target| to |result|. |target| must still be in |roots|.
- result: targets that need to be built are added here."""
- if target.visited:
- return
-
- target.visited = True
- target.in_roots = target in roots
-
- for back_dep_target in target.back_deps:
- _AddCompileTargets(back_dep_target, roots, False, result)
- target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
- target.in_roots |= back_dep_target.in_roots
- target.is_or_has_linked_ancestor |= (
- back_dep_target.is_or_has_linked_ancestor)
-
- # Always add 'executable' targets. Even though they may be built by other
- # targets that depend upon them it makes detection of what is going to be
- # built easier.
- # And always add static_libraries that have no dependencies on them from
- # linkables. This is necessary as the other dependencies on them may be
- # static libraries themselves, which are not compile time dependencies.
- if target.in_roots and \
- (target.is_executable or
- (not target.added_to_compile_targets and
- (add_if_no_ancestor or target.requires_build)) or
- (target.is_static_library and add_if_no_ancestor and
- not target.is_or_has_linked_ancestor)):
- print '\t\tadding to compile targets', target.name, 'executable', \
- target.is_executable, 'added_to_compile_targets', \
- target.added_to_compile_targets, 'add_if_no_ancestor', \
- add_if_no_ancestor, 'requires_build', target.requires_build, \
- 'is_static_library', target.is_static_library, \
- 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
- result.add(target)
- target.added_to_compile_targets = True
-
-
-def _GetCompileTargets(matching_targets, supplied_targets):
- """Returns the set of Targets that require a build.
- matching_targets: targets that changed and need to be built.
- supplied_targets: set of targets supplied to analyzer to search from."""
- result = set()
- for target in matching_targets:
- print 'finding compile targets for match', target.name
- _AddCompileTargets(target, supplied_targets, True, result)
- return result
-
-
-def _WriteOutput(params, **values):
- """Writes the output, either to stdout or a file is specified."""
- if 'error' in values:
- print 'Error:', values['error']
- if 'status' in values:
- print values['status']
- if 'targets' in values:
- values['targets'].sort()
- print 'Supplied targets that depend on changed files:'
- for target in values['targets']:
- print '\t', target
- if 'invalid_targets' in values:
- values['invalid_targets'].sort()
- print 'The following targets were not found:'
- for target in values['invalid_targets']:
- print '\t', target
- if 'build_targets' in values:
- values['build_targets'].sort()
- print 'Targets that require a build:'
- for target in values['build_targets']:
- print '\t', target
- if 'compile_targets' in values:
- values['compile_targets'].sort()
- print 'Targets that need to be built:'
- for target in values['compile_targets']:
- print '\t', target
- if 'test_targets' in values:
- values['test_targets'].sort()
- print 'Test targets:'
- for target in values['test_targets']:
- print '\t', target
-
- output_path = params.get('generator_flags', {}).get(
- 'analyzer_output_path', None)
- if not output_path:
- print json.dumps(values)
- return
- try:
- f = open(output_path, 'w')
- f.write(json.dumps(values) + '\n')
- f.close()
- except IOError as e:
- print 'Error writing to output file', output_path, str(e)
-
-
-def _WasGypIncludeFileModified(params, files):
- """Returns true if one of the files in |files| is in the set of included
- files."""
- if params['options'].includes:
- for include in params['options'].includes:
- if _ToGypPath(os.path.normpath(include)) in files:
- print 'Include file modified, assuming all changed', include
- return True
- return False
-
-
-def _NamesNotIn(names, mapping):
- """Returns a list of the values in |names| that are not in |mapping|."""
- return [name for name in names if name not in mapping]
-
-
-def _LookupTargets(names, mapping):
- """Returns a list of the mapping[name] for each value in |names| that is in
- |mapping|."""
- return [mapping[name] for name in names if name in mapping]
-
-
-def CalculateVariables(default_variables, params):
- """Calculate additional variables for use in the build (called by gyp)."""
- flavor = gyp.common.GetFlavor(params)
- if flavor == 'mac':
- default_variables.setdefault('OS', 'mac')
- elif flavor == 'win':
- default_variables.setdefault('OS', 'win')
- # Copy additional generator configuration data from VS, which is shared
- # by the Windows Ninja generator.
- import gyp.generator.msvs as msvs_generator
- generator_additional_non_configuration_keys = getattr(msvs_generator,
- 'generator_additional_non_configuration_keys', [])
- generator_additional_path_sections = getattr(msvs_generator,
- 'generator_additional_path_sections', [])
-
- gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
- else:
- operating_system = flavor
- if flavor == 'android':
- operating_system = 'linux' # Keep this legacy behavior for now.
- default_variables.setdefault('OS', operating_system)
-
-
-class TargetCalculator(object):
- """Calculates the matching test_targets and matching compile_targets."""
- def __init__(self, files, additional_compile_target_names, test_target_names,
- data, target_list, target_dicts, toplevel_dir, build_files):
- self._additional_compile_target_names = set(additional_compile_target_names)
- self._test_target_names = set(test_target_names)
- self._name_to_target, self._changed_targets, self._root_targets = (
- _GenerateTargets(data, target_list, target_dicts, toplevel_dir,
- frozenset(files), build_files))
- self._unqualified_mapping, self.invalid_targets = (
- _GetUnqualifiedToTargetMapping(self._name_to_target,
- self._supplied_target_names_no_all()))
-
- def _supplied_target_names(self):
- return self._additional_compile_target_names | self._test_target_names
-
- def _supplied_target_names_no_all(self):
- """Returns the supplied test targets without 'all'."""
- result = self._supplied_target_names();
- result.discard('all')
- return result
-
- def is_build_impacted(self):
- """Returns true if the supplied files impact the build at all."""
- return self._changed_targets
-
- def find_matching_test_target_names(self):
- """Returns the set of output test targets."""
- assert self.is_build_impacted()
- # Find the test targets first. 'all' is special cased to mean all the
- # root targets. To deal with all the supplied |test_targets| are expanded
- # to include the root targets during lookup. If any of the root targets
- # match, we remove it and replace it with 'all'.
- test_target_names_no_all = set(self._test_target_names)
- test_target_names_no_all.discard('all')
- test_targets_no_all = _LookupTargets(test_target_names_no_all,
- self._unqualified_mapping)
- test_target_names_contains_all = 'all' in self._test_target_names
- if test_target_names_contains_all:
- test_targets = [x for x in (set(test_targets_no_all) |
- set(self._root_targets))]
- else:
- test_targets = [x for x in test_targets_no_all]
- print 'supplied test_targets'
- for target_name in self._test_target_names:
- print '\t', target_name
- print 'found test_targets'
- for target in test_targets:
- print '\t', target.name
- print 'searching for matching test targets'
- matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
- matching_test_targets_contains_all = (test_target_names_contains_all and
- set(matching_test_targets) &
- set(self._root_targets))
- if matching_test_targets_contains_all:
- # Remove any of the targets for all that were not explicitly supplied,
- # 'all' is subsequentely added to the matching names below.
- matching_test_targets = [x for x in (set(matching_test_targets) &
- set(test_targets_no_all))]
- print 'matched test_targets'
- for target in matching_test_targets:
- print '\t', target.name
- matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
- for target in matching_test_targets]
- if matching_test_targets_contains_all:
- matching_target_names.append('all')
- print '\tall'
- return matching_target_names
-
- def find_matching_compile_target_names(self):
- """Returns the set of output compile targets."""
- assert self.is_build_impacted();
- # Compile targets are found by searching up from changed targets.
- # Reset the visited status for _GetBuildTargets.
- for target in self._name_to_target.itervalues():
- target.visited = False
-
- supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
- self._unqualified_mapping)
- if 'all' in self._supplied_target_names():
- supplied_targets = [x for x in (set(supplied_targets) |
- set(self._root_targets))]
- print 'Supplied test_targets & compile_targets'
- for target in supplied_targets:
- print '\t', target.name
- print 'Finding compile targets'
- compile_targets = _GetCompileTargets(self._changed_targets,
- supplied_targets)
- return [gyp.common.ParseQualifiedTarget(target.name)[1]
- for target in compile_targets]
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- """Called by gyp as the final stage. Outputs results."""
- config = Config()
- try:
- config.Init(params)
-
- if not config.files:
- raise Exception('Must specify files to analyze via config_path generator '
- 'flag')
-
- toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
- if debug:
- print 'toplevel_dir', toplevel_dir
-
- if _WasGypIncludeFileModified(params, config.files):
- result_dict = { 'status': all_changed_string,
- 'test_targets': list(config.test_target_names),
- 'compile_targets': list(
- config.additional_compile_target_names |
- config.test_target_names) }
- _WriteOutput(params, **result_dict)
- return
-
- calculator = TargetCalculator(config.files,
- config.additional_compile_target_names,
- config.test_target_names, data,
- target_list, target_dicts, toplevel_dir,
- params['build_files'])
- if not calculator.is_build_impacted():
- result_dict = { 'status': no_dependency_string,
- 'test_targets': [],
- 'compile_targets': [] }
- if calculator.invalid_targets:
- result_dict['invalid_targets'] = calculator.invalid_targets
- _WriteOutput(params, **result_dict)
- return
-
- test_target_names = calculator.find_matching_test_target_names()
- compile_target_names = calculator.find_matching_compile_target_names()
- found_at_least_one_target = compile_target_names or test_target_names
- result_dict = { 'test_targets': test_target_names,
- 'status': found_dependency_string if
- found_at_least_one_target else no_dependency_string,
- 'compile_targets': list(
- set(compile_target_names) |
- set(test_target_names)) }
- if calculator.invalid_targets:
- result_dict['invalid_targets'] = calculator.invalid_targets
- _WriteOutput(params, **result_dict)
-
- except Exception as e:
- _WriteOutput(params, error=str(e))
diff --git a/deps/gyp/pylib/gyp/generator/cmake.py b/deps/gyp/pylib/gyp/generator/cmake.py
deleted file mode 100644
index 17f5e6396c..0000000000
--- a/deps/gyp/pylib/gyp/generator/cmake.py
+++ /dev/null
@@ -1,1221 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""cmake output module
-
-This module is under development and should be considered experimental.
-
-This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
-created for each configuration.
-
-This module's original purpose was to support editing in IDEs like KDevelop
-which use CMake for project management. It is also possible to use CMake to
-generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
-will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
-but build using CMake. As a result QtCreator editor is unaware of compiler
-defines. The generated CMakeLists.txt can also be used to build on Linux. There
-is currently no support for building on platforms other than Linux.
-
-The generated CMakeLists.txt should properly compile all projects. However,
-there is a mismatch between gyp and cmake with regard to linking. All attempts
-are made to work around this, but CMake sometimes sees -Wl,--start-group as a
-library and incorrectly repeats it. As a result the output of this generator
-should not be relied on for building.
-
-When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
-not be able to find the header file directories described in the generated
-CMakeLists.txt file.
-"""
-
-import multiprocessing
-import os
-import signal
-import string
-import subprocess
-import gyp.common
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
- 'SHARED_LIB_PREFIX': 'lib',
- 'SHARED_LIB_SUFFIX': '.so',
- 'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}',
- 'LIB_DIR': '${obj}.${TOOLSET}',
- 'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni',
- 'SHARED_INTERMEDIATE_DIR': '${obj}/gen',
- 'PRODUCT_DIR': '${builddir}',
- 'RULE_INPUT_PATH': '${RULE_INPUT_PATH}',
- 'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}',
- 'RULE_INPUT_NAME': '${RULE_INPUT_NAME}',
- 'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}',
- 'RULE_INPUT_EXT': '${RULE_INPUT_EXT}',
- 'CONFIGURATION_NAME': '${configuration}',
-}
-
-FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}')
-
-generator_supports_multiple_toolsets = True
-generator_wants_static_library_dependencies_adjusted = True
-
-COMPILABLE_EXTENSIONS = {
- '.c': 'cc',
- '.cc': 'cxx',
- '.cpp': 'cxx',
- '.cxx': 'cxx',
- '.s': 's', # cc
- '.S': 's', # cc
-}
-
-
-def RemovePrefix(a, prefix):
- """Returns 'a' without 'prefix' if it starts with 'prefix'."""
- return a[len(prefix):] if a.startswith(prefix) else a
-
-
-def CalculateVariables(default_variables, params):
- """Calculate additional variables for use in the build (called by gyp)."""
- default_variables.setdefault('OS', gyp.common.GetFlavor(params))
-
-
-def Compilable(filename):
- """Return true if the file is compilable (should be in OBJS)."""
- return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
-
-
-def Linkable(filename):
- """Return true if the file is linkable (should be on the link line)."""
- return filename.endswith('.o')
-
-
-def NormjoinPathForceCMakeSource(base_path, rel_path):
- """Resolves rel_path against base_path and returns the result.
-
- If rel_path is an absolute path it is returned unchanged.
- Otherwise it is resolved against base_path and normalized.
- If the result is a relative path, it is forced to be relative to the
- CMakeLists.txt.
- """
- if os.path.isabs(rel_path):
- return rel_path
- if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
- return rel_path
- # TODO: do we need to check base_path for absolute variables as well?
- return os.path.join('${CMAKE_CURRENT_LIST_DIR}',
- os.path.normpath(os.path.join(base_path, rel_path)))
-
-
-def NormjoinPath(base_path, rel_path):
- """Resolves rel_path against base_path and returns the result.
- TODO: what is this really used for?
- If rel_path begins with '$' it is returned unchanged.
- Otherwise it is resolved against base_path if relative, then normalized.
- """
- if rel_path.startswith('$') and not rel_path.startswith('${configuration}'):
- return rel_path
- return os.path.normpath(os.path.join(base_path, rel_path))
-
-
-def CMakeStringEscape(a):
- """Escapes the string 'a' for use inside a CMake string.
-
- This means escaping
- '\' otherwise it may be seen as modifying the next character
- '"' otherwise it will end the string
- ';' otherwise the string becomes a list
-
- The following do not need to be escaped
- '#' when the lexer is in string state, this does not start a comment
-
- The following are yet unknown
- '$' generator variables (like ${obj}) must not be escaped,
- but text $ should be escaped
- what is wanted is to know which $ come from generator variables
- """
- return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
-
-
-def SetFileProperty(output, source_name, property_name, values, sep):
- """Given a set of source file, sets the given property on them."""
- output.write('set_source_files_properties(')
- output.write(source_name)
- output.write(' PROPERTIES ')
- output.write(property_name)
- output.write(' "')
- for value in values:
- output.write(CMakeStringEscape(value))
- output.write(sep)
- output.write('")\n')
-
-
-def SetFilesProperty(output, variable, property_name, values, sep):
- """Given a set of source files, sets the given property on them."""
- output.write('set_source_files_properties(')
- WriteVariable(output, variable)
- output.write(' PROPERTIES ')
- output.write(property_name)
- output.write(' "')
- for value in values:
- output.write(CMakeStringEscape(value))
- output.write(sep)
- output.write('")\n')
-
-
-def SetTargetProperty(output, target_name, property_name, values, sep=''):
- """Given a target, sets the given property."""
- output.write('set_target_properties(')
- output.write(target_name)
- output.write(' PROPERTIES ')
- output.write(property_name)
- output.write(' "')
- for value in values:
- output.write(CMakeStringEscape(value))
- output.write(sep)
- output.write('")\n')
-
-
-def SetVariable(output, variable_name, value):
- """Sets a CMake variable."""
- output.write('set(')
- output.write(variable_name)
- output.write(' "')
- output.write(CMakeStringEscape(value))
- output.write('")\n')
-
-
-def SetVariableList(output, variable_name, values):
- """Sets a CMake variable to a list."""
- if not values:
- return SetVariable(output, variable_name, "")
- if len(values) == 1:
- return SetVariable(output, variable_name, values[0])
- output.write('list(APPEND ')
- output.write(variable_name)
- output.write('\n "')
- output.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
- output.write('")\n')
-
-
-def UnsetVariable(output, variable_name):
- """Unsets a CMake variable."""
- output.write('unset(')
- output.write(variable_name)
- output.write(')\n')
-
-
-def WriteVariable(output, variable_name, prepend=None):
- if prepend:
- output.write(prepend)
- output.write('${')
- output.write(variable_name)
- output.write('}')
-
-
-class CMakeTargetType(object):
- def __init__(self, command, modifier, property_modifier):
- self.command = command
- self.modifier = modifier
- self.property_modifier = property_modifier
-
-
-cmake_target_type_from_gyp_target_type = {
- 'executable': CMakeTargetType('add_executable', None, 'RUNTIME'),
- 'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'),
- 'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'),
- 'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'),
- 'none': CMakeTargetType('add_custom_target', 'SOURCES', None),
-}
-
-
-def StringToCMakeTargetName(a):
- """Converts the given string 'a' to a valid CMake target name.
-
- All invalid characters are replaced by '_'.
- Invalid for cmake: ' ', '/', '(', ')', '"'
- Invalid for make: ':'
- Invalid for unknown reasons but cause failures: '.'
- """
- return a.translate(string.maketrans(' /():."', '_______'))
-
-
-def WriteActions(target_name, actions, extra_sources, extra_deps,
- path_to_gyp, output):
- """Write CMake for the 'actions' in the target.
-
- Args:
- target_name: the name of the CMake target being generated.
- actions: the Gyp 'actions' dict for this target.
- extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
- extra_deps: [<cmake_taget>] to append with generated targets.
- path_to_gyp: relative path from CMakeLists.txt being generated to
- the Gyp file in which the target being generated is defined.
- """
- for action in actions:
- action_name = StringToCMakeTargetName(action['action_name'])
- action_target_name = '%s__%s' % (target_name, action_name)
-
- inputs = action['inputs']
- inputs_name = action_target_name + '__input'
- SetVariableList(output, inputs_name,
- [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
-
- outputs = action['outputs']
- cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
- for out in outputs]
- outputs_name = action_target_name + '__output'
- SetVariableList(output, outputs_name, cmake_outputs)
-
- # Build up a list of outputs.
- # Collect the output dirs we'll need.
- dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
-
- if int(action.get('process_outputs_as_sources', False)):
- extra_sources.extend(zip(cmake_outputs, outputs))
-
- # add_custom_command
- output.write('add_custom_command(OUTPUT ')
- WriteVariable(output, outputs_name)
- output.write('\n')
-
- if len(dirs) > 0:
- for directory in dirs:
- output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
- output.write(directory)
- output.write('\n')
-
- output.write(' COMMAND ')
- output.write(gyp.common.EncodePOSIXShellList(action['action']))
- output.write('\n')
-
- output.write(' DEPENDS ')
- WriteVariable(output, inputs_name)
- output.write('\n')
-
- output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
- output.write(path_to_gyp)
- output.write('\n')
-
- output.write(' COMMENT ')
- if 'message' in action:
- output.write(action['message'])
- else:
- output.write(action_target_name)
- output.write('\n')
-
- output.write(' VERBATIM\n')
- output.write(')\n')
-
- # add_custom_target
- output.write('add_custom_target(')
- output.write(action_target_name)
- output.write('\n DEPENDS ')
- WriteVariable(output, outputs_name)
- output.write('\n SOURCES ')
- WriteVariable(output, inputs_name)
- output.write('\n)\n')
-
- extra_deps.append(action_target_name)
-
-
-def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
- if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")):
- if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
- return rel_path
- return NormjoinPathForceCMakeSource(base_path, rel_path)
-
-
-def WriteRules(target_name, rules, extra_sources, extra_deps,
- path_to_gyp, output):
- """Write CMake for the 'rules' in the target.
-
- Args:
- target_name: the name of the CMake target being generated.
- actions: the Gyp 'actions' dict for this target.
- extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
- extra_deps: [<cmake_taget>] to append with generated targets.
- path_to_gyp: relative path from CMakeLists.txt being generated to
- the Gyp file in which the target being generated is defined.
- """
- for rule in rules:
- rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name'])
-
- inputs = rule.get('inputs', [])
- inputs_name = rule_name + '__input'
- SetVariableList(output, inputs_name,
- [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
- outputs = rule['outputs']
- var_outputs = []
-
- for count, rule_source in enumerate(rule.get('rule_sources', [])):
- action_name = rule_name + '_' + str(count)
-
- rule_source_dirname, rule_source_basename = os.path.split(rule_source)
- rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
-
- SetVariable(output, 'RULE_INPUT_PATH', rule_source)
- SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname)
- SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename)
- SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root)
- SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext)
-
- # Build up a list of outputs.
- # Collect the output dirs we'll need.
- dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
-
- # Create variables for the output, as 'local' variable will be unset.
- these_outputs = []
- for output_index, out in enumerate(outputs):
- output_name = action_name + '_' + str(output_index)
- SetVariable(output, output_name,
- NormjoinRulePathForceCMakeSource(path_to_gyp, out,
- rule_source))
- if int(rule.get('process_outputs_as_sources', False)):
- extra_sources.append(('${' + output_name + '}', out))
- these_outputs.append('${' + output_name + '}')
- var_outputs.append('${' + output_name + '}')
-
- # add_custom_command
- output.write('add_custom_command(OUTPUT\n')
- for out in these_outputs:
- output.write(' ')
- output.write(out)
- output.write('\n')
-
- for directory in dirs:
- output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
- output.write(directory)
- output.write('\n')
-
- output.write(' COMMAND ')
- output.write(gyp.common.EncodePOSIXShellList(rule['action']))
- output.write('\n')
-
- output.write(' DEPENDS ')
- WriteVariable(output, inputs_name)
- output.write(' ')
- output.write(NormjoinPath(path_to_gyp, rule_source))
- output.write('\n')
-
- # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
- # The cwd is the current build directory.
- output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
- output.write(path_to_gyp)
- output.write('\n')
-
- output.write(' COMMENT ')
- if 'message' in rule:
- output.write(rule['message'])
- else:
- output.write(action_name)
- output.write('\n')
-
- output.write(' VERBATIM\n')
- output.write(')\n')
-
- UnsetVariable(output, 'RULE_INPUT_PATH')
- UnsetVariable(output, 'RULE_INPUT_DIRNAME')
- UnsetVariable(output, 'RULE_INPUT_NAME')
- UnsetVariable(output, 'RULE_INPUT_ROOT')
- UnsetVariable(output, 'RULE_INPUT_EXT')
-
- # add_custom_target
- output.write('add_custom_target(')
- output.write(rule_name)
- output.write(' DEPENDS\n')
- for out in var_outputs:
- output.write(' ')
- output.write(out)
- output.write('\n')
- output.write('SOURCES ')
- WriteVariable(output, inputs_name)
- output.write('\n')
- for rule_source in rule.get('rule_sources', []):
- output.write(' ')
- output.write(NormjoinPath(path_to_gyp, rule_source))
- output.write('\n')
- output.write(')\n')
-
- extra_deps.append(rule_name)
-
-
-def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
- """Write CMake for the 'copies' in the target.
-
- Args:
- target_name: the name of the CMake target being generated.
- actions: the Gyp 'actions' dict for this target.
- extra_deps: [<cmake_taget>] to append with generated targets.
- path_to_gyp: relative path from CMakeLists.txt being generated to
- the Gyp file in which the target being generated is defined.
- """
- copy_name = target_name + '__copies'
-
- # CMake gets upset with custom targets with OUTPUT which specify no output.
- have_copies = any(copy['files'] for copy in copies)
- if not have_copies:
- output.write('add_custom_target(')
- output.write(copy_name)
- output.write(')\n')
- extra_deps.append(copy_name)
- return
-
- class Copy(object):
- def __init__(self, ext, command):
- self.cmake_inputs = []
- self.cmake_outputs = []
- self.gyp_inputs = []
- self.gyp_outputs = []
- self.ext = ext
- self.inputs_name = None
- self.outputs_name = None
- self.command = command
-
- file_copy = Copy('', 'copy')
- dir_copy = Copy('_dirs', 'copy_directory')
-
- for copy in copies:
- files = copy['files']
- destination = copy['destination']
- for src in files:
- path = os.path.normpath(src)
- basename = os.path.split(path)[1]
- dst = os.path.join(destination, basename)
-
- copy = file_copy if os.path.basename(src) else dir_copy
-
- copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
- copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
- copy.gyp_inputs.append(src)
- copy.gyp_outputs.append(dst)
-
- for copy in (file_copy, dir_copy):
- if copy.cmake_inputs:
- copy.inputs_name = copy_name + '__input' + copy.ext
- SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
-
- copy.outputs_name = copy_name + '__output' + copy.ext
- SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
-
- # add_custom_command
- output.write('add_custom_command(\n')
-
- output.write('OUTPUT')
- for copy in (file_copy, dir_copy):
- if copy.outputs_name:
- WriteVariable(output, copy.outputs_name, ' ')
- output.write('\n')
-
- for copy in (file_copy, dir_copy):
- for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
- # 'cmake -E copy src dst' will create the 'dst' directory if needed.
- output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command)
- output.write(src)
- output.write(' ')
- output.write(dst)
- output.write("\n")
-
- output.write('DEPENDS')
- for copy in (file_copy, dir_copy):
- if copy.inputs_name:
- WriteVariable(output, copy.inputs_name, ' ')
- output.write('\n')
-
- output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
- output.write(path_to_gyp)
- output.write('\n')
-
- output.write('COMMENT Copying for ')
- output.write(target_name)
- output.write('\n')
-
- output.write('VERBATIM\n')
- output.write(')\n')
-
- # add_custom_target
- output.write('add_custom_target(')
- output.write(copy_name)
- output.write('\n DEPENDS')
- for copy in (file_copy, dir_copy):
- if copy.outputs_name:
- WriteVariable(output, copy.outputs_name, ' ')
- output.write('\n SOURCES')
- if file_copy.inputs_name:
- WriteVariable(output, file_copy.inputs_name, ' ')
- output.write('\n)\n')
-
- extra_deps.append(copy_name)
-
-
-def CreateCMakeTargetBaseName(qualified_target):
- """This is the name we would like the target to have."""
- _, gyp_target_name, gyp_target_toolset = (
- gyp.common.ParseQualifiedTarget(qualified_target))
- cmake_target_base_name = gyp_target_name
- if gyp_target_toolset and gyp_target_toolset != 'target':
- cmake_target_base_name += '_' + gyp_target_toolset
- return StringToCMakeTargetName(cmake_target_base_name)
-
-
-def CreateCMakeTargetFullName(qualified_target):
- """An unambiguous name for the target."""
- gyp_file, gyp_target_name, gyp_target_toolset = (
- gyp.common.ParseQualifiedTarget(qualified_target))
- cmake_target_full_name = gyp_file + ':' + gyp_target_name
- if gyp_target_toolset and gyp_target_toolset != 'target':
- cmake_target_full_name += '_' + gyp_target_toolset
- return StringToCMakeTargetName(cmake_target_full_name)
-
-
-class CMakeNamer(object):
- """Converts Gyp target names into CMake target names.
-
- CMake requires that target names be globally unique. One way to ensure
- this is to fully qualify the names of the targets. Unfortunatly, this
- ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
- of just "chrome". If this generator were only interested in building, it
- would be possible to fully qualify all target names, then create
- unqualified target names which depend on all qualified targets which
- should have had that name. This is more or less what the 'make' generator
- does with aliases. However, one goal of this generator is to create CMake
- files for use with IDEs, and fully qualified names are not as user
- friendly.
-
- Since target name collision is rare, we do the above only when required.
-
- Toolset variants are always qualified from the base, as this is required for
- building. However, it also makes sense for an IDE, as it is possible for
- defines to be different.
- """
- def __init__(self, target_list):
- self.cmake_target_base_names_conficting = set()
-
- cmake_target_base_names_seen = set()
- for qualified_target in target_list:
- cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
-
- if cmake_target_base_name not in cmake_target_base_names_seen:
- cmake_target_base_names_seen.add(cmake_target_base_name)
- else:
- self.cmake_target_base_names_conficting.add(cmake_target_base_name)
-
- def CreateCMakeTargetName(self, qualified_target):
- base_name = CreateCMakeTargetBaseName(qualified_target)
- if base_name in self.cmake_target_base_names_conficting:
- return CreateCMakeTargetFullName(qualified_target)
- return base_name
-
-
-def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
- options, generator_flags, all_qualified_targets, output):
-
- # The make generator does this always.
- # TODO: It would be nice to be able to tell CMake all dependencies.
- circular_libs = generator_flags.get('circular', True)
-
- if not generator_flags.get('standalone', False):
- output.write('\n#')
- output.write(qualified_target)
- output.write('\n')
-
- gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
- rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
- rel_gyp_dir = os.path.dirname(rel_gyp_file)
-
- # Relative path from build dir to top dir.
- build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
- # Relative path from build dir to gyp dir.
- build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
-
- path_from_cmakelists_to_gyp = build_to_gyp
-
- spec = target_dicts.get(qualified_target, {})
- config = spec.get('configurations', {}).get(config_to_use, {})
-
- target_name = spec.get('target_name', '<missing target name>')
- target_type = spec.get('type', '<missing target type>')
- target_toolset = spec.get('toolset')
-
- cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
- if cmake_target_type is None:
- print ('Target %s has unknown target type %s, skipping.' %
- ( target_name, target_type ) )
- return
-
- SetVariable(output, 'TARGET', target_name)
- SetVariable(output, 'TOOLSET', target_toolset)
-
- cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
-
- extra_sources = []
- extra_deps = []
-
- # Actions must come first, since they can generate more OBJs for use below.
- if 'actions' in spec:
- WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
- path_from_cmakelists_to_gyp, output)
-
- # Rules must be early like actions.
- if 'rules' in spec:
- WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
- path_from_cmakelists_to_gyp, output)
-
- # Copies
- if 'copies' in spec:
- WriteCopies(cmake_target_name, spec['copies'], extra_deps,
- path_from_cmakelists_to_gyp, output)
-
- # Target and sources
- srcs = spec.get('sources', [])
-
- # Gyp separates the sheep from the goats based on file extensions.
- # A full separation is done here because of flag handing (see below).
- s_sources = []
- c_sources = []
- cxx_sources = []
- linkable_sources = []
- other_sources = []
- for src in srcs:
- _, ext = os.path.splitext(src)
- src_type = COMPILABLE_EXTENSIONS.get(ext, None)
- src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);
-
- if src_type == 's':
- s_sources.append(src_norm_path)
- elif src_type == 'cc':
- c_sources.append(src_norm_path)
- elif src_type == 'cxx':
- cxx_sources.append(src_norm_path)
- elif Linkable(ext):
- linkable_sources.append(src_norm_path)
- else:
- other_sources.append(src_norm_path)
-
- for extra_source in extra_sources:
- src, real_source = extra_source
- _, ext = os.path.splitext(real_source)
- src_type = COMPILABLE_EXTENSIONS.get(ext, None)
-
- if src_type == 's':
- s_sources.append(src)
- elif src_type == 'cc':
- c_sources.append(src)
- elif src_type == 'cxx':
- cxx_sources.append(src)
- elif Linkable(ext):
- linkable_sources.append(src)
- else:
- other_sources.append(src)
-
- s_sources_name = None
- if s_sources:
- s_sources_name = cmake_target_name + '__asm_srcs'
- SetVariableList(output, s_sources_name, s_sources)
-
- c_sources_name = None
- if c_sources:
- c_sources_name = cmake_target_name + '__c_srcs'
- SetVariableList(output, c_sources_name, c_sources)
-
- cxx_sources_name = None
- if cxx_sources:
- cxx_sources_name = cmake_target_name + '__cxx_srcs'
- SetVariableList(output, cxx_sources_name, cxx_sources)
-
- linkable_sources_name = None
- if linkable_sources:
- linkable_sources_name = cmake_target_name + '__linkable_srcs'
- SetVariableList(output, linkable_sources_name, linkable_sources)
-
- other_sources_name = None
- if other_sources:
- other_sources_name = cmake_target_name + '__other_srcs'
- SetVariableList(output, other_sources_name, other_sources)
-
- # CMake gets upset when executable targets provide no sources.
- # http://www.cmake.org/pipermail/cmake/2010-July/038461.html
- dummy_sources_name = None
- has_sources = (s_sources_name or
- c_sources_name or
- cxx_sources_name or
- linkable_sources_name or
- other_sources_name)
- if target_type == 'executable' and not has_sources:
- dummy_sources_name = cmake_target_name + '__dummy_srcs'
- SetVariable(output, dummy_sources_name,
- "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
- output.write('if(NOT EXISTS "')
- WriteVariable(output, dummy_sources_name)
- output.write('")\n')
- output.write(' file(WRITE "')
- WriteVariable(output, dummy_sources_name)
- output.write('" "")\n')
- output.write("endif()\n")
-
-
- # CMake is opposed to setting linker directories and considers the practice
- # of setting linker directories dangerous. Instead, it favors the use of
- # find_library and passing absolute paths to target_link_libraries.
- # However, CMake does provide the command link_directories, which adds
- # link directories to targets defined after it is called.
- # As a result, link_directories must come before the target definition.
- # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
- library_dirs = config.get('library_dirs')
- if library_dirs is not None:
- output.write('link_directories(')
- for library_dir in library_dirs:
- output.write(' ')
- output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
- output.write('\n')
- output.write(')\n')
-
- output.write(cmake_target_type.command)
- output.write('(')
- output.write(cmake_target_name)
-
- if cmake_target_type.modifier is not None:
- output.write(' ')
- output.write(cmake_target_type.modifier)
-
- if s_sources_name:
- WriteVariable(output, s_sources_name, ' ')
- if c_sources_name:
- WriteVariable(output, c_sources_name, ' ')
- if cxx_sources_name:
- WriteVariable(output, cxx_sources_name, ' ')
- if linkable_sources_name:
- WriteVariable(output, linkable_sources_name, ' ')
- if other_sources_name:
- WriteVariable(output, other_sources_name, ' ')
- if dummy_sources_name:
- WriteVariable(output, dummy_sources_name, ' ')
-
- output.write(')\n')
-
- # Let CMake know if the 'all' target should depend on this target.
- exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
- else 'FALSE')
- SetTargetProperty(output, cmake_target_name,
- 'EXCLUDE_FROM_ALL', exclude_from_all)
- for extra_target_name in extra_deps:
- SetTargetProperty(output, extra_target_name,
- 'EXCLUDE_FROM_ALL', exclude_from_all)
-
- # Output name and location.
- if target_type != 'none':
- # Link as 'C' if there are no other files
- if not c_sources and not cxx_sources:
- SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
-
- # Mark uncompiled sources as uncompiled.
- if other_sources_name:
- output.write('set_source_files_properties(')
- WriteVariable(output, other_sources_name, '')
- output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
-
- # Mark object sources as linkable.
- if linkable_sources_name:
- output.write('set_source_files_properties(')
- WriteVariable(output, other_sources_name, '')
- output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
-
- # Output directory
- target_output_directory = spec.get('product_dir')
- if target_output_directory is None:
- if target_type in ('executable', 'loadable_module'):
- target_output_directory = generator_default_variables['PRODUCT_DIR']
- elif target_type == 'shared_library':
- target_output_directory = '${builddir}/lib.${TOOLSET}'
- elif spec.get('standalone_static_library', False):
- target_output_directory = generator_default_variables['PRODUCT_DIR']
- else:
- base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
- options.toplevel_dir)
- target_output_directory = '${obj}.${TOOLSET}'
- target_output_directory = (
- os.path.join(target_output_directory, base_path))
-
- cmake_target_output_directory = NormjoinPathForceCMakeSource(
- path_from_cmakelists_to_gyp,
- target_output_directory)
- SetTargetProperty(output,
- cmake_target_name,
- cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
- cmake_target_output_directory)
-
- # Output name
- default_product_prefix = ''
- default_product_name = target_name
- default_product_ext = ''
- if target_type == 'static_library':
- static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
- default_product_name = RemovePrefix(default_product_name,
- static_library_prefix)
- default_product_prefix = static_library_prefix
- default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']
-
- elif target_type in ('loadable_module', 'shared_library'):
- shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
- default_product_name = RemovePrefix(default_product_name,
- shared_library_prefix)
- default_product_prefix = shared_library_prefix
- default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
-
- elif target_type != 'executable':
- print ('ERROR: What output file should be generated?',
- 'type', target_type, 'target', target_name)
-
- product_prefix = spec.get('product_prefix', default_product_prefix)
- product_name = spec.get('product_name', default_product_name)
- product_ext = spec.get('product_extension')
- if product_ext:
- product_ext = '.' + product_ext
- else:
- product_ext = default_product_ext
-
- SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
- SetTargetProperty(output, cmake_target_name,
- cmake_target_type.property_modifier + '_OUTPUT_NAME',
- product_name)
- SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)
-
- # Make the output of this target referenceable as a source.
- cmake_target_output_basename = product_prefix + product_name + product_ext
- cmake_target_output = os.path.join(cmake_target_output_directory,
- cmake_target_output_basename)
- SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
-
- # Includes
- includes = config.get('include_dirs')
- if includes:
- # This (target include directories) is what requires CMake 2.8.8
- includes_name = cmake_target_name + '__include_dirs'
- SetVariableList(output, includes_name,
- [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
- for include in includes])
- output.write('set_property(TARGET ')
- output.write(cmake_target_name)
- output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
- WriteVariable(output, includes_name, '')
- output.write(')\n')
-
- # Defines
- defines = config.get('defines')
- if defines is not None:
- SetTargetProperty(output,
- cmake_target_name,
- 'COMPILE_DEFINITIONS',
- defines,
- ';')
-
- # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
- # CMake currently does not have target C and CXX flags.
- # So, instead of doing...
-
- # cflags_c = config.get('cflags_c')
- # if cflags_c is not None:
- # SetTargetProperty(output, cmake_target_name,
- # 'C_COMPILE_FLAGS', cflags_c, ' ')
-
- # cflags_cc = config.get('cflags_cc')
- # if cflags_cc is not None:
- # SetTargetProperty(output, cmake_target_name,
- # 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
-
- # Instead we must...
- cflags = config.get('cflags', [])
- cflags_c = config.get('cflags_c', [])
- cflags_cxx = config.get('cflags_cc', [])
- if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
- SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
-
- elif c_sources and not (s_sources or cxx_sources):
- flags = []
- flags.extend(cflags)
- flags.extend(cflags_c)
- SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
-
- elif cxx_sources and not (s_sources or c_sources):
- flags = []
- flags.extend(cflags)
- flags.extend(cflags_cxx)
- SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
-
- else:
- # TODO: This is broken, one cannot generally set properties on files,
- # as other targets may require different properties on the same files.
- if s_sources and cflags:
- SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')
-
- if c_sources and (cflags or cflags_c):
- flags = []
- flags.extend(cflags)
- flags.extend(cflags_c)
- SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')
-
- if cxx_sources and (cflags or cflags_cxx):
- flags = []
- flags.extend(cflags)
- flags.extend(cflags_cxx)
- SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')
-
- # Linker flags
- ldflags = config.get('ldflags')
- if ldflags is not None:
- SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
-
- # Note on Dependencies and Libraries:
- # CMake wants to handle link order, resolving the link line up front.
- # Gyp does not retain or enforce specifying enough information to do so.
- # So do as other gyp generators and use --start-group and --end-group.
- # Give CMake as little information as possible so that it doesn't mess it up.
-
- # Dependencies
- rawDeps = spec.get('dependencies', [])
-
- static_deps = []
- shared_deps = []
- other_deps = []
- for rawDep in rawDeps:
- dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
- dep_spec = target_dicts.get(rawDep, {})
- dep_target_type = dep_spec.get('type', None)
-
- if dep_target_type == 'static_library':
- static_deps.append(dep_cmake_name)
- elif dep_target_type == 'shared_library':
- shared_deps.append(dep_cmake_name)
- else:
- other_deps.append(dep_cmake_name)
-
- # ensure all external dependencies are complete before internal dependencies
- # extra_deps currently only depend on their own deps, so otherwise run early
- if static_deps or shared_deps or other_deps:
- for extra_dep in extra_deps:
- output.write('add_dependencies(')
- output.write(extra_dep)
- output.write('\n')
- for deps in (static_deps, shared_deps, other_deps):
- for dep in gyp.common.uniquer(deps):
- output.write(' ')
- output.write(dep)
- output.write('\n')
- output.write(')\n')
-
- linkable = target_type in ('executable', 'loadable_module', 'shared_library')
- other_deps.extend(extra_deps)
- if other_deps or (not linkable and (static_deps or shared_deps)):
- output.write('add_dependencies(')
- output.write(cmake_target_name)
- output.write('\n')
- for dep in gyp.common.uniquer(other_deps):
- output.write(' ')
- output.write(dep)
- output.write('\n')
- if not linkable:
- for deps in (static_deps, shared_deps):
- for lib_dep in gyp.common.uniquer(deps):
- output.write(' ')
- output.write(lib_dep)
- output.write('\n')
- output.write(')\n')
-
- # Libraries
- if linkable:
- external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
- if external_libs or static_deps or shared_deps:
- output.write('target_link_libraries(')
- output.write(cmake_target_name)
- output.write('\n')
- if static_deps:
- write_group = circular_libs and len(static_deps) > 1
- if write_group:
- output.write('-Wl,--start-group\n')
- for dep in gyp.common.uniquer(static_deps):
- output.write(' ')
- output.write(dep)
- output.write('\n')
- if write_group:
- output.write('-Wl,--end-group\n')
- if shared_deps:
- for dep in gyp.common.uniquer(shared_deps):
- output.write(' ')
- output.write(dep)
- output.write('\n')
- if external_libs:
- for lib in gyp.common.uniquer(external_libs):
- output.write(' ')
- output.write(lib)
- output.write('\n')
-
- output.write(')\n')
-
- UnsetVariable(output, 'TOOLSET')
- UnsetVariable(output, 'TARGET')
-
-
-def GenerateOutputForConfig(target_list, target_dicts, data,
- params, config_to_use):
- options = params['options']
- generator_flags = params['generator_flags']
-
- # generator_dir: relative path from pwd to where make puts build files.
- # Makes migrating from make to cmake easier, cmake doesn't put anything here.
- # Each Gyp configuration creates a different CMakeLists.txt file
- # to avoid incompatibilities between Gyp and CMake configurations.
- generator_dir = os.path.relpath(options.generator_output or '.')
-
- # output_dir: relative path from generator_dir to the build directory.
- output_dir = generator_flags.get('output_dir', 'out')
-
- # build_dir: relative path from source root to our output files.
- # e.g. "out/Debug"
- build_dir = os.path.normpath(os.path.join(generator_dir,
- output_dir,
- config_to_use))
-
- toplevel_build = os.path.join(options.toplevel_dir, build_dir)
-
- output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
- gyp.common.EnsureDirExists(output_file)
-
- output = open(output_file, 'w')
- output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
- output.write('cmake_policy(VERSION 2.8.8)\n')
-
- gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
- output.write('project(')
- output.write(project_target)
- output.write(')\n')
-
- SetVariable(output, 'configuration', config_to_use)
-
- ar = None
- cc = None
- cxx = None
-
- make_global_settings = data[gyp_file].get('make_global_settings', [])
- build_to_top = gyp.common.InvertRelativePath(build_dir,
- options.toplevel_dir)
- for key, value in make_global_settings:
- if key == 'AR':
- ar = os.path.join(build_to_top, value)
- if key == 'CC':
- cc = os.path.join(build_to_top, value)
- if key == 'CXX':
- cxx = os.path.join(build_to_top, value)
-
- ar = gyp.common.GetEnvironFallback(['AR_target', 'AR'], ar)
- cc = gyp.common.GetEnvironFallback(['CC_target', 'CC'], cc)
- cxx = gyp.common.GetEnvironFallback(['CXX_target', 'CXX'], cxx)
-
- if ar:
- SetVariable(output, 'CMAKE_AR', ar)
- if cc:
- SetVariable(output, 'CMAKE_C_COMPILER', cc)
- if cxx:
- SetVariable(output, 'CMAKE_CXX_COMPILER', cxx)
-
- # The following appears to be as-yet undocumented.
- # http://public.kitware.com/Bug/view.php?id=8392
- output.write('enable_language(ASM)\n')
- # ASM-ATT does not support .S files.
- # output.write('enable_language(ASM-ATT)\n')
-
- if cc:
- SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
-
- SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}')
- SetVariable(output, 'obj', '${builddir}/obj')
- output.write('\n')
-
- # TODO: Undocumented/unsupported (the CMake Java generator depends on it).
- # CMake by default names the object resulting from foo.c to be foo.c.o.
- # Gyp traditionally names the object resulting from foo.c foo.o.
- # This should be irrelevant, but some targets extract .o files from .a
- # and depend on the name of the extracted .o files.
- output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n')
- output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
- output.write('\n')
-
- # Force ninja to use rsp files. Otherwise link and ar lines can get too long,
- # resulting in 'Argument list too long' errors.
- output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
- output.write('\n')
-
- namer = CMakeNamer(target_list)
-
- # The list of targets upon which the 'all' target should depend.
- # CMake has it's own implicit 'all' target, one is not created explicitly.
- all_qualified_targets = set()
- for build_file in params['build_files']:
- for qualified_target in gyp.common.AllTargets(target_list,
- target_dicts,
- os.path.normpath(build_file)):
- all_qualified_targets.add(qualified_target)
-
- for qualified_target in target_list:
- WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
- options, generator_flags, all_qualified_targets, output)
-
- output.close()
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
- generator_flags = params['generator_flags']
-
- # generator_dir: relative path from pwd to where make puts build files.
- # Makes migrating from make to cmake easier, cmake doesn't put anything here.
- generator_dir = os.path.relpath(options.generator_output or '.')
-
- # output_dir: relative path from generator_dir to the build directory.
- output_dir = generator_flags.get('output_dir', 'out')
-
- for config_name in configurations:
- # build_dir: relative path from source root to our output files.
- # e.g. "out/Debug"
- build_dir = os.path.normpath(os.path.join(generator_dir,
- output_dir,
- config_name))
- arguments = ['cmake', '-G', 'Ninja']
- print 'Generating [%s]: %s' % (config_name, arguments)
- subprocess.check_call(arguments, cwd=build_dir)
-
- arguments = ['ninja', '-C', build_dir]
- print 'Building [%s]: %s' % (config_name, arguments)
- subprocess.check_call(arguments)
-
-
-def CallGenerateOutputForConfig(arglist):
- # Ignore the interrupt signal so that the parent process catches it and
- # kills all multiprocessing children.
- signal.signal(signal.SIGINT, signal.SIG_IGN)
-
- target_list, target_dicts, data, params, config_name = arglist
- GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- user_config = params.get('generator_flags', {}).get('config', None)
- if user_config:
- GenerateOutputForConfig(target_list, target_dicts, data,
- params, user_config)
- else:
- config_names = target_dicts[target_list[0]]['configurations'].keys()
- if params['parallel']:
- try:
- pool = multiprocessing.Pool(len(config_names))
- arglists = []
- for config_name in config_names:
- arglists.append((target_list, target_dicts, data,
- params, config_name))
- pool.map(CallGenerateOutputForConfig, arglists)
- except KeyboardInterrupt, e:
- pool.terminate()
- raise e
- else:
- for config_name in config_names:
- GenerateOutputForConfig(target_list, target_dicts, data,
- params, config_name)
diff --git a/deps/gyp/pylib/gyp/generator/dump_dependency_json.py b/deps/gyp/pylib/gyp/generator/dump_dependency_json.py
deleted file mode 100644
index 160eafe2ef..0000000000
--- a/deps/gyp/pylib/gyp/generator/dump_dependency_json.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import os
-import gyp
-import gyp.common
-import gyp.msvs_emulation
-import json
-import sys
-
-generator_supports_multiple_toolsets = True
-
-generator_wants_static_library_dependencies_adjusted = False
-
-generator_filelist_paths = {
-}
-
-generator_default_variables = {
-}
-for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
- 'LIB_DIR', 'SHARED_LIB_DIR']:
- # Some gyp steps fail if these are empty(!).
- generator_default_variables[dirname] = 'dir'
-for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
- 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
- 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
- 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
- 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
- 'CONFIGURATION_NAME']:
- generator_default_variables[unused] = ''
-
-
-def CalculateVariables(default_variables, params):
- generator_flags = params.get('generator_flags', {})
- for key, val in generator_flags.items():
- default_variables.setdefault(key, val)
- default_variables.setdefault('OS', gyp.common.GetFlavor(params))
-
- flavor = gyp.common.GetFlavor(params)
- if flavor =='win':
- # Copy additional generator configuration data from VS, which is shared
- # by the Windows Ninja generator.
- import gyp.generator.msvs as msvs_generator
- generator_additional_non_configuration_keys = getattr(msvs_generator,
- 'generator_additional_non_configuration_keys', [])
- generator_additional_path_sections = getattr(msvs_generator,
- 'generator_additional_path_sections', [])
-
- gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
-
-
-def CalculateGeneratorInputInfo(params):
- """Calculate the generator specific info that gets fed to input (called by
- gyp)."""
- generator_flags = params.get('generator_flags', {})
- if generator_flags.get('adjust_static_libraries', False):
- global generator_wants_static_library_dependencies_adjusted
- generator_wants_static_library_dependencies_adjusted = True
-
- toplevel = params['options'].toplevel_dir
- generator_dir = os.path.relpath(params['options'].generator_output or '.')
- # output_dir: relative path from generator_dir to the build directory.
- output_dir = generator_flags.get('output_dir', 'out')
- qualified_out_dir = os.path.normpath(os.path.join(
- toplevel, generator_dir, output_dir, 'gypfiles'))
- global generator_filelist_paths
- generator_filelist_paths = {
- 'toplevel': toplevel,
- 'qualified_out_dir': qualified_out_dir,
- }
-
-def GenerateOutput(target_list, target_dicts, data, params):
- # Map of target -> list of targets it depends on.
- edges = {}
-
- # Queue of targets to visit.
- targets_to_visit = target_list[:]
-
- while len(targets_to_visit) > 0:
- target = targets_to_visit.pop()
- if target in edges:
- continue
- edges[target] = []
-
- for dep in target_dicts[target].get('dependencies', []):
- edges[target].append(dep)
- targets_to_visit.append(dep)
-
- try:
- filepath = params['generator_flags']['output_dir']
- except KeyError:
- filepath = '.'
- filename = os.path.join(filepath, 'dump.json')
- f = open(filename, 'w')
- json.dump(edges, f)
- f.close()
- print 'Wrote json to %s.' % filename
diff --git a/deps/gyp/pylib/gyp/generator/eclipse.py b/deps/gyp/pylib/gyp/generator/eclipse.py
deleted file mode 100644
index 3544347b3b..0000000000
--- a/deps/gyp/pylib/gyp/generator/eclipse.py
+++ /dev/null
@@ -1,425 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""GYP backend that generates Eclipse CDT settings files.
-
-This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
-files that can be imported into an Eclipse CDT project. The XML file contains a
-list of include paths and symbols (i.e. defines).
-
-Because a full .cproject definition is not created by this generator, it's not
-possible to properly define the include dirs and symbols for each file
-individually. Instead, one set of includes/symbols is generated for the entire
-project. This works fairly well (and is a vast improvement in general), but may
-still result in a few indexer issues here and there.
-
-This generator has no automated tests, so expect it to be broken.
-"""
-
-from xml.sax.saxutils import escape
-import os.path
-import subprocess
-import gyp
-import gyp.common
-import gyp.msvs_emulation
-import shlex
-import xml.etree.cElementTree as ET
-
-generator_wants_static_library_dependencies_adjusted = False
-
-generator_default_variables = {
-}
-
-for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
- # Some gyp steps fail if these are empty(!), so we convert them to variables
- generator_default_variables[dirname] = '$' + dirname
-
-for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
- 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
- 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
- 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
- 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
- 'CONFIGURATION_NAME']:
- generator_default_variables[unused] = ''
-
-# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
-# part of the path when dealing with generated headers. This value will be
-# replaced dynamically for each configuration.
-generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
- '$SHARED_INTERMEDIATE_DIR'
-
-
-def CalculateVariables(default_variables, params):
- generator_flags = params.get('generator_flags', {})
- for key, val in generator_flags.items():
- default_variables.setdefault(key, val)
- flavor = gyp.common.GetFlavor(params)
- default_variables.setdefault('OS', flavor)
- if flavor == 'win':
- # Copy additional generator configuration data from VS, which is shared
- # by the Eclipse generator.
- import gyp.generator.msvs as msvs_generator
- generator_additional_non_configuration_keys = getattr(msvs_generator,
- 'generator_additional_non_configuration_keys', [])
- generator_additional_path_sections = getattr(msvs_generator,
- 'generator_additional_path_sections', [])
-
- gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
-
-
-def CalculateGeneratorInputInfo(params):
- """Calculate the generator specific info that gets fed to input (called by
- gyp)."""
- generator_flags = params.get('generator_flags', {})
- if generator_flags.get('adjust_static_libraries', False):
- global generator_wants_static_library_dependencies_adjusted
- generator_wants_static_library_dependencies_adjusted = True
-
-
-def GetAllIncludeDirectories(target_list, target_dicts,
- shared_intermediate_dirs, config_name, params,
- compiler_path):
- """Calculate the set of include directories to be used.
-
- Returns:
- A list including all the include_dir's specified for every target followed
- by any include directories that were added as cflag compiler options.
- """
-
- gyp_includes_set = set()
- compiler_includes_list = []
-
- # Find compiler's default include dirs.
- if compiler_path:
- command = shlex.split(compiler_path)
- command.extend(['-E', '-xc++', '-v', '-'])
- proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- output = proc.communicate()[1]
- # Extract the list of include dirs from the output, which has this format:
- # ...
- # #include "..." search starts here:
- # #include <...> search starts here:
- # /usr/include/c++/4.6
- # /usr/local/include
- # End of search list.
- # ...
- in_include_list = False
- for line in output.splitlines():
- if line.startswith('#include'):
- in_include_list = True
- continue
- if line.startswith('End of search list.'):
- break
- if in_include_list:
- include_dir = line.strip()
- if include_dir not in compiler_includes_list:
- compiler_includes_list.append(include_dir)
-
- flavor = gyp.common.GetFlavor(params)
- if flavor == 'win':
- generator_flags = params.get('generator_flags', {})
- for target_name in target_list:
- target = target_dicts[target_name]
- if config_name in target['configurations']:
- config = target['configurations'][config_name]
-
- # Look for any include dirs that were explicitly added via cflags. This
- # may be done in gyp files to force certain includes to come at the end.
- # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
- # remove this.
- if flavor == 'win':
- msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
- cflags = msvs_settings.GetCflags(config_name)
- else:
- cflags = config['cflags']
- for cflag in cflags:
- if cflag.startswith('-I'):
- include_dir = cflag[2:]
- if include_dir not in compiler_includes_list:
- compiler_includes_list.append(include_dir)
-
- # Find standard gyp include dirs.
- if config.has_key('include_dirs'):
- include_dirs = config['include_dirs']
- for shared_intermediate_dir in shared_intermediate_dirs:
- for include_dir in include_dirs:
- include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR',
- shared_intermediate_dir)
- if not os.path.isabs(include_dir):
- base_dir = os.path.dirname(target_name)
-
- include_dir = base_dir + '/' + include_dir
- include_dir = os.path.abspath(include_dir)
-
- gyp_includes_set.add(include_dir)
-
- # Generate a list that has all the include dirs.
- all_includes_list = list(gyp_includes_set)
- all_includes_list.sort()
- for compiler_include in compiler_includes_list:
- if not compiler_include in gyp_includes_set:
- all_includes_list.append(compiler_include)
-
- # All done.
- return all_includes_list
-
-
-def GetCompilerPath(target_list, data, options):
- """Determine a command that can be used to invoke the compiler.
-
- Returns:
- If this is a gyp project that has explicit make settings, try to determine
- the compiler from that. Otherwise, see if a compiler was specified via the
- CC_target environment variable.
- """
- # First, see if the compiler is configured in make's settings.
- build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
- make_global_settings_dict = data[build_file].get('make_global_settings', {})
- for key, value in make_global_settings_dict:
- if key in ['CC', 'CXX']:
- return os.path.join(options.toplevel_dir, value)
-
- # Check to see if the compiler was specified as an environment variable.
- for key in ['CC_target', 'CC', 'CXX']:
- compiler = os.environ.get(key)
- if compiler:
- return compiler
-
- return 'gcc'
-
-
-def GetAllDefines(target_list, target_dicts, data, config_name, params,
- compiler_path):
- """Calculate the defines for a project.
-
- Returns:
- A dict that includes explict defines declared in gyp files along with all of
- the default defines that the compiler uses.
- """
-
- # Get defines declared in the gyp files.
- all_defines = {}
- flavor = gyp.common.GetFlavor(params)
- if flavor == 'win':
- generator_flags = params.get('generator_flags', {})
- for target_name in target_list:
- target = target_dicts[target_name]
-
- if flavor == 'win':
- msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
- extra_defines = msvs_settings.GetComputedDefines(config_name)
- else:
- extra_defines = []
- if config_name in target['configurations']:
- config = target['configurations'][config_name]
- target_defines = config['defines']
- else:
- target_defines = []
- for define in target_defines + extra_defines:
- split_define = define.split('=', 1)
- if len(split_define) == 1:
- split_define.append('1')
- if split_define[0].strip() in all_defines:
- # Already defined
- continue
- all_defines[split_define[0].strip()] = split_define[1].strip()
- # Get default compiler defines (if possible).
- if flavor == 'win':
- return all_defines # Default defines already processed in the loop above.
- if compiler_path:
- command = shlex.split(compiler_path)
- command.extend(['-E', '-dM', '-'])
- cpp_proc = subprocess.Popen(args=command, cwd='.',
- stdin=subprocess.PIPE, stdout=subprocess.PIPE)
- cpp_output = cpp_proc.communicate()[0]
- cpp_lines = cpp_output.split('\n')
- for cpp_line in cpp_lines:
- if not cpp_line.strip():
- continue
- cpp_line_parts = cpp_line.split(' ', 2)
- key = cpp_line_parts[1]
- if len(cpp_line_parts) >= 3:
- val = cpp_line_parts[2]
- else:
- val = '1'
- all_defines[key] = val
-
- return all_defines
-
-
-def WriteIncludePaths(out, eclipse_langs, include_dirs):
- """Write the includes section of a CDT settings export file."""
-
- out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
- 'settingswizards.IncludePaths">\n')
- out.write(' <language name="holder for library settings"></language>\n')
- for lang in eclipse_langs:
- out.write(' <language name="%s">\n' % lang)
- for include_dir in include_dirs:
- out.write(' <includepath workspace_path="false">%s</includepath>\n' %
- include_dir)
- out.write(' </language>\n')
- out.write(' </section>\n')
-
-
-def WriteMacros(out, eclipse_langs, defines):
- """Write the macros section of a CDT settings export file."""
-
- out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
- 'settingswizards.Macros">\n')
- out.write(' <language name="holder for library settings"></language>\n')
- for lang in eclipse_langs:
- out.write(' <language name="%s">\n' % lang)
- for key in sorted(defines.iterkeys()):
- out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
- (escape(key), escape(defines[key])))
- out.write(' </language>\n')
- out.write(' </section>\n')
-
-
-def GenerateOutputForConfig(target_list, target_dicts, data, params,
- config_name):
- options = params['options']
- generator_flags = params.get('generator_flags', {})
-
- # build_dir: relative path from source root to our output files.
- # e.g. "out/Debug"
- build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
- config_name)
-
- toplevel_build = os.path.join(options.toplevel_dir, build_dir)
- # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
- # SHARED_INTERMEDIATE_DIR. Include both possible locations.
- shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
- os.path.join(toplevel_build, 'gen')]
-
- GenerateCdtSettingsFile(target_list,
- target_dicts,
- data,
- params,
- config_name,
- os.path.join(toplevel_build,
- 'eclipse-cdt-settings.xml'),
- options,
- shared_intermediate_dirs)
- GenerateClasspathFile(target_list,
- target_dicts,
- options.toplevel_dir,
- toplevel_build,
- os.path.join(toplevel_build,
- 'eclipse-classpath.xml'))
-
-
-def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
- config_name, out_name, options,
- shared_intermediate_dirs):
- gyp.common.EnsureDirExists(out_name)
- with open(out_name, 'w') as out:
- out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
- out.write('<cdtprojectproperties>\n')
-
- eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
- 'GNU C++', 'GNU C', 'Assembly']
- compiler_path = GetCompilerPath(target_list, data, options)
- include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
- shared_intermediate_dirs,
- config_name, params, compiler_path)
- WriteIncludePaths(out, eclipse_langs, include_dirs)
- defines = GetAllDefines(target_list, target_dicts, data, config_name,
- params, compiler_path)
- WriteMacros(out, eclipse_langs, defines)
-
- out.write('</cdtprojectproperties>\n')
-
-
-def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
- toplevel_build, out_name):
- '''Generates a classpath file suitable for symbol navigation and code
- completion of Java code (such as in Android projects) by finding all
- .java and .jar files used as action inputs.'''
- gyp.common.EnsureDirExists(out_name)
- result = ET.Element('classpath')
-
- def AddElements(kind, paths):
- # First, we need to normalize the paths so they are all relative to the
- # toplevel dir.
- rel_paths = set()
- for path in paths:
- if os.path.isabs(path):
- rel_paths.add(os.path.relpath(path, toplevel_dir))
- else:
- rel_paths.add(path)
-
- for path in sorted(rel_paths):
- entry_element = ET.SubElement(result, 'classpathentry')
- entry_element.set('kind', kind)
- entry_element.set('path', path)
-
- AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
- AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
- # Include the standard JRE container and a dummy out folder
- AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
- # Include a dummy out folder so that Eclipse doesn't use the default /bin
- # folder in the root of the project.
- AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
-
- ET.ElementTree(result).write(out_name)
-
-
-def GetJavaJars(target_list, target_dicts, toplevel_dir):
- '''Generates a sequence of all .jars used as inputs.'''
- for target_name in target_list:
- target = target_dicts[target_name]
- for action in target.get('actions', []):
- for input_ in action['inputs']:
- if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
- if os.path.isabs(input_):
- yield input_
- else:
- yield os.path.join(os.path.dirname(target_name), input_)
-
-
-def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
- '''Generates a sequence of all likely java package root directories.'''
- for target_name in target_list:
- target = target_dicts[target_name]
- for action in target.get('actions', []):
- for input_ in action['inputs']:
- if (os.path.splitext(input_)[1] == '.java' and
- not input_.startswith('$')):
- dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
- input_))
- # If there is a parent 'src' or 'java' folder, navigate up to it -
- # these are canonical package root names in Chromium. This will
- # break if 'src' or 'java' exists in the package structure. This
- # could be further improved by inspecting the java file for the
- # package name if this proves to be too fragile in practice.
- parent_search = dir_
- while os.path.basename(parent_search) not in ['src', 'java']:
- parent_search, _ = os.path.split(parent_search)
- if not parent_search or parent_search == toplevel_dir:
- # Didn't find a known root, just return the original path
- yield dir_
- break
- else:
- yield parent_search
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- """Generate an XML settings file that can be imported into a CDT project."""
-
- if params['options'].generator_output:
- raise NotImplementedError("--generator_output not implemented for eclipse")
-
- user_config = params.get('generator_flags', {}).get('config', None)
- if user_config:
- GenerateOutputForConfig(target_list, target_dicts, data, params,
- user_config)
- else:
- config_names = target_dicts[target_list[0]]['configurations'].keys()
- for config_name in config_names:
- GenerateOutputForConfig(target_list, target_dicts, data, params,
- config_name)
-
diff --git a/deps/gyp/pylib/gyp/generator/gypd.py b/deps/gyp/pylib/gyp/generator/gypd.py
deleted file mode 100644
index 3efdb9966a..0000000000
--- a/deps/gyp/pylib/gyp/generator/gypd.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""gypd output module
-
-This module produces gyp input as its output. Output files are given the
-.gypd extension to avoid overwriting the .gyp files that they are generated
-from. Internal references to .gyp files (such as those found in
-"dependencies" sections) are not adjusted to point to .gypd files instead;
-unlike other paths, which are relative to the .gyp or .gypd file, such paths
-are relative to the directory from which gyp was run to create the .gypd file.
-
-This generator module is intended to be a sample and a debugging aid, hence
-the "d" for "debug" in .gypd. It is useful to inspect the results of the
-various merges, expansions, and conditional evaluations performed by gyp
-and to see a representation of what would be fed to a generator module.
-
-It's not advisable to rename .gypd files produced by this module to .gyp,
-because they will have all merges, expansions, and evaluations already
-performed and the relevant constructs not present in the output; paths to
-dependencies may be wrong; and various sections that do not belong in .gyp
-files such as such as "included_files" and "*_excluded" will be present.
-Output will also be stripped of comments. This is not intended to be a
-general-purpose gyp pretty-printer; for that, you probably just want to
-run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
-comments but won't do all of the other things done to this module's output.
-
-The specific formatting of the output generated by this module is subject
-to change.
-"""
-
-
-import gyp.common
-import errno
-import os
-import pprint
-
-
-# These variables should just be spit back out as variable references.
-_generator_identity_variables = [
- 'CONFIGURATION_NAME',
- 'EXECUTABLE_PREFIX',
- 'EXECUTABLE_SUFFIX',
- 'INTERMEDIATE_DIR',
- 'LIB_DIR',
- 'PRODUCT_DIR',
- 'RULE_INPUT_ROOT',
- 'RULE_INPUT_DIRNAME',
- 'RULE_INPUT_EXT',
- 'RULE_INPUT_NAME',
- 'RULE_INPUT_PATH',
- 'SHARED_INTERMEDIATE_DIR',
- 'SHARED_LIB_DIR',
- 'SHARED_LIB_PREFIX',
- 'SHARED_LIB_SUFFIX',
- 'STATIC_LIB_PREFIX',
- 'STATIC_LIB_SUFFIX',
-]
-
-# gypd doesn't define a default value for OS like many other generator
-# modules. Specify "-D OS=whatever" on the command line to provide a value.
-generator_default_variables = {
-}
-
-# gypd supports multiple toolsets
-generator_supports_multiple_toolsets = True
-
-# TODO(mark): This always uses <, which isn't right. The input module should
-# notify the generator to tell it which phase it is operating in, and this
-# module should use < for the early phase and then switch to > for the late
-# phase. Bonus points for carrying @ back into the output too.
-for v in _generator_identity_variables:
- generator_default_variables[v] = '<(%s)' % v
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- output_files = {}
- for qualified_target in target_list:
- [input_file, target] = \
- gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
-
- if input_file[-4:] != '.gyp':
- continue
- input_file_stem = input_file[:-4]
- output_file = input_file_stem + params['options'].suffix + '.gypd'
-
- if not output_file in output_files:
- output_files[output_file] = input_file
-
- for output_file, input_file in output_files.iteritems():
- output = open(output_file, 'w')
- pprint.pprint(data[input_file], output)
- output.close()
diff --git a/deps/gyp/pylib/gyp/generator/gypsh.py b/deps/gyp/pylib/gyp/generator/gypsh.py
deleted file mode 100644
index bd405f43a9..0000000000
--- a/deps/gyp/pylib/gyp/generator/gypsh.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""gypsh output module
-
-gypsh is a GYP shell. It's not really a generator per se. All it does is
-fire up an interactive Python session with a few local variables set to the
-variables passed to the generator. Like gypd, it's intended as a debugging
-aid, to facilitate the exploration of .gyp structures after being processed
-by the input module.
-
-The expected usage is "gyp -f gypsh -D OS=desired_os".
-"""
-
-
-import code
-import sys
-
-
-# All of this stuff about generator variables was lovingly ripped from gypd.py.
-# That module has a much better description of what's going on and why.
-_generator_identity_variables = [
- 'EXECUTABLE_PREFIX',
- 'EXECUTABLE_SUFFIX',
- 'INTERMEDIATE_DIR',
- 'PRODUCT_DIR',
- 'RULE_INPUT_ROOT',
- 'RULE_INPUT_DIRNAME',
- 'RULE_INPUT_EXT',
- 'RULE_INPUT_NAME',
- 'RULE_INPUT_PATH',
- 'SHARED_INTERMEDIATE_DIR',
-]
-
-generator_default_variables = {
-}
-
-for v in _generator_identity_variables:
- generator_default_variables[v] = '<(%s)' % v
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- locals = {
- 'target_list': target_list,
- 'target_dicts': target_dicts,
- 'data': data,
- }
-
- # Use a banner that looks like the stock Python one and like what
- # code.interact uses by default, but tack on something to indicate what
- # locals are available, and identify gypsh.
- banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
- (sys.version, sys.platform, repr(sorted(locals.keys())))
-
- code.interact(banner, local=locals)
diff --git a/deps/gyp/pylib/gyp/generator/make.py b/deps/gyp/pylib/gyp/generator/make.py
deleted file mode 100644
index 310fad7037..0000000000
--- a/deps/gyp/pylib/gyp/generator/make.py
+++ /dev/null
@@ -1,2219 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Notes:
-#
-# This is all roughly based on the Makefile system used by the Linux
-# kernel, but is a non-recursive make -- we put the entire dependency
-# graph in front of make and let it figure it out.
-#
-# The code below generates a separate .mk file for each target, but
-# all are sourced by the top-level Makefile. This means that all
-# variables in .mk-files clobber one another. Be careful to use :=
-# where appropriate for immediate evaluation, and similarly to watch
-# that you're not relying on a variable value to last beween different
-# .mk files.
-#
-# TODOs:
-#
-# Global settings and utility functions are currently stuffed in the
-# toplevel Makefile. It may make sense to generate some .mk files on
-# the side to keep the the files readable.
-
-import os
-import re
-import sys
-import subprocess
-import gyp
-import gyp.common
-import gyp.xcode_emulation
-from gyp.common import GetEnvironFallback
-from gyp.common import GypError
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': 'lib',
- 'SHARED_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
- 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
- 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
- 'PRODUCT_DIR': '$(builddir)',
- 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
- 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
- 'RULE_INPUT_PATH': '$(abspath $<)',
- 'RULE_INPUT_EXT': '$(suffix $<)',
- 'RULE_INPUT_NAME': '$(notdir $<)',
- 'CONFIGURATION_NAME': '$(BUILDTYPE)',
-}
-
-# Make supports multiple toolsets
-generator_supports_multiple_toolsets = True
-
-# Request sorted dependencies in the order from dependents to dependencies.
-generator_wants_sorted_dependencies = False
-
-# Placates pylint.
-generator_additional_non_configuration_keys = []
-generator_additional_path_sections = []
-generator_extra_sources_for_rules = []
-generator_filelist_paths = None
-
-
-def CalculateVariables(default_variables, params):
- """Calculate additional variables for use in the build (called by gyp)."""
- flavor = gyp.common.GetFlavor(params)
- if flavor == 'mac':
- default_variables.setdefault('OS', 'mac')
- default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
- default_variables.setdefault('SHARED_LIB_DIR',
- generator_default_variables['PRODUCT_DIR'])
- default_variables.setdefault('LIB_DIR',
- generator_default_variables['PRODUCT_DIR'])
-
- # Copy additional generator configuration data from Xcode, which is shared
- # by the Mac Make generator.
- import gyp.generator.xcode as xcode_generator
- global generator_additional_non_configuration_keys
- generator_additional_non_configuration_keys = getattr(xcode_generator,
- 'generator_additional_non_configuration_keys', [])
- global generator_additional_path_sections
- generator_additional_path_sections = getattr(xcode_generator,
- 'generator_additional_path_sections', [])
- global generator_extra_sources_for_rules
- generator_extra_sources_for_rules = getattr(xcode_generator,
- 'generator_extra_sources_for_rules', [])
- COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'})
- else:
- operating_system = flavor
- if flavor == 'android':
- operating_system = 'linux' # Keep this legacy behavior for now.
- default_variables.setdefault('OS', operating_system)
- default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
- default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)')
- default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)')
-
-
-def CalculateGeneratorInputInfo(params):
- """Calculate the generator specific info that gets fed to input (called by
- gyp)."""
- generator_flags = params.get('generator_flags', {})
- android_ndk_version = generator_flags.get('android_ndk_version', None)
- # Android NDK requires a strict link order.
- if android_ndk_version:
- global generator_wants_sorted_dependencies
- generator_wants_sorted_dependencies = True
-
- output_dir = params['options'].generator_output or \
- params['options'].toplevel_dir
- builddir_name = generator_flags.get('output_dir', 'out')
- qualified_out_dir = os.path.normpath(os.path.join(
- output_dir, builddir_name, 'gypfiles'))
-
- global generator_filelist_paths
- generator_filelist_paths = {
- 'toplevel': params['options'].toplevel_dir,
- 'qualified_out_dir': qualified_out_dir,
- }
-
-
-# The .d checking code below uses these functions:
-# wildcard, sort, foreach, shell, wordlist
-# wildcard can handle spaces, the rest can't.
-# Since I could find no way to make foreach work with spaces in filenames
-# correctly, the .d files have spaces replaced with another character. The .d
-# file for
-# Chromium\ Framework.framework/foo
-# is for example
-# out/Release/.deps/out/Release/Chromium?Framework.framework/foo
-# This is the replacement character.
-SPACE_REPLACEMENT = '?'
-
-
-LINK_COMMANDS_LINUX = """\
-quiet_cmd_alink = AR($(TOOLSET)) $@
-cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
-
-quiet_cmd_alink_thin = AR($(TOOLSET)) $@
-cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
-
-# Due to circular dependencies between libraries :(, we wrap the
-# special "figure out circular dependencies" flags around the entire
-# input list during linking.
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
-
-# We support two kinds of shared objects (.so):
-# 1) shared_library, which is just bundling together many dependent libraries
-# into a link line.
-# 2) loadable_module, which is generating a module intended for dlopen().
-#
-# They differ only slightly:
-# In the former case, we want to package all dependent code into the .so.
-# In the latter case, we want to package just the API exposed by the
-# outermost module.
-# This means shared_library uses --whole-archive, while loadable_module doesn't.
-# (Note that --whole-archive is incompatible with the --start-group used in
-# normal linking.)
-
-# Other shared-object link notes:
-# - Set SONAME to the library filename so our binaries don't reference
-# the local, absolute paths used on the link command-line.
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
-"""
-
-LINK_COMMANDS_MAC = """\
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-"""
-
-LINK_COMMANDS_ANDROID = """\
-quiet_cmd_alink = AR($(TOOLSET)) $@
-cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
-
-quiet_cmd_alink_thin = AR($(TOOLSET)) $@
-cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
-
-# Due to circular dependencies between libraries :(, we wrap the
-# special "figure out circular dependencies" flags around the entire
-# input list during linking.
-quiet_cmd_link = LINK($(TOOLSET)) $@
-quiet_cmd_link_host = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
-cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
-
-# Other shared-object link notes:
-# - Set SONAME to the library filename so our binaries don't reference
-# the local, absolute paths used on the link command-line.
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
-quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-"""
-
-
-LINK_COMMANDS_AIX = """\
-quiet_cmd_alink = AR($(TOOLSET)) $@
-cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
-
-quiet_cmd_alink_thin = AR($(TOOLSET)) $@
-cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-"""
-
-
-# Header of toplevel Makefile.
-# This should go into the build tree, but it's easier to keep it here for now.
-SHARED_HEADER = ("""\
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := %(srcdir)s
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= %(builddir)s
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= %(default_configuration)s
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-%(make_global_settings)s
-
-CC.target ?= %(CC.target)s
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= %(CXX.target)s
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= %(LINK.target)s
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= %(CC.host)s
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= %(CXX.host)s
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= %(LINK.host)s
-LDFLAGS.host ?=
-AR.host ?= %(AR.host)s
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1)
-unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \\
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters."""
-r"""
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-"""
-"""
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-%(extra_commands)s
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@")
-
-%(link_commands)s
-"""
-
-r"""
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))'
-"""
-"""
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain """ + SPACE_REPLACEMENT + \
- """ instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\\
- for p in $(POSTBUILDS); do\\
- eval $$p;\\
- E=$$?;\\
- if [ $$E -ne 0 ]; then\\
- break;\\
- fi;\\
- done;\\
- if [ $$E -ne 0 ]; then\\
- rm -rf "$@";\\
- exit $$E;\\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \
- SPACE_REPLACEMENT + """ for
-# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \
- """ characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "%(default_target)s" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: %(default_target)s
-%(default_target)s:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-""")
-
-SHARED_HEADER_MAC_COMMANDS = """
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-"""
-
-
-def WriteRootHeaderSuffixRules(writer):
- extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
-
- writer.write('# Suffix rules, putting all outputs into $(obj).\n')
- for ext in extensions:
- writer.write('$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n' % ext)
- writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
-
- writer.write('\n# Try building from generated source, too.\n')
- for ext in extensions:
- writer.write(
- '$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext)
- writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
- writer.write('\n')
- for ext in extensions:
- writer.write('$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n' % ext)
- writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
- writer.write('\n')
-
-
-SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
-# Suffix rules, putting all outputs into $(obj).
-""")
-
-
-SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
-# Try building from generated source, too.
-""")
-
-
-SHARED_FOOTER = """\
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
-"""
-
-header = """\
-# This file is generated by gyp; do not edit.
-
-"""
-
-# Maps every compilable file extension to the do_cmd that compiles it.
-COMPILABLE_EXTENSIONS = {
- '.c': 'cc',
- '.cc': 'cxx',
- '.cpp': 'cxx',
- '.cxx': 'cxx',
- '.s': 'cc',
- '.S': 'cc',
-}
-
-def Compilable(filename):
- """Return true if the file is compilable (should be in OBJS)."""
- for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS):
- if res:
- return True
- return False
-
-
-def Linkable(filename):
- """Return true if the file is linkable (should be on the link line)."""
- return filename.endswith('.o')
-
-
-def Target(filename):
- """Translate a compilable filename to its .o target."""
- return os.path.splitext(filename)[0] + '.o'
-
-
-def EscapeShellArgument(s):
- """Quotes an argument so that it will be interpreted literally by a POSIX
- shell. Taken from
- http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
- """
- return "'" + s.replace("'", "'\\''") + "'"
-
-
-def EscapeMakeVariableExpansion(s):
- """Make has its own variable expansion syntax using $. We must escape it for
- string to be interpreted literally."""
- return s.replace('$', '$$')
-
-
-def EscapeCppDefine(s):
- """Escapes a CPP define so that it will reach the compiler unaltered."""
- s = EscapeShellArgument(s)
- s = EscapeMakeVariableExpansion(s)
- # '#' characters must be escaped even embedded in a string, else Make will
- # treat it as the start of a comment.
- return s.replace('#', r'\#')
-
-
-def QuoteIfNecessary(string):
- """TODO: Should this ideally be replaced with one or more of the above
- functions?"""
- if '"' in string:
- string = '"' + string.replace('"', '\\"') + '"'
- return string
-
-
-def StringToMakefileVariable(string):
- """Convert a string to a value that is acceptable as a make variable name."""
- return re.sub('[^a-zA-Z0-9_]', '_', string)
-
-
-srcdir_prefix = ''
-def Sourceify(path):
- """Convert a path to its source directory form."""
- if '$(' in path:
- return path
- if os.path.isabs(path):
- return path
- return srcdir_prefix + path
-
-
-def QuoteSpaces(s, quote=r'\ '):
- return s.replace(' ', quote)
-
-
-# TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py.
-def _ValidateSourcesForOSX(spec, all_sources):
- """Makes sure if duplicate basenames are not specified in the source list.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- """
- if spec.get('type', None) != 'static_library':
- return
-
- basenames = {}
- for source in all_sources:
- name, ext = os.path.splitext(source)
- is_compiled_file = ext in [
- '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
- if not is_compiled_file:
- continue
- basename = os.path.basename(name) # Don't include extension.
- basenames.setdefault(basename, []).append(source)
-
- error = ''
- for basename, files in basenames.iteritems():
- if len(files) > 1:
- error += ' %s: %s\n' % (basename, ' '.join(files))
-
- if error:
- print('static library %s has several files with the same basename:\n' %
- spec['target_name'] + error + 'libtool on OS X will generate' +
- ' warnings for them.')
- raise GypError('Duplicate basenames in sources section, see list above')
-
-
-# Map from qualified target to path to output.
-target_outputs = {}
-# Map from qualified target to any linkable output. A subset
-# of target_outputs. E.g. when mybinary depends on liba, we want to
-# include liba in the linker line; when otherbinary depends on
-# mybinary, we just want to build mybinary first.
-target_link_deps = {}
-
-
-class MakefileWriter(object):
- """MakefileWriter packages up the writing of one target-specific foobar.mk.
-
- Its only real entry point is Write(), and is mostly used for namespacing.
- """
-
- def __init__(self, generator_flags, flavor):
- self.generator_flags = generator_flags
- self.flavor = flavor
-
- self.suffix_rules_srcdir = {}
- self.suffix_rules_objdir1 = {}
- self.suffix_rules_objdir2 = {}
-
- # Generate suffix rules for all compilable extensions.
- for ext in COMPILABLE_EXTENSIONS.keys():
- # Suffix rules for source folder.
- self.suffix_rules_srcdir.update({ext: ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
- @$(call do_cmd,%s,1)
-""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
-
- # Suffix rules for generated source files.
- self.suffix_rules_objdir1.update({ext: ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
- @$(call do_cmd,%s,1)
-""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
- self.suffix_rules_objdir2.update({ext: ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
- @$(call do_cmd,%s,1)
-""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
-
-
- def Write(self, qualified_target, base_path, output_filename, spec, configs,
- part_of_all):
- """The main entry point: writes a .mk file for a single target.
-
- Arguments:
- qualified_target: target we're generating
- base_path: path relative to source root we're building in, used to resolve
- target-relative paths
- output_filename: output .mk file name to write
- spec, configs: gyp info
- part_of_all: flag indicating this target is part of 'all'
- """
- gyp.common.EnsureDirExists(output_filename)
-
- self.fp = open(output_filename, 'w')
-
- self.fp.write(header)
-
- self.qualified_target = qualified_target
- self.path = base_path
- self.target = spec['target_name']
- self.type = spec['type']
- self.toolset = spec['toolset']
-
- self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
- if self.flavor == 'mac':
- self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
- else:
- self.xcode_settings = None
-
- deps, link_deps = self.ComputeDeps(spec)
-
- # Some of the generation below can add extra output, sources, or
- # link dependencies. All of the out params of the functions that
- # follow use names like extra_foo.
- extra_outputs = []
- extra_sources = []
- extra_link_deps = []
- extra_mac_bundle_resources = []
- mac_bundle_deps = []
-
- if self.is_mac_bundle:
- self.output = self.ComputeMacBundleOutput(spec)
- self.output_binary = self.ComputeMacBundleBinaryOutput(spec)
- else:
- self.output = self.output_binary = self.ComputeOutput(spec)
-
- self.is_standalone_static_library = bool(
- spec.get('standalone_static_library', 0))
- self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
- 'shared_library')
- if (self.is_standalone_static_library or
- self.type in self._INSTALLABLE_TARGETS):
- self.alias = os.path.basename(self.output)
- install_path = self._InstallableTargetInstallPath()
- else:
- self.alias = self.output
- install_path = self.output
-
- self.WriteLn("TOOLSET := " + self.toolset)
- self.WriteLn("TARGET := " + self.target)
-
- # Actions must come first, since they can generate more OBJs for use below.
- if 'actions' in spec:
- self.WriteActions(spec['actions'], extra_sources, extra_outputs,
- extra_mac_bundle_resources, part_of_all)
-
- # Rules must be early like actions.
- if 'rules' in spec:
- self.WriteRules(spec['rules'], extra_sources, extra_outputs,
- extra_mac_bundle_resources, part_of_all)
-
- if 'copies' in spec:
- self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
-
- # Bundle resources.
- if self.is_mac_bundle:
- all_mac_bundle_resources = (
- spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources)
- self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps)
- self.WriteMacInfoPlist(mac_bundle_deps)
-
- # Sources.
- all_sources = spec.get('sources', []) + extra_sources
- if all_sources:
- if self.flavor == 'mac':
- # libtool on OS X generates warnings for duplicate basenames in the same
- # target.
- _ValidateSourcesForOSX(spec, all_sources)
- self.WriteSources(
- configs, deps, all_sources, extra_outputs,
- extra_link_deps, part_of_all,
- gyp.xcode_emulation.MacPrefixHeader(
- self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
- self.Pchify))
- sources = filter(Compilable, all_sources)
- if sources:
- self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
- extensions = set([os.path.splitext(s)[1] for s in sources])
- for ext in extensions:
- if ext in self.suffix_rules_srcdir:
- self.WriteLn(self.suffix_rules_srcdir[ext])
- self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
- for ext in extensions:
- if ext in self.suffix_rules_objdir1:
- self.WriteLn(self.suffix_rules_objdir1[ext])
- for ext in extensions:
- if ext in self.suffix_rules_objdir2:
- self.WriteLn(self.suffix_rules_objdir2[ext])
- self.WriteLn('# End of this set of suffix rules')
-
- # Add dependency from bundle to bundle binary.
- if self.is_mac_bundle:
- mac_bundle_deps.append(self.output_binary)
-
- self.WriteTarget(spec, configs, deps, extra_link_deps + link_deps,
- mac_bundle_deps, extra_outputs, part_of_all)
-
- # Update global list of target outputs, used in dependency tracking.
- target_outputs[qualified_target] = install_path
-
- # Update global list of link dependencies.
- if self.type in ('static_library', 'shared_library'):
- target_link_deps[qualified_target] = self.output_binary
-
- # Currently any versions have the same effect, but in future the behavior
- # could be different.
- if self.generator_flags.get('android_ndk_version', None):
- self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps)
-
- self.fp.close()
-
-
- def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
- """Write a "sub-project" Makefile.
-
- This is a small, wrapper Makefile that calls the top-level Makefile to build
- the targets from a single gyp file (i.e. a sub-project).
-
- Arguments:
- output_filename: sub-project Makefile name to write
- makefile_path: path to the top-level Makefile
- targets: list of "all" targets for this sub-project
- build_dir: build output directory, relative to the sub-project
- """
- gyp.common.EnsureDirExists(output_filename)
- self.fp = open(output_filename, 'w')
- self.fp.write(header)
- # For consistency with other builders, put sub-project build output in the
- # sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
- self.WriteLn('export builddir_name ?= %s' %
- os.path.join(os.path.dirname(output_filename), build_dir))
- self.WriteLn('.PHONY: all')
- self.WriteLn('all:')
- if makefile_path:
- makefile_path = ' -C ' + makefile_path
- self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
- self.fp.close()
-
-
- def WriteActions(self, actions, extra_sources, extra_outputs,
- extra_mac_bundle_resources, part_of_all):
- """Write Makefile code for any 'actions' from the gyp input.
-
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- actions (used to make other pieces dependent on these
- actions)
- part_of_all: flag indicating this target is part of 'all'
- """
- env = self.GetSortedXcodeEnv()
- for action in actions:
- name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
- action['action_name']))
- self.WriteLn('### Rules for action "%s":' % action['action_name'])
- inputs = action['inputs']
- outputs = action['outputs']
-
- # Build up a list of outputs.
- # Collect the output dirs we'll need.
- dirs = set()
- for out in outputs:
- dir = os.path.split(out)[0]
- if dir:
- dirs.add(dir)
- if int(action.get('process_outputs_as_sources', False)):
- extra_sources += outputs
- if int(action.get('process_outputs_as_mac_bundle_resources', False)):
- extra_mac_bundle_resources += outputs
-
- # Write the actual command.
- action_commands = action['action']
- if self.flavor == 'mac':
- action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env)
- for command in action_commands]
- command = gyp.common.EncodePOSIXShellList(action_commands)
- if 'message' in action:
- self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
- else:
- self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
- if len(dirs) > 0:
- command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
-
- cd_action = 'cd %s; ' % Sourceify(self.path or '.')
-
- # command and cd_action get written to a toplevel variable called
- # cmd_foo. Toplevel variables can't handle things that change per
- # makefile like $(TARGET), so hardcode the target.
- command = command.replace('$(TARGET)', self.target)
- cd_action = cd_action.replace('$(TARGET)', self.target)
-
- # Set LD_LIBRARY_PATH in case the action runs an executable from this
- # build which links to shared libs from this build.
- # actions run on the host, so they should in theory only use host
- # libraries, but until everything is made cross-compile safe, also use
- # target libraries.
- # TODO(piman): when everything is cross-compile safe, remove lib.target
- self.WriteLn('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:'
- '$(builddir)/lib.target:$$LD_LIBRARY_PATH; '
- 'export LD_LIBRARY_PATH; '
- '%s%s'
- % (name, cd_action, command))
- self.WriteLn()
- outputs = map(self.Absolutify, outputs)
- # The makefile rules are all relative to the top dir, but the gyp actions
- # are defined relative to their containing dir. This replaces the obj
- # variable for the action rule with an absolute version so that the output
- # goes in the right place.
- # Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
- # it's superfluous for the "extra outputs", and this avoids accidentally
- # writing duplicate dummy rules for those outputs.
- # Same for environment.
- self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0]))
- self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0]))
- self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv())
-
- for input in inputs:
- assert ' ' not in input, (
- "Spaces in action input filenames not supported (%s)" % input)
- for output in outputs:
- assert ' ' not in output, (
- "Spaces in action output filenames not supported (%s)" % output)
-
- # See the comment in WriteCopies about expanding env vars.
- outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
- inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
-
- self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
- part_of_all=part_of_all, command=name)
-
- # Stuff the outputs in a variable so we can refer to them later.
- outputs_variable = 'action_%s_outputs' % name
- self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
- extra_outputs.append('$(%s)' % outputs_variable)
- self.WriteLn()
-
- self.WriteLn()
-
-
- def WriteRules(self, rules, extra_sources, extra_outputs,
- extra_mac_bundle_resources, part_of_all):
- """Write Makefile code for any 'rules' from the gyp input.
-
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- rules (used to make other pieces dependent on these rules)
- part_of_all: flag indicating this target is part of 'all'
- """
- env = self.GetSortedXcodeEnv()
- for rule in rules:
- name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
- rule['rule_name']))
- count = 0
- self.WriteLn('### Generated for rule %s:' % name)
-
- all_outputs = []
-
- for rule_source in rule.get('rule_sources', []):
- dirs = set()
- (rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
- (rule_source_root, rule_source_ext) = \
- os.path.splitext(rule_source_basename)
-
- outputs = [self.ExpandInputRoot(out, rule_source_root,
- rule_source_dirname)
- for out in rule['outputs']]
-
- for out in outputs:
- dir = os.path.dirname(out)
- if dir:
- dirs.add(dir)
- if int(rule.get('process_outputs_as_sources', False)):
- extra_sources += outputs
- if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
- extra_mac_bundle_resources += outputs
- inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
- rule.get('inputs', [])))
- actions = ['$(call do_cmd,%s_%d)' % (name, count)]
-
- if name == 'resources_grit':
- # HACK: This is ugly. Grit intentionally doesn't touch the
- # timestamp of its output file when the file doesn't change,
- # which is fine in hash-based dependency systems like scons
- # and forge, but not kosher in the make world. After some
- # discussion, hacking around it here seems like the least
- # amount of pain.
- actions += ['@touch --no-create $@']
-
- # See the comment in WriteCopies about expanding env vars.
- outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
- inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
-
- outputs = map(self.Absolutify, outputs)
- all_outputs += outputs
- # Only write the 'obj' and 'builddir' rules for the "primary" output
- # (:1); it's superfluous for the "extra outputs", and this avoids
- # accidentally writing duplicate dummy rules for those outputs.
- self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
- self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
- self.WriteMakeRule(outputs, inputs, actions,
- command="%s_%d" % (name, count))
- # Spaces in rule filenames are not supported, but rule variables have
- # spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
- # The spaces within the variables are valid, so remove the variables
- # before checking.
- variables_with_spaces = re.compile(r'\$\([^ ]* \$<\)')
- for output in outputs:
- output = re.sub(variables_with_spaces, '', output)
- assert ' ' not in output, (
- "Spaces in rule filenames not yet supported (%s)" % output)
- self.WriteLn('all_deps += %s' % ' '.join(outputs))
-
- action = [self.ExpandInputRoot(ac, rule_source_root,
- rule_source_dirname)
- for ac in rule['action']]
- mkdirs = ''
- if len(dirs) > 0:
- mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
- cd_action = 'cd %s; ' % Sourceify(self.path or '.')
-
- # action, cd_action, and mkdirs get written to a toplevel variable
- # called cmd_foo. Toplevel variables can't handle things that change
- # per makefile like $(TARGET), so hardcode the target.
- if self.flavor == 'mac':
- action = [gyp.xcode_emulation.ExpandEnvVars(command, env)
- for command in action]
- action = gyp.common.EncodePOSIXShellList(action)
- action = action.replace('$(TARGET)', self.target)
- cd_action = cd_action.replace('$(TARGET)', self.target)
- mkdirs = mkdirs.replace('$(TARGET)', self.target)
-
- # Set LD_LIBRARY_PATH in case the rule runs an executable from this
- # build which links to shared libs from this build.
- # rules run on the host, so they should in theory only use host
- # libraries, but until everything is made cross-compile safe, also use
- # target libraries.
- # TODO(piman): when everything is cross-compile safe, remove lib.target
- self.WriteLn(
- "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH="
- "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
- "export LD_LIBRARY_PATH; "
- "%(cd_action)s%(mkdirs)s%(action)s" % {
- 'action': action,
- 'cd_action': cd_action,
- 'count': count,
- 'mkdirs': mkdirs,
- 'name': name,
- })
- self.WriteLn(
- 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
- 'count': count,
- 'name': name,
- })
- self.WriteLn()
- count += 1
-
- outputs_variable = 'rule_%s_outputs' % name
- self.WriteList(all_outputs, outputs_variable)
- extra_outputs.append('$(%s)' % outputs_variable)
-
- self.WriteLn('### Finished generating for rule: %s' % name)
- self.WriteLn()
- self.WriteLn('### Finished generating for all rules')
- self.WriteLn('')
-
-
- def WriteCopies(self, copies, extra_outputs, part_of_all):
- """Write Makefile code for any 'copies' from the gyp input.
-
- extra_outputs: a list that will be filled in with any outputs of this action
- (used to make other pieces dependent on this action)
- part_of_all: flag indicating this target is part of 'all'
- """
- self.WriteLn('### Generated for copy rule.')
-
- variable = StringToMakefileVariable(self.qualified_target + '_copies')
- outputs = []
- for copy in copies:
- for path in copy['files']:
- # Absolutify() may call normpath, and will strip trailing slashes.
- path = Sourceify(self.Absolutify(path))
- filename = os.path.split(path)[1]
- output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
- filename)))
-
- # If the output path has variables in it, which happens in practice for
- # 'copies', writing the environment as target-local doesn't work,
- # because the variables are already needed for the target name.
- # Copying the environment variables into global make variables doesn't
- # work either, because then the .d files will potentially contain spaces
- # after variable expansion, and .d file handling cannot handle spaces.
- # As a workaround, manually expand variables at gyp time. Since 'copies'
- # can't run scripts, there's no need to write the env then.
- # WriteDoCmd() will escape spaces for .d files.
- env = self.GetSortedXcodeEnv()
- output = gyp.xcode_emulation.ExpandEnvVars(output, env)
- path = gyp.xcode_emulation.ExpandEnvVars(path, env)
- self.WriteDoCmd([output], [path], 'copy', part_of_all)
- outputs.append(output)
- self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs))))
- extra_outputs.append('$(%s)' % variable)
- self.WriteLn()
-
-
- def WriteMacBundleResources(self, resources, bundle_deps):
- """Writes Makefile code for 'mac_bundle_resources'."""
- self.WriteLn('### Generated for mac_bundle_resources')
-
- for output, res in gyp.xcode_emulation.GetMacBundleResources(
- generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
- map(Sourceify, map(self.Absolutify, resources))):
- _, ext = os.path.splitext(output)
- if ext != '.xcassets':
- # Make does not supports '.xcassets' emulation.
- self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
- part_of_all=True)
- bundle_deps.append(output)
-
-
- def WriteMacInfoPlist(self, bundle_deps):
- """Write Makefile code for bundle Info.plist files."""
- info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
- generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
- lambda p: Sourceify(self.Absolutify(p)))
- if not info_plist:
- return
- if defines:
- # Create an intermediate file to store preprocessed results.
- intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' +
- os.path.basename(info_plist))
- self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D',
- quoter=EscapeCppDefine)
- self.WriteMakeRule([intermediate_plist], [info_plist],
- ['$(call do_cmd,infoplist)',
- # "Convert" the plist so that any weird whitespace changes from the
- # preprocessor do not affect the XML parser in mac_tool.
- '@plutil -convert xml1 $@ $@'])
- info_plist = intermediate_plist
- # plists can contain envvars and substitute them into the file.
- self.WriteSortedXcodeEnv(
- out, self.GetSortedXcodeEnv(additional_settings=extra_env))
- self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist',
- part_of_all=True)
- bundle_deps.append(out)
-
-
- def WriteSources(self, configs, deps, sources,
- extra_outputs, extra_link_deps,
- part_of_all, precompiled_header):
- """Write Makefile code for any 'sources' from the gyp input.
- These are source files necessary to build the current target.
-
- configs, deps, sources: input from gyp.
- extra_outputs: a list of extra outputs this action should be dependent on;
- used to serialize action/rules before compilation
- extra_link_deps: a list that will be filled in with any outputs of
- compilation (to be used in link lines)
- part_of_all: flag indicating this target is part of 'all'
- """
-
- # Write configuration-specific variables for CFLAGS, etc.
- for configname in sorted(configs.keys()):
- config = configs[configname]
- self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
- quoter=EscapeCppDefine)
-
- if self.flavor == 'mac':
- cflags = self.xcode_settings.GetCflags(configname)
- cflags_c = self.xcode_settings.GetCflagsC(configname)
- cflags_cc = self.xcode_settings.GetCflagsCC(configname)
- cflags_objc = self.xcode_settings.GetCflagsObjC(configname)
- cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname)
- else:
- cflags = config.get('cflags')
- cflags_c = config.get('cflags_c')
- cflags_cc = config.get('cflags_cc')
-
- self.WriteLn("# Flags passed to all source files.");
- self.WriteList(cflags, 'CFLAGS_%s' % configname)
- self.WriteLn("# Flags passed to only C files.");
- self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname)
- self.WriteLn("# Flags passed to only C++ files.");
- self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname)
- if self.flavor == 'mac':
- self.WriteLn("# Flags passed to only ObjC files.");
- self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname)
- self.WriteLn("# Flags passed to only ObjC++ files.");
- self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname)
- includes = config.get('include_dirs')
- if includes:
- includes = map(Sourceify, map(self.Absolutify, includes))
- self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
-
- compilable = filter(Compilable, sources)
- objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable)))
- self.WriteList(objs, 'OBJS')
-
- for obj in objs:
- assert ' ' not in obj, (
- "Spaces in object filenames not supported (%s)" % obj)
- self.WriteLn('# Add to the list of files we specially track '
- 'dependencies for.')
- self.WriteLn('all_deps += $(OBJS)')
- self.WriteLn()
-
- # Make sure our dependencies are built first.
- if deps:
- self.WriteMakeRule(['$(OBJS)'], deps,
- comment = 'Make sure our dependencies are built '
- 'before any of us.',
- order_only = True)
-
- # Make sure the actions and rules run first.
- # If they generate any extra headers etc., the per-.o file dep tracking
- # will catch the proper rebuilds, so order only is still ok here.
- if extra_outputs:
- self.WriteMakeRule(['$(OBJS)'], extra_outputs,
- comment = 'Make sure our actions/rules run '
- 'before any of us.',
- order_only = True)
-
- pchdeps = precompiled_header.GetObjDependencies(compilable, objs )
- if pchdeps:
- self.WriteLn('# Dependencies from obj files to their precompiled headers')
- for source, obj, gch in pchdeps:
- self.WriteLn('%s: %s' % (obj, gch))
- self.WriteLn('# End precompiled header dependencies')
-
- if objs:
- extra_link_deps.append('$(OBJS)')
- self.WriteLn("""\
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.""")
- self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
- self.WriteLn("$(OBJS): GYP_CFLAGS := "
- "$(DEFS_$(BUILDTYPE)) "
- "$(INCS_$(BUILDTYPE)) "
- "%s " % precompiled_header.GetInclude('c') +
- "$(CFLAGS_$(BUILDTYPE)) "
- "$(CFLAGS_C_$(BUILDTYPE))")
- self.WriteLn("$(OBJS): GYP_CXXFLAGS := "
- "$(DEFS_$(BUILDTYPE)) "
- "$(INCS_$(BUILDTYPE)) "
- "%s " % precompiled_header.GetInclude('cc') +
- "$(CFLAGS_$(BUILDTYPE)) "
- "$(CFLAGS_CC_$(BUILDTYPE))")
- if self.flavor == 'mac':
- self.WriteLn("$(OBJS): GYP_OBJCFLAGS := "
- "$(DEFS_$(BUILDTYPE)) "
- "$(INCS_$(BUILDTYPE)) "
- "%s " % precompiled_header.GetInclude('m') +
- "$(CFLAGS_$(BUILDTYPE)) "
- "$(CFLAGS_C_$(BUILDTYPE)) "
- "$(CFLAGS_OBJC_$(BUILDTYPE))")
- self.WriteLn("$(OBJS): GYP_OBJCXXFLAGS := "
- "$(DEFS_$(BUILDTYPE)) "
- "$(INCS_$(BUILDTYPE)) "
- "%s " % precompiled_header.GetInclude('mm') +
- "$(CFLAGS_$(BUILDTYPE)) "
- "$(CFLAGS_CC_$(BUILDTYPE)) "
- "$(CFLAGS_OBJCC_$(BUILDTYPE))")
-
- self.WritePchTargets(precompiled_header.GetPchBuildCommands())
-
- # If there are any object files in our input file list, link them into our
- # output.
- extra_link_deps += filter(Linkable, sources)
-
- self.WriteLn()
-
- def WritePchTargets(self, pch_commands):
- """Writes make rules to compile prefix headers."""
- if not pch_commands:
- return
-
- for gch, lang_flag, lang, input in pch_commands:
- extra_flags = {
- 'c': '$(CFLAGS_C_$(BUILDTYPE))',
- 'cc': '$(CFLAGS_CC_$(BUILDTYPE))',
- 'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))',
- 'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))',
- }[lang]
- var_name = {
- 'c': 'GYP_PCH_CFLAGS',
- 'cc': 'GYP_PCH_CXXFLAGS',
- 'm': 'GYP_PCH_OBJCFLAGS',
- 'mm': 'GYP_PCH_OBJCXXFLAGS',
- }[lang]
- self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) +
- "$(DEFS_$(BUILDTYPE)) "
- "$(INCS_$(BUILDTYPE)) "
- "$(CFLAGS_$(BUILDTYPE)) " +
- extra_flags)
-
- self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input))
- self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang)
- self.WriteLn('')
- assert ' ' not in gch, (
- "Spaces in gch filenames not supported (%s)" % gch)
- self.WriteLn('all_deps += %s' % gch)
- self.WriteLn('')
-
-
- def ComputeOutputBasename(self, spec):
- """Return the 'output basename' of a gyp spec.
-
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- 'libfoobar.so'
- """
- assert not self.is_mac_bundle
-
- if self.flavor == 'mac' and self.type in (
- 'static_library', 'executable', 'shared_library', 'loadable_module'):
- return self.xcode_settings.GetExecutablePath()
-
- target = spec['target_name']
- target_prefix = ''
- target_ext = ''
- if self.type == 'static_library':
- if target[:3] == 'lib':
- target = target[3:]
- target_prefix = 'lib'
- target_ext = '.a'
- elif self.type in ('loadable_module', 'shared_library'):
- if target[:3] == 'lib':
- target = target[3:]
- target_prefix = 'lib'
- target_ext = '.so'
- elif self.type == 'none':
- target = '%s.stamp' % target
- elif self.type != 'executable':
- print ("ERROR: What output file should be generated?",
- "type", self.type, "target", target)
-
- target_prefix = spec.get('product_prefix', target_prefix)
- target = spec.get('product_name', target)
- product_ext = spec.get('product_extension')
- if product_ext:
- target_ext = '.' + product_ext
-
- return target_prefix + target + target_ext
-
-
- def _InstallImmediately(self):
- return self.toolset == 'target' and self.flavor == 'mac' and self.type in (
- 'static_library', 'executable', 'shared_library', 'loadable_module')
-
-
- def ComputeOutput(self, spec):
- """Return the 'output' (full output path) of a gyp spec.
-
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- '$(obj)/baz/libfoobar.so'
- """
- assert not self.is_mac_bundle
-
- path = os.path.join('$(obj).' + self.toolset, self.path)
- if self.type == 'executable' or self._InstallImmediately():
- path = '$(builddir)'
- path = spec.get('product_dir', path)
- return os.path.join(path, self.ComputeOutputBasename(spec))
-
-
- def ComputeMacBundleOutput(self, spec):
- """Return the 'output' (full output path) to a bundle output directory."""
- assert self.is_mac_bundle
- path = generator_default_variables['PRODUCT_DIR']
- return os.path.join(path, self.xcode_settings.GetWrapperName())
-
-
- def ComputeMacBundleBinaryOutput(self, spec):
- """Return the 'output' (full output path) to the binary in a bundle."""
- path = generator_default_variables['PRODUCT_DIR']
- return os.path.join(path, self.xcode_settings.GetExecutablePath())
-
-
- def ComputeDeps(self, spec):
- """Compute the dependencies of a gyp spec.
-
- Returns a tuple (deps, link_deps), where each is a list of
- filenames that will need to be put in front of make for either
- building (deps) or linking (link_deps).
- """
- deps = []
- link_deps = []
- if 'dependencies' in spec:
- deps.extend([target_outputs[dep] for dep in spec['dependencies']
- if target_outputs[dep]])
- for dep in spec['dependencies']:
- if dep in target_link_deps:
- link_deps.append(target_link_deps[dep])
- deps.extend(link_deps)
- # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
- # This hack makes it work:
- # link_deps.extend(spec.get('libraries', []))
- return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
-
-
- def WriteDependencyOnExtraOutputs(self, target, extra_outputs):
- self.WriteMakeRule([self.output_binary], extra_outputs,
- comment = 'Build our special outputs first.',
- order_only = True)
-
-
- def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps,
- extra_outputs, part_of_all):
- """Write Makefile code to produce the final target of the gyp spec.
-
- spec, configs: input from gyp.
- deps, link_deps: dependency lists; see ComputeDeps()
- extra_outputs: any extra outputs that our target should depend on
- part_of_all: flag indicating this target is part of 'all'
- """
-
- self.WriteLn('### Rules for final target.')
-
- if extra_outputs:
- self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs)
- self.WriteMakeRule(extra_outputs, deps,
- comment=('Preserve order dependency of '
- 'special output on deps.'),
- order_only = True)
-
- target_postbuilds = {}
- if self.type != 'none':
- for configname in sorted(configs.keys()):
- config = configs[configname]
- if self.flavor == 'mac':
- ldflags = self.xcode_settings.GetLdflags(configname,
- generator_default_variables['PRODUCT_DIR'],
- lambda p: Sourceify(self.Absolutify(p)))
-
- # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
- gyp_to_build = gyp.common.InvertRelativePath(self.path)
- target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
- configname,
- QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
- self.output))),
- QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
- self.output_binary))))
- if target_postbuild:
- target_postbuilds[configname] = target_postbuild
- else:
- ldflags = config.get('ldflags', [])
- # Compute an rpath for this output if needed.
- if any(dep.endswith('.so') or '.so.' in dep for dep in deps):
- # We want to get the literal string "$ORIGIN" into the link command,
- # so we need lots of escaping.
- ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
- ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
- self.toolset)
- library_dirs = config.get('library_dirs', [])
- ldflags += [('-L%s' % library_dir) for library_dir in library_dirs]
- self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
- if self.flavor == 'mac':
- self.WriteList(self.xcode_settings.GetLibtoolflags(configname),
- 'LIBTOOLFLAGS_%s' % configname)
- libraries = spec.get('libraries')
- if libraries:
- # Remove duplicate entries
- # Commented out due to https://code.google.com/p/gyp/issues/detail?id=419
- # libraries = gyp.common.uniquer(libraries)
- if self.flavor == 'mac':
- libraries = self.xcode_settings.AdjustLibraries(libraries)
- self.WriteList(libraries, 'LIBS')
- self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' %
- QuoteSpaces(self.output_binary))
- self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary))
-
- if self.flavor == 'mac':
- self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' %
- QuoteSpaces(self.output_binary))
-
- # Postbuild actions. Like actions, but implicitly depend on the target's
- # output.
- postbuilds = []
- if self.flavor == 'mac':
- if target_postbuilds:
- postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))')
- postbuilds.extend(
- gyp.xcode_emulation.GetSpecPostbuildCommands(spec))
-
- if postbuilds:
- # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE),
- # so we must output its definition first, since we declare variables
- # using ":=".
- self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv())
-
- for configname in target_postbuilds:
- self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' %
- (QuoteSpaces(self.output),
- configname,
- gyp.common.EncodePOSIXShellList(target_postbuilds[configname])))
-
- # Postbuilds expect to be run in the gyp file's directory, so insert an
- # implicit postbuild to cd to there.
- postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
- for i in xrange(len(postbuilds)):
- if not postbuilds[i].startswith('$'):
- postbuilds[i] = EscapeShellArgument(postbuilds[i])
- self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
- self.WriteLn('%s: POSTBUILDS := %s' % (
- QuoteSpaces(self.output), ' '.join(postbuilds)))
-
- # A bundle directory depends on its dependencies such as bundle resources
- # and bundle binary. When all dependencies have been built, the bundle
- # needs to be packaged.
- if self.is_mac_bundle:
- # If the framework doesn't contain a binary, then nothing depends
- # on the actions -- make the framework depend on them directly too.
- self.WriteDependencyOnExtraOutputs(self.output, extra_outputs)
-
- # Bundle dependencies. Note that the code below adds actions to this
- # target, so if you move these two lines, move the lines below as well.
- self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS')
- self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output))
-
- # After the framework is built, package it. Needs to happen before
- # postbuilds, since postbuilds depend on this.
- if self.type in ('shared_library', 'loadable_module'):
- self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' %
- self.xcode_settings.GetFrameworkVersion())
-
- # Bundle postbuilds can depend on the whole bundle, so run them after
- # the bundle is packaged, not already after the bundle binary is done.
- if postbuilds:
- self.WriteLn('\t@$(call do_postbuilds)')
- postbuilds = [] # Don't write postbuilds for target's output.
-
- # Needed by test/mac/gyptest-rebuild.py.
- self.WriteLn('\t@true # No-op, used by tests')
-
- # Since this target depends on binary and resources which are in
- # nested subfolders, the framework directory will be older than
- # its dependencies usually. To prevent this rule from executing
- # on every build (expensive, especially with postbuilds), expliclity
- # update the time on the framework directory.
- self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output))
-
- if postbuilds:
- assert not self.is_mac_bundle, ('Postbuilds for bundles should be done '
- 'on the bundle, not the binary (target \'%s\')' % self.target)
- assert 'product_dir' not in spec, ('Postbuilds do not work with '
- 'custom product_dir')
-
- if self.type == 'executable':
- self.WriteLn('%s: LD_INPUTS := %s' % (
- QuoteSpaces(self.output_binary),
- ' '.join(map(QuoteSpaces, link_deps))))
- if self.toolset == 'host' and self.flavor == 'android':
- self.WriteDoCmd([self.output_binary], link_deps, 'link_host',
- part_of_all, postbuilds=postbuilds)
- else:
- self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all,
- postbuilds=postbuilds)
-
- elif self.type == 'static_library':
- for link_dep in link_deps:
- assert ' ' not in link_dep, (
- "Spaces in alink input filenames not supported (%s)" % link_dep)
- if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
- self.is_standalone_static_library):
- self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
- part_of_all, postbuilds=postbuilds)
- else:
- self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all,
- postbuilds=postbuilds)
- elif self.type == 'shared_library':
- self.WriteLn('%s: LD_INPUTS := %s' % (
- QuoteSpaces(self.output_binary),
- ' '.join(map(QuoteSpaces, link_deps))))
- self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all,
- postbuilds=postbuilds)
- elif self.type == 'loadable_module':
- for link_dep in link_deps:
- assert ' ' not in link_dep, (
- "Spaces in module input filenames not supported (%s)" % link_dep)
- if self.toolset == 'host' and self.flavor == 'android':
- self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host',
- part_of_all, postbuilds=postbuilds)
- else:
- self.WriteDoCmd(
- [self.output_binary], link_deps, 'solink_module', part_of_all,
- postbuilds=postbuilds)
- elif self.type == 'none':
- # Write a stamp line.
- self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
- postbuilds=postbuilds)
- else:
- print "WARNING: no output for", self.type, target
-
- # Add an alias for each target (if there are any outputs).
- # Installable target aliases are created below.
- if ((self.output and self.output != self.target) and
- (self.type not in self._INSTALLABLE_TARGETS)):
- self.WriteMakeRule([self.target], [self.output],
- comment='Add target alias', phony = True)
- if part_of_all:
- self.WriteMakeRule(['all'], [self.target],
- comment = 'Add target alias to "all" target.',
- phony = True)
-
- # Add special-case rules for our installable targets.
- # 1) They need to install to the build dir or "product" dir.
- # 2) They get shortcuts for building (e.g. "make chrome").
- # 3) They are part of "make all".
- if (self.type in self._INSTALLABLE_TARGETS or
- self.is_standalone_static_library):
- if self.type == 'shared_library':
- file_desc = 'shared library'
- elif self.type == 'static_library':
- file_desc = 'static library'
- else:
- file_desc = 'executable'
- install_path = self._InstallableTargetInstallPath()
- installable_deps = [self.output]
- if (self.flavor == 'mac' and not 'product_dir' in spec and
- self.toolset == 'target'):
- # On mac, products are created in install_path immediately.
- assert install_path == self.output, '%s != %s' % (
- install_path, self.output)
-
- # Point the target alias to the final binary output.
- self.WriteMakeRule([self.target], [install_path],
- comment='Add target alias', phony = True)
- if install_path != self.output:
- assert not self.is_mac_bundle # See comment a few lines above.
- self.WriteDoCmd([install_path], [self.output], 'copy',
- comment = 'Copy this to the %s output path.' %
- file_desc, part_of_all=part_of_all)
- installable_deps.append(install_path)
- if self.output != self.alias and self.alias != self.target:
- self.WriteMakeRule([self.alias], installable_deps,
- comment = 'Short alias for building this %s.' %
- file_desc, phony = True)
- if part_of_all:
- self.WriteMakeRule(['all'], [install_path],
- comment = 'Add %s to "all" target.' % file_desc,
- phony = True)
-
-
- def WriteList(self, value_list, variable=None, prefix='',
- quoter=QuoteIfNecessary):
- """Write a variable definition that is a list of values.
-
- E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
- foo = blaha blahb
- but in a pretty-printed style.
- """
- values = ''
- if value_list:
- value_list = [quoter(prefix + l) for l in value_list]
- values = ' \\\n\t' + ' \\\n\t'.join(value_list)
- self.fp.write('%s :=%s\n\n' % (variable, values))
-
-
- def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None,
- postbuilds=False):
- """Write a Makefile rule that uses do_cmd.
-
- This makes the outputs dependent on the command line that was run,
- as well as support the V= make command line flag.
- """
- suffix = ''
- if postbuilds:
- assert ',' not in command
- suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS
- self.WriteMakeRule(outputs, inputs,
- actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
- comment = comment,
- command = command,
- force = True)
- # Add our outputs to the list of targets we read depfiles from.
- # all_deps is only used for deps file reading, and for deps files we replace
- # spaces with ? because escaping doesn't work with make's $(sort) and
- # other functions.
- outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
- self.WriteLn('all_deps += %s' % ' '.join(outputs))
-
-
- def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
- order_only=False, force=False, phony=False, command=None):
- """Write a Makefile rule, with some extra tricks.
-
- outputs: a list of outputs for the rule (note: this is not directly
- supported by make; see comments below)
- inputs: a list of inputs for the rule
- actions: a list of shell commands to run for the rule
- comment: a comment to put in the Makefile above the rule (also useful
- for making this Python script's code self-documenting)
- order_only: if true, makes the dependency order-only
- force: if true, include FORCE_DO_CMD as an order-only dep
- phony: if true, the rule does not actually generate the named output, the
- output is just a name to run the rule
- command: (optional) command name to generate unambiguous labels
- """
- outputs = map(QuoteSpaces, outputs)
- inputs = map(QuoteSpaces, inputs)
-
- if comment:
- self.WriteLn('# ' + comment)
- if phony:
- self.WriteLn('.PHONY: ' + ' '.join(outputs))
- if actions:
- self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
- force_append = ' FORCE_DO_CMD' if force else ''
-
- if order_only:
- # Order only rule: Just write a simple rule.
- # TODO(evanm): just make order_only a list of deps instead of this hack.
- self.WriteLn('%s: | %s%s' %
- (' '.join(outputs), ' '.join(inputs), force_append))
- elif len(outputs) == 1:
- # Regular rule, one output: Just write a simple rule.
- self.WriteLn('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append))
- else:
- # Regular rule, more than one output: Multiple outputs are tricky in
- # make. We will write three rules:
- # - All outputs depend on an intermediate file.
- # - Make .INTERMEDIATE depend on the intermediate.
- # - The intermediate file depends on the inputs and executes the
- # actual command.
- # - The intermediate recipe will 'touch' the intermediate file.
- # - The multi-output rule will have an do-nothing recipe.
- intermediate = "%s.intermediate" % (command if command else self.target)
- self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
- self.WriteLn('\t%s' % '@:');
- self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
- self.WriteLn('%s: %s%s' %
- (intermediate, ' '.join(inputs), force_append))
- actions.insert(0, '$(call do_cmd,touch)')
-
- if actions:
- for action in actions:
- self.WriteLn('\t%s' % action)
- self.WriteLn()
-
-
- def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
- """Write a set of LOCAL_XXX definitions for Android NDK.
-
- These variable definitions will be used by Android NDK but do nothing for
- non-Android applications.
-
- Arguments:
- module_name: Android NDK module name, which must be unique among all
- module names.
- all_sources: A list of source files (will be filtered by Compilable).
- link_deps: A list of link dependencies, which must be sorted in
- the order from dependencies to dependents.
- """
- if self.type not in ('executable', 'shared_library', 'static_library'):
- return
-
- self.WriteLn('# Variable definitions for Android applications')
- self.WriteLn('include $(CLEAR_VARS)')
- self.WriteLn('LOCAL_MODULE := ' + module_name)
- self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) '
- '$(DEFS_$(BUILDTYPE)) '
- # LOCAL_CFLAGS is applied to both of C and C++. There is
- # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C
- # sources.
- '$(CFLAGS_C_$(BUILDTYPE)) '
- # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while
- # LOCAL_C_INCLUDES does not expect it. So put it in
- # LOCAL_CFLAGS.
- '$(INCS_$(BUILDTYPE))')
- # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred.
- self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))')
- self.WriteLn('LOCAL_C_INCLUDES :=')
- self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)')
-
- # Detect the C++ extension.
- cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0}
- default_cpp_ext = '.cpp'
- for filename in all_sources:
- ext = os.path.splitext(filename)[1]
- if ext in cpp_ext:
- cpp_ext[ext] += 1
- if cpp_ext[ext] > cpp_ext[default_cpp_ext]:
- default_cpp_ext = ext
- self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext)
-
- self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)),
- 'LOCAL_SRC_FILES')
-
- # Filter out those which do not match prefix and suffix and produce
- # the resulting list without prefix and suffix.
- def DepsToModules(deps, prefix, suffix):
- modules = []
- for filepath in deps:
- filename = os.path.basename(filepath)
- if filename.startswith(prefix) and filename.endswith(suffix):
- modules.append(filename[len(prefix):-len(suffix)])
- return modules
-
- # Retrieve the default value of 'SHARED_LIB_SUFFIX'
- params = {'flavor': 'linux'}
- default_variables = {}
- CalculateVariables(default_variables, params)
-
- self.WriteList(
- DepsToModules(link_deps,
- generator_default_variables['SHARED_LIB_PREFIX'],
- default_variables['SHARED_LIB_SUFFIX']),
- 'LOCAL_SHARED_LIBRARIES')
- self.WriteList(
- DepsToModules(link_deps,
- generator_default_variables['STATIC_LIB_PREFIX'],
- generator_default_variables['STATIC_LIB_SUFFIX']),
- 'LOCAL_STATIC_LIBRARIES')
-
- if self.type == 'executable':
- self.WriteLn('include $(BUILD_EXECUTABLE)')
- elif self.type == 'shared_library':
- self.WriteLn('include $(BUILD_SHARED_LIBRARY)')
- elif self.type == 'static_library':
- self.WriteLn('include $(BUILD_STATIC_LIBRARY)')
- self.WriteLn()
-
-
- def WriteLn(self, text=''):
- self.fp.write(text + '\n')
-
-
- def GetSortedXcodeEnv(self, additional_settings=None):
- return gyp.xcode_emulation.GetSortedXcodeEnv(
- self.xcode_settings, "$(abs_builddir)",
- os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)",
- additional_settings)
-
-
- def GetSortedXcodePostbuildEnv(self):
- # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
- # TODO(thakis): It would be nice to have some general mechanism instead.
- strip_save_file = self.xcode_settings.GetPerTargetSetting(
- 'CHROMIUM_STRIP_SAVE_FILE', '')
- # Even if strip_save_file is empty, explicitly write it. Else a postbuild
- # might pick up an export from an earlier target.
- return self.GetSortedXcodeEnv(
- additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file})
-
-
- def WriteSortedXcodeEnv(self, target, env):
- for k, v in env:
- # For
- # foo := a\ b
- # the escaped space does the right thing. For
- # export foo := a\ b
- # it does not -- the backslash is written to the env as literal character.
- # So don't escape spaces in |env[k]|.
- self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v))
-
-
- def Objectify(self, path):
- """Convert a path to its output directory form."""
- if '$(' in path:
- path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
- if not '$(obj)' in path:
- path = '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
- return path
-
-
- def Pchify(self, path, lang):
- """Convert a prefix header path to its output directory form."""
- path = self.Absolutify(path)
- if '$(' in path:
- path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/pch-%s' %
- (self.toolset, lang))
- return path
- return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path)
-
-
- def Absolutify(self, path):
- """Convert a subdirectory-relative path into a base-relative path.
- Skips over paths that contain variables."""
- if '$(' in path:
- # Don't call normpath in this case, as it might collapse the
- # path too aggressively if it features '..'. However it's still
- # important to strip trailing slashes.
- return path.rstrip('/')
- return os.path.normpath(os.path.join(self.path, path))
-
-
- def ExpandInputRoot(self, template, expansion, dirname):
- if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
- return template
- path = template % {
- 'INPUT_ROOT': expansion,
- 'INPUT_DIRNAME': dirname,
- }
- return path
-
-
- def _InstallableTargetInstallPath(self):
- """Returns the location of the final output for an installable target."""
- # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
- # rely on this. Emulate this behavior for mac.
- if (self.type == 'shared_library' and
- (self.flavor != 'mac' or self.toolset != 'target')):
- # Install all shared libs into a common directory (per toolset) for
- # convenient access with LD_LIBRARY_PATH.
- return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
- return '$(builddir)/' + self.alias
-
-
-def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
- build_files):
- """Write the target to regenerate the Makefile."""
- options = params['options']
- build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
- for filename in params['build_files_arg']]
-
- gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
- options.toplevel_dir)
- if not gyp_binary.startswith(os.sep):
- gyp_binary = os.path.join('.', gyp_binary)
-
- root_makefile.write(
- "quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
- "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n"
- "%(makefile_name)s: %(deps)s\n"
- "\t$(call do_cmd,regen_makefile)\n\n" % {
- 'makefile_name': makefile_name,
- 'deps': ' '.join(map(Sourceify, build_files)),
- 'cmd': gyp.common.EncodePOSIXShellList(
- [gyp_binary, '-fmake'] +
- gyp.RegenerateFlags(options) +
- build_files_args)})
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
- for config in configurations:
- arguments = ['make']
- if options.toplevel_dir and options.toplevel_dir != '.':
- arguments += '-C', options.toplevel_dir
- arguments.append('BUILDTYPE=' + config)
- print 'Building [%s]: %s' % (config, arguments)
- subprocess.check_call(arguments)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- options = params['options']
- flavor = gyp.common.GetFlavor(params)
- generator_flags = params.get('generator_flags', {})
- builddir_name = generator_flags.get('output_dir', 'out')
- android_ndk_version = generator_flags.get('android_ndk_version', None)
- default_target = generator_flags.get('default_target', 'all')
-
- def CalculateMakefilePath(build_file, base_name):
- """Determine where to write a Makefile for a given gyp file."""
- # Paths in gyp files are relative to the .gyp file, but we want
- # paths relative to the source root for the master makefile. Grab
- # the path of the .gyp file as the base to relativize against.
- # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
- base_path = gyp.common.RelativePath(os.path.dirname(build_file),
- options.depth)
- # We write the file in the base_path directory.
- output_file = os.path.join(options.depth, base_path, base_name)
- if options.generator_output:
- output_file = os.path.join(
- options.depth, options.generator_output, base_path, base_name)
- base_path = gyp.common.RelativePath(os.path.dirname(build_file),
- options.toplevel_dir)
- return base_path, output_file
-
- # TODO: search for the first non-'Default' target. This can go
- # away when we add verification that all targets have the
- # necessary configurations.
- default_configuration = None
- toolsets = set([target_dicts[target]['toolset'] for target in target_list])
- for target in target_list:
- spec = target_dicts[target]
- if spec['default_configuration'] != 'Default':
- default_configuration = spec['default_configuration']
- break
- if not default_configuration:
- default_configuration = 'Default'
-
- srcdir = '.'
- makefile_name = 'Makefile' + options.suffix
- makefile_path = os.path.join(options.toplevel_dir, makefile_name)
- if options.generator_output:
- global srcdir_prefix
- makefile_path = os.path.join(
- options.toplevel_dir, options.generator_output, makefile_name)
- srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
- srcdir_prefix = '$(srcdir)/'
-
- flock_command= 'flock'
- copy_archive_arguments = '-af'
- header_params = {
- 'default_target': default_target,
- 'builddir': builddir_name,
- 'default_configuration': default_configuration,
- 'flock': flock_command,
- 'flock_index': 1,
- 'link_commands': LINK_COMMANDS_LINUX,
- 'extra_commands': '',
- 'srcdir': srcdir,
- 'copy_archive_args': copy_archive_arguments,
- }
- if flavor == 'mac':
- flock_command = './gyp-mac-tool flock'
- header_params.update({
- 'flock': flock_command,
- 'flock_index': 2,
- 'link_commands': LINK_COMMANDS_MAC,
- 'extra_commands': SHARED_HEADER_MAC_COMMANDS,
- })
- elif flavor == 'android':
- header_params.update({
- 'link_commands': LINK_COMMANDS_ANDROID,
- })
- elif flavor == 'solaris':
- header_params.update({
- 'flock': './gyp-flock-tool flock',
- 'flock_index': 2,
- })
- elif flavor == 'freebsd':
- # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
- header_params.update({
- 'flock': 'lockf',
- })
- elif flavor == 'openbsd':
- copy_archive_arguments = '-pPRf'
- header_params.update({
- 'copy_archive_args': copy_archive_arguments,
- })
- elif flavor == 'aix':
- copy_archive_arguments = '-pPRf'
- header_params.update({
- 'copy_archive_args': copy_archive_arguments,
- 'link_commands': LINK_COMMANDS_AIX,
- 'flock': './gyp-flock-tool flock',
- 'flock_index': 2,
- })
-
- header_params.update({
- 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
- 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
- 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
- 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
- 'CC.host': GetEnvironFallback(('CC_host',), 'gcc'),
- 'AR.host': GetEnvironFallback(('AR_host',), 'ar'),
- 'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'),
- 'LINK.host': GetEnvironFallback(('LINK_host',), '$(CXX.host)'),
- })
-
- build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
- make_global_settings_array = data[build_file].get('make_global_settings', [])
- wrappers = {}
- for key, value in make_global_settings_array:
- if key.endswith('_wrapper'):
- wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
- make_global_settings = ''
- for key, value in make_global_settings_array:
- if re.match('.*_wrapper', key):
- continue
- if value[0] != '$':
- value = '$(abspath %s)' % value
- wrapper = wrappers.get(key)
- if wrapper:
- value = '%s %s' % (wrapper, value)
- del wrappers[key]
- if key in ('CC', 'CC.host', 'CXX', 'CXX.host'):
- make_global_settings += (
- 'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
- # Let gyp-time envvars win over global settings.
- env_key = key.replace('.', '_') # CC.host -> CC_host
- if env_key in os.environ:
- value = os.environ[env_key]
- make_global_settings += ' %s = %s\n' % (key, value)
- make_global_settings += 'endif\n'
- else:
- make_global_settings += '%s ?= %s\n' % (key, value)
- # TODO(ukai): define cmd when only wrapper is specified in
- # make_global_settings.
-
- header_params['make_global_settings'] = make_global_settings
-
- gyp.common.EnsureDirExists(makefile_path)
- root_makefile = open(makefile_path, 'w')
- root_makefile.write(SHARED_HEADER % header_params)
- # Currently any versions have the same effect, but in future the behavior
- # could be different.
- if android_ndk_version:
- root_makefile.write(
- '# Define LOCAL_PATH for build of Android applications.\n'
- 'LOCAL_PATH := $(call my-dir)\n'
- '\n')
- for toolset in toolsets:
- root_makefile.write('TOOLSET := %s\n' % toolset)
- WriteRootHeaderSuffixRules(root_makefile)
-
- # Put build-time support tools next to the root Makefile.
- dest_path = os.path.dirname(makefile_path)
- gyp.common.CopyTool(flavor, dest_path)
-
- # Find the list of targets that derive from the gyp file(s) being built.
- needed_targets = set()
- for build_file in params['build_files']:
- for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
- needed_targets.add(target)
-
- build_files = set()
- include_list = set()
- for qualified_target in target_list:
- build_file, target, toolset = gyp.common.ParseQualifiedTarget(
- qualified_target)
-
- this_make_global_settings = data[build_file].get('make_global_settings', [])
- assert make_global_settings_array == this_make_global_settings, (
- "make_global_settings needs to be the same for all targets. %s vs. %s" %
- (this_make_global_settings, make_global_settings))
-
- build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
- included_files = data[build_file]['included_files']
- for included_file in included_files:
- # The included_files entries are relative to the dir of the build file
- # that included them, so we have to undo that and then make them relative
- # to the root dir.
- relative_include_file = gyp.common.RelativePath(
- gyp.common.UnrelativePath(included_file, build_file),
- options.toplevel_dir)
- abs_include_file = os.path.abspath(relative_include_file)
- # If the include file is from the ~/.gyp dir, we should use absolute path
- # so that relocating the src dir doesn't break the path.
- if (params['home_dot_gyp'] and
- abs_include_file.startswith(params['home_dot_gyp'])):
- build_files.add(abs_include_file)
- else:
- build_files.add(relative_include_file)
-
- base_path, output_file = CalculateMakefilePath(build_file,
- target + '.' + toolset + options.suffix + '.mk')
-
- spec = target_dicts[qualified_target]
- configs = spec['configurations']
-
- if flavor == 'mac':
- gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
-
- writer = MakefileWriter(generator_flags, flavor)
- writer.Write(qualified_target, base_path, output_file, spec, configs,
- part_of_all=qualified_target in needed_targets)
-
- # Our root_makefile lives at the source root. Compute the relative path
- # from there to the output_file for including.
- mkfile_rel_path = gyp.common.RelativePath(output_file,
- os.path.dirname(makefile_path))
- include_list.add(mkfile_rel_path)
-
- # Write out per-gyp (sub-project) Makefiles.
- depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
- for build_file in build_files:
- # The paths in build_files were relativized above, so undo that before
- # testing against the non-relativized items in target_list and before
- # calculating the Makefile path.
- build_file = os.path.join(depth_rel_path, build_file)
- gyp_targets = [target_dicts[target]['target_name'] for target in target_list
- if target.startswith(build_file) and
- target in needed_targets]
- # Only generate Makefiles for gyp files with targets.
- if not gyp_targets:
- continue
- base_path, output_file = CalculateMakefilePath(build_file,
- os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
- makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
- os.path.dirname(output_file))
- writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
- builddir_name)
-
-
- # Write out the sorted list of includes.
- root_makefile.write('\n')
- for include_file in sorted(include_list):
- # We wrap each .mk include in an if statement so users can tell make to
- # not load a file by setting NO_LOAD. The below make code says, only
- # load the .mk file if the .mk filename doesn't start with a token in
- # NO_LOAD.
- root_makefile.write(
- "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n"
- " $(findstring $(join ^,$(prefix)),\\\n"
- " $(join ^," + include_file + ")))),)\n")
- root_makefile.write(" include " + include_file + "\n")
- root_makefile.write("endif\n")
- root_makefile.write('\n')
-
- if (not generator_flags.get('standalone')
- and generator_flags.get('auto_regeneration', True)):
- WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
-
- root_makefile.write(SHARED_FOOTER)
-
- root_makefile.close()
diff --git a/deps/gyp/pylib/gyp/generator/msvs.py b/deps/gyp/pylib/gyp/generator/msvs.py
deleted file mode 100644
index 44cc1304a2..0000000000
--- a/deps/gyp/pylib/gyp/generator/msvs.py
+++ /dev/null
@@ -1,3453 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import copy
-import ntpath
-import os
-import posixpath
-import re
-import subprocess
-import sys
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-import gyp.generator.ninja as ninja_generator
-import gyp.MSVSNew as MSVSNew
-import gyp.MSVSProject as MSVSProject
-import gyp.MSVSSettings as MSVSSettings
-import gyp.MSVSToolFile as MSVSToolFile
-import gyp.MSVSUserFile as MSVSUserFile
-import gyp.MSVSUtil as MSVSUtil
-import gyp.MSVSVersion as MSVSVersion
-from gyp.common import GypError
-from gyp.common import OrderedSet
-
-# TODO: Remove once bots are on 2.7, http://crbug.com/241769
-def _import_OrderedDict():
- import collections
- try:
- return collections.OrderedDict
- except AttributeError:
- import gyp.ordered_dict
- return gyp.ordered_dict.OrderedDict
-OrderedDict = _import_OrderedDict()
-
-
-# Regular expression for validating Visual Studio GUIDs. If the GUID
-# contains lowercase hex letters, MSVS will be fine. However,
-# IncrediBuild BuildConsole will parse the solution file, but then
-# silently skip building the target causing hard to track down errors.
-# Note that this only happens with the BuildConsole, and does not occur
-# if IncrediBuild is executed from inside Visual Studio. This regex
-# validates that the string looks like a GUID with all uppercase hex
-# letters.
-VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
-
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '.exe',
- 'STATIC_LIB_PREFIX': '',
- 'SHARED_LIB_PREFIX': '',
- 'STATIC_LIB_SUFFIX': '.lib',
- 'SHARED_LIB_SUFFIX': '.dll',
- 'INTERMEDIATE_DIR': '$(IntDir)',
- 'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate',
- 'OS': 'win',
- 'PRODUCT_DIR': '$(OutDir)',
- 'LIB_DIR': '$(OutDir)lib',
- 'RULE_INPUT_ROOT': '$(InputName)',
- 'RULE_INPUT_DIRNAME': '$(InputDir)',
- 'RULE_INPUT_EXT': '$(InputExt)',
- 'RULE_INPUT_NAME': '$(InputFileName)',
- 'RULE_INPUT_PATH': '$(InputPath)',
- 'CONFIGURATION_NAME': '$(ConfigurationName)',
-}
-
-
-# The msvs specific sections that hold paths
-generator_additional_path_sections = [
- 'msvs_cygwin_dirs',
- 'msvs_props',
-]
-
-
-generator_additional_non_configuration_keys = [
- 'msvs_cygwin_dirs',
- 'msvs_cygwin_shell',
- 'msvs_large_pdb',
- 'msvs_shard',
- 'msvs_external_builder',
- 'msvs_external_builder_out_dir',
- 'msvs_external_builder_build_cmd',
- 'msvs_external_builder_clean_cmd',
- 'msvs_external_builder_clcompile_cmd',
- 'msvs_enable_winrt',
- 'msvs_requires_importlibrary',
- 'msvs_enable_winphone',
- 'msvs_application_type_revision',
- 'msvs_target_platform_version',
- 'msvs_target_platform_minversion',
-]
-
-
-# List of precompiled header related keys.
-precomp_keys = [
- 'msvs_precompiled_header',
- 'msvs_precompiled_source',
-]
-
-
-cached_username = None
-
-
-cached_domain = None
-
-
-# TODO(gspencer): Switch the os.environ calls to be
-# win32api.GetDomainName() and win32api.GetUserName() once the
-# python version in depot_tools has been updated to work on Vista
-# 64-bit.
-def _GetDomainAndUserName():
- if sys.platform not in ('win32', 'cygwin'):
- return ('DOMAIN', 'USERNAME')
- global cached_username
- global cached_domain
- if not cached_domain or not cached_username:
- domain = os.environ.get('USERDOMAIN')
- username = os.environ.get('USERNAME')
- if not domain or not username:
- call = subprocess.Popen(['net', 'config', 'Workstation'],
- stdout=subprocess.PIPE)
- config = call.communicate()[0]
- username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE)
- username_match = username_re.search(config)
- if username_match:
- username = username_match.group(1)
- domain_re = re.compile(r'^Logon domain\s+(\S+)', re.MULTILINE)
- domain_match = domain_re.search(config)
- if domain_match:
- domain = domain_match.group(1)
- cached_domain = domain
- cached_username = username
- return (cached_domain, cached_username)
-
-fixpath_prefix = None
-
-
-def _NormalizedSource(source):
- """Normalize the path.
-
- But not if that gets rid of a variable, as this may expand to something
- larger than one directory.
-
- Arguments:
- source: The path to be normalize.d
-
- Returns:
- The normalized path.
- """
- normalized = os.path.normpath(source)
- if source.count('$') == normalized.count('$'):
- source = normalized
- return source
-
-
-def _FixPath(path):
- """Convert paths to a form that will make sense in a vcproj file.
-
- Arguments:
- path: The path to convert, may contain / etc.
- Returns:
- The path with all slashes made into backslashes.
- """
- if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
- path = os.path.join(fixpath_prefix, path)
- path = path.replace('/', '\\')
- path = _NormalizedSource(path)
- if path and path[-1] == '\\':
- path = path[:-1]
- return path
-
-
-def _FixPaths(paths):
- """Fix each of the paths of the list."""
- return [_FixPath(i) for i in paths]
-
-
-def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
- list_excluded=True, msvs_version=None):
- """Converts a list split source file paths into a vcproj folder hierarchy.
-
- Arguments:
- sources: A list of source file paths split.
- prefix: A list of source file path layers meant to apply to each of sources.
- excluded: A set of excluded files.
- msvs_version: A MSVSVersion object.
-
- Returns:
- A hierarchy of filenames and MSVSProject.Filter objects that matches the
- layout of the source tree.
- For example:
- _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
- prefix=['joe'])
- -->
- [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
- MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
- """
- if not prefix: prefix = []
- result = []
- excluded_result = []
- folders = OrderedDict()
- # Gather files into the final result, excluded, or folders.
- for s in sources:
- if len(s) == 1:
- filename = _NormalizedSource('\\'.join(prefix + s))
- if filename in excluded:
- excluded_result.append(filename)
- else:
- result.append(filename)
- elif msvs_version and not msvs_version.UsesVcxproj():
- # For MSVS 2008 and earlier, we need to process all files before walking
- # the sub folders.
- if not folders.get(s[0]):
- folders[s[0]] = []
- folders[s[0]].append(s[1:])
- else:
- contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]],
- excluded=excluded,
- list_excluded=list_excluded,
- msvs_version=msvs_version)
- contents = MSVSProject.Filter(s[0], contents=contents)
- result.append(contents)
- # Add a folder for excluded files.
- if excluded_result and list_excluded:
- excluded_folder = MSVSProject.Filter('_excluded_files',
- contents=excluded_result)
- result.append(excluded_folder)
-
- if msvs_version and msvs_version.UsesVcxproj():
- return result
-
- # Populate all the folders.
- for f in folders:
- contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
- excluded=excluded,
- list_excluded=list_excluded,
- msvs_version=msvs_version)
- contents = MSVSProject.Filter(f, contents=contents)
- result.append(contents)
- return result
-
-
-def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
- if not value: return
- _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
-
-
-def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
- # TODO(bradnelson): ugly hack, fix this more generally!!!
- if 'Directories' in setting or 'Dependencies' in setting:
- if type(value) == str:
- value = value.replace('/', '\\')
- else:
- value = [i.replace('/', '\\') for i in value]
- if not tools.get(tool_name):
- tools[tool_name] = dict()
- tool = tools[tool_name]
- if tool.get(setting):
- if only_if_unset: return
- if type(tool[setting]) == list and type(value) == list:
- tool[setting] += value
- else:
- raise TypeError(
- 'Appending "%s" to a non-list setting "%s" for tool "%s" is '
- 'not allowed, previous value: %s' % (
- value, setting, tool_name, str(tool[setting])))
- else:
- tool[setting] = value
-
-
-def _ConfigPlatform(config_data):
- return config_data.get('msvs_configuration_platform', 'Win32')
-
-
-def _ConfigBaseName(config_name, platform_name):
- if config_name.endswith('_' + platform_name):
- return config_name[0:-len(platform_name) - 1]
- else:
- return config_name
-
-
-def _ConfigFullName(config_name, config_data):
- platform_name = _ConfigPlatform(config_data)
- return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
-
-
-def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
- quote_cmd, do_setup_env):
-
- if [x for x in cmd if '$(InputDir)' in x]:
- input_dir_preamble = (
- 'set INPUTDIR=$(InputDir)\n'
- 'if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n'
- 'set INPUTDIR=%INPUTDIR:~0,-1%\n'
- )
- else:
- input_dir_preamble = ''
-
- if cygwin_shell:
- # Find path to cygwin.
- cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
- # Prepare command.
- direct_cmd = cmd
- direct_cmd = [i.replace('$(IntDir)',
- '`cygpath -m "${INTDIR}"`') for i in direct_cmd]
- direct_cmd = [i.replace('$(OutDir)',
- '`cygpath -m "${OUTDIR}"`') for i in direct_cmd]
- direct_cmd = [i.replace('$(InputDir)',
- '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd]
- if has_input_path:
- direct_cmd = [i.replace('$(InputPath)',
- '`cygpath -m "${INPUTPATH}"`')
- for i in direct_cmd]
- direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
- # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
- direct_cmd = ' '.join(direct_cmd)
- # TODO(quote): regularize quoting path names throughout the module
- cmd = ''
- if do_setup_env:
- cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
- cmd += 'set CYGWIN=nontsec&& '
- if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
- cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
- if direct_cmd.find('INTDIR') >= 0:
- cmd += 'set INTDIR=$(IntDir)&& '
- if direct_cmd.find('OUTDIR') >= 0:
- cmd += 'set OUTDIR=$(OutDir)&& '
- if has_input_path and direct_cmd.find('INPUTPATH') >= 0:
- cmd += 'set INPUTPATH=$(InputPath) && '
- cmd += 'bash -c "%(cmd)s"'
- cmd = cmd % {'cygwin_dir': cygwin_dir,
- 'cmd': direct_cmd}
- return input_dir_preamble + cmd
- else:
- # Convert cat --> type to mimic unix.
- if cmd[0] == 'cat':
- command = ['type']
- else:
- command = [cmd[0].replace('/', '\\')]
- # Add call before command to ensure that commands can be tied together one
- # after the other without aborting in Incredibuild, since IB makes a bat
- # file out of the raw command string, and some commands (like python) are
- # actually batch files themselves.
- command.insert(0, 'call')
- # Fix the paths
- # TODO(quote): This is a really ugly heuristic, and will miss path fixing
- # for arguments like "--arg=path" or "/opt:path".
- # If the argument starts with a slash or dash, it's probably a command line
- # switch
- arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]]
- arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments]
- arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
- if quote_cmd:
- # Support a mode for using cmd directly.
- # Convert any paths to native form (first element is used directly).
- # TODO(quote): regularize quoting path names throughout the module
- arguments = ['"%s"' % i for i in arguments]
- # Collapse into a single command.
- return input_dir_preamble + ' '.join(command + arguments)
-
-
-def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
- # Currently this weird argument munging is used to duplicate the way a
- # python script would need to be run as part of the chrome tree.
- # Eventually we should add some sort of rule_default option to set this
- # per project. For now the behavior chrome needs is the default.
- mcs = rule.get('msvs_cygwin_shell')
- if mcs is None:
- mcs = int(spec.get('msvs_cygwin_shell', 1))
- elif isinstance(mcs, str):
- mcs = int(mcs)
- quote_cmd = int(rule.get('msvs_quote_cmd', 1))
- return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path,
- quote_cmd, do_setup_env=do_setup_env)
-
-
-def _AddActionStep(actions_dict, inputs, outputs, description, command):
- """Merge action into an existing list of actions.
-
- Care must be taken so that actions which have overlapping inputs either don't
- get assigned to the same input, or get collapsed into one.
-
- Arguments:
- actions_dict: dictionary keyed on input name, which maps to a list of
- dicts describing the actions attached to that input file.
- inputs: list of inputs
- outputs: list of outputs
- description: description of the action
- command: command line to execute
- """
- # Require there to be at least one input (call sites will ensure this).
- assert inputs
-
- action = {
- 'inputs': inputs,
- 'outputs': outputs,
- 'description': description,
- 'command': command,
- }
-
- # Pick where to stick this action.
- # While less than optimal in terms of build time, attach them to the first
- # input for now.
- chosen_input = inputs[0]
-
- # Add it there.
- if chosen_input not in actions_dict:
- actions_dict[chosen_input] = []
- actions_dict[chosen_input].append(action)
-
-
-def _AddCustomBuildToolForMSVS(p, spec, primary_input,
- inputs, outputs, description, cmd):
- """Add a custom build tool to execute something.
-
- Arguments:
- p: the target project
- spec: the target project dict
- primary_input: input file to attach the build tool to
- inputs: list of inputs
- outputs: list of outputs
- description: description of the action
- cmd: command line to execute
- """
- inputs = _FixPaths(inputs)
- outputs = _FixPaths(outputs)
- tool = MSVSProject.Tool(
- 'VCCustomBuildTool',
- {'Description': description,
- 'AdditionalDependencies': ';'.join(inputs),
- 'Outputs': ';'.join(outputs),
- 'CommandLine': cmd,
- })
- # Add to the properties of primary input for each config.
- for config_name, c_data in spec['configurations'].iteritems():
- p.AddFileConfig(_FixPath(primary_input),
- _ConfigFullName(config_name, c_data), tools=[tool])
-
-
-def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
- """Add actions accumulated into an actions_dict, merging as needed.
-
- Arguments:
- p: the target project
- spec: the target project dict
- actions_dict: dictionary keyed on input name, which maps to a list of
- dicts describing the actions attached to that input file.
- """
- for primary_input in actions_dict:
- inputs = OrderedSet()
- outputs = OrderedSet()
- descriptions = []
- commands = []
- for action in actions_dict[primary_input]:
- inputs.update(OrderedSet(action['inputs']))
- outputs.update(OrderedSet(action['outputs']))
- descriptions.append(action['description'])
- commands.append(action['command'])
- # Add the custom build step for one input file.
- description = ', and also '.join(descriptions)
- command = '\r\n'.join(commands)
- _AddCustomBuildToolForMSVS(p, spec,
- primary_input=primary_input,
- inputs=inputs,
- outputs=outputs,
- description=description,
- cmd=command)
-
-
-def _RuleExpandPath(path, input_file):
- """Given the input file to which a rule applied, string substitute a path.
-
- Arguments:
- path: a path to string expand
- input_file: the file to which the rule applied.
- Returns:
- The string substituted path.
- """
- path = path.replace('$(InputName)',
- os.path.splitext(os.path.split(input_file)[1])[0])
- path = path.replace('$(InputDir)', os.path.dirname(input_file))
- path = path.replace('$(InputExt)',
- os.path.splitext(os.path.split(input_file)[1])[1])
- path = path.replace('$(InputFileName)', os.path.split(input_file)[1])
- path = path.replace('$(InputPath)', input_file)
- return path
-
-
-def _FindRuleTriggerFiles(rule, sources):
- """Find the list of files which a particular rule applies to.
-
- Arguments:
- rule: the rule in question
- sources: the set of all known source files for this project
- Returns:
- The list of sources that trigger a particular rule.
- """
- return rule.get('rule_sources', [])
-
-
-def _RuleInputsAndOutputs(rule, trigger_file):
- """Find the inputs and outputs generated by a rule.
-
- Arguments:
- rule: the rule in question.
- trigger_file: the main trigger for this rule.
- Returns:
- The pair of (inputs, outputs) involved in this rule.
- """
- raw_inputs = _FixPaths(rule.get('inputs', []))
- raw_outputs = _FixPaths(rule.get('outputs', []))
- inputs = OrderedSet()
- outputs = OrderedSet()
- inputs.add(trigger_file)
- for i in raw_inputs:
- inputs.add(_RuleExpandPath(i, trigger_file))
- for o in raw_outputs:
- outputs.add(_RuleExpandPath(o, trigger_file))
- return (inputs, outputs)
-
-
-def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
- """Generate a native rules file.
-
- Arguments:
- p: the target project
- rules: the set of rules to include
- output_dir: the directory in which the project/gyp resides
- spec: the project dict
- options: global generator options
- """
- rules_filename = '%s%s.rules' % (spec['target_name'],
- options.suffix)
- rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename),
- spec['target_name'])
- # Add each rule.
- for r in rules:
- rule_name = r['rule_name']
- rule_ext = r['extension']
- inputs = _FixPaths(r.get('inputs', []))
- outputs = _FixPaths(r.get('outputs', []))
- # Skip a rule with no action and no inputs.
- if 'action' not in r and not r.get('rule_sources', []):
- continue
- cmd = _BuildCommandLineForRule(spec, r, has_input_path=True,
- do_setup_env=True)
- rules_file.AddCustomBuildRule(name=rule_name,
- description=r.get('message', rule_name),
- extensions=[rule_ext],
- additional_dependencies=inputs,
- outputs=outputs,
- cmd=cmd)
- # Write out rules file.
- rules_file.WriteIfChanged()
-
- # Add rules file to project.
- p.AddToolFile(rules_filename)
-
-
-def _Cygwinify(path):
- path = path.replace('$(OutDir)', '$(OutDirCygwin)')
- path = path.replace('$(IntDir)', '$(IntDirCygwin)')
- return path
-
-
-def _GenerateExternalRules(rules, output_dir, spec,
- sources, options, actions_to_add):
- """Generate an external makefile to do a set of rules.
-
- Arguments:
- rules: the list of rules to include
- output_dir: path containing project and gyp files
- spec: project specification data
- sources: set of sources known
- options: global generator options
- actions_to_add: The list of actions we will add to.
- """
- filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix)
- mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
- # Find cygwin style versions of some paths.
- mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
- mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
- # Gather stuff needed to emit all: target.
- all_inputs = OrderedSet()
- all_outputs = OrderedSet()
- all_output_dirs = OrderedSet()
- first_outputs = []
- for rule in rules:
- trigger_files = _FindRuleTriggerFiles(rule, sources)
- for tf in trigger_files:
- inputs, outputs = _RuleInputsAndOutputs(rule, tf)
- all_inputs.update(OrderedSet(inputs))
- all_outputs.update(OrderedSet(outputs))
- # Only use one target from each rule as the dependency for
- # 'all' so we don't try to build each rule multiple times.
- first_outputs.append(list(outputs)[0])
- # Get the unique output directories for this rule.
- output_dirs = [os.path.split(i)[0] for i in outputs]
- for od in output_dirs:
- all_output_dirs.add(od)
- first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
- # Write out all: target, including mkdir for each output directory.
- mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg))
- for od in all_output_dirs:
- if od:
- mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od)
- mk_file.write('\n')
- # Define how each output is generated.
- for rule in rules:
- trigger_files = _FindRuleTriggerFiles(rule, sources)
- for tf in trigger_files:
- # Get all the inputs and outputs for this rule for this trigger file.
- inputs, outputs = _RuleInputsAndOutputs(rule, tf)
- inputs = [_Cygwinify(i) for i in inputs]
- outputs = [_Cygwinify(i) for i in outputs]
- # Prepare the command line for this rule.
- cmd = [_RuleExpandPath(c, tf) for c in rule['action']]
- cmd = ['"%s"' % i for i in cmd]
- cmd = ' '.join(cmd)
- # Add it to the makefile.
- mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs)))
- mk_file.write('\t%s\n\n' % cmd)
- # Close up the file.
- mk_file.close()
-
- # Add makefile to list of sources.
- sources.add(filename)
- # Add a build action to call makefile.
- cmd = ['make',
- 'OutDir=$(OutDir)',
- 'IntDir=$(IntDir)',
- '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
- '-f', filename]
- cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True)
- # Insert makefile as 0'th input, so it gets the action attached there,
- # as this is easier to understand from in the IDE.
- all_inputs = list(all_inputs)
- all_inputs.insert(0, filename)
- _AddActionStep(actions_to_add,
- inputs=_FixPaths(all_inputs),
- outputs=_FixPaths(all_outputs),
- description='Running external rules for %s' %
- spec['target_name'],
- command=cmd)
-
-
-def _EscapeEnvironmentVariableExpansion(s):
- """Escapes % characters.
-
- Escapes any % characters so that Windows-style environment variable
- expansions will leave them alone.
- See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
- to understand why we have to do this.
-
- Args:
- s: The string to be escaped.
-
- Returns:
- The escaped string.
- """
- s = s.replace('%', '%%')
- return s
-
-
-quote_replacer_regex = re.compile(r'(\\*)"')
-
-
-def _EscapeCommandLineArgumentForMSVS(s):
- """Escapes a Windows command-line argument.
-
- So that the Win32 CommandLineToArgv function will turn the escaped result back
- into the original string.
- See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
- ("Parsing C++ Command-Line Arguments") to understand why we have to do
- this.
-
- Args:
- s: the string to be escaped.
- Returns:
- the escaped string.
- """
-
- def _Replace(match):
- # For a literal quote, CommandLineToArgv requires an odd number of
- # backslashes preceding it, and it produces half as many literal backslashes
- # (rounded down). So we need to produce 2n+1 backslashes.
- return 2 * match.group(1) + '\\"'
-
- # Escape all quotes so that they are interpreted literally.
- s = quote_replacer_regex.sub(_Replace, s)
- # Now add unescaped quotes so that any whitespace is interpreted literally.
- s = '"' + s + '"'
- return s
-
-
-delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)')
-
-
-def _EscapeVCProjCommandLineArgListItem(s):
- """Escapes command line arguments for MSVS.
-
- The VCProj format stores string lists in a single string using commas and
- semi-colons as separators, which must be quoted if they are to be
- interpreted literally. However, command-line arguments may already have
- quotes, and the VCProj parser is ignorant of the backslash escaping
- convention used by CommandLineToArgv, so the command-line quotes and the
- VCProj quotes may not be the same quotes. So to store a general
- command-line argument in a VCProj list, we need to parse the existing
- quoting according to VCProj's convention and quote any delimiters that are
- not already quoted by that convention. The quotes that we add will also be
- seen by CommandLineToArgv, so if backslashes precede them then we also have
- to escape those backslashes according to the CommandLineToArgv
- convention.
-
- Args:
- s: the string to be escaped.
- Returns:
- the escaped string.
- """
-
- def _Replace(match):
- # For a non-literal quote, CommandLineToArgv requires an even number of
- # backslashes preceding it, and it produces half as many literal
- # backslashes. So we need to produce 2n backslashes.
- return 2 * match.group(1) + '"' + match.group(2) + '"'
-
- segments = s.split('"')
- # The unquoted segments are at the even-numbered indices.
- for i in range(0, len(segments), 2):
- segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i])
- # Concatenate back into a single string
- s = '"'.join(segments)
- if len(segments) % 2 == 0:
- # String ends while still quoted according to VCProj's convention. This
- # means the delimiter and the next list item that follow this one in the
- # .vcproj file will be misinterpreted as part of this item. There is nothing
- # we can do about this. Adding an extra quote would correct the problem in
- # the VCProj but cause the same problem on the final command-line. Moving
- # the item to the end of the list does works, but that's only possible if
- # there's only one such item. Let's just warn the user.
- print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
- 'quotes in ' + s)
- return s
-
-
-def _EscapeCppDefineForMSVS(s):
- """Escapes a CPP define so that it will reach the compiler unaltered."""
- s = _EscapeEnvironmentVariableExpansion(s)
- s = _EscapeCommandLineArgumentForMSVS(s)
- s = _EscapeVCProjCommandLineArgListItem(s)
- # cl.exe replaces literal # characters with = in preprocesor definitions for
- # some reason. Octal-encode to work around that.
- s = s.replace('#', '\\%03o' % ord('#'))
- return s
-
-
-quote_replacer_regex2 = re.compile(r'(\\+)"')
-
-
-def _EscapeCommandLineArgumentForMSBuild(s):
- """Escapes a Windows command-line argument for use by MSBuild."""
-
- def _Replace(match):
- return (len(match.group(1)) / 2 * 4) * '\\' + '\\"'
-
- # Escape all quotes so that they are interpreted literally.
- s = quote_replacer_regex2.sub(_Replace, s)
- return s
-
-
-def _EscapeMSBuildSpecialCharacters(s):
- escape_dictionary = {
- '%': '%25',
- '$': '%24',
- '@': '%40',
- "'": '%27',
- ';': '%3B',
- '?': '%3F',
- '*': '%2A'
- }
- result = ''.join([escape_dictionary.get(c, c) for c in s])
- return result
-
-
-def _EscapeCppDefineForMSBuild(s):
- """Escapes a CPP define so that it will reach the compiler unaltered."""
- s = _EscapeEnvironmentVariableExpansion(s)
- s = _EscapeCommandLineArgumentForMSBuild(s)
- s = _EscapeMSBuildSpecialCharacters(s)
- # cl.exe replaces literal # characters with = in preprocesor definitions for
- # some reason. Octal-encode to work around that.
- s = s.replace('#', '\\%03o' % ord('#'))
- return s
-
-
-def _GenerateRulesForMSVS(p, output_dir, options, spec,
- sources, excluded_sources,
- actions_to_add):
- """Generate all the rules for a particular project.
-
- Arguments:
- p: the project
- output_dir: directory to emit rules to
- options: global options passed to the generator
- spec: the specification for this project
- sources: the set of all known source files in this project
- excluded_sources: the set of sources excluded from normal processing
- actions_to_add: deferred list of actions to add in
- """
- rules = spec.get('rules', [])
- rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
- rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
-
- # Handle rules that use a native rules file.
- if rules_native:
- _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options)
-
- # Handle external rules (non-native rules).
- if rules_external:
- _GenerateExternalRules(rules_external, output_dir, spec,
- sources, options, actions_to_add)
- _AdjustSourcesForRules(rules, sources, excluded_sources, False)
-
-
-def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
- # Add outputs generated by each rule (if applicable).
- for rule in rules:
- # Add in the outputs from this rule.
- trigger_files = _FindRuleTriggerFiles(rule, sources)
- for trigger_file in trigger_files:
- # Remove trigger_file from excluded_sources to let the rule be triggered
- # (e.g. rule trigger ax_enums.idl is added to excluded_sources
- # because it's also in an action's inputs in the same project)
- excluded_sources.discard(_FixPath(trigger_file))
- # Done if not processing outputs as sources.
- if int(rule.get('process_outputs_as_sources', False)):
- inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
- inputs = OrderedSet(_FixPaths(inputs))
- outputs = OrderedSet(_FixPaths(outputs))
- inputs.remove(_FixPath(trigger_file))
- sources.update(inputs)
- if not is_msbuild:
- excluded_sources.update(inputs)
- sources.update(outputs)
-
-
-def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
- """Take inputs with actions attached out of the list of exclusions.
-
- Arguments:
- excluded_sources: list of source files not to be built.
- actions_to_add: dict of actions keyed on source file they're attached to.
- Returns:
- excluded_sources with files that have actions attached removed.
- """
- must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
- return [s for s in excluded_sources if s not in must_keep]
-
-
-def _GetDefaultConfiguration(spec):
- return spec['configurations'][spec['default_configuration']]
-
-
-def _GetGuidOfProject(proj_path, spec):
- """Get the guid for the project.
-
- Arguments:
- proj_path: Path of the vcproj or vcxproj file to generate.
- spec: The target dictionary containing the properties of the target.
- Returns:
- the guid.
- Raises:
- ValueError: if the specified GUID is invalid.
- """
- # Pluck out the default configuration.
- default_config = _GetDefaultConfiguration(spec)
- # Decide the guid of the project.
- guid = default_config.get('msvs_guid')
- if guid:
- if VALID_MSVS_GUID_CHARS.match(guid) is None:
- raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' %
- (guid, VALID_MSVS_GUID_CHARS.pattern))
- guid = '{%s}' % guid
- guid = guid or MSVSNew.MakeGuid(proj_path)
- return guid
-
-
-def _GetMsbuildToolsetOfProject(proj_path, spec, version):
- """Get the platform toolset for the project.
-
- Arguments:
- proj_path: Path of the vcproj or vcxproj file to generate.
- spec: The target dictionary containing the properties of the target.
- version: The MSVSVersion object.
- Returns:
- the platform toolset string or None.
- """
- # Pluck out the default configuration.
- default_config = _GetDefaultConfiguration(spec)
- toolset = default_config.get('msbuild_toolset')
- if not toolset and version.DefaultToolset():
- toolset = version.DefaultToolset()
- return toolset
-
-
-def _GenerateProject(project, options, version, generator_flags):
- """Generates a vcproj file.
-
- Arguments:
- project: the MSVSProject object.
- options: global generator options.
- version: the MSVSVersion object.
- generator_flags: dict of generator-specific flags.
- Returns:
- A list of source files that cannot be found on disk.
- """
- default_config = _GetDefaultConfiguration(project.spec)
-
- # Skip emitting anything if told to with msvs_existing_vcproj option.
- if default_config.get('msvs_existing_vcproj'):
- return []
-
- if version.UsesVcxproj():
- return _GenerateMSBuildProject(project, options, version, generator_flags)
- else:
- return _GenerateMSVSProject(project, options, version, generator_flags)
-
-
-# TODO: Avoid code duplication with _ValidateSourcesForOSX in make.py.
-def _ValidateSourcesForMSVSProject(spec, version):
- """Makes sure if duplicate basenames are not specified in the source list.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- version: The VisualStudioVersion object.
- """
- # This validation should not be applied to MSVC2010 and later.
- assert not version.UsesVcxproj()
-
- # TODO: Check if MSVC allows this for loadable_module targets.
- if spec.get('type', None) not in ('static_library', 'shared_library'):
- return
- sources = spec.get('sources', [])
- basenames = {}
- for source in sources:
- name, ext = os.path.splitext(source)
- is_compiled_file = ext in [
- '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
- if not is_compiled_file:
- continue
- basename = os.path.basename(name) # Don't include extension.
- basenames.setdefault(basename, []).append(source)
-
- error = ''
- for basename, files in basenames.iteritems():
- if len(files) > 1:
- error += ' %s: %s\n' % (basename, ' '.join(files))
-
- if error:
- print('static library %s has several files with the same basename:\n' %
- spec['target_name'] + error + 'MSVC08 cannot handle that.')
- raise GypError('Duplicate basenames in sources section, see list above')
-
-
-def _GenerateMSVSProject(project, options, version, generator_flags):
- """Generates a .vcproj file. It may create .rules and .user files too.
-
- Arguments:
- project: The project object we will generate the file for.
- options: Global options passed to the generator.
- version: The VisualStudioVersion object.
- generator_flags: dict of generator-specific flags.
- """
- spec = project.spec
- gyp.common.EnsureDirExists(project.path)
-
- platforms = _GetUniquePlatforms(spec)
- p = MSVSProject.Writer(project.path, version, spec['target_name'],
- project.guid, platforms)
-
- # Get directory project file is in.
- project_dir = os.path.split(project.path)[0]
- gyp_path = _NormalizedSource(project.build_file)
- relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
-
- config_type = _GetMSVSConfigurationType(spec, project.build_file)
- for config_name, config in spec['configurations'].iteritems():
- _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
-
- # MSVC08 and prior version cannot handle duplicate basenames in the same
- # target.
- # TODO: Take excluded sources into consideration if possible.
- _ValidateSourcesForMSVSProject(spec, version)
-
- # Prepare list of sources and excluded sources.
- gyp_file = os.path.split(project.build_file)[1]
- sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
- gyp_file)
-
- # Add rules.
- actions_to_add = {}
- _GenerateRulesForMSVS(p, project_dir, options, spec,
- sources, excluded_sources,
- actions_to_add)
- list_excluded = generator_flags.get('msvs_list_excluded_files', True)
- sources, excluded_sources, excluded_idl = (
- _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir,
- sources, excluded_sources,
- list_excluded, version))
-
- # Add in files.
- missing_sources = _VerifySourcesExist(sources, project_dir)
- p.AddFiles(sources)
-
- _AddToolFilesToMSVS(p, spec)
- _HandlePreCompiledHeaders(p, sources, spec)
- _AddActions(actions_to_add, spec, relative_path_of_gyp_file)
- _AddCopies(actions_to_add, spec)
- _WriteMSVSUserFile(project.path, version, spec)
-
- # NOTE: this stanza must appear after all actions have been decided.
- # Don't excluded sources with actions attached, or they won't run.
- excluded_sources = _FilterActionsFromExcluded(
- excluded_sources, actions_to_add)
- _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
- list_excluded)
- _AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
-
- # Write it out.
- p.WriteIfChanged()
-
- return missing_sources
-
-
-def _GetUniquePlatforms(spec):
- """Returns the list of unique platforms for this spec, e.g ['win32', ...].
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- The MSVSUserFile object created.
- """
- # Gather list of unique platforms.
- platforms = OrderedSet()
- for configuration in spec['configurations']:
- platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
- platforms = list(platforms)
- return platforms
-
-
-def _CreateMSVSUserFile(proj_path, version, spec):
- """Generates a .user file for the user running this Gyp program.
-
- Arguments:
- proj_path: The path of the project file being created. The .user file
- shares the same path (with an appropriate suffix).
- version: The VisualStudioVersion object.
- spec: The target dictionary containing the properties of the target.
- Returns:
- The MSVSUserFile object created.
- """
- (domain, username) = _GetDomainAndUserName()
- vcuser_filename = '.'.join([proj_path, domain, username, 'user'])
- user_file = MSVSUserFile.Writer(vcuser_filename, version,
- spec['target_name'])
- return user_file
-
-
-def _GetMSVSConfigurationType(spec, build_file):
- """Returns the configuration type for this project.
-
- It's a number defined by Microsoft. May raise an exception.
-
- Args:
- spec: The target dictionary containing the properties of the target.
- build_file: The path of the gyp file.
- Returns:
- An integer, the configuration type.
- """
- try:
- config_type = {
- 'executable': '1', # .exe
- 'shared_library': '2', # .dll
- 'loadable_module': '2', # .dll
- 'static_library': '4', # .lib
- 'none': '10', # Utility type
- }[spec['type']]
- except KeyError:
- if spec.get('type'):
- raise GypError('Target type %s is not a valid target type for '
- 'target %s in %s.' %
- (spec['type'], spec['target_name'], build_file))
- else:
- raise GypError('Missing type field for target %s in %s.' %
- (spec['target_name'], build_file))
- return config_type
-
-
-def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
- """Adds a configuration to the MSVS project.
-
- Many settings in a vcproj file are specific to a configuration. This
- function the main part of the vcproj file that's configuration specific.
-
- Arguments:
- p: The target project being generated.
- spec: The target dictionary containing the properties of the target.
- config_type: The configuration type, a number as defined by Microsoft.
- config_name: The name of the configuration.
- config: The dictionary that defines the special processing to be done
- for this configuration.
- """
- # Get the information for this configuration
- include_dirs, midl_include_dirs, resource_include_dirs = \
- _GetIncludeDirs(config)
- libraries = _GetLibraries(spec)
- library_dirs = _GetLibraryDirs(config)
- out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
- defines = _GetDefines(config)
- defines = [_EscapeCppDefineForMSVS(d) for d in defines]
- disabled_warnings = _GetDisabledWarnings(config)
- prebuild = config.get('msvs_prebuild')
- postbuild = config.get('msvs_postbuild')
- def_file = _GetModuleDefinition(spec)
- precompiled_header = config.get('msvs_precompiled_header')
-
- # Prepare the list of tools as a dictionary.
- tools = dict()
- # Add in user specified msvs_settings.
- msvs_settings = config.get('msvs_settings', {})
- MSVSSettings.ValidateMSVSSettings(msvs_settings)
-
- # Prevent default library inheritance from the environment.
- _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)'])
-
- for tool in msvs_settings:
- settings = config['msvs_settings'][tool]
- for setting in settings:
- _ToolAppend(tools, tool, setting, settings[setting])
- # Add the information to the appropriate tool
- _ToolAppend(tools, 'VCCLCompilerTool',
- 'AdditionalIncludeDirectories', include_dirs)
- _ToolAppend(tools, 'VCMIDLTool',
- 'AdditionalIncludeDirectories', midl_include_dirs)
- _ToolAppend(tools, 'VCResourceCompilerTool',
- 'AdditionalIncludeDirectories', resource_include_dirs)
- # Add in libraries.
- _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries)
- _ToolAppend(tools, 'VCLinkerTool', 'AdditionalLibraryDirectories',
- library_dirs)
- if out_file:
- _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True)
- # Add defines.
- _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines)
- _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions',
- defines)
- # Change program database directory to prevent collisions.
- _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName',
- '$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True)
- # Add disabled warnings.
- _ToolAppend(tools, 'VCCLCompilerTool',
- 'DisableSpecificWarnings', disabled_warnings)
- # Add Pre-build.
- _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild)
- # Add Post-build.
- _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild)
- # Turn on precompiled headers if appropriate.
- if precompiled_header:
- precompiled_header = os.path.split(precompiled_header)[1]
- _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2')
- _ToolAppend(tools, 'VCCLCompilerTool',
- 'PrecompiledHeaderThrough', precompiled_header)
- _ToolAppend(tools, 'VCCLCompilerTool',
- 'ForcedIncludeFiles', precompiled_header)
- # Loadable modules don't generate import libraries;
- # tell dependent projects to not expect one.
- if spec['type'] == 'loadable_module':
- _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true')
- # Set the module definition file if any.
- if def_file:
- _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file)
-
- _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
-
-
-def _GetIncludeDirs(config):
- """Returns the list of directories to be used for #include directives.
-
- Arguments:
- config: The dictionary that defines the special processing to be done
- for this configuration.
- Returns:
- The list of directory paths.
- """
- # TODO(bradnelson): include_dirs should really be flexible enough not to
- # require this sort of thing.
- include_dirs = (
- config.get('include_dirs', []) +
- config.get('msvs_system_include_dirs', []))
- midl_include_dirs = (
- config.get('midl_include_dirs', []) +
- config.get('msvs_system_include_dirs', []))
- resource_include_dirs = config.get('resource_include_dirs', include_dirs)
- include_dirs = _FixPaths(include_dirs)
- midl_include_dirs = _FixPaths(midl_include_dirs)
- resource_include_dirs = _FixPaths(resource_include_dirs)
- return include_dirs, midl_include_dirs, resource_include_dirs
-
-
-def _GetLibraryDirs(config):
- """Returns the list of directories to be used for library search paths.
-
- Arguments:
- config: The dictionary that defines the special processing to be done
- for this configuration.
- Returns:
- The list of directory paths.
- """
-
- library_dirs = config.get('library_dirs', [])
- library_dirs = _FixPaths(library_dirs)
- return library_dirs
-
-
-def _GetLibraries(spec):
- """Returns the list of libraries for this configuration.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- The list of directory paths.
- """
- libraries = spec.get('libraries', [])
- # Strip out -l, as it is not used on windows (but is needed so we can pass
- # in libraries that are assumed to be in the default library path).
- # Also remove duplicate entries, leaving only the last duplicate, while
- # preserving order.
- found = OrderedSet()
- unique_libraries_list = []
- for entry in reversed(libraries):
- library = re.sub(r'^\-l', '', entry)
- if not os.path.splitext(library)[1]:
- library += '.lib'
- if library not in found:
- found.add(library)
- unique_libraries_list.append(library)
- unique_libraries_list.reverse()
- return unique_libraries_list
-
-
-def _GetOutputFilePathAndTool(spec, msbuild):
- """Returns the path and tool to use for this target.
-
- Figures out the path of the file this spec will create and the name of
- the VC tool that will create it.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- A triple of (file path, name of the vc tool, name of the msbuild tool)
- """
- # Select a name for the output file.
- out_file = ''
- vc_tool = ''
- msbuild_tool = ''
- output_file_map = {
- 'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'),
- 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
- 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
- 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'),
- }
- output_file_props = output_file_map.get(spec['type'])
- if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
- vc_tool, msbuild_tool, out_dir, suffix = output_file_props
- if spec.get('standalone_static_library', 0):
- out_dir = '$(OutDir)'
- out_dir = spec.get('product_dir', out_dir)
- product_extension = spec.get('product_extension')
- if product_extension:
- suffix = '.' + product_extension
- elif msbuild:
- suffix = '$(TargetExt)'
- prefix = spec.get('product_prefix', '')
- product_name = spec.get('product_name', '$(ProjectName)')
- out_file = ntpath.join(out_dir, prefix + product_name + suffix)
- return out_file, vc_tool, msbuild_tool
-
-
-def _GetOutputTargetExt(spec):
- """Returns the extension for this target, including the dot
-
- If product_extension is specified, set target_extension to this to avoid
- MSB8012, returns None otherwise. Ignores any target_extension settings in
- the input files.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- A string with the extension, or None
- """
- target_extension = spec.get('product_extension')
- if target_extension:
- return '.' + target_extension
- return None
-
-
-def _GetDefines(config):
- """Returns the list of preprocessor definitions for this configuation.
-
- Arguments:
- config: The dictionary that defines the special processing to be done
- for this configuration.
- Returns:
- The list of preprocessor definitions.
- """
- defines = []
- for d in config.get('defines', []):
- if type(d) == list:
- fd = '='.join([str(dpart) for dpart in d])
- else:
- fd = str(d)
- defines.append(fd)
- return defines
-
-
-def _GetDisabledWarnings(config):
- return [str(i) for i in config.get('msvs_disabled_warnings', [])]
-
-
-def _GetModuleDefinition(spec):
- def_file = ''
- if spec['type'] in ['shared_library', 'loadable_module', 'executable']:
- def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
- if len(def_files) == 1:
- def_file = _FixPath(def_files[0])
- elif def_files:
- raise ValueError(
- 'Multiple module definition files in one target, target %s lists '
- 'multiple .def files: %s' % (
- spec['target_name'], ' '.join(def_files)))
- return def_file
-
-
-def _ConvertToolsToExpectedForm(tools):
- """Convert tools to a form expected by Visual Studio.
-
- Arguments:
- tools: A dictionary of settings; the tool name is the key.
- Returns:
- A list of Tool objects.
- """
- tool_list = []
- for tool, settings in tools.iteritems():
- # Collapse settings with lists.
- settings_fixed = {}
- for setting, value in settings.iteritems():
- if type(value) == list:
- if ((tool == 'VCLinkerTool' and
- setting == 'AdditionalDependencies') or
- setting == 'AdditionalOptions'):
- settings_fixed[setting] = ' '.join(value)
- else:
- settings_fixed[setting] = ';'.join(value)
- else:
- settings_fixed[setting] = value
- # Add in this tool.
- tool_list.append(MSVSProject.Tool(tool, settings_fixed))
- return tool_list
-
-
-def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
- """Add to the project file the configuration specified by config.
-
- Arguments:
- p: The target project being generated.
- spec: the target project dict.
- tools: A dictionary of settings; the tool name is the key.
- config: The dictionary that defines the special processing to be done
- for this configuration.
- config_type: The configuration type, a number as defined by Microsoft.
- config_name: The name of the configuration.
- """
- attributes = _GetMSVSAttributes(spec, config, config_type)
- # Add in this configuration.
- tool_list = _ConvertToolsToExpectedForm(tools)
- p.AddConfig(_ConfigFullName(config_name, config),
- attrs=attributes, tools=tool_list)
-
-
-def _GetMSVSAttributes(spec, config, config_type):
- # Prepare configuration attributes.
- prepared_attrs = {}
- source_attrs = config.get('msvs_configuration_attributes', {})
- for a in source_attrs:
- prepared_attrs[a] = source_attrs[a]
- # Add props files.
- vsprops_dirs = config.get('msvs_props', [])
- vsprops_dirs = _FixPaths(vsprops_dirs)
- if vsprops_dirs:
- prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs)
- # Set configuration type.
- prepared_attrs['ConfigurationType'] = config_type
- output_dir = prepared_attrs.get('OutputDirectory',
- '$(SolutionDir)$(ConfigurationName)')
- prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\'
- if 'IntermediateDirectory' not in prepared_attrs:
- intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
- prepared_attrs['IntermediateDirectory'] = _FixPath(intermediate) + '\\'
- else:
- intermediate = _FixPath(prepared_attrs['IntermediateDirectory']) + '\\'
- intermediate = MSVSSettings.FixVCMacroSlashes(intermediate)
- prepared_attrs['IntermediateDirectory'] = intermediate
- return prepared_attrs
-
-
-def _AddNormalizedSources(sources_set, sources_array):
- sources_set.update(_NormalizedSource(s) for s in sources_array)
-
-
-def _PrepareListOfSources(spec, generator_flags, gyp_file):
- """Prepare list of sources and excluded sources.
-
- Besides the sources specified directly in the spec, adds the gyp file so
- that a change to it will cause a re-compile. Also adds appropriate sources
- for actions and copies. Assumes later stage will un-exclude files which
- have custom build steps attached.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- gyp_file: The name of the gyp file.
- Returns:
- A pair of (list of sources, list of excluded sources).
- The sources will be relative to the gyp file.
- """
- sources = OrderedSet()
- _AddNormalizedSources(sources, spec.get('sources', []))
- excluded_sources = OrderedSet()
- # Add in the gyp file.
- if not generator_flags.get('standalone'):
- sources.add(gyp_file)
-
- # Add in 'action' inputs and outputs.
- for a in spec.get('actions', []):
- inputs = a['inputs']
- inputs = [_NormalizedSource(i) for i in inputs]
- # Add all inputs to sources and excluded sources.
- inputs = OrderedSet(inputs)
- sources.update(inputs)
- if not spec.get('msvs_external_builder'):
- excluded_sources.update(inputs)
- if int(a.get('process_outputs_as_sources', False)):
- _AddNormalizedSources(sources, a.get('outputs', []))
- # Add in 'copies' inputs and outputs.
- for cpy in spec.get('copies', []):
- _AddNormalizedSources(sources, cpy.get('files', []))
- return (sources, excluded_sources)
-
-
-def _AdjustSourcesAndConvertToFilterHierarchy(
- spec, options, gyp_dir, sources, excluded_sources, list_excluded, version):
- """Adjusts the list of sources and excluded sources.
-
- Also converts the sets to lists.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- options: Global generator options.
- gyp_dir: The path to the gyp file being processed.
- sources: A set of sources to be included for this project.
- excluded_sources: A set of sources to be excluded for this project.
- version: A MSVSVersion object.
- Returns:
- A trio of (list of sources, list of excluded sources,
- path of excluded IDL file)
- """
- # Exclude excluded sources coming into the generator.
- excluded_sources.update(OrderedSet(spec.get('sources_excluded', [])))
- # Add excluded sources into sources for good measure.
- sources.update(excluded_sources)
- # Convert to proper windows form.
- # NOTE: sources goes from being a set to a list here.
- # NOTE: excluded_sources goes from being a set to a list here.
- sources = _FixPaths(sources)
- # Convert to proper windows form.
- excluded_sources = _FixPaths(excluded_sources)
-
- excluded_idl = _IdlFilesHandledNonNatively(spec, sources)
-
- precompiled_related = _GetPrecompileRelatedFiles(spec)
- # Find the excluded ones, minus the precompiled header related ones.
- fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
-
- # Convert to folders and the right slashes.
- sources = [i.split('\\') for i in sources]
- sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded,
- list_excluded=list_excluded,
- msvs_version=version)
-
- # Prune filters with a single child to flatten ugly directory structures
- # such as ../../src/modules/module1 etc.
- if version.UsesVcxproj():
- while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
- and len(set([s.name for s in sources])) == 1:
- assert all([len(s.contents) == 1 for s in sources])
- sources = [s.contents[0] for s in sources]
- else:
- while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
- sources = sources[0].contents
-
- return sources, excluded_sources, excluded_idl
-
-
-def _IdlFilesHandledNonNatively(spec, sources):
- # If any non-native rules use 'idl' as an extension exclude idl files.
- # Gather a list here to use later.
- using_idl = False
- for rule in spec.get('rules', []):
- if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
- using_idl = True
- break
- if using_idl:
- excluded_idl = [i for i in sources if i.endswith('.idl')]
- else:
- excluded_idl = []
- return excluded_idl
-
-
-def _GetPrecompileRelatedFiles(spec):
- # Gather a list of precompiled header related sources.
- precompiled_related = []
- for _, config in spec['configurations'].iteritems():
- for k in precomp_keys:
- f = config.get(k)
- if f:
- precompiled_related.append(_FixPath(f))
- return precompiled_related
-
-
-def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
- list_excluded):
- exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
- for file_name, excluded_configs in exclusions.iteritems():
- if (not list_excluded and
- len(excluded_configs) == len(spec['configurations'])):
- # If we're not listing excluded files, then they won't appear in the
- # project, so don't try to configure them to be excluded.
- pass
- else:
- for config_name, config in excluded_configs:
- p.AddFileConfig(file_name, _ConfigFullName(config_name, config),
- {'ExcludedFromBuild': 'true'})
-
-
-def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
- exclusions = {}
- # Exclude excluded sources from being built.
- for f in excluded_sources:
- excluded_configs = []
- for config_name, config in spec['configurations'].iteritems():
- precomped = [_FixPath(config.get(i, '')) for i in precomp_keys]
- # Don't do this for ones that are precompiled header related.
- if f not in precomped:
- excluded_configs.append((config_name, config))
- exclusions[f] = excluded_configs
- # If any non-native rules use 'idl' as an extension exclude idl files.
- # Exclude them now.
- for f in excluded_idl:
- excluded_configs = []
- for config_name, config in spec['configurations'].iteritems():
- excluded_configs.append((config_name, config))
- exclusions[f] = excluded_configs
- return exclusions
-
-
-def _AddToolFilesToMSVS(p, spec):
- # Add in tool files (rules).
- tool_files = OrderedSet()
- for _, config in spec['configurations'].iteritems():
- for f in config.get('msvs_tool_files', []):
- tool_files.add(f)
- for f in tool_files:
- p.AddToolFile(f)
-
-
-def _HandlePreCompiledHeaders(p, sources, spec):
- # Pre-compiled header source stubs need a different compiler flag
- # (generate precompiled header) and any source file not of the same
- # kind (i.e. C vs. C++) as the precompiled header source stub needs
- # to have use of precompiled headers disabled.
- extensions_excluded_from_precompile = []
- for config_name, config in spec['configurations'].iteritems():
- source = config.get('msvs_precompiled_source')
- if source:
- source = _FixPath(source)
- # UsePrecompiledHeader=1 for if using precompiled headers.
- tool = MSVSProject.Tool('VCCLCompilerTool',
- {'UsePrecompiledHeader': '1'})
- p.AddFileConfig(source, _ConfigFullName(config_name, config),
- {}, tools=[tool])
- basename, extension = os.path.splitext(source)
- if extension == '.c':
- extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
- else:
- extensions_excluded_from_precompile = ['.c']
- def DisableForSourceTree(source_tree):
- for source in source_tree:
- if isinstance(source, MSVSProject.Filter):
- DisableForSourceTree(source.contents)
- else:
- basename, extension = os.path.splitext(source)
- if extension in extensions_excluded_from_precompile:
- for config_name, config in spec['configurations'].iteritems():
- tool = MSVSProject.Tool('VCCLCompilerTool',
- {'UsePrecompiledHeader': '0',
- 'ForcedIncludeFiles': '$(NOINHERIT)'})
- p.AddFileConfig(_FixPath(source),
- _ConfigFullName(config_name, config),
- {}, tools=[tool])
- # Do nothing if there was no precompiled source.
- if extensions_excluded_from_precompile:
- DisableForSourceTree(sources)
-
-
-def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
- # Add actions.
- actions = spec.get('actions', [])
- # Don't setup_env every time. When all the actions are run together in one
- # batch file in VS, the PATH will grow too long.
- # Membership in this set means that the cygwin environment has been set up,
- # and does not need to be set up again.
- have_setup_env = set()
- for a in actions:
- # Attach actions to the gyp file if nothing else is there.
- inputs = a.get('inputs') or [relative_path_of_gyp_file]
- attached_to = inputs[0]
- need_setup_env = attached_to not in have_setup_env
- cmd = _BuildCommandLineForRule(spec, a, has_input_path=False,
- do_setup_env=need_setup_env)
- have_setup_env.add(attached_to)
- # Add the action.
- _AddActionStep(actions_to_add,
- inputs=inputs,
- outputs=a.get('outputs', []),
- description=a.get('message', a['action_name']),
- command=cmd)
-
-
-def _WriteMSVSUserFile(project_path, version, spec):
- # Add run_as and test targets.
- if 'run_as' in spec:
- run_as = spec['run_as']
- action = run_as.get('action', [])
- environment = run_as.get('environment', [])
- working_directory = run_as.get('working_directory', '.')
- elif int(spec.get('test', 0)):
- action = ['$(TargetPath)', '--gtest_print_time']
- environment = []
- working_directory = '.'
- else:
- return # Nothing to add
- # Write out the user file.
- user_file = _CreateMSVSUserFile(project_path, version, spec)
- for config_name, c_data in spec['configurations'].iteritems():
- user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
- action, environment, working_directory)
- user_file.WriteIfChanged()
-
-
-def _AddCopies(actions_to_add, spec):
- copies = _GetCopies(spec)
- for inputs, outputs, cmd, description in copies:
- _AddActionStep(actions_to_add, inputs=inputs, outputs=outputs,
- description=description, command=cmd)
-
-
-def _GetCopies(spec):
- copies = []
- # Add copies.
- for cpy in spec.get('copies', []):
- for src in cpy.get('files', []):
- dst = os.path.join(cpy['destination'], os.path.basename(src))
- # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and
- # outputs, so do the same for our generated command line.
- if src.endswith('/'):
- src_bare = src[:-1]
- base_dir = posixpath.split(src_bare)[0]
- outer_dir = posixpath.split(src_bare)[1]
- cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % (
- _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir)
- copies.append(([src], ['dummy_copies', dst], cmd,
- 'Copying %s to %s' % (src, dst)))
- else:
- cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
- _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst))
- copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst)))
- return copies
-
-
-def _GetPathDict(root, path):
- # |path| will eventually be empty (in the recursive calls) if it was initially
- # relative; otherwise it will eventually end up as '\', 'D:\', etc.
- if not path or path.endswith(os.sep):
- return root
- parent, folder = os.path.split(path)
- parent_dict = _GetPathDict(root, parent)
- if folder not in parent_dict:
- parent_dict[folder] = dict()
- return parent_dict[folder]
-
-
-def _DictsToFolders(base_path, bucket, flat):
- # Convert to folders recursively.
- children = []
- for folder, contents in bucket.iteritems():
- if type(contents) == dict:
- folder_children = _DictsToFolders(os.path.join(base_path, folder),
- contents, flat)
- if flat:
- children += folder_children
- else:
- folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder),
- name='(' + folder + ')',
- entries=folder_children)
- children.append(folder_children)
- else:
- children.append(contents)
- return children
-
-
-def _CollapseSingles(parent, node):
- # Recursively explorer the tree of dicts looking for projects which are
- # the sole item in a folder which has the same name as the project. Bring
- # such projects up one level.
- if (type(node) == dict and
- len(node) == 1 and
- node.keys()[0] == parent + '.vcproj'):
- return node[node.keys()[0]]
- if type(node) != dict:
- return node
- for child in node:
- node[child] = _CollapseSingles(child, node[child])
- return node
-
-
-def _GatherSolutionFolders(sln_projects, project_objects, flat):
- root = {}
- # Convert into a tree of dicts on path.
- for p in sln_projects:
- gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
- gyp_dir = os.path.dirname(gyp_file)
- path_dict = _GetPathDict(root, gyp_dir)
- path_dict[target + '.vcproj'] = project_objects[p]
- # Walk down from the top until we hit a folder that has more than one entry.
- # In practice, this strips the top-level "src/" dir from the hierarchy in
- # the solution.
- while len(root) == 1 and type(root[root.keys()[0]]) == dict:
- root = root[root.keys()[0]]
- # Collapse singles.
- root = _CollapseSingles('', root)
- # Merge buckets until everything is a root entry.
- return _DictsToFolders('', root, flat)
-
-
-def _GetPathOfProject(qualified_target, spec, options, msvs_version):
- default_config = _GetDefaultConfiguration(spec)
- proj_filename = default_config.get('msvs_existing_vcproj')
- if not proj_filename:
- proj_filename = (spec['target_name'] + options.suffix +
- msvs_version.ProjectExtension())
-
- build_file = gyp.common.BuildFile(qualified_target)
- proj_path = os.path.join(os.path.dirname(build_file), proj_filename)
- fix_prefix = None
- if options.generator_output:
- project_dir_path = os.path.dirname(os.path.abspath(proj_path))
- proj_path = os.path.join(options.generator_output, proj_path)
- fix_prefix = gyp.common.RelativePath(project_dir_path,
- os.path.dirname(proj_path))
- return proj_path, fix_prefix
-
-
-def _GetPlatformOverridesOfProject(spec):
- # Prepare a dict indicating which project configurations are used for which
- # solution configurations for this target.
- config_platform_overrides = {}
- for config_name, c in spec['configurations'].iteritems():
- config_fullname = _ConfigFullName(config_name, c)
- platform = c.get('msvs_target_platform', _ConfigPlatform(c))
- fixed_config_fullname = '%s|%s' % (
- _ConfigBaseName(config_name, _ConfigPlatform(c)), platform)
- config_platform_overrides[config_fullname] = fixed_config_fullname
- return config_platform_overrides
-
-
-def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
- """Create a MSVSProject object for the targets found in target list.
-
- Arguments:
- target_list: the list of targets to generate project objects for.
- target_dicts: the dictionary of specifications.
- options: global generator options.
- msvs_version: the MSVSVersion object.
- Returns:
- A set of created projects, keyed by target.
- """
- global fixpath_prefix
- # Generate each project.
- projects = {}
- for qualified_target in target_list:
- spec = target_dicts[qualified_target]
- if spec['toolset'] != 'target':
- raise GypError(
- 'Multiple toolsets not supported in msvs build (target %s)' %
- qualified_target)
- proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
- options, msvs_version)
- guid = _GetGuidOfProject(proj_path, spec)
- overrides = _GetPlatformOverridesOfProject(spec)
- build_file = gyp.common.BuildFile(qualified_target)
- # Create object for this project.
- obj = MSVSNew.MSVSProject(
- proj_path,
- name=spec['target_name'],
- guid=guid,
- spec=spec,
- build_file=build_file,
- config_platform_overrides=overrides,
- fixpath_prefix=fixpath_prefix)
- # Set project toolset if any (MS build only)
- if msvs_version.UsesVcxproj():
- obj.set_msbuild_toolset(
- _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version))
- projects[qualified_target] = obj
- # Set all the dependencies, but not if we are using an external builder like
- # ninja
- for project in projects.values():
- if not project.spec.get('msvs_external_builder'):
- deps = project.spec.get('dependencies', [])
- deps = [projects[d] for d in deps]
- project.set_dependencies(deps)
- return projects
-
-
-def _InitNinjaFlavor(params, target_list, target_dicts):
- """Initialize targets for the ninja flavor.
-
- This sets up the necessary variables in the targets to generate msvs projects
- that use ninja as an external builder. The variables in the spec are only set
- if they have not been set. This allows individual specs to override the
- default values initialized here.
- Arguments:
- params: Params provided to the generator.
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- """
- for qualified_target in target_list:
- spec = target_dicts[qualified_target]
- if spec.get('msvs_external_builder'):
- # The spec explicitly defined an external builder, so don't change it.
- continue
-
- path_to_ninja = spec.get('msvs_path_to_ninja', 'ninja.exe')
-
- spec['msvs_external_builder'] = 'ninja'
- if not spec.get('msvs_external_builder_out_dir'):
- gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
- gyp_dir = os.path.dirname(gyp_file)
- configuration = '$(Configuration)'
- if params.get('target_arch') == 'x64':
- configuration += '_x64'
- spec['msvs_external_builder_out_dir'] = os.path.join(
- gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir),
- ninja_generator.ComputeOutputDir(params),
- configuration)
- if not spec.get('msvs_external_builder_build_cmd'):
- spec['msvs_external_builder_build_cmd'] = [
- path_to_ninja,
- '-C',
- '$(OutDir)',
- '$(ProjectName)',
- ]
- if not spec.get('msvs_external_builder_clean_cmd'):
- spec['msvs_external_builder_clean_cmd'] = [
- path_to_ninja,
- '-C',
- '$(OutDir)',
- '-tclean',
- '$(ProjectName)',
- ]
-
-
-def CalculateVariables(default_variables, params):
- """Generated variables that require params to be known."""
-
- generator_flags = params.get('generator_flags', {})
-
- # Select project file format version (if unset, default to auto detecting).
- msvs_version = MSVSVersion.SelectVisualStudioVersion(
- generator_flags.get('msvs_version', 'auto'))
- # Stash msvs_version for later (so we don't have to probe the system twice).
- params['msvs_version'] = msvs_version
-
- # Set a variable so conditions can be based on msvs_version.
- default_variables['MSVS_VERSION'] = msvs_version.ShortName()
-
- # To determine processor word size on Windows, in addition to checking
- # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
- # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
- # contains the actual word size of the system when running thru WOW64).
- if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
- os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
- default_variables['MSVS_OS_BITS'] = 64
- else:
- default_variables['MSVS_OS_BITS'] = 32
-
- if gyp.common.GetFlavor(params) == 'ninja':
- default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen'
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
- msvs_version = params['msvs_version']
- devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com')
-
- for build_file, build_file_dict in data.iteritems():
- (build_file_root, build_file_ext) = os.path.splitext(build_file)
- if build_file_ext != '.gyp':
- continue
- sln_path = build_file_root + options.suffix + '.sln'
- if options.generator_output:
- sln_path = os.path.join(options.generator_output, sln_path)
-
- for config in configurations:
- arguments = [devenv, sln_path, '/Build', config]
- print 'Building [%s]: %s' % (config, arguments)
- rtn = subprocess.check_call(arguments)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- """Generate .sln and .vcproj files.
-
- This is the entry point for this generator.
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- data: Dictionary containing per .gyp data.
- """
- global fixpath_prefix
-
- options = params['options']
-
- # Get the project file format version back out of where we stashed it in
- # GeneratorCalculatedVariables.
- msvs_version = params['msvs_version']
-
- generator_flags = params.get('generator_flags', {})
-
- # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
- (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)
-
- # Optionally use the large PDB workaround for targets marked with
- # 'msvs_large_pdb': 1.
- (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
- target_list, target_dicts, generator_default_variables)
-
- # Optionally configure each spec to use ninja as the external builder.
- if params.get('flavor') == 'ninja':
- _InitNinjaFlavor(params, target_list, target_dicts)
-
- # Prepare the set of configurations.
- configs = set()
- for qualified_target in target_list:
- spec = target_dicts[qualified_target]
- for config_name, config in spec['configurations'].iteritems():
- configs.add(_ConfigFullName(config_name, config))
- configs = list(configs)
-
- # Figure out all the projects that will be generated and their guids
- project_objects = _CreateProjectObjects(target_list, target_dicts, options,
- msvs_version)
-
- # Generate each project.
- missing_sources = []
- for project in project_objects.values():
- fixpath_prefix = project.fixpath_prefix
- missing_sources.extend(_GenerateProject(project, options, msvs_version,
- generator_flags))
- fixpath_prefix = None
-
- for build_file in data:
- # Validate build_file extension
- if not build_file.endswith('.gyp'):
- continue
- sln_path = os.path.splitext(build_file)[0] + options.suffix + '.sln'
- if options.generator_output:
- sln_path = os.path.join(options.generator_output, sln_path)
- # Get projects in the solution, and their dependents.
- sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
- sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
- # Create folder hierarchy.
- root_entries = _GatherSolutionFolders(
- sln_projects, project_objects, flat=msvs_version.FlatSolution())
- # Create solution.
- sln = MSVSNew.MSVSSolution(sln_path,
- entries=root_entries,
- variants=configs,
- websiteProperties=False,
- version=msvs_version)
- sln.Write()
-
- if missing_sources:
- error_message = "Missing input files:\n" + \
- '\n'.join(set(missing_sources))
- if generator_flags.get('msvs_error_on_missing_sources', False):
- raise GypError(error_message)
- else:
- print >> sys.stdout, "Warning: " + error_message
-
-
-def _GenerateMSBuildFiltersFile(filters_path, source_files,
- rule_dependencies, extension_to_rule_name):
- """Generate the filters file.
-
- This file is used by Visual Studio to organize the presentation of source
- files into folders.
-
- Arguments:
- filters_path: The path of the file to be created.
- source_files: The hierarchical structure of all the sources.
- extension_to_rule_name: A dictionary mapping file extensions to rules.
- """
- filter_group = []
- source_group = []
- _AppendFiltersForMSBuild('', source_files, rule_dependencies,
- extension_to_rule_name, filter_group, source_group)
- if filter_group:
- content = ['Project',
- {'ToolsVersion': '4.0',
- 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
- },
- ['ItemGroup'] + filter_group,
- ['ItemGroup'] + source_group
- ]
- easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True)
- elif os.path.exists(filters_path):
- # We don't need this filter anymore. Delete the old filter file.
- os.unlink(filters_path)
-
-
-def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
- extension_to_rule_name,
- filter_group, source_group):
- """Creates the list of filters and sources to be added in the filter file.
-
- Args:
- parent_filter_name: The name of the filter under which the sources are
- found.
- sources: The hierarchy of filters and sources to process.
- extension_to_rule_name: A dictionary mapping file extensions to rules.
- filter_group: The list to which filter entries will be appended.
- source_group: The list to which source entries will be appeneded.
- """
- for source in sources:
- if isinstance(source, MSVSProject.Filter):
- # We have a sub-filter. Create the name of that sub-filter.
- if not parent_filter_name:
- filter_name = source.name
- else:
- filter_name = '%s\\%s' % (parent_filter_name, source.name)
- # Add the filter to the group.
- filter_group.append(
- ['Filter', {'Include': filter_name},
- ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
- # Recurse and add its dependents.
- _AppendFiltersForMSBuild(filter_name, source.contents,
- rule_dependencies, extension_to_rule_name,
- filter_group, source_group)
- else:
- # It's a source. Create a source entry.
- _, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
- extension_to_rule_name)
- source_entry = [element, {'Include': source}]
- # Specify the filter it is part of, if any.
- if parent_filter_name:
- source_entry.append(['Filter', parent_filter_name])
- source_group.append(source_entry)
-
-
-def _MapFileToMsBuildSourceType(source, rule_dependencies,
- extension_to_rule_name):
- """Returns the group and element type of the source file.
-
- Arguments:
- source: The source file name.
- extension_to_rule_name: A dictionary mapping file extensions to rules.
-
- Returns:
- A pair of (group this file should be part of, the label of element)
- """
- _, ext = os.path.splitext(source)
- if ext in extension_to_rule_name:
- group = 'rule'
- element = extension_to_rule_name[ext]
- elif ext in ['.cc', '.cpp', '.c', '.cxx']:
- group = 'compile'
- element = 'ClCompile'
- elif ext in ['.h', '.hxx']:
- group = 'include'
- element = 'ClInclude'
- elif ext == '.rc':
- group = 'resource'
- element = 'ResourceCompile'
- elif ext == '.asm':
- group = 'masm'
- element = 'MASM'
- elif ext == '.idl':
- group = 'midl'
- element = 'Midl'
- elif source in rule_dependencies:
- group = 'rule_dependency'
- element = 'CustomBuild'
- else:
- group = 'none'
- element = 'None'
- return (group, element)
-
-
-def _GenerateRulesForMSBuild(output_dir, options, spec,
- sources, excluded_sources,
- props_files_of_rules, targets_files_of_rules,
- actions_to_add, rule_dependencies,
- extension_to_rule_name):
- # MSBuild rules are implemented using three files: an XML file, a .targets
- # file and a .props file.
- # See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
- # for more details.
- rules = spec.get('rules', [])
- rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
- rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
-
- msbuild_rules = []
- for rule in rules_native:
- # Skip a rule with no action and no inputs.
- if 'action' not in rule and not rule.get('rule_sources', []):
- continue
- msbuild_rule = MSBuildRule(rule, spec)
- msbuild_rules.append(msbuild_rule)
- rule_dependencies.update(msbuild_rule.additional_dependencies.split(';'))
- extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
- if msbuild_rules:
- base = spec['target_name'] + options.suffix
- props_name = base + '.props'
- targets_name = base + '.targets'
- xml_name = base + '.xml'
-
- props_files_of_rules.add(props_name)
- targets_files_of_rules.add(targets_name)
-
- props_path = os.path.join(output_dir, props_name)
- targets_path = os.path.join(output_dir, targets_name)
- xml_path = os.path.join(output_dir, xml_name)
-
- _GenerateMSBuildRulePropsFile(props_path, msbuild_rules)
- _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules)
- _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules)
-
- if rules_external:
- _GenerateExternalRules(rules_external, output_dir, spec,
- sources, options, actions_to_add)
- _AdjustSourcesForRules(rules, sources, excluded_sources, True)
-
-
-class MSBuildRule(object):
- """Used to store information used to generate an MSBuild rule.
-
- Attributes:
- rule_name: The rule name, sanitized to use in XML.
- target_name: The name of the target.
- after_targets: The name of the AfterTargets element.
- before_targets: The name of the BeforeTargets element.
- depends_on: The name of the DependsOn element.
- compute_output: The name of the ComputeOutput element.
- dirs_to_make: The name of the DirsToMake element.
- inputs: The name of the _inputs element.
- tlog: The name of the _tlog element.
- extension: The extension this rule applies to.
- description: The message displayed when this rule is invoked.
- additional_dependencies: A string listing additional dependencies.
- outputs: The outputs of this rule.
- command: The command used to run the rule.
- """
-
- def __init__(self, rule, spec):
- self.display_name = rule['rule_name']
- # Assure that the rule name is only characters and numbers
- self.rule_name = re.sub(r'\W', '_', self.display_name)
- # Create the various element names, following the example set by the
- # Visual Studio 2008 to 2010 conversion. I don't know if VS2010
- # is sensitive to the exact names.
- self.target_name = '_' + self.rule_name
- self.after_targets = self.rule_name + 'AfterTargets'
- self.before_targets = self.rule_name + 'BeforeTargets'
- self.depends_on = self.rule_name + 'DependsOn'
- self.compute_output = 'Compute%sOutput' % self.rule_name
- self.dirs_to_make = self.rule_name + 'DirsToMake'
- self.inputs = self.rule_name + '_inputs'
- self.tlog = self.rule_name + '_tlog'
- self.extension = rule['extension']
- if not self.extension.startswith('.'):
- self.extension = '.' + self.extension
-
- self.description = MSVSSettings.ConvertVCMacrosToMSBuild(
- rule.get('message', self.rule_name))
- old_additional_dependencies = _FixPaths(rule.get('inputs', []))
- self.additional_dependencies = (
- ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
- for i in old_additional_dependencies]))
- old_outputs = _FixPaths(rule.get('outputs', []))
- self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
- for i in old_outputs])
- old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True,
- do_setup_env=True)
- self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command)
-
-
-def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
- """Generate the .props file."""
- content = ['Project',
- {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}]
- for rule in msbuild_rules:
- content.extend([
- ['PropertyGroup',
- {'Condition': "'$(%s)' == '' and '$(%s)' == '' and "
- "'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets,
- rule.after_targets)
- },
- [rule.before_targets, 'Midl'],
- [rule.after_targets, 'CustomBuild'],
- ],
- ['PropertyGroup',
- [rule.depends_on,
- {'Condition': "'$(ConfigurationType)' != 'Makefile'"},
- '_SelectedFiles;$(%s)' % rule.depends_on
- ],
- ],
- ['ItemDefinitionGroup',
- [rule.rule_name,
- ['CommandLineTemplate', rule.command],
- ['Outputs', rule.outputs],
- ['ExecutionDescription', rule.description],
- ['AdditionalDependencies', rule.additional_dependencies],
- ],
- ]
- ])
- easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True)
-
-
-def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
- """Generate the .targets file."""
- content = ['Project',
- {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
- }
- ]
- item_group = [
- 'ItemGroup',
- ['PropertyPageSchema',
- {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'}
- ]
- ]
- for rule in msbuild_rules:
- item_group.append(
- ['AvailableItemName',
- {'Include': rule.rule_name},
- ['Targets', rule.target_name],
- ])
- content.append(item_group)
-
- for rule in msbuild_rules:
- content.append(
- ['UsingTask',
- {'TaskName': rule.rule_name,
- 'TaskFactory': 'XamlTaskFactory',
- 'AssemblyName': 'Microsoft.Build.Tasks.v4.0'
- },
- ['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'],
- ])
- for rule in msbuild_rules:
- rule_name = rule.rule_name
- target_outputs = '%%(%s.Outputs)' % rule_name
- target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);'
- '$(MSBuildProjectFile)') % (rule_name, rule_name)
- rule_inputs = '%%(%s.Identity)' % rule_name
- extension_condition = ("'%(Extension)'=='.obj' or "
- "'%(Extension)'=='.res' or "
- "'%(Extension)'=='.rsc' or "
- "'%(Extension)'=='.lib'")
- remove_section = [
- 'ItemGroup',
- {'Condition': "'@(SelectedFiles)' != ''"},
- [rule_name,
- {'Remove': '@(%s)' % rule_name,
- 'Condition': "'%(Identity)' != '@(SelectedFiles)'"
- }
- ]
- ]
- inputs_section = [
- 'ItemGroup',
- [rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}]
- ]
- logging_section = [
- 'ItemGroup',
- [rule.tlog,
- {'Include': '%%(%s.Outputs)' % rule_name,
- 'Condition': ("'%%(%s.Outputs)' != '' and "
- "'%%(%s.ExcludedFromBuild)' != 'true'" %
- (rule_name, rule_name))
- },
- ['Source', "@(%s, '|')" % rule_name],
- ['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs],
- ],
- ]
- message_section = [
- 'Message',
- {'Importance': 'High',
- 'Text': '%%(%s.ExecutionDescription)' % rule_name
- }
- ]
- write_tlog_section = [
- 'WriteLinesToFile',
- {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
- "'true'" % (rule.tlog, rule.tlog),
- 'File': '$(IntDir)$(ProjectName).write.1.tlog',
- 'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog,
- rule.tlog)
- }
- ]
- read_tlog_section = [
- 'WriteLinesToFile',
- {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
- "'true'" % (rule.tlog, rule.tlog),
- 'File': '$(IntDir)$(ProjectName).read.1.tlog',
- 'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog)
- }
- ]
- command_and_input_section = [
- rule_name,
- {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
- "'true'" % (rule_name, rule_name),
- 'EchoOff': 'true',
- 'StandardOutputImportance': 'High',
- 'StandardErrorImportance': 'High',
- 'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name,
- 'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name,
- 'Inputs': rule_inputs
- }
- ]
- content.extend([
- ['Target',
- {'Name': rule.target_name,
- 'BeforeTargets': '$(%s)' % rule.before_targets,
- 'AfterTargets': '$(%s)' % rule.after_targets,
- 'Condition': "'@(%s)' != ''" % rule_name,
- 'DependsOnTargets': '$(%s);%s' % (rule.depends_on,
- rule.compute_output),
- 'Outputs': target_outputs,
- 'Inputs': target_inputs
- },
- remove_section,
- inputs_section,
- logging_section,
- message_section,
- write_tlog_section,
- read_tlog_section,
- command_and_input_section,
- ],
- ['PropertyGroup',
- ['ComputeLinkInputsTargets',
- '$(ComputeLinkInputsTargets);',
- '%s;' % rule.compute_output
- ],
- ['ComputeLibInputsTargets',
- '$(ComputeLibInputsTargets);',
- '%s;' % rule.compute_output
- ],
- ],
- ['Target',
- {'Name': rule.compute_output,
- 'Condition': "'@(%s)' != ''" % rule_name
- },
- ['ItemGroup',
- [rule.dirs_to_make,
- {'Condition': "'@(%s)' != '' and "
- "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name),
- 'Include': '%%(%s.Outputs)' % rule_name
- }
- ],
- ['Link',
- {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
- 'Condition': extension_condition
- }
- ],
- ['Lib',
- {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
- 'Condition': extension_condition
- }
- ],
- ['ImpLib',
- {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
- 'Condition': extension_condition
- }
- ],
- ],
- ['MakeDir',
- {'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" %
- rule.dirs_to_make)
- }
- ]
- ],
- ])
- easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True)
-
-
-def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
- # Generate the .xml file
- content = [
- 'ProjectSchemaDefinitions',
- {'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;'
- 'assembly=Microsoft.Build.Framework'),
- 'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml',
- 'xmlns:sys': 'clr-namespace:System;assembly=mscorlib',
- 'xmlns:transformCallback':
- 'Microsoft.Cpp.Dev10.ConvertPropertyCallback'
- }
- ]
- for rule in msbuild_rules:
- content.extend([
- ['Rule',
- {'Name': rule.rule_name,
- 'PageTemplate': 'tool',
- 'DisplayName': rule.display_name,
- 'Order': '200'
- },
- ['Rule.DataSource',
- ['DataSource',
- {'Persistence': 'ProjectFile',
- 'ItemType': rule.rule_name
- }
- ]
- ],
- ['Rule.Categories',
- ['Category',
- {'Name': 'General'},
- ['Category.DisplayName',
- ['sys:String', 'General'],
- ],
- ],
- ['Category',
- {'Name': 'Command Line',
- 'Subtype': 'CommandLine'
- },
- ['Category.DisplayName',
- ['sys:String', 'Command Line'],
- ],
- ],
- ],
- ['StringListProperty',
- {'Name': 'Inputs',
- 'Category': 'Command Line',
- 'IsRequired': 'true',
- 'Switch': ' '
- },
- ['StringListProperty.DataSource',
- ['DataSource',
- {'Persistence': 'ProjectFile',
- 'ItemType': rule.rule_name,
- 'SourceType': 'Item'
- }
- ]
- ],
- ],
- ['StringProperty',
- {'Name': 'CommandLineTemplate',
- 'DisplayName': 'Command Line',
- 'Visible': 'False',
- 'IncludeInCommandLine': 'False'
- }
- ],
- ['DynamicEnumProperty',
- {'Name': rule.before_targets,
- 'Category': 'General',
- 'EnumProvider': 'Targets',
- 'IncludeInCommandLine': 'False'
- },
- ['DynamicEnumProperty.DisplayName',
- ['sys:String', 'Execute Before'],
- ],
- ['DynamicEnumProperty.Description',
- ['sys:String', 'Specifies the targets for the build customization'
- ' to run before.'
- ],
- ],
- ['DynamicEnumProperty.ProviderSettings',
- ['NameValuePair',
- {'Name': 'Exclude',
- 'Value': '^%s|^Compute' % rule.before_targets
- }
- ]
- ],
- ['DynamicEnumProperty.DataSource',
- ['DataSource',
- {'Persistence': 'ProjectFile',
- 'HasConfigurationCondition': 'true'
- }
- ]
- ],
- ],
- ['DynamicEnumProperty',
- {'Name': rule.after_targets,
- 'Category': 'General',
- 'EnumProvider': 'Targets',
- 'IncludeInCommandLine': 'False'
- },
- ['DynamicEnumProperty.DisplayName',
- ['sys:String', 'Execute After'],
- ],
- ['DynamicEnumProperty.Description',
- ['sys:String', ('Specifies the targets for the build customization'
- ' to run after.')
- ],
- ],
- ['DynamicEnumProperty.ProviderSettings',
- ['NameValuePair',
- {'Name': 'Exclude',
- 'Value': '^%s|^Compute' % rule.after_targets
- }
- ]
- ],
- ['DynamicEnumProperty.DataSource',
- ['DataSource',
- {'Persistence': 'ProjectFile',
- 'ItemType': '',
- 'HasConfigurationCondition': 'true'
- }
- ]
- ],
- ],
- ['StringListProperty',
- {'Name': 'Outputs',
- 'DisplayName': 'Outputs',
- 'Visible': 'False',
- 'IncludeInCommandLine': 'False'
- }
- ],
- ['StringProperty',
- {'Name': 'ExecutionDescription',
- 'DisplayName': 'Execution Description',
- 'Visible': 'False',
- 'IncludeInCommandLine': 'False'
- }
- ],
- ['StringListProperty',
- {'Name': 'AdditionalDependencies',
- 'DisplayName': 'Additional Dependencies',
- 'IncludeInCommandLine': 'False',
- 'Visible': 'false'
- }
- ],
- ['StringProperty',
- {'Subtype': 'AdditionalOptions',
- 'Name': 'AdditionalOptions',
- 'Category': 'Command Line'
- },
- ['StringProperty.DisplayName',
- ['sys:String', 'Additional Options'],
- ],
- ['StringProperty.Description',
- ['sys:String', 'Additional Options'],
- ],
- ],
- ],
- ['ItemType',
- {'Name': rule.rule_name,
- 'DisplayName': rule.display_name
- }
- ],
- ['FileExtension',
- {'Name': '*' + rule.extension,
- 'ContentType': rule.rule_name
- }
- ],
- ['ContentType',
- {'Name': rule.rule_name,
- 'DisplayName': '',
- 'ItemType': rule.rule_name
- }
- ]
- ])
- easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True)
-
-
-def _GetConfigurationAndPlatform(name, settings):
- configuration = name.rsplit('_', 1)[0]
- platform = settings.get('msvs_configuration_platform', 'Win32')
- return (configuration, platform)
-
-
-def _GetConfigurationCondition(name, settings):
- return (r"'$(Configuration)|$(Platform)'=='%s|%s'" %
- _GetConfigurationAndPlatform(name, settings))
-
-
-def _GetMSBuildProjectConfigurations(configurations):
- group = ['ItemGroup', {'Label': 'ProjectConfigurations'}]
- for (name, settings) in sorted(configurations.iteritems()):
- configuration, platform = _GetConfigurationAndPlatform(name, settings)
- designation = '%s|%s' % (configuration, platform)
- group.append(
- ['ProjectConfiguration', {'Include': designation},
- ['Configuration', configuration],
- ['Platform', platform]])
- return [group]
-
-
-def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
- namespace = os.path.splitext(gyp_file_name)[0]
- properties = [
- ['PropertyGroup', {'Label': 'Globals'},
- ['ProjectGuid', guid],
- ['Keyword', 'Win32Proj'],
- ['RootNamespace', namespace],
- ['IgnoreWarnCompileDuplicatedFilename', 'true'],
- ]
- ]
-
- if os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or \
- os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
- properties[0].append(['PreferredToolArchitecture', 'x64'])
-
- if spec.get('msvs_enable_winrt'):
- properties[0].append(['DefaultLanguage', 'en-US'])
- properties[0].append(['AppContainerApplication', 'true'])
- if spec.get('msvs_application_type_revision'):
- app_type_revision = spec.get('msvs_application_type_revision')
- properties[0].append(['ApplicationTypeRevision', app_type_revision])
- else:
- properties[0].append(['ApplicationTypeRevision', '8.1'])
-
- if spec.get('msvs_target_platform_version'):
- target_platform_version = spec.get('msvs_target_platform_version')
- properties[0].append(['WindowsTargetPlatformVersion',
- target_platform_version])
- if spec.get('msvs_target_platform_minversion'):
- target_platform_minversion = spec.get('msvs_target_platform_minversion')
- properties[0].append(['WindowsTargetPlatformMinVersion',
- target_platform_minversion])
- else:
- properties[0].append(['WindowsTargetPlatformMinVersion',
- target_platform_version])
- if spec.get('msvs_enable_winphone'):
- properties[0].append(['ApplicationType', 'Windows Phone'])
- else:
- properties[0].append(['ApplicationType', 'Windows Store'])
-
- return properties
-
-def _GetMSBuildConfigurationDetails(spec, build_file):
- properties = {}
- for name, settings in spec['configurations'].iteritems():
- msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
- condition = _GetConfigurationCondition(name, settings)
- character_set = msbuild_attributes.get('CharacterSet')
- _AddConditionalProperty(properties, condition, 'ConfigurationType',
- msbuild_attributes['ConfigurationType'])
- if character_set:
- if 'msvs_enable_winrt' not in spec :
- _AddConditionalProperty(properties, condition, 'CharacterSet',
- character_set)
- return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
-
-
-def _GetMSBuildLocalProperties(msbuild_toolset):
- # Currently the only local property we support is PlatformToolset
- properties = {}
- if msbuild_toolset:
- properties = [
- ['PropertyGroup', {'Label': 'Locals'},
- ['PlatformToolset', msbuild_toolset],
- ]
- ]
- return properties
-
-
-def _GetMSBuildPropertySheets(configurations):
- user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props'
- additional_props = {}
- props_specified = False
- for name, settings in sorted(configurations.iteritems()):
- configuration = _GetConfigurationCondition(name, settings)
- if settings.has_key('msbuild_props'):
- additional_props[configuration] = _FixPaths(settings['msbuild_props'])
- props_specified = True
- else:
- additional_props[configuration] = ''
-
- if not props_specified:
- return [
- ['ImportGroup',
- {'Label': 'PropertySheets'},
- ['Import',
- {'Project': user_props,
- 'Condition': "exists('%s')" % user_props,
- 'Label': 'LocalAppDataPlatform'
- }
- ]
- ]
- ]
- else:
- sheets = []
- for condition, props in additional_props.iteritems():
- import_group = [
- 'ImportGroup',
- {'Label': 'PropertySheets',
- 'Condition': condition
- },
- ['Import',
- {'Project': user_props,
- 'Condition': "exists('%s')" % user_props,
- 'Label': 'LocalAppDataPlatform'
- }
- ]
- ]
- for props_file in props:
- import_group.append(['Import', {'Project':props_file}])
- sheets.append(import_group)
- return sheets
-
-def _ConvertMSVSBuildAttributes(spec, config, build_file):
- config_type = _GetMSVSConfigurationType(spec, build_file)
- msvs_attributes = _GetMSVSAttributes(spec, config, config_type)
- msbuild_attributes = {}
- for a in msvs_attributes:
- if a in ['IntermediateDirectory', 'OutputDirectory']:
- directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a])
- if not directory.endswith('\\'):
- directory += '\\'
- msbuild_attributes[a] = directory
- elif a == 'CharacterSet':
- msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
- elif a == 'ConfigurationType':
- msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
- else:
- print 'Warning: Do not know how to convert MSVS attribute ' + a
- return msbuild_attributes
-
-
-def _ConvertMSVSCharacterSet(char_set):
- if char_set.isdigit():
- char_set = {
- '0': 'MultiByte',
- '1': 'Unicode',
- '2': 'MultiByte',
- }[char_set]
- return char_set
-
-
-def _ConvertMSVSConfigurationType(config_type):
- if config_type.isdigit():
- config_type = {
- '1': 'Application',
- '2': 'DynamicLibrary',
- '4': 'StaticLibrary',
- '10': 'Utility'
- }[config_type]
- return config_type
-
-
-def _GetMSBuildAttributes(spec, config, build_file):
- if 'msbuild_configuration_attributes' not in config:
- msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file)
-
- else:
- config_type = _GetMSVSConfigurationType(spec, build_file)
- config_type = _ConvertMSVSConfigurationType(config_type)
- msbuild_attributes = config.get('msbuild_configuration_attributes', {})
- msbuild_attributes.setdefault('ConfigurationType', config_type)
- output_dir = msbuild_attributes.get('OutputDirectory',
- '$(SolutionDir)$(Configuration)')
- msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\'
- if 'IntermediateDirectory' not in msbuild_attributes:
- intermediate = _FixPath('$(Configuration)') + '\\'
- msbuild_attributes['IntermediateDirectory'] = intermediate
- if 'CharacterSet' in msbuild_attributes:
- msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet(
- msbuild_attributes['CharacterSet'])
- if 'TargetName' not in msbuild_attributes:
- prefix = spec.get('product_prefix', '')
- product_name = spec.get('product_name', '$(ProjectName)')
- target_name = prefix + product_name
- msbuild_attributes['TargetName'] = target_name
-
- if spec.get('msvs_external_builder'):
- external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
- msbuild_attributes['OutputDirectory'] = _FixPath(external_out_dir) + '\\'
-
- # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
- # (depending on the tool used) to avoid MSB8012 warning.
- msbuild_tool_map = {
- 'executable': 'Link',
- 'shared_library': 'Link',
- 'loadable_module': 'Link',
- 'static_library': 'Lib',
- }
- msbuild_tool = msbuild_tool_map.get(spec['type'])
- if msbuild_tool:
- msbuild_settings = config['finalized_msbuild_settings']
- out_file = msbuild_settings[msbuild_tool].get('OutputFile')
- if out_file:
- msbuild_attributes['TargetPath'] = _FixPath(out_file)
- target_ext = msbuild_settings[msbuild_tool].get('TargetExt')
- if target_ext:
- msbuild_attributes['TargetExt'] = target_ext
-
- return msbuild_attributes
-
-
-def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
- # TODO(jeanluc) We could optimize out the following and do it only if
- # there are actions.
- # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
- new_paths = []
- cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0]
- if cygwin_dirs:
- cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs)
- new_paths.append(cyg_path)
- # TODO(jeanluc) Change the convention to have both a cygwin_dir and a
- # python_dir.
- python_path = cyg_path.replace('cygwin\\bin', 'python_26')
- new_paths.append(python_path)
- if new_paths:
- new_paths = '$(ExecutablePath);' + ';'.join(new_paths)
-
- properties = {}
- for (name, configuration) in sorted(configurations.iteritems()):
- condition = _GetConfigurationCondition(name, configuration)
- attributes = _GetMSBuildAttributes(spec, configuration, build_file)
- msbuild_settings = configuration['finalized_msbuild_settings']
- _AddConditionalProperty(properties, condition, 'IntDir',
- attributes['IntermediateDirectory'])
- _AddConditionalProperty(properties, condition, 'OutDir',
- attributes['OutputDirectory'])
- _AddConditionalProperty(properties, condition, 'TargetName',
- attributes['TargetName'])
-
- if attributes.get('TargetPath'):
- _AddConditionalProperty(properties, condition, 'TargetPath',
- attributes['TargetPath'])
- if attributes.get('TargetExt'):
- _AddConditionalProperty(properties, condition, 'TargetExt',
- attributes['TargetExt'])
-
- if new_paths:
- _AddConditionalProperty(properties, condition, 'ExecutablePath',
- new_paths)
- tool_settings = msbuild_settings.get('', {})
- for name, value in sorted(tool_settings.iteritems()):
- formatted_value = _GetValueFormattedForMSBuild('', name, value)
- _AddConditionalProperty(properties, condition, name, formatted_value)
- return _GetMSBuildPropertyGroup(spec, None, properties)
-
-
-def _AddConditionalProperty(properties, condition, name, value):
- """Adds a property / conditional value pair to a dictionary.
-
- Arguments:
- properties: The dictionary to be modified. The key is the name of the
- property. The value is itself a dictionary; its key is the value and
- the value a list of condition for which this value is true.
- condition: The condition under which the named property has the value.
- name: The name of the property.
- value: The value of the property.
- """
- if name not in properties:
- properties[name] = {}
- values = properties[name]
- if value not in values:
- values[value] = []
- conditions = values[value]
- conditions.append(condition)
-
-
-# Regex for msvs variable references ( i.e. $(FOO) ).
-MSVS_VARIABLE_REFERENCE = re.compile(r'\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
-
-
-def _GetMSBuildPropertyGroup(spec, label, properties):
- """Returns a PropertyGroup definition for the specified properties.
-
- Arguments:
- spec: The target project dict.
- label: An optional label for the PropertyGroup.
- properties: The dictionary to be converted. The key is the name of the
- property. The value is itself a dictionary; its key is the value and
- the value a list of condition for which this value is true.
- """
- group = ['PropertyGroup']
- if label:
- group.append({'Label': label})
- num_configurations = len(spec['configurations'])
- def GetEdges(node):
- # Use a definition of edges such that user_of_variable -> used_varible.
- # This happens to be easier in this case, since a variable's
- # definition contains all variables it references in a single string.
- edges = set()
- for value in sorted(properties[node].keys()):
- # Add to edges all $(...) references to variables.
- #
- # Variable references that refer to names not in properties are excluded
- # These can exist for instance to refer built in definitions like
- # $(SolutionDir).
- #
- # Self references are ignored. Self reference is used in a few places to
- # append to the default value. I.e. PATH=$(PATH);other_path
- edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value)
- if v in properties and v != node]))
- return edges
- properties_ordered = gyp.common.TopologicallySorted(
- properties.keys(), GetEdges)
- # Walk properties in the reverse of a topological sort on
- # user_of_variable -> used_variable as this ensures variables are
- # defined before they are used.
- # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
- for name in reversed(properties_ordered):
- values = properties[name]
- for value, conditions in sorted(values.iteritems()):
- if len(conditions) == num_configurations:
- # If the value is the same all configurations,
- # just add one unconditional entry.
- group.append([name, value])
- else:
- for condition in conditions:
- group.append([name, {'Condition': condition}, value])
- return [group]
-
-
-def _GetMSBuildToolSettingsSections(spec, configurations):
- groups = []
- for (name, configuration) in sorted(configurations.iteritems()):
- msbuild_settings = configuration['finalized_msbuild_settings']
- group = ['ItemDefinitionGroup',
- {'Condition': _GetConfigurationCondition(name, configuration)}
- ]
- for tool_name, tool_settings in sorted(msbuild_settings.iteritems()):
- # Skip the tool named '' which is a holder of global settings handled
- # by _GetMSBuildConfigurationGlobalProperties.
- if tool_name:
- if tool_settings:
- tool = [tool_name]
- for name, value in sorted(tool_settings.iteritems()):
- formatted_value = _GetValueFormattedForMSBuild(tool_name, name,
- value)
- tool.append([name, formatted_value])
- group.append(tool)
- groups.append(group)
- return groups
-
-
-def _FinalizeMSBuildSettings(spec, configuration):
- if 'msbuild_settings' in configuration:
- converted = False
- msbuild_settings = configuration['msbuild_settings']
- MSVSSettings.ValidateMSBuildSettings(msbuild_settings)
- else:
- converted = True
- msvs_settings = configuration.get('msvs_settings', {})
- msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
- include_dirs, midl_include_dirs, resource_include_dirs = \
- _GetIncludeDirs(configuration)
- libraries = _GetLibraries(spec)
- library_dirs = _GetLibraryDirs(configuration)
- out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
- target_ext = _GetOutputTargetExt(spec)
- defines = _GetDefines(configuration)
- if converted:
- # Visual Studio 2010 has TR1
- defines = [d for d in defines if d != '_HAS_TR1=0']
- # Warn of ignored settings
- ignored_settings = ['msvs_tool_files']
- for ignored_setting in ignored_settings:
- value = configuration.get(ignored_setting)
- if value:
- print ('Warning: The automatic conversion to MSBuild does not handle '
- '%s. Ignoring setting of %s' % (ignored_setting, str(value)))
-
- defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
- disabled_warnings = _GetDisabledWarnings(configuration)
- prebuild = configuration.get('msvs_prebuild')
- postbuild = configuration.get('msvs_postbuild')
- def_file = _GetModuleDefinition(spec)
- precompiled_header = configuration.get('msvs_precompiled_header')
-
- # Add the information to the appropriate tool
- # TODO(jeanluc) We could optimize and generate these settings only if
- # the corresponding files are found, e.g. don't generate ResourceCompile
- # if you don't have any resources.
- _ToolAppend(msbuild_settings, 'ClCompile',
- 'AdditionalIncludeDirectories', include_dirs)
- _ToolAppend(msbuild_settings, 'Midl',
- 'AdditionalIncludeDirectories', midl_include_dirs)
- _ToolAppend(msbuild_settings, 'ResourceCompile',
- 'AdditionalIncludeDirectories', resource_include_dirs)
- # Add in libraries, note that even for empty libraries, we want this
- # set, to prevent inheriting default libraries from the enviroment.
- _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies',
- libraries)
- _ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories',
- library_dirs)
- if out_file:
- _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
- only_if_unset=True)
- if target_ext:
- _ToolAppend(msbuild_settings, msbuild_tool, 'TargetExt', target_ext,
- only_if_unset=True)
- # Add defines.
- _ToolAppend(msbuild_settings, 'ClCompile',
- 'PreprocessorDefinitions', defines)
- _ToolAppend(msbuild_settings, 'ResourceCompile',
- 'PreprocessorDefinitions', defines)
- # Add disabled warnings.
- _ToolAppend(msbuild_settings, 'ClCompile',
- 'DisableSpecificWarnings', disabled_warnings)
- # Turn on precompiled headers if appropriate.
- if precompiled_header:
- precompiled_header = os.path.split(precompiled_header)[1]
- _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use')
- _ToolAppend(msbuild_settings, 'ClCompile',
- 'PrecompiledHeaderFile', precompiled_header)
- _ToolAppend(msbuild_settings, 'ClCompile',
- 'ForcedIncludeFiles', [precompiled_header])
- else:
- _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'NotUsing')
- # Turn off WinRT compilation
- _ToolAppend(msbuild_settings, 'ClCompile', 'CompileAsWinRT', 'false')
- # Turn on import libraries if appropriate
- if spec.get('msvs_requires_importlibrary'):
- _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'false')
- # Loadable modules don't generate import libraries;
- # tell dependent projects to not expect one.
- if spec['type'] == 'loadable_module':
- _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true')
- # Set the module definition file if any.
- if def_file:
- _ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file)
- configuration['finalized_msbuild_settings'] = msbuild_settings
- if prebuild:
- _ToolAppend(msbuild_settings, 'PreBuildEvent', 'Command', prebuild)
- if postbuild:
- _ToolAppend(msbuild_settings, 'PostBuildEvent', 'Command', postbuild)
-
-
-def _GetValueFormattedForMSBuild(tool_name, name, value):
- if type(value) == list:
- # For some settings, VS2010 does not automatically extends the settings
- # TODO(jeanluc) Is this what we want?
- if name in ['AdditionalIncludeDirectories',
- 'AdditionalLibraryDirectories',
- 'AdditionalOptions',
- 'DelayLoadDLLs',
- 'DisableSpecificWarnings',
- 'PreprocessorDefinitions']:
- value.append('%%(%s)' % name)
- # For most tools, entries in a list should be separated with ';' but some
- # settings use a space. Check for those first.
- exceptions = {
- 'ClCompile': ['AdditionalOptions'],
- 'Link': ['AdditionalOptions'],
- 'Lib': ['AdditionalOptions']}
- if tool_name in exceptions and name in exceptions[tool_name]:
- char = ' '
- else:
- char = ';'
- formatted_value = char.join(
- [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value])
- else:
- formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value)
- return formatted_value
-
-
-def _VerifySourcesExist(sources, root_dir):
- """Verifies that all source files exist on disk.
-
- Checks that all regular source files, i.e. not created at run time,
- exist on disk. Missing files cause needless recompilation but no otherwise
- visible errors.
-
- Arguments:
- sources: A recursive list of Filter/file names.
- root_dir: The root directory for the relative path names.
- Returns:
- A list of source files that cannot be found on disk.
- """
- missing_sources = []
- for source in sources:
- if isinstance(source, MSVSProject.Filter):
- missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
- else:
- if '$' not in source:
- full_path = os.path.join(root_dir, source)
- if not os.path.exists(full_path):
- missing_sources.append(full_path)
- return missing_sources
-
-
-def _GetMSBuildSources(spec, sources, exclusions, rule_dependencies,
- extension_to_rule_name, actions_spec,
- sources_handled_by_action, list_excluded):
- groups = ['none', 'masm', 'midl', 'include', 'compile', 'resource', 'rule',
- 'rule_dependency']
- grouped_sources = {}
- for g in groups:
- grouped_sources[g] = []
-
- _AddSources2(spec, sources, exclusions, grouped_sources,
- rule_dependencies, extension_to_rule_name,
- sources_handled_by_action, list_excluded)
- sources = []
- for g in groups:
- if grouped_sources[g]:
- sources.append(['ItemGroup'] + grouped_sources[g])
- if actions_spec:
- sources.append(['ItemGroup'] + actions_spec)
- return sources
-
-
-def _AddSources2(spec, sources, exclusions, grouped_sources,
- rule_dependencies, extension_to_rule_name,
- sources_handled_by_action,
- list_excluded):
- extensions_excluded_from_precompile = []
- for source in sources:
- if isinstance(source, MSVSProject.Filter):
- _AddSources2(spec, source.contents, exclusions, grouped_sources,
- rule_dependencies, extension_to_rule_name,
- sources_handled_by_action,
- list_excluded)
- else:
- if not source in sources_handled_by_action:
- detail = []
- excluded_configurations = exclusions.get(source, [])
- if len(excluded_configurations) == len(spec['configurations']):
- detail.append(['ExcludedFromBuild', 'true'])
- else:
- for config_name, configuration in sorted(excluded_configurations):
- condition = _GetConfigurationCondition(config_name, configuration)
- detail.append(['ExcludedFromBuild',
- {'Condition': condition},
- 'true'])
- # Add precompile if needed
- for config_name, configuration in spec['configurations'].iteritems():
- precompiled_source = configuration.get('msvs_precompiled_source', '')
- if precompiled_source != '':
- precompiled_source = _FixPath(precompiled_source)
- if not extensions_excluded_from_precompile:
- # If the precompiled header is generated by a C source, we must
- # not try to use it for C++ sources, and vice versa.
- basename, extension = os.path.splitext(precompiled_source)
- if extension == '.c':
- extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
- else:
- extensions_excluded_from_precompile = ['.c']
-
- if precompiled_source == source:
- condition = _GetConfigurationCondition(config_name, configuration)
- detail.append(['PrecompiledHeader',
- {'Condition': condition},
- 'Create'
- ])
- else:
- # Turn off precompiled header usage for source files of a
- # different type than the file that generated the
- # precompiled header.
- for extension in extensions_excluded_from_precompile:
- if source.endswith(extension):
- detail.append(['PrecompiledHeader', ''])
- detail.append(['ForcedIncludeFiles', ''])
-
- group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
- extension_to_rule_name)
- grouped_sources[group].append([element, {'Include': source}] + detail)
-
-
-def _GetMSBuildProjectReferences(project):
- references = []
- if project.dependencies:
- group = ['ItemGroup']
- for dependency in project.dependencies:
- guid = dependency.guid
- project_dir = os.path.split(project.path)[0]
- relative_path = gyp.common.RelativePath(dependency.path, project_dir)
- project_ref = ['ProjectReference',
- {'Include': relative_path},
- ['Project', guid],
- ['ReferenceOutputAssembly', 'false']
- ]
- for config in dependency.spec.get('configurations', {}).itervalues():
- # If it's disabled in any config, turn it off in the reference.
- if config.get('msvs_2010_disable_uldi_when_referenced', 0):
- project_ref.append(['UseLibraryDependencyInputs', 'false'])
- break
- group.append(project_ref)
- references.append(group)
- return references
-
-
-def _GenerateMSBuildProject(project, options, version, generator_flags):
- spec = project.spec
- configurations = spec['configurations']
- project_dir, project_file_name = os.path.split(project.path)
- gyp.common.EnsureDirExists(project.path)
- # Prepare list of sources and excluded sources.
- gyp_path = _NormalizedSource(project.build_file)
- relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
-
- gyp_file = os.path.split(project.build_file)[1]
- sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
- gyp_file)
- # Add rules.
- actions_to_add = {}
- props_files_of_rules = set()
- targets_files_of_rules = set()
- rule_dependencies = set()
- extension_to_rule_name = {}
- list_excluded = generator_flags.get('msvs_list_excluded_files', True)
-
- # Don't generate rules if we are using an external builder like ninja.
- if not spec.get('msvs_external_builder'):
- _GenerateRulesForMSBuild(project_dir, options, spec,
- sources, excluded_sources,
- props_files_of_rules, targets_files_of_rules,
- actions_to_add, rule_dependencies,
- extension_to_rule_name)
- else:
- rules = spec.get('rules', [])
- _AdjustSourcesForRules(rules, sources, excluded_sources, True)
-
- sources, excluded_sources, excluded_idl = (
- _AdjustSourcesAndConvertToFilterHierarchy(spec, options,
- project_dir, sources,
- excluded_sources,
- list_excluded, version))
-
- # Don't add actions if we are using an external builder like ninja.
- if not spec.get('msvs_external_builder'):
- _AddActions(actions_to_add, spec, project.build_file)
- _AddCopies(actions_to_add, spec)
-
- # NOTE: this stanza must appear after all actions have been decided.
- # Don't excluded sources with actions attached, or they won't run.
- excluded_sources = _FilterActionsFromExcluded(
- excluded_sources, actions_to_add)
-
- exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
- actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(
- spec, actions_to_add)
-
- _GenerateMSBuildFiltersFile(project.path + '.filters', sources,
- rule_dependencies,
- extension_to_rule_name)
- missing_sources = _VerifySourcesExist(sources, project_dir)
-
- for configuration in configurations.itervalues():
- _FinalizeMSBuildSettings(spec, configuration)
-
- # Add attributes to root element
-
- import_default_section = [
- ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]]
- import_cpp_props_section = [
- ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
- import_cpp_targets_section = [
- ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
- import_masm_props_section = [
- ['Import',
- {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]]
- import_masm_targets_section = [
- ['Import',
- {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]]
- macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
-
- content = [
- 'Project',
- {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003',
- 'ToolsVersion': version.ProjectVersion(),
- 'DefaultTargets': 'Build'
- }]
-
- content += _GetMSBuildProjectConfigurations(configurations)
- content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
- content += import_default_section
- content += _GetMSBuildConfigurationDetails(spec, project.build_file)
- if spec.get('msvs_enable_winphone'):
- content += _GetMSBuildLocalProperties('v120_wp81')
- else:
- content += _GetMSBuildLocalProperties(project.msbuild_toolset)
- content += import_cpp_props_section
- content += import_masm_props_section
- content += _GetMSBuildExtensions(props_files_of_rules)
- content += _GetMSBuildPropertySheets(configurations)
- content += macro_section
- content += _GetMSBuildConfigurationGlobalProperties(spec, configurations,
- project.build_file)
- content += _GetMSBuildToolSettingsSections(spec, configurations)
- content += _GetMSBuildSources(
- spec, sources, exclusions, rule_dependencies, extension_to_rule_name,
- actions_spec, sources_handled_by_action, list_excluded)
- content += _GetMSBuildProjectReferences(project)
- content += import_cpp_targets_section
- content += import_masm_targets_section
- content += _GetMSBuildExtensionTargets(targets_files_of_rules)
-
- if spec.get('msvs_external_builder'):
- content += _GetMSBuildExternalBuilderTargets(spec)
-
- # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
- # has_run_as = _WriteMSVSUserFile(project.path, version, spec)
-
- easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True)
-
- return missing_sources
-
-
-def _GetMSBuildExternalBuilderTargets(spec):
- """Return a list of MSBuild targets for external builders.
-
- The "Build" and "Clean" targets are always generated. If the spec contains
- 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
- be generated, to support building selected C/C++ files.
-
- Arguments:
- spec: The gyp target spec.
- Returns:
- List of MSBuild 'Target' specs.
- """
- build_cmd = _BuildCommandLineForRuleRaw(
- spec, spec['msvs_external_builder_build_cmd'],
- False, False, False, False)
- build_target = ['Target', {'Name': 'Build'}]
- build_target.append(['Exec', {'Command': build_cmd}])
-
- clean_cmd = _BuildCommandLineForRuleRaw(
- spec, spec['msvs_external_builder_clean_cmd'],
- False, False, False, False)
- clean_target = ['Target', {'Name': 'Clean'}]
- clean_target.append(['Exec', {'Command': clean_cmd}])
-
- targets = [build_target, clean_target]
-
- if spec.get('msvs_external_builder_clcompile_cmd'):
- clcompile_cmd = _BuildCommandLineForRuleRaw(
- spec, spec['msvs_external_builder_clcompile_cmd'],
- False, False, False, False)
- clcompile_target = ['Target', {'Name': 'ClCompile'}]
- clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
- targets.append(clcompile_target)
-
- return targets
-
-
-def _GetMSBuildExtensions(props_files_of_rules):
- extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}]
- for props_file in props_files_of_rules:
- extensions.append(['Import', {'Project': props_file}])
- return [extensions]
-
-
-def _GetMSBuildExtensionTargets(targets_files_of_rules):
- targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}]
- for targets_file in sorted(targets_files_of_rules):
- targets_node.append(['Import', {'Project': targets_file}])
- return [targets_node]
-
-
-def _GenerateActionsForMSBuild(spec, actions_to_add):
- """Add actions accumulated into an actions_to_add, merging as needed.
-
- Arguments:
- spec: the target project dict
- actions_to_add: dictionary keyed on input name, which maps to a list of
- dicts describing the actions attached to that input file.
-
- Returns:
- A pair of (action specification, the sources handled by this action).
- """
- sources_handled_by_action = OrderedSet()
- actions_spec = []
- for primary_input, actions in actions_to_add.iteritems():
- inputs = OrderedSet()
- outputs = OrderedSet()
- descriptions = []
- commands = []
- for action in actions:
- inputs.update(OrderedSet(action['inputs']))
- outputs.update(OrderedSet(action['outputs']))
- descriptions.append(action['description'])
- cmd = action['command']
- # For most actions, add 'call' so that actions that invoke batch files
- # return and continue executing. msbuild_use_call provides a way to
- # disable this but I have not seen any adverse effect from doing that
- # for everything.
- if action.get('msbuild_use_call', True):
- cmd = 'call ' + cmd
- commands.append(cmd)
- # Add the custom build action for one input file.
- description = ', and also '.join(descriptions)
-
- # We can't join the commands simply with && because the command line will
- # get too long. See also _AddActions: cygwin's setup_env mustn't be called
- # for every invocation or the command that sets the PATH will grow too
- # long.
- command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%'
- for c in commands])
- _AddMSBuildAction(spec,
- primary_input,
- inputs,
- outputs,
- command,
- description,
- sources_handled_by_action,
- actions_spec)
- return actions_spec, sources_handled_by_action
-
-
-def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description,
- sources_handled_by_action, actions_spec):
- command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd)
- primary_input = _FixPath(primary_input)
- inputs_array = _FixPaths(inputs)
- outputs_array = _FixPaths(outputs)
- additional_inputs = ';'.join([i for i in inputs_array
- if i != primary_input])
- outputs = ';'.join(outputs_array)
- sources_handled_by_action.add(primary_input)
- action_spec = ['CustomBuild', {'Include': primary_input}]
- action_spec.extend(
- # TODO(jeanluc) 'Document' for all or just if as_sources?
- [['FileType', 'Document'],
- ['Command', command],
- ['Message', description],
- ['Outputs', outputs]
- ])
- if additional_inputs:
- action_spec.append(['AdditionalInputs', additional_inputs])
- actions_spec.append(action_spec)
diff --git a/deps/gyp/pylib/gyp/generator/msvs_test.py b/deps/gyp/pylib/gyp/generator/msvs_test.py
deleted file mode 100755
index c0b021df50..0000000000
--- a/deps/gyp/pylib/gyp/generator/msvs_test.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the msvs.py file. """
-
-import gyp.generator.msvs as msvs
-import unittest
-import StringIO
-
-
-class TestSequenceFunctions(unittest.TestCase):
-
- def setUp(self):
- self.stderr = StringIO.StringIO()
-
- def test_GetLibraries(self):
- self.assertEqual(
- msvs._GetLibraries({}),
- [])
- self.assertEqual(
- msvs._GetLibraries({'libraries': []}),
- [])
- self.assertEqual(
- msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}),
- ['a.lib'])
- self.assertEqual(
- msvs._GetLibraries({'libraries': ['-la']}),
- ['a.lib'])
- self.assertEqual(
- msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib',
- '-lb.lib', 'd.lib', 'a.lib']}),
- ['c.lib', 'b.lib', 'd.lib', 'a.lib'])
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/gyp/pylib/gyp/generator/ninja.py b/deps/gyp/pylib/gyp/generator/ninja.py
deleted file mode 100644
index b13affe0a1..0000000000
--- a/deps/gyp/pylib/gyp/generator/ninja.py
+++ /dev/null
@@ -1,2410 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import copy
-import hashlib
-import json
-import multiprocessing
-import os.path
-import re
-import signal
-import subprocess
-import sys
-import gyp
-import gyp.common
-from gyp.common import OrderedSet
-import gyp.msvs_emulation
-import gyp.MSVSUtil as MSVSUtil
-import gyp.xcode_emulation
-from cStringIO import StringIO
-
-from gyp.common import GetEnvironFallback
-import gyp.ninja_syntax as ninja_syntax
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
- 'SHARED_LIB_PREFIX': 'lib',
-
- # Gyp expects the following variables to be expandable by the build
- # system to the appropriate locations. Ninja prefers paths to be
- # known at gyp time. To resolve this, introduce special
- # variables starting with $! and $| (which begin with a $ so gyp knows it
- # should be treated specially, but is otherwise an invalid
- # ninja/shell variable) that are passed to gyp here but expanded
- # before writing out into the target .ninja files; see
- # ExpandSpecial.
- # $! is used for variables that represent a path and that can only appear at
- # the start of a string, while $| is used for variables that can appear
- # anywhere in a string.
- 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
- 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
- 'PRODUCT_DIR': '$!PRODUCT_DIR',
- 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
-
- # Special variables that may be used by gyp 'rule' targets.
- # We generate definitions for these variables on the fly when processing a
- # rule.
- 'RULE_INPUT_ROOT': '${root}',
- 'RULE_INPUT_DIRNAME': '${dirname}',
- 'RULE_INPUT_PATH': '${source}',
- 'RULE_INPUT_EXT': '${ext}',
- 'RULE_INPUT_NAME': '${name}',
-}
-
-# Placates pylint.
-generator_additional_non_configuration_keys = []
-generator_additional_path_sections = []
-generator_extra_sources_for_rules = []
-generator_filelist_paths = None
-
-generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
-
-def StripPrefix(arg, prefix):
- if arg.startswith(prefix):
- return arg[len(prefix):]
- return arg
-
-
-def QuoteShellArgument(arg, flavor):
- """Quote a string such that it will be interpreted as a single argument
- by the shell."""
- # Rather than attempting to enumerate the bad shell characters, just
- # whitelist common OK ones and quote anything else.
- if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
- return arg # No quoting necessary.
- if flavor == 'win':
- return gyp.msvs_emulation.QuoteForRspFile(arg)
- return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
-
-
-def Define(d, flavor):
- """Takes a preprocessor define and returns a -D parameter that's ninja- and
- shell-escaped."""
- if flavor == 'win':
- # cl.exe replaces literal # characters with = in preprocesor definitions for
- # some reason. Octal-encode to work around that.
- d = d.replace('#', '\\%03o' % ord('#'))
- return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
-
-
-def AddArch(output, arch):
- """Adds an arch string to an output path."""
- output, extension = os.path.splitext(output)
- return '%s.%s%s' % (output, arch, extension)
-
-
-class Target(object):
- """Target represents the paths used within a single gyp target.
-
- Conceptually, building a single target A is a series of steps:
-
- 1) actions/rules/copies generates source/resources/etc.
- 2) compiles generates .o files
- 3) link generates a binary (library/executable)
- 4) bundle merges the above in a mac bundle
-
- (Any of these steps can be optional.)
-
- From a build ordering perspective, a dependent target B could just
- depend on the last output of this series of steps.
-
- But some dependent commands sometimes need to reach inside the box.
- For example, when linking B it needs to get the path to the static
- library generated by A.
-
- This object stores those paths. To keep things simple, member
- variables only store concrete paths to single files, while methods
- compute derived values like "the last output of the target".
- """
- def __init__(self, type):
- # Gyp type ("static_library", etc.) of this target.
- self.type = type
- # File representing whether any input dependencies necessary for
- # dependent actions have completed.
- self.preaction_stamp = None
- # File representing whether any input dependencies necessary for
- # dependent compiles have completed.
- self.precompile_stamp = None
- # File representing the completion of actions/rules/copies, if any.
- self.actions_stamp = None
- # Path to the output of the link step, if any.
- self.binary = None
- # Path to the file representing the completion of building the bundle,
- # if any.
- self.bundle = None
- # On Windows, incremental linking requires linking against all the .objs
- # that compose a .lib (rather than the .lib itself). That list is stored
- # here. In this case, we also need to save the compile_deps for the target,
- # so that the the target that directly depends on the .objs can also depend
- # on those.
- self.component_objs = None
- self.compile_deps = None
- # Windows only. The import .lib is the output of a build step, but
- # because dependents only link against the lib (not both the lib and the
- # dll) we keep track of the import library here.
- self.import_lib = None
-
- def Linkable(self):
- """Return true if this is a target that can be linked against."""
- return self.type in ('static_library', 'shared_library')
-
- def UsesToc(self, flavor):
- """Return true if the target should produce a restat rule based on a TOC
- file."""
- # For bundles, the .TOC should be produced for the binary, not for
- # FinalOutput(). But the naive approach would put the TOC file into the
- # bundle, so don't do this for bundles for now.
- if flavor == 'win' or self.bundle:
- return False
- return self.type in ('shared_library', 'loadable_module')
-
- def PreActionInput(self, flavor):
- """Return the path, if any, that should be used as a dependency of
- any dependent action step."""
- if self.UsesToc(flavor):
- return self.FinalOutput() + '.TOC'
- return self.FinalOutput() or self.preaction_stamp
-
- def PreCompileInput(self):
- """Return the path, if any, that should be used as a dependency of
- any dependent compile step."""
- return self.actions_stamp or self.precompile_stamp
-
- def FinalOutput(self):
- """Return the last output of the target, which depends on all prior
- steps."""
- return self.bundle or self.binary or self.actions_stamp
-
-
-# A small discourse on paths as used within the Ninja build:
-# All files we produce (both at gyp and at build time) appear in the
-# build directory (e.g. out/Debug).
-#
-# Paths within a given .gyp file are always relative to the directory
-# containing the .gyp file. Call these "gyp paths". This includes
-# sources as well as the starting directory a given gyp rule/action
-# expects to be run from. We call the path from the source root to
-# the gyp file the "base directory" within the per-.gyp-file
-# NinjaWriter code.
-#
-# All paths as written into the .ninja files are relative to the build
-# directory. Call these paths "ninja paths".
-#
-# We translate between these two notions of paths with two helper
-# functions:
-#
-# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
-# into the equivalent ninja path.
-#
-# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
-# an output file; the result can be namespaced such that it is unique
-# to the input file name as well as the output target name.
-
-class NinjaWriter(object):
- def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
- output_file, toplevel_build, output_file_name, flavor,
- toplevel_dir=None):
- """
- base_dir: path from source root to directory containing this gyp file,
- by gyp semantics, all input paths are relative to this
- build_dir: path from source root to build output
- toplevel_dir: path to the toplevel directory
- """
-
- self.hash_for_rules = hash_for_rules
- self.target_outputs = target_outputs
- self.base_dir = base_dir
- self.build_dir = build_dir
- self.ninja = ninja_syntax.Writer(output_file)
- self.toplevel_build = toplevel_build
- self.output_file_name = output_file_name
-
- self.flavor = flavor
- self.abs_build_dir = None
- if toplevel_dir is not None:
- self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
- build_dir))
- self.obj_ext = '.obj' if flavor == 'win' else '.o'
- if flavor == 'win':
- # See docstring of msvs_emulation.GenerateEnvironmentFiles().
- self.win_env = {}
- for arch in ('x86', 'x64'):
- self.win_env[arch] = 'environment.' + arch
-
- # Relative path from build output dir to base dir.
- build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
- self.build_to_base = os.path.join(build_to_top, base_dir)
- # Relative path from base dir to build dir.
- base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
- self.base_to_build = os.path.join(base_to_top, build_dir)
-
- def ExpandSpecial(self, path, product_dir=None):
- """Expand specials like $!PRODUCT_DIR in |path|.
-
- If |product_dir| is None, assumes the cwd is already the product
- dir. Otherwise, |product_dir| is the relative path to the product
- dir.
- """
-
- PRODUCT_DIR = '$!PRODUCT_DIR'
- if PRODUCT_DIR in path:
- if product_dir:
- path = path.replace(PRODUCT_DIR, product_dir)
- else:
- path = path.replace(PRODUCT_DIR + '/', '')
- path = path.replace(PRODUCT_DIR + '\\', '')
- path = path.replace(PRODUCT_DIR, '.')
-
- INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
- if INTERMEDIATE_DIR in path:
- int_dir = self.GypPathToUniqueOutput('gen')
- # GypPathToUniqueOutput generates a path relative to the product dir,
- # so insert product_dir in front if it is provided.
- path = path.replace(INTERMEDIATE_DIR,
- os.path.join(product_dir or '', int_dir))
-
- CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
- path = path.replace(CONFIGURATION_NAME, self.config_name)
-
- return path
-
- def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
- if self.flavor == 'win':
- path = self.msvs_settings.ConvertVSMacros(
- path, config=self.config_name)
- path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
- path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
- dirname)
- path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
- path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
- path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
- return path
-
- def GypPathToNinja(self, path, env=None):
- """Translate a gyp path to a ninja path, optionally expanding environment
- variable references in |path| with |env|.
-
- See the above discourse on path conversions."""
- if env:
- if self.flavor == 'mac':
- path = gyp.xcode_emulation.ExpandEnvVars(path, env)
- elif self.flavor == 'win':
- path = gyp.msvs_emulation.ExpandMacros(path, env)
- if path.startswith('$!'):
- expanded = self.ExpandSpecial(path)
- if self.flavor == 'win':
- expanded = os.path.normpath(expanded)
- return expanded
- if '$|' in path:
- path = self.ExpandSpecial(path)
- assert '$' not in path, path
- return os.path.normpath(os.path.join(self.build_to_base, path))
-
- def GypPathToUniqueOutput(self, path, qualified=True):
- """Translate a gyp path to a ninja path for writing output.
-
- If qualified is True, qualify the resulting filename with the name
- of the target. This is necessary when e.g. compiling the same
- path twice for two separate output targets.
-
- See the above discourse on path conversions."""
-
- path = self.ExpandSpecial(path)
- assert not path.startswith('$'), path
-
- # Translate the path following this scheme:
- # Input: foo/bar.gyp, target targ, references baz/out.o
- # Output: obj/foo/baz/targ.out.o (if qualified)
- # obj/foo/baz/out.o (otherwise)
- # (and obj.host instead of obj for cross-compiles)
- #
- # Why this scheme and not some other one?
- # 1) for a given input, you can compute all derived outputs by matching
- # its path, even if the input is brought via a gyp file with '..'.
- # 2) simple files like libraries and stamps have a simple filename.
-
- obj = 'obj'
- if self.toolset != 'target':
- obj += '.' + self.toolset
-
- path_dir, path_basename = os.path.split(path)
- assert not os.path.isabs(path_dir), (
- "'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
-
- if qualified:
- path_basename = self.name + '.' + path_basename
- return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
- path_basename))
-
- def WriteCollapsedDependencies(self, name, targets, order_only=None):
- """Given a list of targets, return a path for a single file
- representing the result of building all the targets or None.
-
- Uses a stamp file if necessary."""
-
- assert targets == filter(None, targets), targets
- if len(targets) == 0:
- assert not order_only
- return None
- if len(targets) > 1 or order_only:
- stamp = self.GypPathToUniqueOutput(name + '.stamp')
- targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
- self.ninja.newline()
- return targets[0]
-
- def _SubninjaNameForArch(self, arch):
- output_file_base = os.path.splitext(self.output_file_name)[0]
- return '%s.%s.ninja' % (output_file_base, arch)
-
- def WriteSpec(self, spec, config_name, generator_flags):
- """The main entry point for NinjaWriter: write the build rules for a spec.
-
- Returns a Target object, which represents the output paths for this spec.
- Returns None if there are no outputs (e.g. a settings-only 'none' type
- target)."""
-
- self.config_name = config_name
- self.name = spec['target_name']
- self.toolset = spec['toolset']
- config = spec['configurations'][config_name]
- self.target = Target(spec['type'])
- self.is_standalone_static_library = bool(
- spec.get('standalone_static_library', 0))
- # Track if this target contains any C++ files, to decide if gcc or g++
- # should be used for linking.
- self.uses_cpp = False
-
- self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
- self.xcode_settings = self.msvs_settings = None
- if self.flavor == 'mac':
- self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
- if self.flavor == 'win':
- self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
- generator_flags)
- arch = self.msvs_settings.GetArch(config_name)
- self.ninja.variable('arch', self.win_env[arch])
- self.ninja.variable('cc', '$cl_' + arch)
- self.ninja.variable('cxx', '$cl_' + arch)
- self.ninja.variable('cc_host', '$cl_' + arch)
- self.ninja.variable('cxx_host', '$cl_' + arch)
- self.ninja.variable('asm', '$ml_' + arch)
-
- if self.flavor == 'mac':
- self.archs = self.xcode_settings.GetActiveArchs(config_name)
- if len(self.archs) > 1:
- self.arch_subninjas = dict(
- (arch, ninja_syntax.Writer(
- OpenOutput(os.path.join(self.toplevel_build,
- self._SubninjaNameForArch(arch)),
- 'w')))
- for arch in self.archs)
-
- # Compute predepends for all rules.
- # actions_depends is the dependencies this target depends on before running
- # any of its action/rule/copy steps.
- # compile_depends is the dependencies this target depends on before running
- # any of its compile steps.
- actions_depends = []
- compile_depends = []
- # TODO(evan): it is rather confusing which things are lists and which
- # are strings. Fix these.
- if 'dependencies' in spec:
- for dep in spec['dependencies']:
- if dep in self.target_outputs:
- target = self.target_outputs[dep]
- actions_depends.append(target.PreActionInput(self.flavor))
- compile_depends.append(target.PreCompileInput())
- actions_depends = filter(None, actions_depends)
- compile_depends = filter(None, compile_depends)
- actions_depends = self.WriteCollapsedDependencies('actions_depends',
- actions_depends)
- compile_depends = self.WriteCollapsedDependencies('compile_depends',
- compile_depends)
- self.target.preaction_stamp = actions_depends
- self.target.precompile_stamp = compile_depends
-
- # Write out actions, rules, and copies. These must happen before we
- # compile any sources, so compute a list of predependencies for sources
- # while we do it.
- extra_sources = []
- mac_bundle_depends = []
- self.target.actions_stamp = self.WriteActionsRulesCopies(
- spec, extra_sources, actions_depends, mac_bundle_depends)
-
- # If we have actions/rules/copies, we depend directly on those, but
- # otherwise we depend on dependent target's actions/rules/copies etc.
- # We never need to explicitly depend on previous target's link steps,
- # because no compile ever depends on them.
- compile_depends_stamp = (self.target.actions_stamp or compile_depends)
-
- # Write out the compilation steps, if any.
- link_deps = []
- sources = extra_sources + spec.get('sources', [])
- if sources:
- if self.flavor == 'mac' and len(self.archs) > 1:
- # Write subninja file containing compile and link commands scoped to
- # a single arch if a fat binary is being built.
- for arch in self.archs:
- self.ninja.subninja(self._SubninjaNameForArch(arch))
-
- pch = None
- if self.flavor == 'win':
- gyp.msvs_emulation.VerifyMissingSources(
- sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
- pch = gyp.msvs_emulation.PrecompiledHeader(
- self.msvs_settings, config_name, self.GypPathToNinja,
- self.GypPathToUniqueOutput, self.obj_ext)
- else:
- pch = gyp.xcode_emulation.MacPrefixHeader(
- self.xcode_settings, self.GypPathToNinja,
- lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
- link_deps = self.WriteSources(
- self.ninja, config_name, config, sources, compile_depends_stamp, pch,
- spec)
- # Some actions/rules output 'sources' that are already object files.
- obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
- if obj_outputs:
- if self.flavor != 'mac' or len(self.archs) == 1:
- link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
- else:
- print "Warning: Actions/rules writing object files don't work with " \
- "multiarch targets, dropping. (target %s)" % spec['target_name']
- elif self.flavor == 'mac' and len(self.archs) > 1:
- link_deps = collections.defaultdict(list)
-
- compile_deps = self.target.actions_stamp or actions_depends
- if self.flavor == 'win' and self.target.type == 'static_library':
- self.target.component_objs = link_deps
- self.target.compile_deps = compile_deps
-
- # Write out a link step, if needed.
- output = None
- is_empty_bundle = not link_deps and not mac_bundle_depends
- if link_deps or self.target.actions_stamp or actions_depends:
- output = self.WriteTarget(spec, config_name, config, link_deps,
- compile_deps)
- if self.is_mac_bundle:
- mac_bundle_depends.append(output)
-
- # Bundle all of the above together, if needed.
- if self.is_mac_bundle:
- output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
-
- if not output:
- return None
-
- assert self.target.FinalOutput(), output
- return self.target
-
- def _WinIdlRule(self, source, prebuild, outputs):
- """Handle the implicit VS .idl rule for one source file. Fills |outputs|
- with files that are generated."""
- outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
- source, self.config_name)
- outdir = self.GypPathToNinja(outdir)
- def fix_path(path, rel=None):
- path = os.path.join(outdir, path)
- dirname, basename = os.path.split(source)
- root, ext = os.path.splitext(basename)
- path = self.ExpandRuleVariables(
- path, root, dirname, source, ext, basename)
- if rel:
- path = os.path.relpath(path, rel)
- return path
- vars = [(name, fix_path(value, outdir)) for name, value in vars]
- output = [fix_path(p) for p in output]
- vars.append(('outdir', outdir))
- vars.append(('idlflags', flags))
- input = self.GypPathToNinja(source)
- self.ninja.build(output, 'idl', input,
- variables=vars, order_only=prebuild)
- outputs.extend(output)
-
- def WriteWinIdlFiles(self, spec, prebuild):
- """Writes rules to match MSVS's implicit idl handling."""
- assert self.flavor == 'win'
- if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
- return []
- outputs = []
- for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
- self._WinIdlRule(source, prebuild, outputs)
- return outputs
-
- def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
- mac_bundle_depends):
- """Write out the Actions, Rules, and Copies steps. Return a path
- representing the outputs of these steps."""
- outputs = []
- if self.is_mac_bundle:
- mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
- else:
- mac_bundle_resources = []
- extra_mac_bundle_resources = []
-
- if 'actions' in spec:
- outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
- extra_mac_bundle_resources)
- if 'rules' in spec:
- outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
- mac_bundle_resources,
- extra_mac_bundle_resources)
- if 'copies' in spec:
- outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
-
- if 'sources' in spec and self.flavor == 'win':
- outputs += self.WriteWinIdlFiles(spec, prebuild)
-
- stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
-
- if self.is_mac_bundle:
- xcassets = self.WriteMacBundleResources(
- extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
- partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
- self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
-
- return stamp
-
- def GenerateDescription(self, verb, message, fallback):
- """Generate and return a description of a build step.
-
- |verb| is the short summary, e.g. ACTION or RULE.
- |message| is a hand-written description, or None if not available.
- |fallback| is the gyp-level name of the step, usable as a fallback.
- """
- if self.toolset != 'target':
- verb += '(%s)' % self.toolset
- if message:
- return '%s %s' % (verb, self.ExpandSpecial(message))
- else:
- return '%s %s: %s' % (verb, self.name, fallback)
-
- def WriteActions(self, actions, extra_sources, prebuild,
- extra_mac_bundle_resources):
- # Actions cd into the base directory.
- env = self.GetToolchainEnv()
- all_outputs = []
- for action in actions:
- # First write out a rule for the action.
- name = '%s_%s' % (action['action_name'], self.hash_for_rules)
- description = self.GenerateDescription('ACTION',
- action.get('message', None),
- name)
- is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
- if self.flavor == 'win' else False)
- args = action['action']
- depfile = action.get('depfile', None)
- if depfile:
- depfile = self.ExpandSpecial(depfile, self.base_to_build)
- pool = 'console' if int(action.get('ninja_use_console', 0)) else None
- rule_name, _ = self.WriteNewNinjaRule(name, args, description,
- is_cygwin, env, pool,
- depfile=depfile)
-
- inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
- if int(action.get('process_outputs_as_sources', False)):
- extra_sources += action['outputs']
- if int(action.get('process_outputs_as_mac_bundle_resources', False)):
- extra_mac_bundle_resources += action['outputs']
- outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
-
- # Then write out an edge using the rule.
- self.ninja.build(outputs, rule_name, inputs,
- order_only=prebuild)
- all_outputs += outputs
-
- self.ninja.newline()
-
- return all_outputs
-
- def WriteRules(self, rules, extra_sources, prebuild,
- mac_bundle_resources, extra_mac_bundle_resources):
- env = self.GetToolchainEnv()
- all_outputs = []
- for rule in rules:
- # Skip a rule with no action and no inputs.
- if 'action' not in rule and not rule.get('rule_sources', []):
- continue
-
- # First write out a rule for the rule action.
- name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
-
- args = rule['action']
- description = self.GenerateDescription(
- 'RULE',
- rule.get('message', None),
- ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
- is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
- if self.flavor == 'win' else False)
- pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
- rule_name, args = self.WriteNewNinjaRule(
- name, args, description, is_cygwin, env, pool)
-
- # TODO: if the command references the outputs directly, we should
- # simplify it to just use $out.
-
- # Rules can potentially make use of some special variables which
- # must vary per source file.
- # Compute the list of variables we'll need to provide.
- special_locals = ('source', 'root', 'dirname', 'ext', 'name')
- needed_variables = set(['source'])
- for argument in args:
- for var in special_locals:
- if '${%s}' % var in argument:
- needed_variables.add(var)
-
- def cygwin_munge(path):
- # pylint: disable=cell-var-from-loop
- if is_cygwin:
- return path.replace('\\', '/')
- return path
-
- inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
-
- # If there are n source files matching the rule, and m additional rule
- # inputs, then adding 'inputs' to each build edge written below will
- # write m * n inputs. Collapsing reduces this to m + n.
- sources = rule.get('rule_sources', [])
- num_inputs = len(inputs)
- if prebuild:
- num_inputs += 1
- if num_inputs > 2 and len(sources) > 2:
- inputs = [self.WriteCollapsedDependencies(
- rule['rule_name'], inputs, order_only=prebuild)]
- prebuild = []
-
- # For each source file, write an edge that generates all the outputs.
- for source in sources:
- source = os.path.normpath(source)
- dirname, basename = os.path.split(source)
- root, ext = os.path.splitext(basename)
-
- # Gather the list of inputs and outputs, expanding $vars if possible.
- outputs = [self.ExpandRuleVariables(o, root, dirname,
- source, ext, basename)
- for o in rule['outputs']]
-
- if int(rule.get('process_outputs_as_sources', False)):
- extra_sources += outputs
-
- was_mac_bundle_resource = source in mac_bundle_resources
- if was_mac_bundle_resource or \
- int(rule.get('process_outputs_as_mac_bundle_resources', False)):
- extra_mac_bundle_resources += outputs
- # Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
- # items in a set and remove them all in a single pass if this becomes
- # a performance issue.
- if was_mac_bundle_resource:
- mac_bundle_resources.remove(source)
-
- extra_bindings = []
- for var in needed_variables:
- if var == 'root':
- extra_bindings.append(('root', cygwin_munge(root)))
- elif var == 'dirname':
- # '$dirname' is a parameter to the rule action, which means
- # it shouldn't be converted to a Ninja path. But we don't
- # want $!PRODUCT_DIR in there either.
- dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
- extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
- elif var == 'source':
- # '$source' is a parameter to the rule action, which means
- # it shouldn't be converted to a Ninja path. But we don't
- # want $!PRODUCT_DIR in there either.
- source_expanded = self.ExpandSpecial(source, self.base_to_build)
- extra_bindings.append(('source', cygwin_munge(source_expanded)))
- elif var == 'ext':
- extra_bindings.append(('ext', ext))
- elif var == 'name':
- extra_bindings.append(('name', cygwin_munge(basename)))
- else:
- assert var == None, repr(var)
-
- outputs = [self.GypPathToNinja(o, env) for o in outputs]
- if self.flavor == 'win':
- # WriteNewNinjaRule uses unique_name for creating an rsp file on win.
- extra_bindings.append(('unique_name',
- hashlib.md5(outputs[0]).hexdigest()))
- self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
- implicit=inputs,
- order_only=prebuild,
- variables=extra_bindings)
-
- all_outputs.extend(outputs)
-
- return all_outputs
-
- def WriteCopies(self, copies, prebuild, mac_bundle_depends):
- outputs = []
- env = self.GetToolchainEnv()
- for copy in copies:
- for path in copy['files']:
- # Normalize the path so trailing slashes don't confuse us.
- path = os.path.normpath(path)
- basename = os.path.split(path)[1]
- src = self.GypPathToNinja(path, env)
- dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
- env)
- outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
- if self.is_mac_bundle:
- # gyp has mac_bundle_resources to copy things into a bundle's
- # Resources folder, but there's no built-in way to copy files to other
- # places in the bundle. Hence, some targets use copies for this. Check
- # if this file is copied into the current bundle, and if so add it to
- # the bundle depends so that dependent targets get rebuilt if the copy
- # input changes.
- if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
- mac_bundle_depends.append(dst)
-
- return outputs
-
- def WriteMacBundleResources(self, resources, bundle_depends):
- """Writes ninja edges for 'mac_bundle_resources'."""
- xcassets = []
- for output, res in gyp.xcode_emulation.GetMacBundleResources(
- generator_default_variables['PRODUCT_DIR'],
- self.xcode_settings, map(self.GypPathToNinja, resources)):
- output = self.ExpandSpecial(output)
- if os.path.splitext(output)[-1] != '.xcassets':
- isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
- self.ninja.build(output, 'mac_tool', res,
- variables=[('mactool_cmd', 'copy-bundle-resource'), \
- ('binary', isBinary)])
- bundle_depends.append(output)
- else:
- xcassets.append(res)
- return xcassets
-
- def WriteMacXCassets(self, xcassets, bundle_depends):
- """Writes ninja edges for 'mac_bundle_resources' .xcassets files.
-
- This add an invocation of 'actool' via the 'mac_tool.py' helper script.
- It assumes that the assets catalogs define at least one imageset and
- thus an Assets.car file will be generated in the application resources
- directory. If this is not the case, then the build will probably be done
- at each invocation of ninja."""
- if not xcassets:
- return
-
- extra_arguments = {}
- settings_to_arg = {
- 'XCASSETS_APP_ICON': 'app-icon',
- 'XCASSETS_LAUNCH_IMAGE': 'launch-image',
- }
- settings = self.xcode_settings.xcode_settings[self.config_name]
- for settings_key, arg_name in settings_to_arg.iteritems():
- value = settings.get(settings_key)
- if value:
- extra_arguments[arg_name] = value
-
- partial_info_plist = None
- if extra_arguments:
- partial_info_plist = self.GypPathToUniqueOutput(
- 'assetcatalog_generated_info.plist')
- extra_arguments['output-partial-info-plist'] = partial_info_plist
-
- outputs = []
- outputs.append(
- os.path.join(
- self.xcode_settings.GetBundleResourceFolder(),
- 'Assets.car'))
- if partial_info_plist:
- outputs.append(partial_info_plist)
-
- keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
- extra_env = self.xcode_settings.GetPerTargetSettings()
- env = self.GetSortedXcodeEnv(additional_settings=extra_env)
- env = self.ComputeExportEnvString(env)
-
- bundle_depends.extend(self.ninja.build(
- outputs, 'compile_xcassets', xcassets,
- variables=[('env', env), ('keys', keys)]))
- return partial_info_plist
-
- def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
- """Write build rules for bundle Info.plist files."""
- info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
- generator_default_variables['PRODUCT_DIR'],
- self.xcode_settings, self.GypPathToNinja)
- if not info_plist:
- return
- out = self.ExpandSpecial(out)
- if defines:
- # Create an intermediate file to store preprocessed results.
- intermediate_plist = self.GypPathToUniqueOutput(
- os.path.basename(info_plist))
- defines = ' '.join([Define(d, self.flavor) for d in defines])
- info_plist = self.ninja.build(
- intermediate_plist, 'preprocess_infoplist', info_plist,
- variables=[('defines',defines)])
-
- env = self.GetSortedXcodeEnv(additional_settings=extra_env)
- env = self.ComputeExportEnvString(env)
-
- if partial_info_plist:
- intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
- info_plist = self.ninja.build(
- intermediate_plist, 'merge_infoplist',
- [partial_info_plist, info_plist])
-
- keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
- keys = QuoteShellArgument(json.dumps(keys), self.flavor)
- isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
- self.ninja.build(out, 'copy_infoplist', info_plist,
- variables=[('env', env), ('keys', keys),
- ('binary', isBinary)])
- bundle_depends.append(out)
-
- def WriteSources(self, ninja_file, config_name, config, sources, predepends,
- precompiled_header, spec):
- """Write build rules to compile all of |sources|."""
- if self.toolset == 'host':
- self.ninja.variable('ar', '$ar_host')
- self.ninja.variable('cc', '$cc_host')
- self.ninja.variable('cxx', '$cxx_host')
- self.ninja.variable('ld', '$ld_host')
- self.ninja.variable('ldxx', '$ldxx_host')
- self.ninja.variable('nm', '$nm_host')
- self.ninja.variable('readelf', '$readelf_host')
-
- if self.flavor != 'mac' or len(self.archs) == 1:
- return self.WriteSourcesForArch(
- self.ninja, config_name, config, sources, predepends,
- precompiled_header, spec)
- else:
- return dict((arch, self.WriteSourcesForArch(
- self.arch_subninjas[arch], config_name, config, sources, predepends,
- precompiled_header, spec, arch=arch))
- for arch in self.archs)
-
- def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
- predepends, precompiled_header, spec, arch=None):
- """Write build rules to compile all of |sources|."""
-
- extra_defines = []
- if self.flavor == 'mac':
- cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
- cflags_c = self.xcode_settings.GetCflagsC(config_name)
- cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
- cflags_objc = ['$cflags_c'] + \
- self.xcode_settings.GetCflagsObjC(config_name)
- cflags_objcc = ['$cflags_cc'] + \
- self.xcode_settings.GetCflagsObjCC(config_name)
- elif self.flavor == 'win':
- asmflags = self.msvs_settings.GetAsmflags(config_name)
- cflags = self.msvs_settings.GetCflags(config_name)
- cflags_c = self.msvs_settings.GetCflagsC(config_name)
- cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
- extra_defines = self.msvs_settings.GetComputedDefines(config_name)
- # See comment at cc_command for why there's two .pdb files.
- pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
- config_name, self.ExpandSpecial)
- if not pdbpath_c:
- obj = 'obj'
- if self.toolset != 'target':
- obj += '.' + self.toolset
- pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
- pdbpath_c = pdbpath + '.c.pdb'
- pdbpath_cc = pdbpath + '.cc.pdb'
- self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
- self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
- self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
- else:
- cflags = config.get('cflags', [])
- cflags_c = config.get('cflags_c', [])
- cflags_cc = config.get('cflags_cc', [])
-
- # Respect environment variables related to build, but target-specific
- # flags can still override them.
- if self.toolset == 'target':
- cflags_c = (os.environ.get('CPPFLAGS', '').split() +
- os.environ.get('CFLAGS', '').split() + cflags_c)
- cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
- os.environ.get('CXXFLAGS', '').split() + cflags_cc)
- elif self.toolset == 'host':
- cflags_c = (os.environ.get('CPPFLAGS_host', '').split() +
- os.environ.get('CFLAGS_host', '').split() + cflags_c)
- cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() +
- os.environ.get('CXXFLAGS_host', '').split() + cflags_cc)
-
- defines = config.get('defines', []) + extra_defines
- self.WriteVariableList(ninja_file, 'defines',
- [Define(d, self.flavor) for d in defines])
- if self.flavor == 'win':
- self.WriteVariableList(ninja_file, 'asmflags',
- map(self.ExpandSpecial, asmflags))
- self.WriteVariableList(ninja_file, 'rcflags',
- [QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
- for f in self.msvs_settings.GetRcflags(config_name,
- self.GypPathToNinja)])
-
- include_dirs = config.get('include_dirs', [])
-
- env = self.GetToolchainEnv()
- if self.flavor == 'win':
- include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
- config_name)
- self.WriteVariableList(ninja_file, 'includes',
- [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
- for i in include_dirs])
-
- if self.flavor == 'win':
- midl_include_dirs = config.get('midl_include_dirs', [])
- midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
- midl_include_dirs, config_name)
- self.WriteVariableList(ninja_file, 'midl_includes',
- [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
- for i in midl_include_dirs])
-
- pch_commands = precompiled_header.GetPchBuildCommands(arch)
- if self.flavor == 'mac':
- # Most targets use no precompiled headers, so only write these if needed.
- for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
- ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
- include = precompiled_header.GetInclude(ext, arch)
- if include: ninja_file.variable(var, include)
-
- arflags = config.get('arflags', [])
-
- self.WriteVariableList(ninja_file, 'cflags',
- map(self.ExpandSpecial, cflags))
- self.WriteVariableList(ninja_file, 'cflags_c',
- map(self.ExpandSpecial, cflags_c))
- self.WriteVariableList(ninja_file, 'cflags_cc',
- map(self.ExpandSpecial, cflags_cc))
- if self.flavor == 'mac':
- self.WriteVariableList(ninja_file, 'cflags_objc',
- map(self.ExpandSpecial, cflags_objc))
- self.WriteVariableList(ninja_file, 'cflags_objcc',
- map(self.ExpandSpecial, cflags_objcc))
- self.WriteVariableList(ninja_file, 'arflags',
- map(self.ExpandSpecial, arflags))
- ninja_file.newline()
- outputs = []
- has_rc_source = False
- for source in sources:
- filename, ext = os.path.splitext(source)
- ext = ext[1:]
- obj_ext = self.obj_ext
- if ext in ('cc', 'cpp', 'cxx'):
- command = 'cxx'
- self.uses_cpp = True
- elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
- command = 'cc'
- elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
- command = 'cc_s'
- elif (self.flavor == 'win' and ext == 'asm' and
- not self.msvs_settings.HasExplicitAsmRules(spec)):
- command = 'asm'
- # Add the _asm suffix as msvs is capable of handling .cc and
- # .asm files of the same name without collision.
- obj_ext = '_asm.obj'
- elif self.flavor == 'mac' and ext == 'm':
- command = 'objc'
- elif self.flavor == 'mac' and ext == 'mm':
- command = 'objcxx'
- self.uses_cpp = True
- elif self.flavor == 'win' and ext == 'rc':
- command = 'rc'
- obj_ext = '.res'
- has_rc_source = True
- else:
- # Ignore unhandled extensions.
- continue
- input = self.GypPathToNinja(source)
- output = self.GypPathToUniqueOutput(filename + obj_ext)
- if arch is not None:
- output = AddArch(output, arch)
- implicit = precompiled_header.GetObjDependencies([input], [output], arch)
- variables = []
- if self.flavor == 'win':
- variables, output, implicit = precompiled_header.GetFlagsModifications(
- input, output, implicit, command, cflags_c, cflags_cc,
- self.ExpandSpecial)
- ninja_file.build(output, command, input,
- implicit=[gch for _, _, gch in implicit],
- order_only=predepends, variables=variables)
- outputs.append(output)
-
- if has_rc_source:
- resource_include_dirs = config.get('resource_include_dirs', include_dirs)
- self.WriteVariableList(ninja_file, 'resource_includes',
- [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
- for i in resource_include_dirs])
-
- self.WritePchTargets(ninja_file, pch_commands)
-
- ninja_file.newline()
- return outputs
-
- def WritePchTargets(self, ninja_file, pch_commands):
- """Writes ninja rules to compile prefix headers."""
- if not pch_commands:
- return
-
- for gch, lang_flag, lang, input in pch_commands:
- var_name = {
- 'c': 'cflags_pch_c',
- 'cc': 'cflags_pch_cc',
- 'm': 'cflags_pch_objc',
- 'mm': 'cflags_pch_objcc',
- }[lang]
-
- map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
- cmd = map.get(lang)
- ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
-
- def WriteLink(self, spec, config_name, config, link_deps):
- """Write out a link step. Fills out target.binary. """
- if self.flavor != 'mac' or len(self.archs) == 1:
- return self.WriteLinkForArch(
- self.ninja, spec, config_name, config, link_deps)
- else:
- output = self.ComputeOutput(spec)
- inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
- config_name, config, link_deps[arch],
- arch=arch)
- for arch in self.archs]
- extra_bindings = []
- build_output = output
- if not self.is_mac_bundle:
- self.AppendPostbuildVariable(extra_bindings, spec, output, output)
-
- # TODO(yyanagisawa): more work needed to fix:
- # https://code.google.com/p/gyp/issues/detail?id=411
- if (spec['type'] in ('shared_library', 'loadable_module') and
- not self.is_mac_bundle):
- extra_bindings.append(('lib', output))
- self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
- variables=extra_bindings)
- else:
- self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
- return output
-
- def WriteLinkForArch(self, ninja_file, spec, config_name, config,
- link_deps, arch=None):
- """Write out a link step. Fills out target.binary. """
- command = {
- 'executable': 'link',
- 'loadable_module': 'solink_module',
- 'shared_library': 'solink',
- }[spec['type']]
- command_suffix = ''
-
- implicit_deps = set()
- solibs = set()
- order_deps = set()
-
- if 'dependencies' in spec:
- # Two kinds of dependencies:
- # - Linkable dependencies (like a .a or a .so): add them to the link line.
- # - Non-linkable dependencies (like a rule that generates a file
- # and writes a stamp file): add them to implicit_deps
- extra_link_deps = set()
- for dep in spec['dependencies']:
- target = self.target_outputs.get(dep)
- if not target:
- continue
- linkable = target.Linkable()
- if linkable:
- new_deps = []
- if (self.flavor == 'win' and
- target.component_objs and
- self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
- new_deps = target.component_objs
- if target.compile_deps:
- order_deps.add(target.compile_deps)
- elif self.flavor == 'win' and target.import_lib:
- new_deps = [target.import_lib]
- elif target.UsesToc(self.flavor):
- solibs.add(target.binary)
- implicit_deps.add(target.binary + '.TOC')
- else:
- new_deps = [target.binary]
- for new_dep in new_deps:
- if new_dep not in extra_link_deps:
- extra_link_deps.add(new_dep)
- link_deps.append(new_dep)
-
- final_output = target.FinalOutput()
- if not linkable or final_output != target.binary:
- implicit_deps.add(final_output)
-
- extra_bindings = []
- if self.uses_cpp and self.flavor != 'win':
- extra_bindings.append(('ld', '$ldxx'))
-
- output = self.ComputeOutput(spec, arch)
- if arch is None and not self.is_mac_bundle:
- self.AppendPostbuildVariable(extra_bindings, spec, output, output)
-
- is_executable = spec['type'] == 'executable'
- # The ldflags config key is not used on mac or win. On those platforms
- # linker flags are set via xcode_settings and msvs_settings, respectively.
- env_ldflags = os.environ.get('LDFLAGS', '').split()
- if self.flavor == 'mac':
- ldflags = self.xcode_settings.GetLdflags(config_name,
- self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
- self.GypPathToNinja, arch)
- ldflags = env_ldflags + ldflags
- elif self.flavor == 'win':
- manifest_base_name = self.GypPathToUniqueOutput(
- self.ComputeOutputFileName(spec))
- ldflags, intermediate_manifest, manifest_files = \
- self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
- self.ExpandSpecial, manifest_base_name,
- output, is_executable,
- self.toplevel_build)
- ldflags = env_ldflags + ldflags
- self.WriteVariableList(ninja_file, 'manifests', manifest_files)
- implicit_deps = implicit_deps.union(manifest_files)
- if intermediate_manifest:
- self.WriteVariableList(
- ninja_file, 'intermediatemanifest', [intermediate_manifest])
- command_suffix = _GetWinLinkRuleNameSuffix(
- self.msvs_settings.IsEmbedManifest(config_name))
- def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
- if def_file:
- implicit_deps.add(def_file)
- else:
- # Respect environment variables related to build, but target-specific
- # flags can still override them.
- ldflags = env_ldflags + config.get('ldflags', [])
- if is_executable and len(solibs):
- rpath = 'lib/'
- if self.toolset != 'target':
- rpath += self.toolset
- ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
- ldflags.append('-Wl,-rpath-link=%s' % rpath)
- self.WriteVariableList(ninja_file, 'ldflags',
- map(self.ExpandSpecial, ldflags))
-
- library_dirs = config.get('library_dirs', [])
- if self.flavor == 'win':
- library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
- for l in library_dirs]
- library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
- self.flavor)
- for l in library_dirs]
- else:
- library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
- self.flavor)
- for l in library_dirs]
-
- libraries = gyp.common.uniquer(map(self.ExpandSpecial,
- spec.get('libraries', [])))
- if self.flavor == 'mac':
- libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
- elif self.flavor == 'win':
- libraries = self.msvs_settings.AdjustLibraries(libraries)
-
- self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
-
- linked_binary = output
-
- if command in ('solink', 'solink_module'):
- extra_bindings.append(('soname', os.path.split(output)[1]))
- extra_bindings.append(('lib',
- gyp.common.EncodePOSIXShellArgument(output)))
- if self.flavor != 'win':
- link_file_list = output
- if self.is_mac_bundle:
- # 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
- # 'Dependency Framework.framework.rsp'
- link_file_list = self.xcode_settings.GetWrapperName()
- if arch:
- link_file_list += '.' + arch
- link_file_list += '.rsp'
- # If an rspfile contains spaces, ninja surrounds the filename with
- # quotes around it and then passes it to open(), creating a file with
- # quotes in its name (and when looking for the rsp file, the name
- # makes it through bash which strips the quotes) :-/
- link_file_list = link_file_list.replace(' ', '_')
- extra_bindings.append(
- ('link_file_list',
- gyp.common.EncodePOSIXShellArgument(link_file_list)))
- if self.flavor == 'win':
- extra_bindings.append(('binary', output))
- if ('/NOENTRY' not in ldflags and
- not self.msvs_settings.GetNoImportLibrary(config_name)):
- self.target.import_lib = output + '.lib'
- extra_bindings.append(('implibflag',
- '/IMPLIB:%s' % self.target.import_lib))
- pdbname = self.msvs_settings.GetPDBName(
- config_name, self.ExpandSpecial, output + '.pdb')
- output = [output, self.target.import_lib]
- if pdbname:
- output.append(pdbname)
- elif not self.is_mac_bundle:
- output = [output, output + '.TOC']
- else:
- command = command + '_notoc'
- elif self.flavor == 'win':
- extra_bindings.append(('binary', output))
- pdbname = self.msvs_settings.GetPDBName(
- config_name, self.ExpandSpecial, output + '.pdb')
- if pdbname:
- output = [output, pdbname]
-
-
- if len(solibs):
- extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
-
- ninja_file.build(output, command + command_suffix, link_deps,
- implicit=list(implicit_deps),
- order_only=list(order_deps),
- variables=extra_bindings)
- return linked_binary
-
- def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
- extra_link_deps = any(self.target_outputs.get(dep).Linkable()
- for dep in spec.get('dependencies', [])
- if dep in self.target_outputs)
- if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
- # TODO(evan): don't call this function for 'none' target types, as
- # it doesn't do anything, and we fake out a 'binary' with a stamp file.
- self.target.binary = compile_deps
- self.target.type = 'none'
- elif spec['type'] == 'static_library':
- self.target.binary = self.ComputeOutput(spec)
- if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
- self.is_standalone_static_library):
- self.ninja.build(self.target.binary, 'alink_thin', link_deps,
- order_only=compile_deps)
- else:
- variables = []
- if self.xcode_settings:
- libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
- if libtool_flags:
- variables.append(('libtool_flags', libtool_flags))
- if self.msvs_settings:
- libflags = self.msvs_settings.GetLibFlags(config_name,
- self.GypPathToNinja)
- variables.append(('libflags', libflags))
-
- if self.flavor != 'mac' or len(self.archs) == 1:
- self.AppendPostbuildVariable(variables, spec,
- self.target.binary, self.target.binary)
- self.ninja.build(self.target.binary, 'alink', link_deps,
- order_only=compile_deps, variables=variables)
- else:
- inputs = []
- for arch in self.archs:
- output = self.ComputeOutput(spec, arch)
- self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
- order_only=compile_deps,
- variables=variables)
- inputs.append(output)
- # TODO: It's not clear if libtool_flags should be passed to the alink
- # call that combines single-arch .a files into a fat .a file.
- self.AppendPostbuildVariable(variables, spec,
- self.target.binary, self.target.binary)
- self.ninja.build(self.target.binary, 'alink', inputs,
- # FIXME: test proving order_only=compile_deps isn't
- # needed.
- variables=variables)
- else:
- self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
- return self.target.binary
-
- def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
- assert self.is_mac_bundle
- package_framework = spec['type'] in ('shared_library', 'loadable_module')
- output = self.ComputeMacBundleOutput()
- if is_empty:
- output += '.stamp'
- variables = []
- self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
- is_command_start=not package_framework)
- if package_framework and not is_empty:
- variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
- self.ninja.build(output, 'package_framework', mac_bundle_depends,
- variables=variables)
- else:
- self.ninja.build(output, 'stamp', mac_bundle_depends,
- variables=variables)
- self.target.bundle = output
- return output
-
- def GetToolchainEnv(self, additional_settings=None):
- """Returns the variables toolchain would set for build steps."""
- env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
- if self.flavor == 'win':
- env = self.GetMsvsToolchainEnv(
- additional_settings=additional_settings)
- return env
-
- def GetMsvsToolchainEnv(self, additional_settings=None):
- """Returns the variables Visual Studio would set for build steps."""
- return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
- config=self.config_name)
-
- def GetSortedXcodeEnv(self, additional_settings=None):
- """Returns the variables Xcode would set for build steps."""
- assert self.abs_build_dir
- abs_build_dir = self.abs_build_dir
- return gyp.xcode_emulation.GetSortedXcodeEnv(
- self.xcode_settings, abs_build_dir,
- os.path.join(abs_build_dir, self.build_to_base), self.config_name,
- additional_settings)
-
- def GetSortedXcodePostbuildEnv(self):
- """Returns the variables Xcode would set for postbuild steps."""
- postbuild_settings = {}
- # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
- # TODO(thakis): It would be nice to have some general mechanism instead.
- strip_save_file = self.xcode_settings.GetPerTargetSetting(
- 'CHROMIUM_STRIP_SAVE_FILE')
- if strip_save_file:
- postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
- return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
-
- def AppendPostbuildVariable(self, variables, spec, output, binary,
- is_command_start=False):
- """Adds a 'postbuild' variable if there is a postbuild for |output|."""
- postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
- if postbuild:
- variables.append(('postbuilds', postbuild))
-
- def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
- """Returns a shell command that runs all the postbuilds, and removes
- |output| if any of them fails. If |is_command_start| is False, then the
- returned string will start with ' && '."""
- if not self.xcode_settings or spec['type'] == 'none' or not output:
- return ''
- output = QuoteShellArgument(output, self.flavor)
- postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
- if output_binary is not None:
- postbuilds = self.xcode_settings.AddImplicitPostbuilds(
- self.config_name,
- os.path.normpath(os.path.join(self.base_to_build, output)),
- QuoteShellArgument(
- os.path.normpath(os.path.join(self.base_to_build, output_binary)),
- self.flavor),
- postbuilds, quiet=True)
-
- if not postbuilds:
- return ''
- # Postbuilds expect to be run in the gyp file's directory, so insert an
- # implicit postbuild to cd to there.
- postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
- ['cd', self.build_to_base]))
- env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
- # G will be non-null if any postbuild fails. Run all postbuilds in a
- # subshell.
- commands = env + ' (' + \
- ' && '.join([ninja_syntax.escape(command) for command in postbuilds])
- command_string = (commands + '); G=$$?; '
- # Remove the final output if any postbuild failed.
- '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
- if is_command_start:
- return '(' + command_string + ' && '
- else:
- return '$ && (' + command_string
-
- def ComputeExportEnvString(self, env):
- """Given an environment, returns a string looking like
- 'export FOO=foo; export BAR="${FOO} bar;'
- that exports |env| to the shell."""
- export_str = []
- for k, v in env:
- export_str.append('export %s=%s;' %
- (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
- return ' '.join(export_str)
-
- def ComputeMacBundleOutput(self):
- """Return the 'output' (full output path) to a bundle output directory."""
- assert self.is_mac_bundle
- path = generator_default_variables['PRODUCT_DIR']
- return self.ExpandSpecial(
- os.path.join(path, self.xcode_settings.GetWrapperName()))
-
- def ComputeOutputFileName(self, spec, type=None):
- """Compute the filename of the final output for the current target."""
- if not type:
- type = spec['type']
-
- default_variables = copy.copy(generator_default_variables)
- CalculateVariables(default_variables, {'flavor': self.flavor})
-
- # Compute filename prefix: the product prefix, or a default for
- # the product type.
- DEFAULT_PREFIX = {
- 'loadable_module': default_variables['SHARED_LIB_PREFIX'],
- 'shared_library': default_variables['SHARED_LIB_PREFIX'],
- 'static_library': default_variables['STATIC_LIB_PREFIX'],
- 'executable': default_variables['EXECUTABLE_PREFIX'],
- }
- prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
-
- # Compute filename extension: the product extension, or a default
- # for the product type.
- DEFAULT_EXTENSION = {
- 'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
- 'shared_library': default_variables['SHARED_LIB_SUFFIX'],
- 'static_library': default_variables['STATIC_LIB_SUFFIX'],
- 'executable': default_variables['EXECUTABLE_SUFFIX'],
- }
- extension = spec.get('product_extension')
- if extension:
- extension = '.' + extension
- else:
- extension = DEFAULT_EXTENSION.get(type, '')
-
- if 'product_name' in spec:
- # If we were given an explicit name, use that.
- target = spec['product_name']
- else:
- # Otherwise, derive a name from the target name.
- target = spec['target_name']
- if prefix == 'lib':
- # Snip out an extra 'lib' from libs if appropriate.
- target = StripPrefix(target, 'lib')
-
- if type in ('static_library', 'loadable_module', 'shared_library',
- 'executable'):
- return '%s%s%s' % (prefix, target, extension)
- elif type == 'none':
- return '%s.stamp' % target
- else:
- raise Exception('Unhandled output type %s' % type)
-
- def ComputeOutput(self, spec, arch=None):
- """Compute the path for the final output of the spec."""
- type = spec['type']
-
- if self.flavor == 'win':
- override = self.msvs_settings.GetOutputName(self.config_name,
- self.ExpandSpecial)
- if override:
- return override
-
- if arch is None and self.flavor == 'mac' and type in (
- 'static_library', 'executable', 'shared_library', 'loadable_module'):
- filename = self.xcode_settings.GetExecutablePath()
- else:
- filename = self.ComputeOutputFileName(spec, type)
-
- if arch is None and 'product_dir' in spec:
- path = os.path.join(spec['product_dir'], filename)
- return self.ExpandSpecial(path)
-
- # Some products go into the output root, libraries go into shared library
- # dir, and everything else goes into the normal place.
- type_in_output_root = ['executable', 'loadable_module']
- if self.flavor == 'mac' and self.toolset == 'target':
- type_in_output_root += ['shared_library', 'static_library']
- elif self.flavor == 'win' and self.toolset == 'target':
- type_in_output_root += ['shared_library']
-
- if arch is not None:
- # Make sure partial executables don't end up in a bundle or the regular
- # output directory.
- archdir = 'arch'
- if self.toolset != 'target':
- archdir = os.path.join('arch', '%s' % self.toolset)
- return os.path.join(archdir, AddArch(filename, arch))
- elif type in type_in_output_root or self.is_standalone_static_library:
- return filename
- elif type == 'shared_library':
- libdir = 'lib'
- if self.toolset != 'target':
- libdir = os.path.join('lib', '%s' % self.toolset)
- return os.path.join(libdir, filename)
- else:
- return self.GypPathToUniqueOutput(filename, qualified=False)
-
- def WriteVariableList(self, ninja_file, var, values):
- assert not isinstance(values, str)
- if values is None:
- values = []
- ninja_file.variable(var, ' '.join(values))
-
- def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
- depfile=None):
- """Write out a new ninja "rule" statement for a given command.
-
- Returns the name of the new rule, and a copy of |args| with variables
- expanded."""
-
- if self.flavor == 'win':
- args = [self.msvs_settings.ConvertVSMacros(
- arg, self.base_to_build, config=self.config_name)
- for arg in args]
- description = self.msvs_settings.ConvertVSMacros(
- description, config=self.config_name)
- elif self.flavor == 'mac':
- # |env| is an empty list on non-mac.
- args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
- description = gyp.xcode_emulation.ExpandEnvVars(description, env)
-
- # TODO: we shouldn't need to qualify names; we do it because
- # currently the ninja rule namespace is global, but it really
- # should be scoped to the subninja.
- rule_name = self.name
- if self.toolset == 'target':
- rule_name += '.' + self.toolset
- rule_name += '.' + name
- rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
-
- # Remove variable references, but not if they refer to the magic rule
- # variables. This is not quite right, as it also protects these for
- # actions, not just for rules where they are valid. Good enough.
- protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
- protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
- description = re.sub(protect + r'\$', '_', description)
-
- # gyp dictates that commands are run from the base directory.
- # cd into the directory before running, and adjust paths in
- # the arguments to point to the proper locations.
- rspfile = None
- rspfile_content = None
- args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
- if self.flavor == 'win':
- rspfile = rule_name + '.$unique_name.rsp'
- # The cygwin case handles this inside the bash sub-shell.
- run_in = '' if is_cygwin else ' ' + self.build_to_base
- if is_cygwin:
- rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
- args, self.build_to_base)
- else:
- rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
- command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
- rspfile + run_in)
- else:
- env = self.ComputeExportEnvString(env)
- command = gyp.common.EncodePOSIXShellList(args)
- command = 'cd %s; ' % self.build_to_base + env + command
-
- # GYP rules/actions express being no-ops by not touching their outputs.
- # Avoid executing downstream dependencies in this case by specifying
- # restat=1 to ninja.
- self.ninja.rule(rule_name, command, description, depfile=depfile,
- restat=True, pool=pool,
- rspfile=rspfile, rspfile_content=rspfile_content)
- self.ninja.newline()
-
- return rule_name, args
-
-
-def CalculateVariables(default_variables, params):
- """Calculate additional variables for use in the build (called by gyp)."""
- global generator_additional_non_configuration_keys
- global generator_additional_path_sections
- flavor = gyp.common.GetFlavor(params)
- if flavor == 'mac':
- default_variables.setdefault('OS', 'mac')
- default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
- default_variables.setdefault('SHARED_LIB_DIR',
- generator_default_variables['PRODUCT_DIR'])
- default_variables.setdefault('LIB_DIR',
- generator_default_variables['PRODUCT_DIR'])
-
- # Copy additional generator configuration data from Xcode, which is shared
- # by the Mac Ninja generator.
- import gyp.generator.xcode as xcode_generator
- generator_additional_non_configuration_keys = getattr(xcode_generator,
- 'generator_additional_non_configuration_keys', [])
- generator_additional_path_sections = getattr(xcode_generator,
- 'generator_additional_path_sections', [])
- global generator_extra_sources_for_rules
- generator_extra_sources_for_rules = getattr(xcode_generator,
- 'generator_extra_sources_for_rules', [])
- elif flavor == 'win':
- exts = gyp.MSVSUtil.TARGET_TYPE_EXT
- default_variables.setdefault('OS', 'win')
- default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
- default_variables['STATIC_LIB_PREFIX'] = ''
- default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
- default_variables['SHARED_LIB_PREFIX'] = ''
- default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
-
- # Copy additional generator configuration data from VS, which is shared
- # by the Windows Ninja generator.
- import gyp.generator.msvs as msvs_generator
- generator_additional_non_configuration_keys = getattr(msvs_generator,
- 'generator_additional_non_configuration_keys', [])
- generator_additional_path_sections = getattr(msvs_generator,
- 'generator_additional_path_sections', [])
-
- gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
- else:
- operating_system = flavor
- if flavor == 'android':
- operating_system = 'linux' # Keep this legacy behavior for now.
- default_variables.setdefault('OS', operating_system)
- default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
- default_variables.setdefault('SHARED_LIB_DIR',
- os.path.join('$!PRODUCT_DIR', 'lib'))
- default_variables.setdefault('LIB_DIR',
- os.path.join('$!PRODUCT_DIR', 'obj'))
-
-def ComputeOutputDir(params):
- """Returns the path from the toplevel_dir to the build output directory."""
- # generator_dir: relative path from pwd to where make puts build files.
- # Makes migrating from make to ninja easier, ninja doesn't put anything here.
- generator_dir = os.path.relpath(params['options'].generator_output or '.')
-
- # output_dir: relative path from generator_dir to the build directory.
- output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
-
- # Relative path from source root to our output files. e.g. "out"
- return os.path.normpath(os.path.join(generator_dir, output_dir))
-
-
-def CalculateGeneratorInputInfo(params):
- """Called by __init__ to initialize generator values based on params."""
- # E.g. "out/gypfiles"
- toplevel = params['options'].toplevel_dir
- qualified_out_dir = os.path.normpath(os.path.join(
- toplevel, ComputeOutputDir(params), 'gypfiles'))
-
- global generator_filelist_paths
- generator_filelist_paths = {
- 'toplevel': toplevel,
- 'qualified_out_dir': qualified_out_dir,
- }
-
-
-def OpenOutput(path, mode='w'):
- """Open |path| for writing, creating directories if necessary."""
- gyp.common.EnsureDirExists(path)
- return open(path, mode)
-
-
-def CommandWithWrapper(cmd, wrappers, prog):
- wrapper = wrappers.get(cmd, '')
- if wrapper:
- return wrapper + ' ' + prog
- return prog
-
-
-def GetDefaultConcurrentLinks():
- """Returns a best-guess for a number of concurrent links."""
- pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0))
- if pool_size:
- return pool_size
-
- if sys.platform in ('win32', 'cygwin'):
- import ctypes
-
- class MEMORYSTATUSEX(ctypes.Structure):
- _fields_ = [
- ("dwLength", ctypes.c_ulong),
- ("dwMemoryLoad", ctypes.c_ulong),
- ("ullTotalPhys", ctypes.c_ulonglong),
- ("ullAvailPhys", ctypes.c_ulonglong),
- ("ullTotalPageFile", ctypes.c_ulonglong),
- ("ullAvailPageFile", ctypes.c_ulonglong),
- ("ullTotalVirtual", ctypes.c_ulonglong),
- ("ullAvailVirtual", ctypes.c_ulonglong),
- ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
- ]
-
- stat = MEMORYSTATUSEX()
- stat.dwLength = ctypes.sizeof(stat)
- ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
-
- # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
- # on a 64 GB machine.
- mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB
- hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
- return min(mem_limit, hard_cap)
- elif sys.platform.startswith('linux'):
- if os.path.exists("/proc/meminfo"):
- with open("/proc/meminfo") as meminfo:
- memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
- for line in meminfo:
- match = memtotal_re.match(line)
- if not match:
- continue
- # Allow 8Gb per link on Linux because Gold is quite memory hungry
- return max(1, int(match.group(1)) / (8 * (2 ** 20)))
- return 1
- elif sys.platform == 'darwin':
- try:
- avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
- # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
- # 4GB per ld process allows for some more bloat.
- return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
- except:
- return 1
- else:
- # TODO(scottmg): Implement this for other platforms.
- return 1
-
-
-def _GetWinLinkRuleNameSuffix(embed_manifest):
- """Returns the suffix used to select an appropriate linking rule depending on
- whether the manifest embedding is enabled."""
- return '_embed' if embed_manifest else ''
-
-
-def _AddWinLinkRules(master_ninja, embed_manifest):
- """Adds link rules for Windows platform to |master_ninja|."""
- def FullLinkCommand(ldcmd, out, binary_type):
- resource_name = {
- 'exe': '1',
- 'dll': '2',
- }[binary_type]
- return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
- '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
- '$manifests' % {
- 'python': sys.executable,
- 'out': out,
- 'ldcmd': ldcmd,
- 'resname': resource_name,
- 'embed': embed_manifest }
- rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
- use_separate_mspdbsrv = (
- int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
- dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
- dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
- '$ld /nologo $implibflag /DLL /OUT:$binary '
- '@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
- dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
- master_ninja.rule('solink' + rule_name_suffix,
- description=dlldesc, command=dllcmd,
- rspfile='$binary.rsp',
- rspfile_content='$libs $in_newline $ldflags',
- restat=True,
- pool='link_pool')
- master_ninja.rule('solink_module' + rule_name_suffix,
- description=dlldesc, command=dllcmd,
- rspfile='$binary.rsp',
- rspfile_content='$libs $in_newline $ldflags',
- restat=True,
- pool='link_pool')
- # Note that ldflags goes at the end so that it has the option of
- # overriding default settings earlier in the command line.
- exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
- '$ld /nologo /OUT:$binary @$binary.rsp' %
- (sys.executable, use_separate_mspdbsrv))
- exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
- master_ninja.rule('link' + rule_name_suffix,
- description='LINK%s $binary' % rule_name_suffix.upper(),
- command=exe_cmd,
- rspfile='$binary.rsp',
- rspfile_content='$in_newline $libs $ldflags',
- pool='link_pool')
-
-
-def GenerateOutputForConfig(target_list, target_dicts, data, params,
- config_name):
- options = params['options']
- flavor = gyp.common.GetFlavor(params)
- generator_flags = params.get('generator_flags', {})
-
- # build_dir: relative path from source root to our output files.
- # e.g. "out/Debug"
- build_dir = os.path.normpath(
- os.path.join(ComputeOutputDir(params), config_name))
-
- toplevel_build = os.path.join(options.toplevel_dir, build_dir)
-
- master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
- master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
-
- # Put build-time support tools in out/{config_name}.
- gyp.common.CopyTool(flavor, toplevel_build)
-
- # Grab make settings for CC/CXX.
- # The rules are
- # - The priority from low to high is gcc/g++, the 'make_global_settings' in
- # gyp, the environment variable.
- # - If there is no 'make_global_settings' for CC.host/CXX.host or
- # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
- # to cc/cxx.
- if flavor == 'win':
- ar = 'lib.exe'
- # cc and cxx must be set to the correct architecture by overriding with one
- # of cl_x86 or cl_x64 below.
- cc = 'UNSET'
- cxx = 'UNSET'
- ld = 'link.exe'
- ld_host = '$ld'
- else:
- ar = 'ar'
- cc = 'cc'
- cxx = 'c++'
- ld = '$cc'
- ldxx = '$cxx'
- ld_host = '$cc_host'
- ldxx_host = '$cxx_host'
-
- ar_host = ar
- cc_host = None
- cxx_host = None
- cc_host_global_setting = None
- cxx_host_global_setting = None
- clang_cl = None
- nm = 'nm'
- nm_host = 'nm'
- readelf = 'readelf'
- readelf_host = 'readelf'
-
- build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
- make_global_settings = data[build_file].get('make_global_settings', [])
- build_to_root = gyp.common.InvertRelativePath(build_dir,
- options.toplevel_dir)
- wrappers = {}
- for key, value in make_global_settings:
- if key == 'AR':
- ar = os.path.join(build_to_root, value)
- if key == 'AR.host':
- ar_host = os.path.join(build_to_root, value)
- if key == 'CC':
- cc = os.path.join(build_to_root, value)
- if cc.endswith('clang-cl'):
- clang_cl = cc
- if key == 'CXX':
- cxx = os.path.join(build_to_root, value)
- if key == 'CC.host':
- cc_host = os.path.join(build_to_root, value)
- cc_host_global_setting = value
- if key == 'CXX.host':
- cxx_host = os.path.join(build_to_root, value)
- cxx_host_global_setting = value
- if key == 'LD':
- ld = os.path.join(build_to_root, value)
- if key == 'LD.host':
- ld_host = os.path.join(build_to_root, value)
- if key == 'NM':
- nm = os.path.join(build_to_root, value)
- if key == 'NM.host':
- nm_host = os.path.join(build_to_root, value)
- if key == 'READELF':
- readelf = os.path.join(build_to_root, value)
- if key == 'READELF.host':
- readelf_host = os.path.join(build_to_root, value)
- if key.endswith('_wrapper'):
- wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
-
- # Support wrappers from environment variables too.
- for key, value in os.environ.iteritems():
- if key.lower().endswith('_wrapper'):
- key_prefix = key[:-len('_wrapper')]
- key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
- wrappers[key_prefix] = os.path.join(build_to_root, value)
-
- if flavor == 'win':
- configs = [target_dicts[qualified_target]['configurations'][config_name]
- for qualified_target in target_list]
- shared_system_includes = None
- if not generator_flags.get('ninja_use_custom_environment_files', 0):
- shared_system_includes = \
- gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
- configs, generator_flags)
- cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
- toplevel_build, generator_flags, shared_system_includes, OpenOutput)
- for arch, path in cl_paths.iteritems():
- if clang_cl:
- # If we have selected clang-cl, use that instead.
- path = clang_cl
- command = CommandWithWrapper('CC', wrappers,
- QuoteShellArgument(path, 'win'))
- if clang_cl:
- # Use clang-cl to cross-compile for x86 or x86_64.
- command += (' -m32' if arch == 'x86' else ' -m64')
- master_ninja.variable('cl_' + arch, command)
-
- cc = GetEnvironFallback(['CC_target', 'CC'], cc)
- master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
- cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
- master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
-
- if flavor == 'win':
- master_ninja.variable('ld', ld)
- master_ninja.variable('idl', 'midl.exe')
- master_ninja.variable('ar', ar)
- master_ninja.variable('rc', 'rc.exe')
- master_ninja.variable('ml_x86', 'ml.exe')
- master_ninja.variable('ml_x64', 'ml64.exe')
- master_ninja.variable('mt', 'mt.exe')
- else:
- master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
- master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
- master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
- if flavor != 'mac':
- # Mac does not use readelf/nm for .TOC generation, so avoiding polluting
- # the master ninja with extra unused variables.
- master_ninja.variable(
- 'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
- master_ninja.variable(
- 'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
-
- if generator_supports_multiple_toolsets:
- if not cc_host:
- cc_host = cc
- if not cxx_host:
- cxx_host = cxx
-
- master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
- master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
- master_ninja.variable('readelf_host',
- GetEnvironFallback(['READELF_host'], readelf_host))
- cc_host = GetEnvironFallback(['CC_host'], cc_host)
- cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
-
- # The environment variable could be used in 'make_global_settings', like
- # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
- if '$(CC)' in cc_host and cc_host_global_setting:
- cc_host = cc_host_global_setting.replace('$(CC)', cc)
- if '$(CXX)' in cxx_host and cxx_host_global_setting:
- cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
- master_ninja.variable('cc_host',
- CommandWithWrapper('CC.host', wrappers, cc_host))
- master_ninja.variable('cxx_host',
- CommandWithWrapper('CXX.host', wrappers, cxx_host))
- if flavor == 'win':
- master_ninja.variable('ld_host', ld_host)
- else:
- master_ninja.variable('ld_host', CommandWithWrapper(
- 'LINK', wrappers, ld_host))
- master_ninja.variable('ldxx_host', CommandWithWrapper(
- 'LINK', wrappers, ldxx_host))
-
- master_ninja.newline()
-
- master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
- master_ninja.newline()
-
- deps = 'msvc' if flavor == 'win' else 'gcc'
-
- if flavor != 'win':
- master_ninja.rule(
- 'cc',
- description='CC $out',
- command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
- '$cflags_pch_c -c $in -o $out'),
- depfile='$out.d',
- deps=deps)
- master_ninja.rule(
- 'cc_s',
- description='CC $out',
- command=('$cc $defines $includes $cflags $cflags_c '
- '$cflags_pch_c -c $in -o $out'))
- master_ninja.rule(
- 'cxx',
- description='CXX $out',
- command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
- '$cflags_pch_cc -c $in -o $out'),
- depfile='$out.d',
- deps=deps)
- else:
- # TODO(scottmg) Separate pdb names is a test to see if it works around
- # http://crbug.com/142362. It seems there's a race between the creation of
- # the .pdb by the precompiled header step for .cc and the compilation of
- # .c files. This should be handled by mspdbsrv, but rarely errors out with
- # c1xx : fatal error C1033: cannot open program database
- # By making the rules target separate pdb files this might be avoided.
- cc_command = ('ninja -t msvc -e $arch ' +
- '-- '
- '$cc /nologo /showIncludes /FC '
- '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
- cxx_command = ('ninja -t msvc -e $arch ' +
- '-- '
- '$cxx /nologo /showIncludes /FC '
- '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
- master_ninja.rule(
- 'cc',
- description='CC $out',
- command=cc_command,
- rspfile='$out.rsp',
- rspfile_content='$defines $includes $cflags $cflags_c',
- deps=deps)
- master_ninja.rule(
- 'cxx',
- description='CXX $out',
- command=cxx_command,
- rspfile='$out.rsp',
- rspfile_content='$defines $includes $cflags $cflags_cc',
- deps=deps)
- master_ninja.rule(
- 'idl',
- description='IDL $in',
- command=('%s gyp-win-tool midl-wrapper $arch $outdir '
- '$tlb $h $dlldata $iid $proxy $in '
- '$midl_includes $idlflags' % sys.executable))
- master_ninja.rule(
- 'rc',
- description='RC $in',
- # Note: $in must be last otherwise rc.exe complains.
- command=('%s gyp-win-tool rc-wrapper '
- '$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
- sys.executable))
- master_ninja.rule(
- 'asm',
- description='ASM $out',
- command=('%s gyp-win-tool asm-wrapper '
- '$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
- sys.executable))
-
- if flavor != 'mac' and flavor != 'win':
- master_ninja.rule(
- 'alink',
- description='AR $out',
- command='rm -f $out && $ar rcs $arflags $out $in')
- master_ninja.rule(
- 'alink_thin',
- description='AR $out',
- command='rm -f $out && $ar rcsT $arflags $out $in')
-
- # This allows targets that only need to depend on $lib's API to declare an
- # order-only dependency on $lib.TOC and avoid relinking such downstream
- # dependencies when $lib changes only in non-public ways.
- # The resulting string leaves an uninterpolated %{suffix} which
- # is used in the final substitution below.
- mtime_preserving_solink_base = (
- 'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
- '%(solink)s && %(extract_toc)s > $lib.TOC; else '
- '%(solink)s && %(extract_toc)s > $lib.tmp && '
- 'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
- 'fi; fi'
- % { 'solink':
- '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
- 'extract_toc':
- ('{ $readelf -d $lib | grep SONAME ; '
- '$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
-
- master_ninja.rule(
- 'solink',
- description='SOLINK $lib',
- restat=True,
- command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
- rspfile='$link_file_list',
- rspfile_content=
- '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
- pool='link_pool')
- master_ninja.rule(
- 'solink_module',
- description='SOLINK(module) $lib',
- restat=True,
- command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
- rspfile='$link_file_list',
- rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
- pool='link_pool')
- master_ninja.rule(
- 'link',
- description='LINK $out',
- command=('$ld $ldflags -o $out '
- '-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
- pool='link_pool')
- elif flavor == 'win':
- master_ninja.rule(
- 'alink',
- description='LIB $out',
- command=('%s gyp-win-tool link-wrapper $arch False '
- '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
- sys.executable),
- rspfile='$out.rsp',
- rspfile_content='$in_newline $libflags')
- _AddWinLinkRules(master_ninja, embed_manifest=True)
- _AddWinLinkRules(master_ninja, embed_manifest=False)
- else:
- master_ninja.rule(
- 'objc',
- description='OBJC $out',
- command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
- '$cflags_pch_objc -c $in -o $out'),
- depfile='$out.d',
- deps=deps)
- master_ninja.rule(
- 'objcxx',
- description='OBJCXX $out',
- command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
- '$cflags_pch_objcc -c $in -o $out'),
- depfile='$out.d',
- deps=deps)
- master_ninja.rule(
- 'alink',
- description='LIBTOOL-STATIC $out, POSTBUILDS',
- command='rm -f $out && '
- './gyp-mac-tool filter-libtool libtool $libtool_flags '
- '-static -o $out $in'
- '$postbuilds')
- master_ninja.rule(
- 'lipo',
- description='LIPO $out, POSTBUILDS',
- command='rm -f $out && lipo -create $in -output $out$postbuilds')
- master_ninja.rule(
- 'solipo',
- description='SOLIPO $out, POSTBUILDS',
- command=(
- 'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
- '%(extract_toc)s > $lib.TOC'
- % { 'extract_toc':
- '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
- 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
-
-
- # Record the public interface of $lib in $lib.TOC. See the corresponding
- # comment in the posix section above for details.
- solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
- mtime_preserving_solink_base = (
- 'if [ ! -e $lib -o ! -e $lib.TOC ] || '
- # Always force dependent targets to relink if this library
- # reexports something. Handling this correctly would require
- # recursive TOC dumping but this is rare in practice, so punt.
- 'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
- '%(solink)s && %(extract_toc)s > $lib.TOC; '
- 'else '
- '%(solink)s && %(extract_toc)s > $lib.tmp && '
- 'if ! cmp -s $lib.tmp $lib.TOC; then '
- 'mv $lib.tmp $lib.TOC ; '
- 'fi; '
- 'fi'
- % { 'solink': solink_base,
- 'extract_toc':
- '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
- 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
-
-
- solink_suffix = '@$link_file_list$postbuilds'
- master_ninja.rule(
- 'solink',
- description='SOLINK $lib, POSTBUILDS',
- restat=True,
- command=mtime_preserving_solink_base % {'suffix': solink_suffix,
- 'type': '-shared'},
- rspfile='$link_file_list',
- rspfile_content='$in $solibs $libs',
- pool='link_pool')
- master_ninja.rule(
- 'solink_notoc',
- description='SOLINK $lib, POSTBUILDS',
- restat=True,
- command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
- rspfile='$link_file_list',
- rspfile_content='$in $solibs $libs',
- pool='link_pool')
-
- master_ninja.rule(
- 'solink_module',
- description='SOLINK(module) $lib, POSTBUILDS',
- restat=True,
- command=mtime_preserving_solink_base % {'suffix': solink_suffix,
- 'type': '-bundle'},
- rspfile='$link_file_list',
- rspfile_content='$in $solibs $libs',
- pool='link_pool')
- master_ninja.rule(
- 'solink_module_notoc',
- description='SOLINK(module) $lib, POSTBUILDS',
- restat=True,
- command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
- rspfile='$link_file_list',
- rspfile_content='$in $solibs $libs',
- pool='link_pool')
-
- master_ninja.rule(
- 'link',
- description='LINK $out, POSTBUILDS',
- command=('$ld $ldflags -o $out '
- '$in $solibs $libs$postbuilds'),
- pool='link_pool')
- master_ninja.rule(
- 'preprocess_infoplist',
- description='PREPROCESS INFOPLIST $out',
- command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
- 'plutil -convert xml1 $out $out'))
- master_ninja.rule(
- 'copy_infoplist',
- description='COPY INFOPLIST $in',
- command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
- master_ninja.rule(
- 'merge_infoplist',
- description='MERGE INFOPLISTS $in',
- command='$env ./gyp-mac-tool merge-info-plist $out $in')
- master_ninja.rule(
- 'compile_xcassets',
- description='COMPILE XCASSETS $in',
- command='$env ./gyp-mac-tool compile-xcassets $keys $in')
- master_ninja.rule(
- 'mac_tool',
- description='MACTOOL $mactool_cmd $in',
- command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
- master_ninja.rule(
- 'package_framework',
- description='PACKAGE FRAMEWORK $out, POSTBUILDS',
- command='./gyp-mac-tool package-framework $out $version$postbuilds '
- '&& touch $out')
- if flavor == 'win':
- master_ninja.rule(
- 'stamp',
- description='STAMP $out',
- command='%s gyp-win-tool stamp $out' % sys.executable)
- master_ninja.rule(
- 'copy',
- description='COPY $in $out',
- command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
- else:
- master_ninja.rule(
- 'stamp',
- description='STAMP $out',
- command='${postbuilds}touch $out')
- master_ninja.rule(
- 'copy',
- description='COPY $in $out',
- command='ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)')
- master_ninja.newline()
-
- all_targets = set()
- for build_file in params['build_files']:
- for target in gyp.common.AllTargets(target_list,
- target_dicts,
- os.path.normpath(build_file)):
- all_targets.add(target)
- all_outputs = set()
-
- # target_outputs is a map from qualified target name to a Target object.
- target_outputs = {}
- # target_short_names is a map from target short name to a list of Target
- # objects.
- target_short_names = {}
-
- # short name of targets that were skipped because they didn't contain anything
- # interesting.
- # NOTE: there may be overlap between this an non_empty_target_names.
- empty_target_names = set()
-
- # Set of non-empty short target names.
- # NOTE: there may be overlap between this an empty_target_names.
- non_empty_target_names = set()
-
- for qualified_target in target_list:
- # qualified_target is like: third_party/icu/icu.gyp:icui18n#target
- build_file, name, toolset = \
- gyp.common.ParseQualifiedTarget(qualified_target)
-
- this_make_global_settings = data[build_file].get('make_global_settings', [])
- assert make_global_settings == this_make_global_settings, (
- "make_global_settings needs to be the same for all targets. %s vs. %s" %
- (this_make_global_settings, make_global_settings))
-
- spec = target_dicts[qualified_target]
- if flavor == 'mac':
- gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
-
- # If build_file is a symlink, we must not follow it because there's a chance
- # it could point to a path above toplevel_dir, and we cannot correctly deal
- # with that case at the moment.
- build_file = gyp.common.RelativePath(build_file, options.toplevel_dir,
- False)
-
- qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
- toolset)
- hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
-
- base_path = os.path.dirname(build_file)
- obj = 'obj'
- if toolset != 'target':
- obj += '.' + toolset
- output_file = os.path.join(obj, base_path, name + '.ninja')
-
- ninja_output = StringIO()
- writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
- ninja_output,
- toplevel_build, output_file,
- flavor, toplevel_dir=options.toplevel_dir)
-
- target = writer.WriteSpec(spec, config_name, generator_flags)
-
- if ninja_output.tell() > 0:
- # Only create files for ninja files that actually have contents.
- with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
- ninja_file.write(ninja_output.getvalue())
- ninja_output.close()
- master_ninja.subninja(output_file)
-
- if target:
- if name != target.FinalOutput() and spec['toolset'] == 'target':
- target_short_names.setdefault(name, []).append(target)
- target_outputs[qualified_target] = target
- if qualified_target in all_targets:
- all_outputs.add(target.FinalOutput())
- non_empty_target_names.add(name)
- else:
- empty_target_names.add(name)
-
- if target_short_names:
- # Write a short name to build this target. This benefits both the
- # "build chrome" case as well as the gyp tests, which expect to be
- # able to run actions and build libraries by their short name.
- master_ninja.newline()
- master_ninja.comment('Short names for targets.')
- for short_name in target_short_names:
- master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
- target_short_names[short_name]])
-
- # Write phony targets for any empty targets that weren't written yet. As
- # short names are not necessarily unique only do this for short names that
- # haven't already been output for another target.
- empty_target_names = empty_target_names - non_empty_target_names
- if empty_target_names:
- master_ninja.newline()
- master_ninja.comment('Empty targets (output for completeness).')
- for name in sorted(empty_target_names):
- master_ninja.build(name, 'phony')
-
- if all_outputs:
- master_ninja.newline()
- master_ninja.build('all', 'phony', list(all_outputs))
- master_ninja.default(generator_flags.get('default_target', 'all'))
-
- master_ninja_file.close()
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
- for config in configurations:
- builddir = os.path.join(options.toplevel_dir, 'out', config)
- arguments = ['ninja', '-C', builddir]
- print 'Building [%s]: %s' % (config, arguments)
- subprocess.check_call(arguments)
-
-
-def CallGenerateOutputForConfig(arglist):
- # Ignore the interrupt signal so that the parent process catches it and
- # kills all multiprocessing children.
- signal.signal(signal.SIGINT, signal.SIG_IGN)
-
- (target_list, target_dicts, data, params, config_name) = arglist
- GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- # Update target_dicts for iOS device builds.
- target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
- target_dicts)
-
- user_config = params.get('generator_flags', {}).get('config', None)
- if gyp.common.GetFlavor(params) == 'win':
- target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
- target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
- target_list, target_dicts, generator_default_variables)
-
- if user_config:
- GenerateOutputForConfig(target_list, target_dicts, data, params,
- user_config)
- else:
- config_names = target_dicts[target_list[0]]['configurations'].keys()
- if params['parallel']:
- try:
- pool = multiprocessing.Pool(len(config_names))
- arglists = []
- for config_name in config_names:
- arglists.append(
- (target_list, target_dicts, data, params, config_name))
- pool.map(CallGenerateOutputForConfig, arglists)
- except KeyboardInterrupt, e:
- pool.terminate()
- raise e
- else:
- for config_name in config_names:
- GenerateOutputForConfig(target_list, target_dicts, data, params,
- config_name)
diff --git a/deps/gyp/pylib/gyp/generator/ninja_test.py b/deps/gyp/pylib/gyp/generator/ninja_test.py
deleted file mode 100644
index 1767b2f45a..0000000000
--- a/deps/gyp/pylib/gyp/generator/ninja_test.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the ninja.py file. """
-
-import gyp.generator.ninja as ninja
-import unittest
-import StringIO
-import sys
-import TestCommon
-
-
-class TestPrefixesAndSuffixes(unittest.TestCase):
- def test_BinaryNamesWindows(self):
- # These cannot run on non-Windows as they require a VS installation to
- # correctly handle variable expansion.
- if sys.platform.startswith('win'):
- writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
- 'build.ninja', 'win')
- spec = { 'target_name': 'wee' }
- self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
- endswith('.exe'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
- endswith('.dll'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
- endswith('.lib'))
-
- def test_BinaryNamesLinux(self):
- writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
- 'build.ninja', 'linux')
- spec = { 'target_name': 'wee' }
- self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
- 'executable'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
- startswith('lib'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
- startswith('lib'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
- endswith('.so'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
- endswith('.a'))
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/gyp/pylib/gyp/generator/xcode.py b/deps/gyp/pylib/gyp/generator/xcode.py
deleted file mode 100644
index 0e3fb9301e..0000000000
--- a/deps/gyp/pylib/gyp/generator/xcode.py
+++ /dev/null
@@ -1,1300 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import filecmp
-import gyp.common
-import gyp.xcodeproj_file
-import gyp.xcode_ninja
-import errno
-import os
-import sys
-import posixpath
-import re
-import shutil
-import subprocess
-import tempfile
-
-
-# Project files generated by this module will use _intermediate_var as a
-# custom Xcode setting whose value is a DerivedSources-like directory that's
-# project-specific and configuration-specific. The normal choice,
-# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
-# as it is likely that multiple targets within a single project file will want
-# to access the same set of generated files. The other option,
-# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
-# it is not configuration-specific. INTERMEDIATE_DIR is defined as
-# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
-_intermediate_var = 'INTERMEDIATE_DIR'
-
-# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
-# targets that share the same BUILT_PRODUCTS_DIR.
-_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
-
-_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': 'lib',
- 'SHARED_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
- 'SHARED_LIB_SUFFIX': '.dylib',
- # INTERMEDIATE_DIR is a place for targets to build up intermediate products.
- # It is specific to each build environment. It is only guaranteed to exist
- # and be constant within the context of a project, corresponding to a single
- # input file. Some build environments may allow their intermediate directory
- # to be shared on a wider scale, but this is not guaranteed.
- 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
- 'OS': 'mac',
- 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
- 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
- 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
- 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
- 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
- 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
- 'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
- 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
- 'CONFIGURATION_NAME': '$(CONFIGURATION)',
-}
-
-# The Xcode-specific sections that hold paths.
-generator_additional_path_sections = [
- 'mac_bundle_resources',
- 'mac_framework_headers',
- 'mac_framework_private_headers',
- # 'mac_framework_dirs', input already handles _dirs endings.
-]
-
-# The Xcode-specific keys that exist on targets and aren't moved down to
-# configurations.
-generator_additional_non_configuration_keys = [
- 'ios_app_extension',
- 'ios_watch_app',
- 'ios_watchkit_extension',
- 'mac_bundle',
- 'mac_bundle_resources',
- 'mac_framework_headers',
- 'mac_framework_private_headers',
- 'mac_xctest_bundle',
- 'xcode_create_dependents_test_runner',
-]
-
-# We want to let any rules apply to files that are resources also.
-generator_extra_sources_for_rules = [
- 'mac_bundle_resources',
- 'mac_framework_headers',
- 'mac_framework_private_headers',
-]
-
-generator_filelist_paths = None
-
-# Xcode's standard set of library directories, which don't need to be duplicated
-# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
-xcode_standard_library_dirs = frozenset([
- '$(SDKROOT)/usr/lib',
- '$(SDKROOT)/usr/local/lib',
-])
-
-def CreateXCConfigurationList(configuration_names):
- xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
- if len(configuration_names) == 0:
- configuration_names = ['Default']
- for configuration_name in configuration_names:
- xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
- 'name': configuration_name})
- xccl.AppendProperty('buildConfigurations', xcbc)
- xccl.SetProperty('defaultConfigurationName', configuration_names[0])
- return xccl
-
-
-class XcodeProject(object):
- def __init__(self, gyp_path, path, build_file_dict):
- self.gyp_path = gyp_path
- self.path = path
- self.project = gyp.xcodeproj_file.PBXProject(path=path)
- projectDirPath = gyp.common.RelativePath(
- os.path.dirname(os.path.abspath(self.gyp_path)),
- os.path.dirname(path) or '.')
- self.project.SetProperty('projectDirPath', projectDirPath)
- self.project_file = \
- gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
- self.build_file_dict = build_file_dict
-
- # TODO(mark): add destructor that cleans up self.path if created_dir is
- # True and things didn't complete successfully. Or do something even
- # better with "try"?
- self.created_dir = False
- try:
- os.makedirs(self.path)
- self.created_dir = True
- except OSError, e:
- if e.errno != errno.EEXIST:
- raise
-
- def Finalize1(self, xcode_targets, serialize_all_tests):
- # Collect a list of all of the build configuration names used by the
- # various targets in the file. It is very heavily advised to keep each
- # target in an entire project (even across multiple project files) using
- # the same set of configuration names.
- configurations = []
- for xct in self.project.GetProperty('targets'):
- xccl = xct.GetProperty('buildConfigurationList')
- xcbcs = xccl.GetProperty('buildConfigurations')
- for xcbc in xcbcs:
- name = xcbc.GetProperty('name')
- if name not in configurations:
- configurations.append(name)
-
- # Replace the XCConfigurationList attached to the PBXProject object with
- # a new one specifying all of the configuration names used by the various
- # targets.
- try:
- xccl = CreateXCConfigurationList(configurations)
- self.project.SetProperty('buildConfigurationList', xccl)
- except:
- sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
- raise
-
- # The need for this setting is explained above where _intermediate_var is
- # defined. The comments below about wanting to avoid project-wide build
- # settings apply here too, but this needs to be set on a project-wide basis
- # so that files relative to the _intermediate_var setting can be displayed
- # properly in the Xcode UI.
- #
- # Note that for configuration-relative files such as anything relative to
- # _intermediate_var, for the purposes of UI tree view display, Xcode will
- # only resolve the configuration name once, when the project file is
- # opened. If the active build configuration is changed, the project file
- # must be closed and reopened if it is desired for the tree view to update.
- # This is filed as Apple radar 6588391.
- xccl.SetBuildSetting(_intermediate_var,
- '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
- xccl.SetBuildSetting(_shared_intermediate_var,
- '$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
-
- # Set user-specified project-wide build settings and config files. This
- # is intended to be used very sparingly. Really, almost everything should
- # go into target-specific build settings sections. The project-wide
- # settings are only intended to be used in cases where Xcode attempts to
- # resolve variable references in a project context as opposed to a target
- # context, such as when resolving sourceTree references while building up
- # the tree tree view for UI display.
- # Any values set globally are applied to all configurations, then any
- # per-configuration values are applied.
- for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
- xccl.SetBuildSetting(xck, xcv)
- if 'xcode_config_file' in self.build_file_dict:
- config_ref = self.project.AddOrGetFileInRootGroup(
- self.build_file_dict['xcode_config_file'])
- xccl.SetBaseConfiguration(config_ref)
- build_file_configurations = self.build_file_dict.get('configurations', {})
- if build_file_configurations:
- for config_name in configurations:
- build_file_configuration_named = \
- build_file_configurations.get(config_name, {})
- if build_file_configuration_named:
- xcc = xccl.ConfigurationNamed(config_name)
- for xck, xcv in build_file_configuration_named.get('xcode_settings',
- {}).iteritems():
- xcc.SetBuildSetting(xck, xcv)
- if 'xcode_config_file' in build_file_configuration_named:
- config_ref = self.project.AddOrGetFileInRootGroup(
- build_file_configurations[config_name]['xcode_config_file'])
- xcc.SetBaseConfiguration(config_ref)
-
- # Sort the targets based on how they appeared in the input.
- # TODO(mark): Like a lot of other things here, this assumes internal
- # knowledge of PBXProject - in this case, of its "targets" property.
-
- # ordinary_targets are ordinary targets that are already in the project
- # file. run_test_targets are the targets that run unittests and should be
- # used for the Run All Tests target. support_targets are the action/rule
- # targets used by GYP file targets, just kept for the assert check.
- ordinary_targets = []
- run_test_targets = []
- support_targets = []
-
- # targets is full list of targets in the project.
- targets = []
-
- # does the it define it's own "all"?
- has_custom_all = False
-
- # targets_for_all is the list of ordinary_targets that should be listed
- # in this project's "All" target. It includes each non_runtest_target
- # that does not have suppress_wildcard set.
- targets_for_all = []
-
- for target in self.build_file_dict['targets']:
- target_name = target['target_name']
- toolset = target['toolset']
- qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
- toolset)
- xcode_target = xcode_targets[qualified_target]
- # Make sure that the target being added to the sorted list is already in
- # the unsorted list.
- assert xcode_target in self.project._properties['targets']
- targets.append(xcode_target)
- ordinary_targets.append(xcode_target)
- if xcode_target.support_target:
- support_targets.append(xcode_target.support_target)
- targets.append(xcode_target.support_target)
-
- if not int(target.get('suppress_wildcard', False)):
- targets_for_all.append(xcode_target)
-
- if target_name.lower() == 'all':
- has_custom_all = True;
-
- # If this target has a 'run_as' attribute, add its target to the
- # targets, and add it to the test targets.
- if target.get('run_as'):
- # Make a target to run something. It should have one
- # dependency, the parent xcode target.
- xccl = CreateXCConfigurationList(configurations)
- run_target = gyp.xcodeproj_file.PBXAggregateTarget({
- 'name': 'Run ' + target_name,
- 'productName': xcode_target.GetProperty('productName'),
- 'buildConfigurationList': xccl,
- },
- parent=self.project)
- run_target.AddDependency(xcode_target)
-
- command = target['run_as']
- script = ''
- if command.get('working_directory'):
- script = script + 'cd "%s"\n' % \
- gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
- command.get('working_directory'))
-
- if command.get('environment'):
- script = script + "\n".join(
- ['export %s="%s"' %
- (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
- for (key, val) in command.get('environment').iteritems()]) + "\n"
-
- # Some test end up using sockets, files on disk, etc. and can get
- # confused if more then one test runs at a time. The generator
- # flag 'xcode_serialize_all_test_runs' controls the forcing of all
- # tests serially. It defaults to True. To get serial runs this
- # little bit of python does the same as the linux flock utility to
- # make sure only one runs at a time.
- command_prefix = ''
- if serialize_all_tests:
- command_prefix = \
-"""python -c "import fcntl, subprocess, sys
-file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
-fcntl.flock(file.fileno(), fcntl.LOCK_EX)
-sys.exit(subprocess.call(sys.argv[1:]))" """
-
- # If we were unable to exec for some reason, we want to exit
- # with an error, and fixup variable references to be shell
- # syntax instead of xcode syntax.
- script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
- gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
- gyp.common.EncodePOSIXShellList(command.get('action')))
-
- ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
- 'shellScript': script,
- 'showEnvVarsInLog': 0,
- })
- run_target.AppendProperty('buildPhases', ssbp)
-
- # Add the run target to the project file.
- targets.append(run_target)
- run_test_targets.append(run_target)
- xcode_target.test_runner = run_target
-
-
- # Make sure that the list of targets being replaced is the same length as
- # the one replacing it, but allow for the added test runner targets.
- assert len(self.project._properties['targets']) == \
- len(ordinary_targets) + len(support_targets)
-
- self.project._properties['targets'] = targets
-
- # Get rid of unnecessary levels of depth in groups like the Source group.
- self.project.RootGroupsTakeOverOnlyChildren(True)
-
- # Sort the groups nicely. Do this after sorting the targets, because the
- # Products group is sorted based on the order of the targets.
- self.project.SortGroups()
-
- # Create an "All" target if there's more than one target in this project
- # file and the project didn't define its own "All" target. Put a generated
- # "All" target first so that people opening up the project for the first
- # time will build everything by default.
- if len(targets_for_all) > 1 and not has_custom_all:
- xccl = CreateXCConfigurationList(configurations)
- all_target = gyp.xcodeproj_file.PBXAggregateTarget(
- {
- 'buildConfigurationList': xccl,
- 'name': 'All',
- },
- parent=self.project)
-
- for target in targets_for_all:
- all_target.AddDependency(target)
-
- # TODO(mark): This is evil because it relies on internal knowledge of
- # PBXProject._properties. It's important to get the "All" target first,
- # though.
- self.project._properties['targets'].insert(0, all_target)
-
- # The same, but for run_test_targets.
- if len(run_test_targets) > 1:
- xccl = CreateXCConfigurationList(configurations)
- run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
- {
- 'buildConfigurationList': xccl,
- 'name': 'Run All Tests',
- },
- parent=self.project)
- for run_test_target in run_test_targets:
- run_all_tests_target.AddDependency(run_test_target)
-
- # Insert after the "All" target, which must exist if there is more than
- # one run_test_target.
- self.project._properties['targets'].insert(1, run_all_tests_target)
-
- def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
- # Finalize2 needs to happen in a separate step because the process of
- # updating references to other projects depends on the ordering of targets
- # within remote project files. Finalize1 is responsible for sorting duty,
- # and once all project files are sorted, Finalize2 can come in and update
- # these references.
-
- # To support making a "test runner" target that will run all the tests
- # that are direct dependents of any given target, we look for
- # xcode_create_dependents_test_runner being set on an Aggregate target,
- # and generate a second target that will run the tests runners found under
- # the marked target.
- for bf_tgt in self.build_file_dict['targets']:
- if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
- tgt_name = bf_tgt['target_name']
- toolset = bf_tgt['toolset']
- qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
- tgt_name, toolset)
- xcode_target = xcode_targets[qualified_target]
- if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
- # Collect all the run test targets.
- all_run_tests = []
- pbxtds = xcode_target.GetProperty('dependencies')
- for pbxtd in pbxtds:
- pbxcip = pbxtd.GetProperty('targetProxy')
- dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
- if hasattr(dependency_xct, 'test_runner'):
- all_run_tests.append(dependency_xct.test_runner)
-
- # Directly depend on all the runners as they depend on the target
- # that builds them.
- if len(all_run_tests) > 0:
- run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
- 'name': 'Run %s Tests' % tgt_name,
- 'productName': tgt_name,
- },
- parent=self.project)
- for run_test_target in all_run_tests:
- run_all_target.AddDependency(run_test_target)
-
- # Insert the test runner after the related target.
- idx = self.project._properties['targets'].index(xcode_target)
- self.project._properties['targets'].insert(idx + 1, run_all_target)
-
- # Update all references to other projects, to make sure that the lists of
- # remote products are complete. Otherwise, Xcode will fill them in when
- # it opens the project file, which will result in unnecessary diffs.
- # TODO(mark): This is evil because it relies on internal knowledge of
- # PBXProject._other_pbxprojects.
- for other_pbxproject in self.project._other_pbxprojects.keys():
- self.project.AddOrGetProjectReference(other_pbxproject)
-
- self.project.SortRemoteProductReferences()
-
- # Give everything an ID.
- self.project_file.ComputeIDs()
-
- # Make sure that no two objects in the project file have the same ID. If
- # multiple objects wind up with the same ID, upon loading the file, Xcode
- # will only recognize one object (the last one in the file?) and the
- # results are unpredictable.
- self.project_file.EnsureNoIDCollisions()
-
- def Write(self):
- # Write the project file to a temporary location first. Xcode watches for
- # changes to the project file and presents a UI sheet offering to reload
- # the project when it does change. However, in some cases, especially when
- # multiple projects are open or when Xcode is busy, things don't work so
- # seamlessly. Sometimes, Xcode is able to detect that a project file has
- # changed but can't unload it because something else is referencing it.
- # To mitigate this problem, and to avoid even having Xcode present the UI
- # sheet when an open project is rewritten for inconsequential changes, the
- # project file is written to a temporary file in the xcodeproj directory
- # first. The new temporary file is then compared to the existing project
- # file, if any. If they differ, the new file replaces the old; otherwise,
- # the new project file is simply deleted. Xcode properly detects a file
- # being renamed over an open project file as a change and so it remains
- # able to present the "project file changed" sheet under this system.
- # Writing to a temporary file first also avoids the possible problem of
- # Xcode rereading an incomplete project file.
- (output_fd, new_pbxproj_path) = \
- tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
- dir=self.path)
-
- try:
- output_file = os.fdopen(output_fd, 'wb')
-
- self.project_file.Print(output_file)
- output_file.close()
-
- pbxproj_path = os.path.join(self.path, 'project.pbxproj')
-
- same = False
- try:
- same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
-
- if same:
- # The new file is identical to the old one, just get rid of the new
- # one.
- os.unlink(new_pbxproj_path)
- else:
- # The new file is different from the old one, or there is no old one.
- # Rename the new file to the permanent name.
- #
- # tempfile.mkstemp uses an overly restrictive mode, resulting in a
- # file that can only be read by the owner, regardless of the umask.
- # There's no reason to not respect the umask here, which means that
- # an extra hoop is required to fetch it and reset the new file's mode.
- #
- # No way to get the umask without setting a new one? Set a safe one
- # and then set it back to the old value.
- umask = os.umask(077)
- os.umask(umask)
-
- os.chmod(new_pbxproj_path, 0666 & ~umask)
- os.rename(new_pbxproj_path, pbxproj_path)
-
- except Exception:
- # Don't leave turds behind. In fact, if this code was responsible for
- # creating the xcodeproj directory, get rid of that too.
- os.unlink(new_pbxproj_path)
- if self.created_dir:
- shutil.rmtree(self.path, True)
- raise
-
-
-def AddSourceToTarget(source, type, pbxp, xct):
- # TODO(mark): Perhaps source_extensions and library_extensions can be made a
- # little bit fancier.
- source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
-
- # .o is conceptually more of a "source" than a "library," but Xcode thinks
- # of "sources" as things to compile and "libraries" (or "frameworks") as
- # things to link with. Adding an object file to an Xcode target's frameworks
- # phase works properly.
- library_extensions = ['a', 'dylib', 'framework', 'o']
-
- basename = posixpath.basename(source)
- (root, ext) = posixpath.splitext(basename)
- if ext:
- ext = ext[1:].lower()
-
- if ext in source_extensions and type != 'none':
- xct.SourcesPhase().AddFile(source)
- elif ext in library_extensions and type != 'none':
- xct.FrameworksPhase().AddFile(source)
- else:
- # Files that aren't added to a sources or frameworks build phase can still
- # go into the project file, just not as part of a build phase.
- pbxp.AddOrGetFileInRootGroup(source)
-
-
-def AddResourceToTarget(resource, pbxp, xct):
- # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
- # where it's used.
- xct.ResourcesPhase().AddFile(resource)
-
-
-def AddHeaderToTarget(header, pbxp, xct, is_public):
- # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
- # where it's used.
- settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
- xct.HeadersPhase().AddFile(header, settings)
-
-
-_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
-def ExpandXcodeVariables(string, expansions):
- """Expands Xcode-style $(VARIABLES) in string per the expansions dict.
-
- In some rare cases, it is appropriate to expand Xcode variables when a
- project file is generated. For any substring $(VAR) in string, if VAR is a
- key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
- Any $(VAR) substring in string for which VAR is not a key in the expansions
- dict will remain in the returned string.
- """
-
- matches = _xcode_variable_re.findall(string)
- if matches == None:
- return string
-
- matches.reverse()
- for match in matches:
- (to_replace, variable) = match
- if not variable in expansions:
- continue
-
- replacement = expansions[variable]
- string = re.sub(re.escape(to_replace), replacement, string)
-
- return string
-
-
-_xcode_define_re = re.compile(r'([\\\"\' ])')
-def EscapeXcodeDefine(s):
- """We must escape the defines that we give to XCode so that it knows not to
- split on spaces and to respect backslash and quote literals. However, we
- must not quote the define, or Xcode will incorrectly intepret variables
- especially $(inherited)."""
- return re.sub(_xcode_define_re, r'\\\1', s)
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
-
- for build_file, build_file_dict in data.iteritems():
- (build_file_root, build_file_ext) = os.path.splitext(build_file)
- if build_file_ext != '.gyp':
- continue
- xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
- if options.generator_output:
- xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
-
- for config in configurations:
- arguments = ['xcodebuild', '-project', xcodeproj_path]
- arguments += ['-configuration', config]
- print "Building [%s]: %s" % (config, arguments)
- subprocess.check_call(arguments)
-
-
-def CalculateGeneratorInputInfo(params):
- toplevel = params['options'].toplevel_dir
- if params.get('flavor') == 'ninja':
- generator_dir = os.path.relpath(params['options'].generator_output or '.')
- output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
- output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
- qualified_out_dir = os.path.normpath(os.path.join(
- toplevel, output_dir, 'gypfiles-xcode-ninja'))
- else:
- output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))
- qualified_out_dir = os.path.normpath(os.path.join(
- toplevel, output_dir, 'gypfiles'))
-
- global generator_filelist_paths
- generator_filelist_paths = {
- 'toplevel': toplevel,
- 'qualified_out_dir': qualified_out_dir,
- }
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- # Optionally configure each spec to use ninja as the external builder.
- ninja_wrapper = params.get('flavor') == 'ninja'
- if ninja_wrapper:
- (target_list, target_dicts, data) = \
- gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
-
- options = params['options']
- generator_flags = params.get('generator_flags', {})
- parallel_builds = generator_flags.get('xcode_parallel_builds', True)
- serialize_all_tests = \
- generator_flags.get('xcode_serialize_all_test_runs', True)
- upgrade_check_project_version = \
- generator_flags.get('xcode_upgrade_check_project_version', None)
-
- # Format upgrade_check_project_version with leading zeros as needed.
- if upgrade_check_project_version:
- upgrade_check_project_version = str(upgrade_check_project_version)
- while len(upgrade_check_project_version) < 4:
- upgrade_check_project_version = '0' + upgrade_check_project_version
-
- skip_excluded_files = \
- not generator_flags.get('xcode_list_excluded_files', True)
- xcode_projects = {}
- for build_file, build_file_dict in data.iteritems():
- (build_file_root, build_file_ext) = os.path.splitext(build_file)
- if build_file_ext != '.gyp':
- continue
- xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
- if options.generator_output:
- xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
- xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
- xcode_projects[build_file] = xcp
- pbxp = xcp.project
-
- # Set project-level attributes from multiple options
- project_attributes = {};
- if parallel_builds:
- project_attributes['BuildIndependentTargetsInParallel'] = 'YES'
- if upgrade_check_project_version:
- project_attributes['LastUpgradeCheck'] = upgrade_check_project_version
- project_attributes['LastTestingUpgradeCheck'] = \
- upgrade_check_project_version
- project_attributes['LastSwiftUpdateCheck'] = \
- upgrade_check_project_version
- pbxp.SetProperty('attributes', project_attributes)
-
- # Add gyp/gypi files to project
- if not generator_flags.get('standalone'):
- main_group = pbxp.GetProperty('mainGroup')
- build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
- main_group.AppendChild(build_group)
- for included_file in build_file_dict['included_files']:
- build_group.AddOrGetFileByPath(included_file, False)
-
- xcode_targets = {}
- xcode_target_to_target_dict = {}
- for qualified_target in target_list:
- [build_file, target_name, toolset] = \
- gyp.common.ParseQualifiedTarget(qualified_target)
-
- spec = target_dicts[qualified_target]
- if spec['toolset'] != 'target':
- raise Exception(
- 'Multiple toolsets not supported in xcode build (target %s)' %
- qualified_target)
- configuration_names = [spec['default_configuration']]
- for configuration_name in sorted(spec['configurations'].keys()):
- if configuration_name not in configuration_names:
- configuration_names.append(configuration_name)
- xcp = xcode_projects[build_file]
- pbxp = xcp.project
-
- # Set up the configurations for the target according to the list of names
- # supplied.
- xccl = CreateXCConfigurationList(configuration_names)
-
- # Create an XCTarget subclass object for the target. The type with
- # "+bundle" appended will be used if the target has "mac_bundle" set.
- # loadable_modules not in a mac_bundle are mapped to
- # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
- # to create a single-file mh_bundle.
- _types = {
- 'executable': 'com.apple.product-type.tool',
- 'loadable_module': 'com.googlecode.gyp.xcode.bundle',
- 'shared_library': 'com.apple.product-type.library.dynamic',
- 'static_library': 'com.apple.product-type.library.static',
- 'mac_kernel_extension': 'com.apple.product-type.kernel-extension',
- 'executable+bundle': 'com.apple.product-type.application',
- 'loadable_module+bundle': 'com.apple.product-type.bundle',
- 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
- 'shared_library+bundle': 'com.apple.product-type.framework',
- 'executable+extension+bundle': 'com.apple.product-type.app-extension',
- 'executable+watch+extension+bundle':
- 'com.apple.product-type.watchkit-extension',
- 'executable+watch+bundle':
- 'com.apple.product-type.application.watchapp',
- 'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',
- }
-
- target_properties = {
- 'buildConfigurationList': xccl,
- 'name': target_name,
- }
-
- type = spec['type']
- is_xctest = int(spec.get('mac_xctest_bundle', 0))
- is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
- is_app_extension = int(spec.get('ios_app_extension', 0))
- is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
- is_watch_app = int(spec.get('ios_watch_app', 0))
- if type != 'none':
- type_bundle_key = type
- if is_xctest:
- type_bundle_key += '+xctest'
- assert type == 'loadable_module', (
- 'mac_xctest_bundle targets must have type loadable_module '
- '(target %s)' % target_name)
- elif is_app_extension:
- assert is_bundle, ('ios_app_extension flag requires mac_bundle '
- '(target %s)' % target_name)
- type_bundle_key += '+extension+bundle'
- elif is_watchkit_extension:
- assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
- '(target %s)' % target_name)
- type_bundle_key += '+watch+extension+bundle'
- elif is_watch_app:
- assert is_bundle, ('ios_watch_app flag requires mac_bundle '
- '(target %s)' % target_name)
- type_bundle_key += '+watch+bundle'
- elif is_bundle:
- type_bundle_key += '+bundle'
-
- xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
- try:
- target_properties['productType'] = _types[type_bundle_key]
- except KeyError, e:
- gyp.common.ExceptionAppend(e, "-- unknown product type while "
- "writing target %s" % target_name)
- raise
- else:
- xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
- assert not is_bundle, (
- 'mac_bundle targets cannot have type none (target "%s")' %
- target_name)
- assert not is_xctest, (
- 'mac_xctest_bundle targets cannot have type none (target "%s")' %
- target_name)
-
- target_product_name = spec.get('product_name')
- if target_product_name is not None:
- target_properties['productName'] = target_product_name
-
- xct = xctarget_type(target_properties, parent=pbxp,
- force_outdir=spec.get('product_dir'),
- force_prefix=spec.get('product_prefix'),
- force_extension=spec.get('product_extension'))
- pbxp.AppendProperty('targets', xct)
- xcode_targets[qualified_target] = xct
- xcode_target_to_target_dict[xct] = spec
-
- spec_actions = spec.get('actions', [])
- spec_rules = spec.get('rules', [])
-
- # Xcode has some "issues" with checking dependencies for the "Compile
- # sources" step with any source files/headers generated by actions/rules.
- # To work around this, if a target is building anything directly (not
- # type "none"), then a second target is used to run the GYP actions/rules
- # and is made a dependency of this target. This way the work is done
- # before the dependency checks for what should be recompiled.
- support_xct = None
- # The Xcode "issues" don't affect xcode-ninja builds, since the dependency
- # logic all happens in ninja. Don't bother creating the extra targets in
- # that case.
- if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
- support_xccl = CreateXCConfigurationList(configuration_names);
- support_target_suffix = generator_flags.get(
- 'support_target_suffix', ' Support')
- support_target_properties = {
- 'buildConfigurationList': support_xccl,
- 'name': target_name + support_target_suffix,
- }
- if target_product_name:
- support_target_properties['productName'] = \
- target_product_name + ' Support'
- support_xct = \
- gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
- parent=pbxp)
- pbxp.AppendProperty('targets', support_xct)
- xct.AddDependency(support_xct)
- # Hang the support target off the main target so it can be tested/found
- # by the generator during Finalize.
- xct.support_target = support_xct
-
- prebuild_index = 0
-
- # Add custom shell script phases for "actions" sections.
- for action in spec_actions:
- # There's no need to write anything into the script to ensure that the
- # output directories already exist, because Xcode will look at the
- # declared outputs and automatically ensure that they exist for us.
-
- # Do we have a message to print when this action runs?
- message = action.get('message')
- if message:
- message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
- else:
- message = ''
-
- # Turn the list into a string that can be passed to a shell.
- action_string = gyp.common.EncodePOSIXShellList(action['action'])
-
- # Convert Xcode-type variable references to sh-compatible environment
- # variable references.
- message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
- action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
- action_string)
-
- script = ''
- # Include the optional message
- if message_sh:
- script += message_sh + '\n'
- # Be sure the script runs in exec, and that if exec fails, the script
- # exits signalling an error.
- script += 'exec ' + action_string_sh + '\nexit 1\n'
- ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
- 'inputPaths': action['inputs'],
- 'name': 'Action "' + action['action_name'] + '"',
- 'outputPaths': action['outputs'],
- 'shellScript': script,
- 'showEnvVarsInLog': 0,
- })
-
- if support_xct:
- support_xct.AppendProperty('buildPhases', ssbp)
- else:
- # TODO(mark): this assumes too much knowledge of the internals of
- # xcodeproj_file; some of these smarts should move into xcodeproj_file
- # itself.
- xct._properties['buildPhases'].insert(prebuild_index, ssbp)
- prebuild_index = prebuild_index + 1
-
- # TODO(mark): Should verify that at most one of these is specified.
- if int(action.get('process_outputs_as_sources', False)):
- for output in action['outputs']:
- AddSourceToTarget(output, type, pbxp, xct)
-
- if int(action.get('process_outputs_as_mac_bundle_resources', False)):
- for output in action['outputs']:
- AddResourceToTarget(output, pbxp, xct)
-
- # tgt_mac_bundle_resources holds the list of bundle resources so
- # the rule processing can check against it.
- if is_bundle:
- tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
- else:
- tgt_mac_bundle_resources = []
-
- # Add custom shell script phases driving "make" for "rules" sections.
- #
- # Xcode's built-in rule support is almost powerful enough to use directly,
- # but there are a few significant deficiencies that render them unusable.
- # There are workarounds for some of its inadequacies, but in aggregate,
- # the workarounds added complexity to the generator, and some workarounds
- # actually require input files to be crafted more carefully than I'd like.
- # Consequently, until Xcode rules are made more capable, "rules" input
- # sections will be handled in Xcode output by shell script build phases
- # performed prior to the compilation phase.
- #
- # The following problems with Xcode rules were found. The numbers are
- # Apple radar IDs. I hope that these shortcomings are addressed, I really
- # liked having the rules handled directly in Xcode during the period that
- # I was prototyping this.
- #
- # 6588600 Xcode compiles custom script rule outputs too soon, compilation
- # fails. This occurs when rule outputs from distinct inputs are
- # interdependent. The only workaround is to put rules and their
- # inputs in a separate target from the one that compiles the rule
- # outputs. This requires input file cooperation and it means that
- # process_outputs_as_sources is unusable.
- # 6584932 Need to declare that custom rule outputs should be excluded from
- # compilation. A possible workaround is to lie to Xcode about a
- # rule's output, giving it a dummy file it doesn't know how to
- # compile. The rule action script would need to touch the dummy.
- # 6584839 I need a way to declare additional inputs to a custom rule.
- # A possible workaround is a shell script phase prior to
- # compilation that touches a rule's primary input files if any
- # would-be additional inputs are newer than the output. Modifying
- # the source tree - even just modification times - feels dirty.
- # 6564240 Xcode "custom script" build rules always dump all environment
- # variables. This is a low-prioroty problem and is not a
- # show-stopper.
- rules_by_ext = {}
- for rule in spec_rules:
- rules_by_ext[rule['extension']] = rule
-
- # First, some definitions:
- #
- # A "rule source" is a file that was listed in a target's "sources"
- # list and will have a rule applied to it on the basis of matching the
- # rule's "extensions" attribute. Rule sources are direct inputs to
- # rules.
- #
- # Rule definitions may specify additional inputs in their "inputs"
- # attribute. These additional inputs are used for dependency tracking
- # purposes.
- #
- # A "concrete output" is a rule output with input-dependent variables
- # resolved. For example, given a rule with:
- # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
- # if the target's "sources" list contained "one.ext" and "two.ext",
- # the "concrete output" for rule input "two.ext" would be "two.cc". If
- # a rule specifies multiple outputs, each input file that the rule is
- # applied to will have the same number of concrete outputs.
- #
- # If any concrete outputs are outdated or missing relative to their
- # corresponding rule_source or to any specified additional input, the
- # rule action must be performed to generate the concrete outputs.
-
- # concrete_outputs_by_rule_source will have an item at the same index
- # as the rule['rule_sources'] that it corresponds to. Each item is a
- # list of all of the concrete outputs for the rule_source.
- concrete_outputs_by_rule_source = []
-
- # concrete_outputs_all is a flat list of all concrete outputs that this
- # rule is able to produce, given the known set of input files
- # (rule_sources) that apply to it.
- concrete_outputs_all = []
-
- # messages & actions are keyed by the same indices as rule['rule_sources']
- # and concrete_outputs_by_rule_source. They contain the message and
- # action to perform after resolving input-dependent variables. The
- # message is optional, in which case None is stored for each rule source.
- messages = []
- actions = []
-
- for rule_source in rule.get('rule_sources', []):
- rule_source_dirname, rule_source_basename = \
- posixpath.split(rule_source)
- (rule_source_root, rule_source_ext) = \
- posixpath.splitext(rule_source_basename)
-
- # These are the same variable names that Xcode uses for its own native
- # rule support. Because Xcode's rule engine is not being used, they
- # need to be expanded as they are written to the makefile.
- rule_input_dict = {
- 'INPUT_FILE_BASE': rule_source_root,
- 'INPUT_FILE_SUFFIX': rule_source_ext,
- 'INPUT_FILE_NAME': rule_source_basename,
- 'INPUT_FILE_PATH': rule_source,
- 'INPUT_FILE_DIRNAME': rule_source_dirname,
- }
-
- concrete_outputs_for_this_rule_source = []
- for output in rule.get('outputs', []):
- # Fortunately, Xcode and make both use $(VAR) format for their
- # variables, so the expansion is the only transformation necessary.
- # Any remaning $(VAR)-type variables in the string can be given
- # directly to make, which will pick up the correct settings from
- # what Xcode puts into the environment.
- concrete_output = ExpandXcodeVariables(output, rule_input_dict)
- concrete_outputs_for_this_rule_source.append(concrete_output)
-
- # Add all concrete outputs to the project.
- pbxp.AddOrGetFileInRootGroup(concrete_output)
-
- concrete_outputs_by_rule_source.append( \
- concrete_outputs_for_this_rule_source)
- concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
-
- # TODO(mark): Should verify that at most one of these is specified.
- if int(rule.get('process_outputs_as_sources', False)):
- for output in concrete_outputs_for_this_rule_source:
- AddSourceToTarget(output, type, pbxp, xct)
-
- # If the file came from the mac_bundle_resources list or if the rule
- # is marked to process outputs as bundle resource, do so.
- was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
- if was_mac_bundle_resource or \
- int(rule.get('process_outputs_as_mac_bundle_resources', False)):
- for output in concrete_outputs_for_this_rule_source:
- AddResourceToTarget(output, pbxp, xct)
-
- # Do we have a message to print when this rule runs?
- message = rule.get('message')
- if message:
- message = gyp.common.EncodePOSIXShellArgument(message)
- message = ExpandXcodeVariables(message, rule_input_dict)
- messages.append(message)
-
- # Turn the list into a string that can be passed to a shell.
- action_string = gyp.common.EncodePOSIXShellList(rule['action'])
-
- action = ExpandXcodeVariables(action_string, rule_input_dict)
- actions.append(action)
-
- if len(concrete_outputs_all) > 0:
- # TODO(mark): There's a possibilty for collision here. Consider
- # target "t" rule "A_r" and target "t_A" rule "r".
- makefile_name = '%s.make' % re.sub(
- '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
- makefile_path = os.path.join(xcode_projects[build_file].path,
- makefile_name)
- # TODO(mark): try/close? Write to a temporary file and swap it only
- # if it's got changes?
- makefile = open(makefile_path, 'wb')
-
- # make will build the first target in the makefile by default. By
- # convention, it's called "all". List all (or at least one)
- # concrete output for each rule source as a prerequisite of the "all"
- # target.
- makefile.write('all: \\\n')
- for concrete_output_index in \
- xrange(0, len(concrete_outputs_by_rule_source)):
- # Only list the first (index [0]) concrete output of each input
- # in the "all" target. Otherwise, a parallel make (-j > 1) would
- # attempt to process each input multiple times simultaneously.
- # Otherwise, "all" could just contain the entire list of
- # concrete_outputs_all.
- concrete_output = \
- concrete_outputs_by_rule_source[concrete_output_index][0]
- if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
- eol = ''
- else:
- eol = ' \\'
- makefile.write(' %s%s\n' % (concrete_output, eol))
-
- for (rule_source, concrete_outputs, message, action) in \
- zip(rule['rule_sources'], concrete_outputs_by_rule_source,
- messages, actions):
- makefile.write('\n')
-
- # Add a rule that declares it can build each concrete output of a
- # rule source. Collect the names of the directories that are
- # required.
- concrete_output_dirs = []
- for concrete_output_index in xrange(0, len(concrete_outputs)):
- concrete_output = concrete_outputs[concrete_output_index]
- if concrete_output_index == 0:
- bol = ''
- else:
- bol = ' '
- makefile.write('%s%s \\\n' % (bol, concrete_output))
-
- concrete_output_dir = posixpath.dirname(concrete_output)
- if (concrete_output_dir and
- concrete_output_dir not in concrete_output_dirs):
- concrete_output_dirs.append(concrete_output_dir)
-
- makefile.write(' : \\\n')
-
- # The prerequisites for this rule are the rule source itself and
- # the set of additional rule inputs, if any.
- prerequisites = [rule_source]
- prerequisites.extend(rule.get('inputs', []))
- for prerequisite_index in xrange(0, len(prerequisites)):
- prerequisite = prerequisites[prerequisite_index]
- if prerequisite_index == len(prerequisites) - 1:
- eol = ''
- else:
- eol = ' \\'
- makefile.write(' %s%s\n' % (prerequisite, eol))
-
- # Make sure that output directories exist before executing the rule
- # action.
- if len(concrete_output_dirs) > 0:
- makefile.write('\t@mkdir -p "%s"\n' %
- '" "'.join(concrete_output_dirs))
-
- # The rule message and action have already had the necessary variable
- # substitutions performed.
- if message:
- # Mark it with note: so Xcode picks it up in build output.
- makefile.write('\t@echo note: %s\n' % message)
- makefile.write('\t%s\n' % action)
-
- makefile.close()
-
- # It might be nice to ensure that needed output directories exist
- # here rather than in each target in the Makefile, but that wouldn't
- # work if there ever was a concrete output that had an input-dependent
- # variable anywhere other than in the leaf position.
-
- # Don't declare any inputPaths or outputPaths. If they're present,
- # Xcode will provide a slight optimization by only running the script
- # phase if any output is missing or outdated relative to any input.
- # Unfortunately, it will also assume that all outputs are touched by
- # the script, and if the outputs serve as files in a compilation
- # phase, they will be unconditionally rebuilt. Since make might not
- # rebuild everything that could be declared here as an output, this
- # extra compilation activity is unnecessary. With inputPaths and
- # outputPaths not supplied, make will always be called, but it knows
- # enough to not do anything when everything is up-to-date.
-
- # To help speed things up, pass -j COUNT to make so it does some work
- # in parallel. Don't use ncpus because Xcode will build ncpus targets
- # in parallel and if each target happens to have a rules step, there
- # would be ncpus^2 things going. With a machine that has 2 quad-core
- # Xeons, a build can quickly run out of processes based on
- # scheduling/other tasks, and randomly failing builds are no good.
- script = \
-"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
-if [ "${JOB_COUNT}" -gt 4 ]; then
- JOB_COUNT=4
-fi
-exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
-exit 1
-""" % makefile_name
- ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
- 'name': 'Rule "' + rule['rule_name'] + '"',
- 'shellScript': script,
- 'showEnvVarsInLog': 0,
- })
-
- if support_xct:
- support_xct.AppendProperty('buildPhases', ssbp)
- else:
- # TODO(mark): this assumes too much knowledge of the internals of
- # xcodeproj_file; some of these smarts should move into xcodeproj_file
- # itself.
- xct._properties['buildPhases'].insert(prebuild_index, ssbp)
- prebuild_index = prebuild_index + 1
-
- # Extra rule inputs also go into the project file. Concrete outputs were
- # already added when they were computed.
- groups = ['inputs', 'inputs_excluded']
- if skip_excluded_files:
- groups = [x for x in groups if not x.endswith('_excluded')]
- for group in groups:
- for item in rule.get(group, []):
- pbxp.AddOrGetFileInRootGroup(item)
-
- # Add "sources".
- for source in spec.get('sources', []):
- (source_root, source_extension) = posixpath.splitext(source)
- if source_extension[1:] not in rules_by_ext:
- # AddSourceToTarget will add the file to a root group if it's not
- # already there.
- AddSourceToTarget(source, type, pbxp, xct)
- else:
- pbxp.AddOrGetFileInRootGroup(source)
-
- # Add "mac_bundle_resources" and "mac_framework_private_headers" if
- # it's a bundle of any type.
- if is_bundle:
- for resource in tgt_mac_bundle_resources:
- (resource_root, resource_extension) = posixpath.splitext(resource)
- if resource_extension[1:] not in rules_by_ext:
- AddResourceToTarget(resource, pbxp, xct)
- else:
- pbxp.AddOrGetFileInRootGroup(resource)
-
- for header in spec.get('mac_framework_private_headers', []):
- AddHeaderToTarget(header, pbxp, xct, False)
-
- # Add "mac_framework_headers". These can be valid for both frameworks
- # and static libraries.
- if is_bundle or type == 'static_library':
- for header in spec.get('mac_framework_headers', []):
- AddHeaderToTarget(header, pbxp, xct, True)
-
- # Add "copies".
- pbxcp_dict = {}
- for copy_group in spec.get('copies', []):
- dest = copy_group['destination']
- if dest[0] not in ('/', '$'):
- # Relative paths are relative to $(SRCROOT).
- dest = '$(SRCROOT)/' + dest
-
- code_sign = int(copy_group.get('xcode_code_sign', 0))
- settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
-
- # Coalesce multiple "copies" sections in the same target with the same
- # "destination" property into the same PBXCopyFilesBuildPhase, otherwise
- # they'll wind up with ID collisions.
- pbxcp = pbxcp_dict.get(dest, None)
- if pbxcp is None:
- pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
- 'name': 'Copy to ' + copy_group['destination']
- },
- parent=xct)
- pbxcp.SetDestination(dest)
-
- # TODO(mark): The usual comment about this knowing too much about
- # gyp.xcodeproj_file internals applies.
- xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
-
- pbxcp_dict[dest] = pbxcp
-
- for file in copy_group['files']:
- pbxcp.AddFile(file, settings)
-
- # Excluded files can also go into the project file.
- if not skip_excluded_files:
- for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
- 'mac_framework_private_headers']:
- excluded_key = key + '_excluded'
- for item in spec.get(excluded_key, []):
- pbxp.AddOrGetFileInRootGroup(item)
-
- # So can "inputs" and "outputs" sections of "actions" groups.
- groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
- if skip_excluded_files:
- groups = [x for x in groups if not x.endswith('_excluded')]
- for action in spec.get('actions', []):
- for group in groups:
- for item in action.get(group, []):
- # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
- # sources.
- if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
- pbxp.AddOrGetFileInRootGroup(item)
-
- for postbuild in spec.get('postbuilds', []):
- action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
- script = 'exec ' + action_string_sh + '\nexit 1\n'
-
- # Make the postbuild step depend on the output of ld or ar from this
- # target. Apparently putting the script step after the link step isn't
- # sufficient to ensure proper ordering in all cases. With an input
- # declared but no outputs, the script step should run every time, as
- # desired.
- ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
- 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
- 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
- 'shellScript': script,
- 'showEnvVarsInLog': 0,
- })
- xct.AppendProperty('buildPhases', ssbp)
-
- # Add dependencies before libraries, because adding a dependency may imply
- # adding a library. It's preferable to keep dependencies listed first
- # during a link phase so that they can override symbols that would
- # otherwise be provided by libraries, which will usually include system
- # libraries. On some systems, ld is finicky and even requires the
- # libraries to be ordered in such a way that unresolved symbols in
- # earlier-listed libraries may only be resolved by later-listed libraries.
- # The Mac linker doesn't work that way, but other platforms do, and so
- # their linker invocations need to be constructed in this way. There's
- # no compelling reason for Xcode's linker invocations to differ.
-
- if 'dependencies' in spec:
- for dependency in spec['dependencies']:
- xct.AddDependency(xcode_targets[dependency])
- # The support project also gets the dependencies (in case they are
- # needed for the actions/rules to work).
- if support_xct:
- support_xct.AddDependency(xcode_targets[dependency])
-
- if 'libraries' in spec:
- for library in spec['libraries']:
- xct.FrameworksPhase().AddFile(library)
- # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
- # I wish Xcode handled this automatically.
- library_dir = posixpath.dirname(library)
- if library_dir not in xcode_standard_library_dirs and (
- not xct.HasBuildSetting(_library_search_paths_var) or
- library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
- xct.AppendBuildSetting(_library_search_paths_var, library_dir)
-
- for configuration_name in configuration_names:
- configuration = spec['configurations'][configuration_name]
- xcbc = xct.ConfigurationNamed(configuration_name)
- for include_dir in configuration.get('mac_framework_dirs', []):
- xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
- for include_dir in configuration.get('include_dirs', []):
- xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
- for library_dir in configuration.get('library_dirs', []):
- if library_dir not in xcode_standard_library_dirs and (
- not xcbc.HasBuildSetting(_library_search_paths_var) or
- library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
- xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
-
- if 'defines' in configuration:
- for define in configuration['defines']:
- set_define = EscapeXcodeDefine(define)
- xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
- if 'xcode_settings' in configuration:
- for xck, xcv in configuration['xcode_settings'].iteritems():
- xcbc.SetBuildSetting(xck, xcv)
- if 'xcode_config_file' in configuration:
- config_ref = pbxp.AddOrGetFileInRootGroup(
- configuration['xcode_config_file'])
- xcbc.SetBaseConfiguration(config_ref)
-
- build_files = []
- for build_file, build_file_dict in data.iteritems():
- if build_file.endswith('.gyp'):
- build_files.append(build_file)
-
- for build_file in build_files:
- xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
-
- for build_file in build_files:
- xcode_projects[build_file].Finalize2(xcode_targets,
- xcode_target_to_target_dict)
-
- for build_file in build_files:
- xcode_projects[build_file].Write()
diff --git a/deps/gyp/pylib/gyp/generator/xcode_test.py b/deps/gyp/pylib/gyp/generator/xcode_test.py
deleted file mode 100644
index 260324a43f..0000000000
--- a/deps/gyp/pylib/gyp/generator/xcode_test.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the xcode.py file. """
-
-import gyp.generator.xcode as xcode
-import unittest
-import sys
-
-
-class TestEscapeXcodeDefine(unittest.TestCase):
- if sys.platform == 'darwin':
- def test_InheritedRemainsUnescaped(self):
- self.assertEqual(xcode.EscapeXcodeDefine('$(inherited)'), '$(inherited)')
-
- def test_Escaping(self):
- self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/gyp/pylib/gyp/input.py b/deps/gyp/pylib/gyp/input.py
deleted file mode 100644
index 20178672b2..0000000000
--- a/deps/gyp/pylib/gyp/input.py
+++ /dev/null
@@ -1,2894 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from compiler.ast import Const
-from compiler.ast import Dict
-from compiler.ast import Discard
-from compiler.ast import List
-from compiler.ast import Module
-from compiler.ast import Node
-from compiler.ast import Stmt
-import compiler
-import gyp.common
-import gyp.simple_copy
-import multiprocessing
-import optparse
-import os.path
-import re
-import shlex
-import signal
-import subprocess
-import sys
-import threading
-import time
-import traceback
-from gyp.common import GypError
-from gyp.common import OrderedSet
-
-
-# A list of types that are treated as linkable.
-linkable_types = [
- 'executable',
- 'shared_library',
- 'loadable_module',
- 'mac_kernel_extension',
-]
-
-# A list of sections that contain links to other targets.
-dependency_sections = ['dependencies', 'export_dependent_settings']
-
-# base_path_sections is a list of sections defined by GYP that contain
-# pathnames. The generators can provide more keys, the two lists are merged
-# into path_sections, but you should call IsPathSection instead of using either
-# list directly.
-base_path_sections = [
- 'destination',
- 'files',
- 'include_dirs',
- 'inputs',
- 'libraries',
- 'outputs',
- 'sources',
-]
-path_sections = set()
-
-# These per-process dictionaries are used to cache build file data when loading
-# in parallel mode.
-per_process_data = {}
-per_process_aux_data = {}
-
-def IsPathSection(section):
- # If section ends in one of the '=+?!' characters, it's applied to a section
- # without the trailing characters. '/' is notably absent from this list,
- # because there's no way for a regular expression to be treated as a path.
- while section and section[-1:] in '=+?!':
- section = section[:-1]
-
- if section in path_sections:
- return True
-
- # Sections mathing the regexp '_(dir|file|path)s?$' are also
- # considered PathSections. Using manual string matching since that
- # is much faster than the regexp and this can be called hundreds of
- # thousands of times so micro performance matters.
- if "_" in section:
- tail = section[-6:]
- if tail[-1] == 's':
- tail = tail[:-1]
- if tail[-5:] in ('_file', '_path'):
- return True
- return tail[-4:] == '_dir'
-
- return False
-
-# base_non_configuration_keys is a list of key names that belong in the target
-# itself and should not be propagated into its configurations. It is merged
-# with a list that can come from the generator to
-# create non_configuration_keys.
-base_non_configuration_keys = [
- # Sections that must exist inside targets and not configurations.
- 'actions',
- 'configurations',
- 'copies',
- 'default_configuration',
- 'dependencies',
- 'dependencies_original',
- 'libraries',
- 'postbuilds',
- 'product_dir',
- 'product_extension',
- 'product_name',
- 'product_prefix',
- 'rules',
- 'run_as',
- 'sources',
- 'standalone_static_library',
- 'suppress_wildcard',
- 'target_name',
- 'toolset',
- 'toolsets',
- 'type',
-
- # Sections that can be found inside targets or configurations, but that
- # should not be propagated from targets into their configurations.
- 'variables',
-]
-non_configuration_keys = []
-
-# Keys that do not belong inside a configuration dictionary.
-invalid_configuration_keys = [
- 'actions',
- 'all_dependent_settings',
- 'configurations',
- 'dependencies',
- 'direct_dependent_settings',
- 'libraries',
- 'link_settings',
- 'sources',
- 'standalone_static_library',
- 'target_name',
- 'type',
-]
-
-# Controls whether or not the generator supports multiple toolsets.
-multiple_toolsets = False
-
-# Paths for converting filelist paths to output paths: {
-# toplevel,
-# qualified_output_dir,
-# }
-generator_filelist_paths = None
-
-def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
- """Return a list of all build files included into build_file_path.
-
- The returned list will contain build_file_path as well as all other files
- that it included, either directly or indirectly. Note that the list may
- contain files that were included into a conditional section that evaluated
- to false and was not merged into build_file_path's dict.
-
- aux_data is a dict containing a key for each build file or included build
- file. Those keys provide access to dicts whose "included" keys contain
- lists of all other files included by the build file.
-
- included should be left at its default None value by external callers. It
- is used for recursion.
-
- The returned list will not contain any duplicate entries. Each build file
- in the list will be relative to the current directory.
- """
-
- if included == None:
- included = []
-
- if build_file_path in included:
- return included
-
- included.append(build_file_path)
-
- for included_build_file in aux_data[build_file_path].get('included', []):
- GetIncludedBuildFiles(included_build_file, aux_data, included)
-
- return included
-
-
-def CheckedEval(file_contents):
- """Return the eval of a gyp file.
-
- The gyp file is restricted to dictionaries and lists only, and
- repeated keys are not allowed.
-
- Note that this is slower than eval() is.
- """
-
- ast = compiler.parse(file_contents)
- assert isinstance(ast, Module)
- c1 = ast.getChildren()
- assert c1[0] is None
- assert isinstance(c1[1], Stmt)
- c2 = c1[1].getChildren()
- assert isinstance(c2[0], Discard)
- c3 = c2[0].getChildren()
- assert len(c3) == 1
- return CheckNode(c3[0], [])
-
-
-def CheckNode(node, keypath):
- if isinstance(node, Dict):
- c = node.getChildren()
- dict = {}
- for n in range(0, len(c), 2):
- assert isinstance(c[n], Const)
- key = c[n].getChildren()[0]
- if key in dict:
- raise GypError("Key '" + key + "' repeated at level " +
- repr(len(keypath) + 1) + " with key path '" +
- '.'.join(keypath) + "'")
- kp = list(keypath) # Make a copy of the list for descending this node.
- kp.append(key)
- dict[key] = CheckNode(c[n + 1], kp)
- return dict
- elif isinstance(node, List):
- c = node.getChildren()
- children = []
- for index, child in enumerate(c):
- kp = list(keypath) # Copy list.
- kp.append(repr(index))
- children.append(CheckNode(child, kp))
- return children
- elif isinstance(node, Const):
- return node.getChildren()[0]
- else:
- raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
- "': " + repr(node))
-
-
-def LoadOneBuildFile(build_file_path, data, aux_data, includes,
- is_target, check):
- if build_file_path in data:
- return data[build_file_path]
-
- if os.path.exists(build_file_path):
- build_file_contents = open(build_file_path).read()
- else:
- raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
-
- build_file_data = None
- try:
- if check:
- build_file_data = CheckedEval(build_file_contents)
- else:
- build_file_data = eval(build_file_contents, {'__builtins__': None},
- None)
- except SyntaxError, e:
- e.filename = build_file_path
- raise
- except Exception, e:
- gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
- raise
-
- if type(build_file_data) is not dict:
- raise GypError("%s does not evaluate to a dictionary." % build_file_path)
-
- data[build_file_path] = build_file_data
- aux_data[build_file_path] = {}
-
- # Scan for includes and merge them in.
- if ('skip_includes' not in build_file_data or
- not build_file_data['skip_includes']):
- try:
- if is_target:
- LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
- aux_data, includes, check)
- else:
- LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
- aux_data, None, check)
- except Exception, e:
- gyp.common.ExceptionAppend(e,
- 'while reading includes of ' + build_file_path)
- raise
-
- return build_file_data
-
-
-def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
- includes, check):
- includes_list = []
- if includes != None:
- includes_list.extend(includes)
- if 'includes' in subdict:
- for include in subdict['includes']:
- # "include" is specified relative to subdict_path, so compute the real
- # path to include by appending the provided "include" to the directory
- # in which subdict_path resides.
- relative_include = \
- os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
- includes_list.append(relative_include)
- # Unhook the includes list, it's no longer needed.
- del subdict['includes']
-
- # Merge in the included files.
- for include in includes_list:
- if not 'included' in aux_data[subdict_path]:
- aux_data[subdict_path]['included'] = []
- aux_data[subdict_path]['included'].append(include)
-
- gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
-
- MergeDicts(subdict,
- LoadOneBuildFile(include, data, aux_data, None, False, check),
- subdict_path, include)
-
- # Recurse into subdictionaries.
- for k, v in subdict.iteritems():
- if type(v) is dict:
- LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
- None, check)
- elif type(v) is list:
- LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
- check)
-
-
-# This recurses into lists so that it can look for dicts.
-def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
- for item in sublist:
- if type(item) is dict:
- LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
- None, check)
- elif type(item) is list:
- LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
-
-# Processes toolsets in all the targets. This recurses into condition entries
-# since they can contain toolsets as well.
-def ProcessToolsetsInDict(data):
- if 'targets' in data:
- target_list = data['targets']
- new_target_list = []
- for target in target_list:
- # If this target already has an explicit 'toolset', and no 'toolsets'
- # list, don't modify it further.
- if 'toolset' in target and 'toolsets' not in target:
- new_target_list.append(target)
- continue
- if multiple_toolsets:
- toolsets = target.get('toolsets', ['target'])
- else:
- toolsets = ['target']
- # Make sure this 'toolsets' definition is only processed once.
- if 'toolsets' in target:
- del target['toolsets']
- if len(toolsets) > 0:
- # Optimization: only do copies if more than one toolset is specified.
- for build in toolsets[1:]:
- new_target = gyp.simple_copy.deepcopy(target)
- new_target['toolset'] = build
- new_target_list.append(new_target)
- target['toolset'] = toolsets[0]
- new_target_list.append(target)
- data['targets'] = new_target_list
- if 'conditions' in data:
- for condition in data['conditions']:
- if type(condition) is list:
- for condition_dict in condition[1:]:
- if type(condition_dict) is dict:
- ProcessToolsetsInDict(condition_dict)
-
-
-# TODO(mark): I don't love this name. It just means that it's going to load
-# a build file that contains targets and is expected to provide a targets dict
-# that contains the targets...
-def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
- depth, check, load_dependencies):
- # If depth is set, predefine the DEPTH variable to be a relative path from
- # this build file's directory to the directory identified by depth.
- if depth:
- # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
- # temporary measure. This should really be addressed by keeping all paths
- # in POSIX until actual project generation.
- d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
- if d == '':
- variables['DEPTH'] = '.'
- else:
- variables['DEPTH'] = d.replace('\\', '/')
-
- # The 'target_build_files' key is only set when loading target build files in
- # the non-parallel code path, where LoadTargetBuildFile is called
- # recursively. In the parallel code path, we don't need to check whether the
- # |build_file_path| has already been loaded, because the 'scheduled' set in
- # ParallelState guarantees that we never load the same |build_file_path|
- # twice.
- if 'target_build_files' in data:
- if build_file_path in data['target_build_files']:
- # Already loaded.
- return False
- data['target_build_files'].add(build_file_path)
-
- gyp.DebugOutput(gyp.DEBUG_INCLUDES,
- "Loading Target Build File '%s'", build_file_path)
-
- build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
- includes, True, check)
-
- # Store DEPTH for later use in generators.
- build_file_data['_DEPTH'] = depth
-
- # Set up the included_files key indicating which .gyp files contributed to
- # this target dict.
- if 'included_files' in build_file_data:
- raise GypError(build_file_path + ' must not contain included_files key')
-
- included = GetIncludedBuildFiles(build_file_path, aux_data)
- build_file_data['included_files'] = []
- for included_file in included:
- # included_file is relative to the current directory, but it needs to
- # be made relative to build_file_path's directory.
- included_relative = \
- gyp.common.RelativePath(included_file,
- os.path.dirname(build_file_path))
- build_file_data['included_files'].append(included_relative)
-
- # Do a first round of toolsets expansion so that conditions can be defined
- # per toolset.
- ProcessToolsetsInDict(build_file_data)
-
- # Apply "pre"/"early" variable expansions and condition evaluations.
- ProcessVariablesAndConditionsInDict(
- build_file_data, PHASE_EARLY, variables, build_file_path)
-
- # Since some toolsets might have been defined conditionally, perform
- # a second round of toolsets expansion now.
- ProcessToolsetsInDict(build_file_data)
-
- # Look at each project's target_defaults dict, and merge settings into
- # targets.
- if 'target_defaults' in build_file_data:
- if 'targets' not in build_file_data:
- raise GypError("Unable to find targets in build file %s" %
- build_file_path)
-
- index = 0
- while index < len(build_file_data['targets']):
- # This procedure needs to give the impression that target_defaults is
- # used as defaults, and the individual targets inherit from that.
- # The individual targets need to be merged into the defaults. Make
- # a deep copy of the defaults for each target, merge the target dict
- # as found in the input file into that copy, and then hook up the
- # copy with the target-specific data merged into it as the replacement
- # target dict.
- old_target_dict = build_file_data['targets'][index]
- new_target_dict = gyp.simple_copy.deepcopy(
- build_file_data['target_defaults'])
- MergeDicts(new_target_dict, old_target_dict,
- build_file_path, build_file_path)
- build_file_data['targets'][index] = new_target_dict
- index += 1
-
- # No longer needed.
- del build_file_data['target_defaults']
-
- # Look for dependencies. This means that dependency resolution occurs
- # after "pre" conditionals and variable expansion, but before "post" -
- # in other words, you can't put a "dependencies" section inside a "post"
- # conditional within a target.
-
- dependencies = []
- if 'targets' in build_file_data:
- for target_dict in build_file_data['targets']:
- if 'dependencies' not in target_dict:
- continue
- for dependency in target_dict['dependencies']:
- dependencies.append(
- gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
-
- if load_dependencies:
- for dependency in dependencies:
- try:
- LoadTargetBuildFile(dependency, data, aux_data, variables,
- includes, depth, check, load_dependencies)
- except Exception, e:
- gyp.common.ExceptionAppend(
- e, 'while loading dependencies of %s' % build_file_path)
- raise
- else:
- return (build_file_path, dependencies)
-
-def CallLoadTargetBuildFile(global_flags,
- build_file_path, variables,
- includes, depth, check,
- generator_input_info):
- """Wrapper around LoadTargetBuildFile for parallel processing.
-
- This wrapper is used when LoadTargetBuildFile is executed in
- a worker process.
- """
-
- try:
- signal.signal(signal.SIGINT, signal.SIG_IGN)
-
- # Apply globals so that the worker process behaves the same.
- for key, value in global_flags.iteritems():
- globals()[key] = value
-
- SetGeneratorGlobals(generator_input_info)
- result = LoadTargetBuildFile(build_file_path, per_process_data,
- per_process_aux_data, variables,
- includes, depth, check, False)
- if not result:
- return result
-
- (build_file_path, dependencies) = result
-
- # We can safely pop the build_file_data from per_process_data because it
- # will never be referenced by this process again, so we don't need to keep
- # it in the cache.
- build_file_data = per_process_data.pop(build_file_path)
-
- # This gets serialized and sent back to the main process via a pipe.
- # It's handled in LoadTargetBuildFileCallback.
- return (build_file_path,
- build_file_data,
- dependencies)
- except GypError, e:
- sys.stderr.write("gyp: %s\n" % e)
- return None
- except Exception, e:
- print >>sys.stderr, 'Exception:', e
- print >>sys.stderr, traceback.format_exc()
- return None
-
-
-class ParallelProcessingError(Exception):
- pass
-
-
-class ParallelState(object):
- """Class to keep track of state when processing input files in parallel.
-
- If build files are loaded in parallel, use this to keep track of
- state during farming out and processing parallel jobs. It's stored
- in a global so that the callback function can have access to it.
- """
-
- def __init__(self):
- # The multiprocessing pool.
- self.pool = None
- # The condition variable used to protect this object and notify
- # the main loop when there might be more data to process.
- self.condition = None
- # The "data" dict that was passed to LoadTargetBuildFileParallel
- self.data = None
- # The number of parallel calls outstanding; decremented when a response
- # was received.
- self.pending = 0
- # The set of all build files that have been scheduled, so we don't
- # schedule the same one twice.
- self.scheduled = set()
- # A list of dependency build file paths that haven't been scheduled yet.
- self.dependencies = []
- # Flag to indicate if there was an error in a child process.
- self.error = False
-
- def LoadTargetBuildFileCallback(self, result):
- """Handle the results of running LoadTargetBuildFile in another process.
- """
- self.condition.acquire()
- if not result:
- self.error = True
- self.condition.notify()
- self.condition.release()
- return
- (build_file_path0, build_file_data0, dependencies0) = result
- self.data[build_file_path0] = build_file_data0
- self.data['target_build_files'].add(build_file_path0)
- for new_dependency in dependencies0:
- if new_dependency not in self.scheduled:
- self.scheduled.add(new_dependency)
- self.dependencies.append(new_dependency)
- self.pending -= 1
- self.condition.notify()
- self.condition.release()
-
-
-def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
- check, generator_input_info):
- parallel_state = ParallelState()
- parallel_state.condition = threading.Condition()
- # Make copies of the build_files argument that we can modify while working.
- parallel_state.dependencies = list(build_files)
- parallel_state.scheduled = set(build_files)
- parallel_state.pending = 0
- parallel_state.data = data
-
- try:
- parallel_state.condition.acquire()
- while parallel_state.dependencies or parallel_state.pending:
- if parallel_state.error:
- break
- if not parallel_state.dependencies:
- parallel_state.condition.wait()
- continue
-
- dependency = parallel_state.dependencies.pop()
-
- parallel_state.pending += 1
- global_flags = {
- 'path_sections': globals()['path_sections'],
- 'non_configuration_keys': globals()['non_configuration_keys'],
- 'multiple_toolsets': globals()['multiple_toolsets']}
-
- if not parallel_state.pool:
- parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
- parallel_state.pool.apply_async(
- CallLoadTargetBuildFile,
- args = (global_flags, dependency,
- variables, includes, depth, check, generator_input_info),
- callback = parallel_state.LoadTargetBuildFileCallback)
- except KeyboardInterrupt, e:
- parallel_state.pool.terminate()
- raise e
-
- parallel_state.condition.release()
-
- parallel_state.pool.close()
- parallel_state.pool.join()
- parallel_state.pool = None
-
- if parallel_state.error:
- sys.exit(1)
-
-# Look for the bracket that matches the first bracket seen in a
-# string, and return the start and end as a tuple. For example, if
-# the input is something like "<(foo <(bar)) blah", then it would
-# return (1, 13), indicating the entire string except for the leading
-# "<" and trailing " blah".
-LBRACKETS= set('{[(')
-BRACKETS = {'}': '{', ']': '[', ')': '('}
-def FindEnclosingBracketGroup(input_str):
- stack = []
- start = -1
- for index, char in enumerate(input_str):
- if char in LBRACKETS:
- stack.append(char)
- if start == -1:
- start = index
- elif char in BRACKETS:
- if not stack:
- return (-1, -1)
- if stack.pop() != BRACKETS[char]:
- return (-1, -1)
- if not stack:
- return (start, index + 1)
- return (-1, -1)
-
-
-def IsStrCanonicalInt(string):
- """Returns True if |string| is in its canonical integer form.
-
- The canonical form is such that str(int(string)) == string.
- """
- if type(string) is str:
- # This function is called a lot so for maximum performance, avoid
- # involving regexps which would otherwise make the code much
- # shorter. Regexps would need twice the time of this function.
- if string:
- if string == "0":
- return True
- if string[0] == "-":
- string = string[1:]
- if not string:
- return False
- if '1' <= string[0] <= '9':
- return string.isdigit()
-
- return False
-
-
-# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
-# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
-# In the last case, the inner "<()" is captured in match['content'].
-early_variable_re = re.compile(
- r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
- r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
- r'\((?P<is_array>\s*\[?)'
- r'(?P<content>.*?)(\]?)\))')
-
-# This matches the same as early_variable_re, but with '>' instead of '<'.
-late_variable_re = re.compile(
- r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
- r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
- r'\((?P<is_array>\s*\[?)'
- r'(?P<content>.*?)(\]?)\))')
-
-# This matches the same as early_variable_re, but with '^' instead of '<'.
-latelate_variable_re = re.compile(
- r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
- r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
- r'\((?P<is_array>\s*\[?)'
- r'(?P<content>.*?)(\]?)\))')
-
-# Global cache of results from running commands so they don't have to be run
-# more then once.
-cached_command_results = {}
-
-
-def FixupPlatformCommand(cmd):
- if sys.platform == 'win32':
- if type(cmd) is list:
- cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
- else:
- cmd = re.sub('^cat ', 'type ', cmd)
- return cmd
-
-
-PHASE_EARLY = 0
-PHASE_LATE = 1
-PHASE_LATELATE = 2
-
-
-def ExpandVariables(input, phase, variables, build_file):
- # Look for the pattern that gets expanded into variables
- if phase == PHASE_EARLY:
- variable_re = early_variable_re
- expansion_symbol = '<'
- elif phase == PHASE_LATE:
- variable_re = late_variable_re
- expansion_symbol = '>'
- elif phase == PHASE_LATELATE:
- variable_re = latelate_variable_re
- expansion_symbol = '^'
- else:
- assert False
-
- input_str = str(input)
- if IsStrCanonicalInt(input_str):
- return int(input_str)
-
- # Do a quick scan to determine if an expensive regex search is warranted.
- if expansion_symbol not in input_str:
- return input_str
-
- # Get the entire list of matches as a list of MatchObject instances.
- # (using findall here would return strings instead of MatchObjects).
- matches = list(variable_re.finditer(input_str))
- if not matches:
- return input_str
-
- output = input_str
- # Reverse the list of matches so that replacements are done right-to-left.
- # That ensures that earlier replacements won't mess up the string in a
- # way that causes later calls to find the earlier substituted text instead
- # of what's intended for replacement.
- matches.reverse()
- for match_group in matches:
- match = match_group.groupdict()
- gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
- # match['replace'] is the substring to look for, match['type']
- # is the character code for the replacement type (< > <! >! <| >| <@
- # >@ <!@ >!@), match['is_array'] contains a '[' for command
- # arrays, and match['content'] is the name of the variable (< >)
- # or command to run (<! >!). match['command_string'] is an optional
- # command string. Currently, only 'pymod_do_main' is supported.
-
- # run_command is true if a ! variant is used.
- run_command = '!' in match['type']
- command_string = match['command_string']
-
- # file_list is true if a | variant is used.
- file_list = '|' in match['type']
-
- # Capture these now so we can adjust them later.
- replace_start = match_group.start('replace')
- replace_end = match_group.end('replace')
-
- # Find the ending paren, and re-evaluate the contained string.
- (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
-
- # Adjust the replacement range to match the entire command
- # found by FindEnclosingBracketGroup (since the variable_re
- # probably doesn't match the entire command if it contained
- # nested variables).
- replace_end = replace_start + c_end
-
- # Find the "real" replacement, matching the appropriate closing
- # paren, and adjust the replacement start and end.
- replacement = input_str[replace_start:replace_end]
-
- # Figure out what the contents of the variable parens are.
- contents_start = replace_start + c_start + 1
- contents_end = replace_end - 1
- contents = input_str[contents_start:contents_end]
-
- # Do filter substitution now for <|().
- # Admittedly, this is different than the evaluation order in other
- # contexts. However, since filtration has no chance to run on <|(),
- # this seems like the only obvious way to give them access to filters.
- if file_list:
- processed_variables = gyp.simple_copy.deepcopy(variables)
- ProcessListFiltersInDict(contents, processed_variables)
- # Recurse to expand variables in the contents
- contents = ExpandVariables(contents, phase,
- processed_variables, build_file)
- else:
- # Recurse to expand variables in the contents
- contents = ExpandVariables(contents, phase, variables, build_file)
-
- # Strip off leading/trailing whitespace so that variable matches are
- # simpler below (and because they are rarely needed).
- contents = contents.strip()
-
- # expand_to_list is true if an @ variant is used. In that case,
- # the expansion should result in a list. Note that the caller
- # is to be expecting a list in return, and not all callers do
- # because not all are working in list context. Also, for list
- # expansions, there can be no other text besides the variable
- # expansion in the input string.
- expand_to_list = '@' in match['type'] and input_str == replacement
-
- if run_command or file_list:
- # Find the build file's directory, so commands can be run or file lists
- # generated relative to it.
- build_file_dir = os.path.dirname(build_file)
- if build_file_dir == '' and not file_list:
- # If build_file is just a leaf filename indicating a file in the
- # current directory, build_file_dir might be an empty string. Set
- # it to None to signal to subprocess.Popen that it should run the
- # command in the current directory.
- build_file_dir = None
-
- # Support <|(listfile.txt ...) which generates a file
- # containing items from a gyp list, generated at gyp time.
- # This works around actions/rules which have more inputs than will
- # fit on the command line.
- if file_list:
- if type(contents) is list:
- contents_list = contents
- else:
- contents_list = contents.split(' ')
- replacement = contents_list[0]
- if os.path.isabs(replacement):
- raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
-
- if not generator_filelist_paths:
- path = os.path.join(build_file_dir, replacement)
- else:
- if os.path.isabs(build_file_dir):
- toplevel = generator_filelist_paths['toplevel']
- rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
- else:
- rel_build_file_dir = build_file_dir
- qualified_out_dir = generator_filelist_paths['qualified_out_dir']
- path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
- gyp.common.EnsureDirExists(path)
-
- replacement = gyp.common.RelativePath(path, build_file_dir)
- f = gyp.common.WriteOnDiff(path)
- for i in contents_list[1:]:
- f.write('%s\n' % i)
- f.close()
-
- elif run_command:
- use_shell = True
- if match['is_array']:
- contents = eval(contents)
- use_shell = False
-
- # Check for a cached value to avoid executing commands, or generating
- # file lists more than once. The cache key contains the command to be
- # run as well as the directory to run it from, to account for commands
- # that depend on their current directory.
- # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
- # someone could author a set of GYP files where each time the command
- # is invoked it produces different output by design. When the need
- # arises, the syntax should be extended to support no caching off a
- # command's output so it is run every time.
- cache_key = (str(contents), build_file_dir)
- cached_value = cached_command_results.get(cache_key, None)
- if cached_value is None:
- gyp.DebugOutput(gyp.DEBUG_VARIABLES,
- "Executing command '%s' in directory '%s'",
- contents, build_file_dir)
-
- replacement = ''
-
- if command_string == 'pymod_do_main':
- # <!pymod_do_main(modulename param eters) loads |modulename| as a
- # python module and then calls that module's DoMain() function,
- # passing ["param", "eters"] as a single list argument. For modules
- # that don't load quickly, this can be faster than
- # <!(python modulename param eters). Do this in |build_file_dir|.
- oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
- if build_file_dir: # build_file_dir may be None (see above).
- os.chdir(build_file_dir)
- try:
-
- parsed_contents = shlex.split(contents)
- try:
- py_module = __import__(parsed_contents[0])
- except ImportError as e:
- raise GypError("Error importing pymod_do_main"
- "module (%s): %s" % (parsed_contents[0], e))
- replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
- finally:
- os.chdir(oldwd)
- assert replacement != None
- elif command_string:
- raise GypError("Unknown command string '%s' in '%s'." %
- (command_string, contents))
- else:
- # Fix up command with platform specific workarounds.
- contents = FixupPlatformCommand(contents)
- try:
- p = subprocess.Popen(contents, shell=use_shell,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- stdin=subprocess.PIPE,
- cwd=build_file_dir)
- except Exception, e:
- raise GypError("%s while executing command '%s' in %s" %
- (e, contents, build_file))
-
- p_stdout, p_stderr = p.communicate('')
-
- if p.wait() != 0 or p_stderr:
- sys.stderr.write(p_stderr)
- # Simulate check_call behavior, since check_call only exists
- # in python 2.5 and later.
- raise GypError("Call to '%s' returned exit status %d while in %s." %
- (contents, p.returncode, build_file))
- replacement = p_stdout.rstrip()
-
- cached_command_results[cache_key] = replacement
- else:
- gyp.DebugOutput(gyp.DEBUG_VARIABLES,
- "Had cache value for command '%s' in directory '%s'",
- contents,build_file_dir)
- replacement = cached_value
-
- else:
- if not contents in variables:
- if contents[-1] in ['!', '/']:
- # In order to allow cross-compiles (nacl) to happen more naturally,
- # we will allow references to >(sources/) etc. to resolve to
- # and empty list if undefined. This allows actions to:
- # 'action!': [
- # '>@(_sources!)',
- # ],
- # 'action/': [
- # '>@(_sources/)',
- # ],
- replacement = []
- else:
- raise GypError('Undefined variable ' + contents +
- ' in ' + build_file)
- else:
- replacement = variables[contents]
-
- if type(replacement) is list:
- for item in replacement:
- if not contents[-1] == '/' and type(item) not in (str, int):
- raise GypError('Variable ' + contents +
- ' must expand to a string or list of strings; ' +
- 'list contains a ' +
- item.__class__.__name__)
- # Run through the list and handle variable expansions in it. Since
- # the list is guaranteed not to contain dicts, this won't do anything
- # with conditions sections.
- ProcessVariablesAndConditionsInList(replacement, phase, variables,
- build_file)
- elif type(replacement) not in (str, int):
- raise GypError('Variable ' + contents +
- ' must expand to a string or list of strings; ' +
- 'found a ' + replacement.__class__.__name__)
-
- if expand_to_list:
- # Expanding in list context. It's guaranteed that there's only one
- # replacement to do in |input_str| and that it's this replacement. See
- # above.
- if type(replacement) is list:
- # If it's already a list, make a copy.
- output = replacement[:]
- else:
- # Split it the same way sh would split arguments.
- output = shlex.split(str(replacement))
- else:
- # Expanding in string context.
- encoded_replacement = ''
- if type(replacement) is list:
- # When expanding a list into string context, turn the list items
- # into a string in a way that will work with a subprocess call.
- #
- # TODO(mark): This isn't completely correct. This should
- # call a generator-provided function that observes the
- # proper list-to-argument quoting rules on a specific
- # platform instead of just calling the POSIX encoding
- # routine.
- encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
- else:
- encoded_replacement = replacement
-
- output = output[:replace_start] + str(encoded_replacement) + \
- output[replace_end:]
- # Prepare for the next match iteration.
- input_str = output
-
- if output == input:
- gyp.DebugOutput(gyp.DEBUG_VARIABLES,
- "Found only identity matches on %r, avoiding infinite "
- "recursion.",
- output)
- else:
- # Look for more matches now that we've replaced some, to deal with
- # expanding local variables (variables defined in the same
- # variables block as this one).
- gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
- if type(output) is list:
- if output and type(output[0]) is list:
- # Leave output alone if it's a list of lists.
- # We don't want such lists to be stringified.
- pass
- else:
- new_output = []
- for item in output:
- new_output.append(
- ExpandVariables(item, phase, variables, build_file))
- output = new_output
- else:
- output = ExpandVariables(output, phase, variables, build_file)
-
- # Convert all strings that are canonically-represented integers into integers.
- if type(output) is list:
- for index in xrange(0, len(output)):
- if IsStrCanonicalInt(output[index]):
- output[index] = int(output[index])
- elif IsStrCanonicalInt(output):
- output = int(output)
-
- return output
-
-# The same condition is often evaluated over and over again so it
-# makes sense to cache as much as possible between evaluations.
-cached_conditions_asts = {}
-
-def EvalCondition(condition, conditions_key, phase, variables, build_file):
- """Returns the dict that should be used or None if the result was
- that nothing should be used."""
- if type(condition) is not list:
- raise GypError(conditions_key + ' must be a list')
- if len(condition) < 2:
- # It's possible that condition[0] won't work in which case this
- # attempt will raise its own IndexError. That's probably fine.
- raise GypError(conditions_key + ' ' + condition[0] +
- ' must be at least length 2, not ' + str(len(condition)))
-
- i = 0
- result = None
- while i < len(condition):
- cond_expr = condition[i]
- true_dict = condition[i + 1]
- if type(true_dict) is not dict:
- raise GypError('{} {} must be followed by a dictionary, not {}'.format(
- conditions_key, cond_expr, type(true_dict)))
- if len(condition) > i + 2 and type(condition[i + 2]) is dict:
- false_dict = condition[i + 2]
- i = i + 3
- if i != len(condition):
- raise GypError('{} {} has {} unexpected trailing items'.format(
- conditions_key, cond_expr, len(condition) - i))
- else:
- false_dict = None
- i = i + 2
- if result == None:
- result = EvalSingleCondition(
- cond_expr, true_dict, false_dict, phase, variables, build_file)
-
- return result
-
-
-def EvalSingleCondition(
- cond_expr, true_dict, false_dict, phase, variables, build_file):
- """Returns true_dict if cond_expr evaluates to true, and false_dict
- otherwise."""
- # Do expansions on the condition itself. Since the conditon can naturally
- # contain variable references without needing to resort to GYP expansion
- # syntax, this is of dubious value for variables, but someone might want to
- # use a command expansion directly inside a condition.
- cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
- build_file)
- if type(cond_expr_expanded) not in (str, int):
- raise ValueError(
- 'Variable expansion in this context permits str and int ' + \
- 'only, found ' + cond_expr_expanded.__class__.__name__)
-
- try:
- if cond_expr_expanded in cached_conditions_asts:
- ast_code = cached_conditions_asts[cond_expr_expanded]
- else:
- ast_code = compile(cond_expr_expanded, '<string>', 'eval')
- cached_conditions_asts[cond_expr_expanded] = ast_code
- if eval(ast_code, {'__builtins__': None}, variables):
- return true_dict
- return false_dict
- except SyntaxError, e:
- syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
- 'at character %d.' %
- (str(e.args[0]), e.text, build_file, e.offset),
- e.filename, e.lineno, e.offset, e.text)
- raise syntax_error
- except NameError, e:
- gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
- (cond_expr_expanded, build_file))
- raise GypError(e)
-
-
-def ProcessConditionsInDict(the_dict, phase, variables, build_file):
- # Process a 'conditions' or 'target_conditions' section in the_dict,
- # depending on phase.
- # early -> conditions
- # late -> target_conditions
- # latelate -> no conditions
- #
- # Each item in a conditions list consists of cond_expr, a string expression
- # evaluated as the condition, and true_dict, a dict that will be merged into
- # the_dict if cond_expr evaluates to true. Optionally, a third item,
- # false_dict, may be present. false_dict is merged into the_dict if
- # cond_expr evaluates to false.
- #
- # Any dict merged into the_dict will be recursively processed for nested
- # conditionals and other expansions, also according to phase, immediately
- # prior to being merged.
-
- if phase == PHASE_EARLY:
- conditions_key = 'conditions'
- elif phase == PHASE_LATE:
- conditions_key = 'target_conditions'
- elif phase == PHASE_LATELATE:
- return
- else:
- assert False
-
- if not conditions_key in the_dict:
- return
-
- conditions_list = the_dict[conditions_key]
- # Unhook the conditions list, it's no longer needed.
- del the_dict[conditions_key]
-
- for condition in conditions_list:
- merge_dict = EvalCondition(condition, conditions_key, phase, variables,
- build_file)
-
- if merge_dict != None:
- # Expand variables and nested conditinals in the merge_dict before
- # merging it.
- ProcessVariablesAndConditionsInDict(merge_dict, phase,
- variables, build_file)
-
- MergeDicts(the_dict, merge_dict, build_file, build_file)
-
-
-def LoadAutomaticVariablesFromDict(variables, the_dict):
- # Any keys with plain string values in the_dict become automatic variables.
- # The variable name is the key name with a "_" character prepended.
- for key, value in the_dict.iteritems():
- if type(value) in (str, int, list):
- variables['_' + key] = value
-
-
-def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
- # Any keys in the_dict's "variables" dict, if it has one, becomes a
- # variable. The variable name is the key name in the "variables" dict.
- # Variables that end with the % character are set only if they are unset in
- # the variables dict. the_dict_key is the name of the key that accesses
- # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
- # (it could be a list or it could be parentless because it is a root dict),
- # the_dict_key will be None.
- for key, value in the_dict.get('variables', {}).iteritems():
- if type(value) not in (str, int, list):
- continue
-
- if key.endswith('%'):
- variable_name = key[:-1]
- if variable_name in variables:
- # If the variable is already set, don't set it.
- continue
- if the_dict_key is 'variables' and variable_name in the_dict:
- # If the variable is set without a % in the_dict, and the_dict is a
- # variables dict (making |variables| a varaibles sub-dict of a
- # variables dict), use the_dict's definition.
- value = the_dict[variable_name]
- else:
- variable_name = key
-
- variables[variable_name] = value
-
-
-def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
- build_file, the_dict_key=None):
- """Handle all variable and command expansion and conditional evaluation.
-
- This function is the public entry point for all variable expansions and
- conditional evaluations. The variables_in dictionary will not be modified
- by this function.
- """
-
- # Make a copy of the variables_in dict that can be modified during the
- # loading of automatics and the loading of the variables dict.
- variables = variables_in.copy()
- LoadAutomaticVariablesFromDict(variables, the_dict)
-
- if 'variables' in the_dict:
- # Make sure all the local variables are added to the variables
- # list before we process them so that you can reference one
- # variable from another. They will be fully expanded by recursion
- # in ExpandVariables.
- for key, value in the_dict['variables'].iteritems():
- variables[key] = value
-
- # Handle the associated variables dict first, so that any variable
- # references within can be resolved prior to using them as variables.
- # Pass a copy of the variables dict to avoid having it be tainted.
- # Otherwise, it would have extra automatics added for everything that
- # should just be an ordinary variable in this scope.
- ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
- variables, build_file, 'variables')
-
- LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
-
- for key, value in the_dict.iteritems():
- # Skip "variables", which was already processed if present.
- if key != 'variables' and type(value) is str:
- expanded = ExpandVariables(value, phase, variables, build_file)
- if type(expanded) not in (str, int):
- raise ValueError(
- 'Variable expansion in this context permits str and int ' + \
- 'only, found ' + expanded.__class__.__name__ + ' for ' + key)
- the_dict[key] = expanded
-
- # Variable expansion may have resulted in changes to automatics. Reload.
- # TODO(mark): Optimization: only reload if no changes were made.
- variables = variables_in.copy()
- LoadAutomaticVariablesFromDict(variables, the_dict)
- LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
-
- # Process conditions in this dict. This is done after variable expansion
- # so that conditions may take advantage of expanded variables. For example,
- # if the_dict contains:
- # {'type': '<(library_type)',
- # 'conditions': [['_type=="static_library"', { ... }]]},
- # _type, as used in the condition, will only be set to the value of
- # library_type if variable expansion is performed before condition
- # processing. However, condition processing should occur prior to recursion
- # so that variables (both automatic and "variables" dict type) may be
- # adjusted by conditions sections, merged into the_dict, and have the
- # intended impact on contained dicts.
- #
- # This arrangement means that a "conditions" section containing a "variables"
- # section will only have those variables effective in subdicts, not in
- # the_dict. The workaround is to put a "conditions" section within a
- # "variables" section. For example:
- # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
- # 'defines': ['<(define)'],
- # 'my_subdict': {'defines': ['<(define)']}},
- # will not result in "IS_MAC" being appended to the "defines" list in the
- # current scope but would result in it being appended to the "defines" list
- # within "my_subdict". By comparison:
- # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
- # 'defines': ['<(define)'],
- # 'my_subdict': {'defines': ['<(define)']}},
- # will append "IS_MAC" to both "defines" lists.
-
- # Evaluate conditions sections, allowing variable expansions within them
- # as well as nested conditionals. This will process a 'conditions' or
- # 'target_conditions' section, perform appropriate merging and recursive
- # conditional and variable processing, and then remove the conditions section
- # from the_dict if it is present.
- ProcessConditionsInDict(the_dict, phase, variables, build_file)
-
- # Conditional processing may have resulted in changes to automatics or the
- # variables dict. Reload.
- variables = variables_in.copy()
- LoadAutomaticVariablesFromDict(variables, the_dict)
- LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
-
- # Recurse into child dicts, or process child lists which may result in
- # further recursion into descendant dicts.
- for key, value in the_dict.iteritems():
- # Skip "variables" and string values, which were already processed if
- # present.
- if key == 'variables' or type(value) is str:
- continue
- if type(value) is dict:
- # Pass a copy of the variables dict so that subdicts can't influence
- # parents.
- ProcessVariablesAndConditionsInDict(value, phase, variables,
- build_file, key)
- elif type(value) is list:
- # The list itself can't influence the variables dict, and
- # ProcessVariablesAndConditionsInList will make copies of the variables
- # dict if it needs to pass it to something that can influence it. No
- # copy is necessary here.
- ProcessVariablesAndConditionsInList(value, phase, variables,
- build_file)
- elif type(value) is not int:
- raise TypeError('Unknown type ' + value.__class__.__name__ + \
- ' for ' + key)
-
-
-def ProcessVariablesAndConditionsInList(the_list, phase, variables,
- build_file):
- # Iterate using an index so that new values can be assigned into the_list.
- index = 0
- while index < len(the_list):
- item = the_list[index]
- if type(item) is dict:
- # Make a copy of the variables dict so that it won't influence anything
- # outside of its own scope.
- ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
- elif type(item) is list:
- ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
- elif type(item) is str:
- expanded = ExpandVariables(item, phase, variables, build_file)
- if type(expanded) in (str, int):
- the_list[index] = expanded
- elif type(expanded) is list:
- the_list[index:index+1] = expanded
- index += len(expanded)
-
- # index now identifies the next item to examine. Continue right now
- # without falling into the index increment below.
- continue
- else:
- raise ValueError(
- 'Variable expansion in this context permits strings and ' + \
- 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
- index)
- elif type(item) is not int:
- raise TypeError('Unknown type ' + item.__class__.__name__ + \
- ' at index ' + index)
- index = index + 1
-
-
-def BuildTargetsDict(data):
- """Builds a dict mapping fully-qualified target names to their target dicts.
-
- |data| is a dict mapping loaded build files by pathname relative to the
- current directory. Values in |data| are build file contents. For each
- |data| value with a "targets" key, the value of the "targets" key is taken
- as a list containing target dicts. Each target's fully-qualified name is
- constructed from the pathname of the build file (|data| key) and its
- "target_name" property. These fully-qualified names are used as the keys
- in the returned dict. These keys provide access to the target dicts,
- the dicts in the "targets" lists.
- """
-
- targets = {}
- for build_file in data['target_build_files']:
- for target in data[build_file].get('targets', []):
- target_name = gyp.common.QualifiedTarget(build_file,
- target['target_name'],
- target['toolset'])
- if target_name in targets:
- raise GypError('Duplicate target definitions for ' + target_name)
- targets[target_name] = target
-
- return targets
-
-
-def QualifyDependencies(targets):
- """Make dependency links fully-qualified relative to the current directory.
-
- |targets| is a dict mapping fully-qualified target names to their target
- dicts. For each target in this dict, keys known to contain dependency
- links are examined, and any dependencies referenced will be rewritten
- so that they are fully-qualified and relative to the current directory.
- All rewritten dependencies are suitable for use as keys to |targets| or a
- similar dict.
- """
-
- all_dependency_sections = [dep + op
- for dep in dependency_sections
- for op in ('', '!', '/')]
-
- for target, target_dict in targets.iteritems():
- target_build_file = gyp.common.BuildFile(target)
- toolset = target_dict['toolset']
- for dependency_key in all_dependency_sections:
- dependencies = target_dict.get(dependency_key, [])
- for index in xrange(0, len(dependencies)):
- dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
- target_build_file, dependencies[index], toolset)
- if not multiple_toolsets:
- # Ignore toolset specification in the dependency if it is specified.
- dep_toolset = toolset
- dependency = gyp.common.QualifiedTarget(dep_file,
- dep_target,
- dep_toolset)
- dependencies[index] = dependency
-
- # Make sure anything appearing in a list other than "dependencies" also
- # appears in the "dependencies" list.
- if dependency_key != 'dependencies' and \
- dependency not in target_dict['dependencies']:
- raise GypError('Found ' + dependency + ' in ' + dependency_key +
- ' of ' + target + ', but not in dependencies')
-
-
-def ExpandWildcardDependencies(targets, data):
- """Expands dependencies specified as build_file:*.
-
- For each target in |targets|, examines sections containing links to other
- targets. If any such section contains a link of the form build_file:*, it
- is taken as a wildcard link, and is expanded to list each target in
- build_file. The |data| dict provides access to build file dicts.
-
- Any target that does not wish to be included by wildcard can provide an
- optional "suppress_wildcard" key in its target dict. When present and
- true, a wildcard dependency link will not include such targets.
-
- All dependency names, including the keys to |targets| and the values in each
- dependency list, must be qualified when this function is called.
- """
-
- for target, target_dict in targets.iteritems():
- toolset = target_dict['toolset']
- target_build_file = gyp.common.BuildFile(target)
- for dependency_key in dependency_sections:
- dependencies = target_dict.get(dependency_key, [])
-
- # Loop this way instead of "for dependency in" or "for index in xrange"
- # because the dependencies list will be modified within the loop body.
- index = 0
- while index < len(dependencies):
- (dependency_build_file, dependency_target, dependency_toolset) = \
- gyp.common.ParseQualifiedTarget(dependencies[index])
- if dependency_target != '*' and dependency_toolset != '*':
- # Not a wildcard. Keep it moving.
- index = index + 1
- continue
-
- if dependency_build_file == target_build_file:
- # It's an error for a target to depend on all other targets in
- # the same file, because a target cannot depend on itself.
- raise GypError('Found wildcard in ' + dependency_key + ' of ' +
- target + ' referring to same build file')
-
- # Take the wildcard out and adjust the index so that the next
- # dependency in the list will be processed the next time through the
- # loop.
- del dependencies[index]
- index = index - 1
-
- # Loop through the targets in the other build file, adding them to
- # this target's list of dependencies in place of the removed
- # wildcard.
- dependency_target_dicts = data[dependency_build_file]['targets']
- for dependency_target_dict in dependency_target_dicts:
- if int(dependency_target_dict.get('suppress_wildcard', False)):
- continue
- dependency_target_name = dependency_target_dict['target_name']
- if (dependency_target != '*' and
- dependency_target != dependency_target_name):
- continue
- dependency_target_toolset = dependency_target_dict['toolset']
- if (dependency_toolset != '*' and
- dependency_toolset != dependency_target_toolset):
- continue
- dependency = gyp.common.QualifiedTarget(dependency_build_file,
- dependency_target_name,
- dependency_target_toolset)
- index = index + 1
- dependencies.insert(index, dependency)
-
- index = index + 1
-
-
-def Unify(l):
- """Removes duplicate elements from l, keeping the first element."""
- seen = {}
- return [seen.setdefault(e, e) for e in l if e not in seen]
-
-
-def RemoveDuplicateDependencies(targets):
- """Makes sure every dependency appears only once in all targets's dependency
- lists."""
- for target_name, target_dict in targets.iteritems():
- for dependency_key in dependency_sections:
- dependencies = target_dict.get(dependency_key, [])
- if dependencies:
- target_dict[dependency_key] = Unify(dependencies)
-
-
-def Filter(l, item):
- """Removes item from l."""
- res = {}
- return [res.setdefault(e, e) for e in l if e != item]
-
-
-def RemoveSelfDependencies(targets):
- """Remove self dependencies from targets that have the prune_self_dependency
- variable set."""
- for target_name, target_dict in targets.iteritems():
- for dependency_key in dependency_sections:
- dependencies = target_dict.get(dependency_key, [])
- if dependencies:
- for t in dependencies:
- if t == target_name:
- if targets[t].get('variables', {}).get('prune_self_dependency', 0):
- target_dict[dependency_key] = Filter(dependencies, target_name)
-
-
-def RemoveLinkDependenciesFromNoneTargets(targets):
- """Remove dependencies having the 'link_dependency' attribute from the 'none'
- targets."""
- for target_name, target_dict in targets.iteritems():
- for dependency_key in dependency_sections:
- dependencies = target_dict.get(dependency_key, [])
- if dependencies:
- for t in dependencies:
- if target_dict.get('type', None) == 'none':
- if targets[t].get('variables', {}).get('link_dependency', 0):
- target_dict[dependency_key] = \
- Filter(target_dict[dependency_key], t)
-
-
-class DependencyGraphNode(object):
- """
-
- Attributes:
- ref: A reference to an object that this DependencyGraphNode represents.
- dependencies: List of DependencyGraphNodes on which this one depends.
- dependents: List of DependencyGraphNodes that depend on this one.
- """
-
- class CircularException(GypError):
- pass
-
- def __init__(self, ref):
- self.ref = ref
- self.dependencies = []
- self.dependents = []
-
- def __repr__(self):
- return '<DependencyGraphNode: %r>' % self.ref
-
- def FlattenToList(self):
- # flat_list is the sorted list of dependencies - actually, the list items
- # are the "ref" attributes of DependencyGraphNodes. Every target will
- # appear in flat_list after all of its dependencies, and before all of its
- # dependents.
- flat_list = OrderedSet()
-
- # in_degree_zeros is the list of DependencyGraphNodes that have no
- # dependencies not in flat_list. Initially, it is a copy of the children
- # of this node, because when the graph was built, nodes with no
- # dependencies were made implicit dependents of the root node.
- in_degree_zeros = set(self.dependents[:])
-
- while in_degree_zeros:
- # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
- # can be appended to flat_list. Take these nodes out of in_degree_zeros
- # as work progresses, so that the next node to process from the list can
- # always be accessed at a consistent position.
- node = in_degree_zeros.pop()
- flat_list.add(node.ref)
-
- # Look at dependents of the node just added to flat_list. Some of them
- # may now belong in in_degree_zeros.
- for node_dependent in node.dependents:
- is_in_degree_zero = True
- # TODO: We want to check through the
- # node_dependent.dependencies list but if it's long and we
- # always start at the beginning, then we get O(n^2) behaviour.
- for node_dependent_dependency in node_dependent.dependencies:
- if not node_dependent_dependency.ref in flat_list:
- # The dependent one or more dependencies not in flat_list. There
- # will be more chances to add it to flat_list when examining
- # it again as a dependent of those other dependencies, provided
- # that there are no cycles.
- is_in_degree_zero = False
- break
-
- if is_in_degree_zero:
- # All of the dependent's dependencies are already in flat_list. Add
- # it to in_degree_zeros where it will be processed in a future
- # iteration of the outer loop.
- in_degree_zeros.add(node_dependent)
-
- return list(flat_list)
-
- def FindCycles(self):
- """
- Returns a list of cycles in the graph, where each cycle is its own list.
- """
- results = []
- visited = set()
-
- def Visit(node, path):
- for child in node.dependents:
- if child in path:
- results.append([child] + path[:path.index(child) + 1])
- elif not child in visited:
- visited.add(child)
- Visit(child, [child] + path)
-
- visited.add(self)
- Visit(self, [self])
-
- return results
-
- def DirectDependencies(self, dependencies=None):
- """Returns a list of just direct dependencies."""
- if dependencies == None:
- dependencies = []
-
- for dependency in self.dependencies:
- # Check for None, corresponding to the root node.
- if dependency.ref != None and dependency.ref not in dependencies:
- dependencies.append(dependency.ref)
-
- return dependencies
-
- def _AddImportedDependencies(self, targets, dependencies=None):
- """Given a list of direct dependencies, adds indirect dependencies that
- other dependencies have declared to export their settings.
-
- This method does not operate on self. Rather, it operates on the list
- of dependencies in the |dependencies| argument. For each dependency in
- that list, if any declares that it exports the settings of one of its
- own dependencies, those dependencies whose settings are "passed through"
- are added to the list. As new items are added to the list, they too will
- be processed, so it is possible to import settings through multiple levels
- of dependencies.
-
- This method is not terribly useful on its own, it depends on being
- "primed" with a list of direct dependencies such as one provided by
- DirectDependencies. DirectAndImportedDependencies is intended to be the
- public entry point.
- """
-
- if dependencies == None:
- dependencies = []
-
- index = 0
- while index < len(dependencies):
- dependency = dependencies[index]
- dependency_dict = targets[dependency]
- # Add any dependencies whose settings should be imported to the list
- # if not already present. Newly-added items will be checked for
- # their own imports when the list iteration reaches them.
- # Rather than simply appending new items, insert them after the
- # dependency that exported them. This is done to more closely match
- # the depth-first method used by DeepDependencies.
- add_index = 1
- for imported_dependency in \
- dependency_dict.get('export_dependent_settings', []):
- if imported_dependency not in dependencies:
- dependencies.insert(index + add_index, imported_dependency)
- add_index = add_index + 1
- index = index + 1
-
- return dependencies
-
- def DirectAndImportedDependencies(self, targets, dependencies=None):
- """Returns a list of a target's direct dependencies and all indirect
- dependencies that a dependency has advertised settings should be exported
- through the dependency for.
- """
-
- dependencies = self.DirectDependencies(dependencies)
- return self._AddImportedDependencies(targets, dependencies)
-
- def DeepDependencies(self, dependencies=None):
- """Returns an OrderedSet of all of a target's dependencies, recursively."""
- if dependencies is None:
- # Using a list to get ordered output and a set to do fast "is it
- # already added" checks.
- dependencies = OrderedSet()
-
- for dependency in self.dependencies:
- # Check for None, corresponding to the root node.
- if dependency.ref is None:
- continue
- if dependency.ref not in dependencies:
- dependency.DeepDependencies(dependencies)
- dependencies.add(dependency.ref)
-
- return dependencies
-
- def _LinkDependenciesInternal(self, targets, include_shared_libraries,
- dependencies=None, initial=True):
- """Returns an OrderedSet of dependency targets that are linked
- into this target.
-
- This function has a split personality, depending on the setting of
- |initial|. Outside callers should always leave |initial| at its default
- setting.
-
- When adding a target to the list of dependencies, this function will
- recurse into itself with |initial| set to False, to collect dependencies
- that are linked into the linkable target for which the list is being built.
-
- If |include_shared_libraries| is False, the resulting dependencies will not
- include shared_library targets that are linked into this target.
- """
- if dependencies is None:
- # Using a list to get ordered output and a set to do fast "is it
- # already added" checks.
- dependencies = OrderedSet()
-
- # Check for None, corresponding to the root node.
- if self.ref is None:
- return dependencies
-
- # It's kind of sucky that |targets| has to be passed into this function,
- # but that's presently the easiest way to access the target dicts so that
- # this function can find target types.
-
- if 'target_name' not in targets[self.ref]:
- raise GypError("Missing 'target_name' field in target.")
-
- if 'type' not in targets[self.ref]:
- raise GypError("Missing 'type' field in target %s" %
- targets[self.ref]['target_name'])
-
- target_type = targets[self.ref]['type']
-
- is_linkable = target_type in linkable_types
-
- if initial and not is_linkable:
- # If this is the first target being examined and it's not linkable,
- # return an empty list of link dependencies, because the link
- # dependencies are intended to apply to the target itself (initial is
- # True) and this target won't be linked.
- return dependencies
-
- # Don't traverse 'none' targets if explicitly excluded.
- if (target_type == 'none' and
- not targets[self.ref].get('dependencies_traverse', True)):
- dependencies.add(self.ref)
- return dependencies
-
- # Executables, mac kernel extensions and loadable modules are already fully
- # and finally linked. Nothing else can be a link dependency of them, there
- # can only be dependencies in the sense that a dependent target might run
- # an executable or load the loadable_module.
- if not initial and target_type in ('executable', 'loadable_module',
- 'mac_kernel_extension'):
- return dependencies
-
- # Shared libraries are already fully linked. They should only be included
- # in |dependencies| when adjusting static library dependencies (in order to
- # link against the shared_library's import lib), but should not be included
- # in |dependencies| when propagating link_settings.
- # The |include_shared_libraries| flag controls which of these two cases we
- # are handling.
- if (not initial and target_type == 'shared_library' and
- not include_shared_libraries):
- return dependencies
-
- # The target is linkable, add it to the list of link dependencies.
- if self.ref not in dependencies:
- dependencies.add(self.ref)
- if initial or not is_linkable:
- # If this is a subsequent target and it's linkable, don't look any
- # further for linkable dependencies, as they'll already be linked into
- # this target linkable. Always look at dependencies of the initial
- # target, and always look at dependencies of non-linkables.
- for dependency in self.dependencies:
- dependency._LinkDependenciesInternal(targets,
- include_shared_libraries,
- dependencies, False)
-
- return dependencies
-
- def DependenciesForLinkSettings(self, targets):
- """
- Returns a list of dependency targets whose link_settings should be merged
- into this target.
- """
-
- # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
- # link_settings are propagated. So for now, we will allow it, unless the
- # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
- # False. Once chrome is fixed, we can remove this flag.
- include_shared_libraries = \
- targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
- return self._LinkDependenciesInternal(targets, include_shared_libraries)
-
- def DependenciesToLinkAgainst(self, targets):
- """
- Returns a list of dependency targets that are linked into this target.
- """
- return self._LinkDependenciesInternal(targets, True)
-
-
-def BuildDependencyList(targets):
- # Create a DependencyGraphNode for each target. Put it into a dict for easy
- # access.
- dependency_nodes = {}
- for target, spec in targets.iteritems():
- if target not in dependency_nodes:
- dependency_nodes[target] = DependencyGraphNode(target)
-
- # Set up the dependency links. Targets that have no dependencies are treated
- # as dependent on root_node.
- root_node = DependencyGraphNode(None)
- for target, spec in targets.iteritems():
- target_node = dependency_nodes[target]
- target_build_file = gyp.common.BuildFile(target)
- dependencies = spec.get('dependencies')
- if not dependencies:
- target_node.dependencies = [root_node]
- root_node.dependents.append(target_node)
- else:
- for dependency in dependencies:
- dependency_node = dependency_nodes.get(dependency)
- if not dependency_node:
- raise GypError("Dependency '%s' not found while "
- "trying to load target %s" % (dependency, target))
- target_node.dependencies.append(dependency_node)
- dependency_node.dependents.append(target_node)
-
- flat_list = root_node.FlattenToList()
-
- # If there's anything left unvisited, there must be a circular dependency
- # (cycle).
- if len(flat_list) != len(targets):
- if not root_node.dependents:
- # If all targets have dependencies, add the first target as a dependent
- # of root_node so that the cycle can be discovered from root_node.
- target = targets.keys()[0]
- target_node = dependency_nodes[target]
- target_node.dependencies.append(root_node)
- root_node.dependents.append(target_node)
-
- cycles = []
- for cycle in root_node.FindCycles():
- paths = [node.ref for node in cycle]
- cycles.append('Cycle: %s' % ' -> '.join(paths))
- raise DependencyGraphNode.CircularException(
- 'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
-
- return [dependency_nodes, flat_list]
-
-
-def VerifyNoGYPFileCircularDependencies(targets):
- # Create a DependencyGraphNode for each gyp file containing a target. Put
- # it into a dict for easy access.
- dependency_nodes = {}
- for target in targets.iterkeys():
- build_file = gyp.common.BuildFile(target)
- if not build_file in dependency_nodes:
- dependency_nodes[build_file] = DependencyGraphNode(build_file)
-
- # Set up the dependency links.
- for target, spec in targets.iteritems():
- build_file = gyp.common.BuildFile(target)
- build_file_node = dependency_nodes[build_file]
- target_dependencies = spec.get('dependencies', [])
- for dependency in target_dependencies:
- try:
- dependency_build_file = gyp.common.BuildFile(dependency)
- except GypError, e:
- gyp.common.ExceptionAppend(
- e, 'while computing dependencies of .gyp file %s' % build_file)
- raise
-
- if dependency_build_file == build_file:
- # A .gyp file is allowed to refer back to itself.
- continue
- dependency_node = dependency_nodes.get(dependency_build_file)
- if not dependency_node:
- raise GypError("Dependancy '%s' not found" % dependency_build_file)
- if dependency_node not in build_file_node.dependencies:
- build_file_node.dependencies.append(dependency_node)
- dependency_node.dependents.append(build_file_node)
-
-
- # Files that have no dependencies are treated as dependent on root_node.
- root_node = DependencyGraphNode(None)
- for build_file_node in dependency_nodes.itervalues():
- if len(build_file_node.dependencies) == 0:
- build_file_node.dependencies.append(root_node)
- root_node.dependents.append(build_file_node)
-
- flat_list = root_node.FlattenToList()
-
- # If there's anything left unvisited, there must be a circular dependency
- # (cycle).
- if len(flat_list) != len(dependency_nodes):
- if not root_node.dependents:
- # If all files have dependencies, add the first file as a dependent
- # of root_node so that the cycle can be discovered from root_node.
- file_node = dependency_nodes.values()[0]
- file_node.dependencies.append(root_node)
- root_node.dependents.append(file_node)
- cycles = []
- for cycle in root_node.FindCycles():
- paths = [node.ref for node in cycle]
- cycles.append('Cycle: %s' % ' -> '.join(paths))
- raise DependencyGraphNode.CircularException(
- 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
-
-
-def DoDependentSettings(key, flat_list, targets, dependency_nodes):
- # key should be one of all_dependent_settings, direct_dependent_settings,
- # or link_settings.
-
- for target in flat_list:
- target_dict = targets[target]
- build_file = gyp.common.BuildFile(target)
-
- if key == 'all_dependent_settings':
- dependencies = dependency_nodes[target].DeepDependencies()
- elif key == 'direct_dependent_settings':
- dependencies = \
- dependency_nodes[target].DirectAndImportedDependencies(targets)
- elif key == 'link_settings':
- dependencies = \
- dependency_nodes[target].DependenciesForLinkSettings(targets)
- else:
- raise GypError("DoDependentSettings doesn't know how to determine "
- 'dependencies for ' + key)
-
- for dependency in dependencies:
- dependency_dict = targets[dependency]
- if not key in dependency_dict:
- continue
- dependency_build_file = gyp.common.BuildFile(dependency)
- MergeDicts(target_dict, dependency_dict[key],
- build_file, dependency_build_file)
-
-
-def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
- sort_dependencies):
- # Recompute target "dependencies" properties. For each static library
- # target, remove "dependencies" entries referring to other static libraries,
- # unless the dependency has the "hard_dependency" attribute set. For each
- # linkable target, add a "dependencies" entry referring to all of the
- # target's computed list of link dependencies (including static libraries
- # if no such entry is already present.
- for target in flat_list:
- target_dict = targets[target]
- target_type = target_dict['type']
-
- if target_type == 'static_library':
- if not 'dependencies' in target_dict:
- continue
-
- target_dict['dependencies_original'] = target_dict.get(
- 'dependencies', [])[:]
-
- # A static library should not depend on another static library unless
- # the dependency relationship is "hard," which should only be done when
- # a dependent relies on some side effect other than just the build
- # product, like a rule or action output. Further, if a target has a
- # non-hard dependency, but that dependency exports a hard dependency,
- # the non-hard dependency can safely be removed, but the exported hard
- # dependency must be added to the target to keep the same dependency
- # ordering.
- dependencies = \
- dependency_nodes[target].DirectAndImportedDependencies(targets)
- index = 0
- while index < len(dependencies):
- dependency = dependencies[index]
- dependency_dict = targets[dependency]
-
- # Remove every non-hard static library dependency and remove every
- # non-static library dependency that isn't a direct dependency.
- if (dependency_dict['type'] == 'static_library' and \
- not dependency_dict.get('hard_dependency', False)) or \
- (dependency_dict['type'] != 'static_library' and \
- not dependency in target_dict['dependencies']):
- # Take the dependency out of the list, and don't increment index
- # because the next dependency to analyze will shift into the index
- # formerly occupied by the one being removed.
- del dependencies[index]
- else:
- index = index + 1
-
- # Update the dependencies. If the dependencies list is empty, it's not
- # needed, so unhook it.
- if len(dependencies) > 0:
- target_dict['dependencies'] = dependencies
- else:
- del target_dict['dependencies']
-
- elif target_type in linkable_types:
- # Get a list of dependency targets that should be linked into this
- # target. Add them to the dependencies list if they're not already
- # present.
-
- link_dependencies = \
- dependency_nodes[target].DependenciesToLinkAgainst(targets)
- for dependency in link_dependencies:
- if dependency == target:
- continue
- if not 'dependencies' in target_dict:
- target_dict['dependencies'] = []
- if not dependency in target_dict['dependencies']:
- target_dict['dependencies'].append(dependency)
- # Sort the dependencies list in the order from dependents to dependencies.
- # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
- # Note: flat_list is already sorted in the order from dependencies to
- # dependents.
- if sort_dependencies and 'dependencies' in target_dict:
- target_dict['dependencies'] = [dep for dep in reversed(flat_list)
- if dep in target_dict['dependencies']]
-
-
-# Initialize this here to speed up MakePathRelative.
-exception_re = re.compile(r'''["']?[-/$<>^]''')
-
-
-def MakePathRelative(to_file, fro_file, item):
- # If item is a relative path, it's relative to the build file dict that it's
- # coming from. Fix it up to make it relative to the build file dict that
- # it's going into.
- # Exception: any |item| that begins with these special characters is
- # returned without modification.
- # / Used when a path is already absolute (shortcut optimization;
- # such paths would be returned as absolute anyway)
- # $ Used for build environment variables
- # - Used for some build environment flags (such as -lapr-1 in a
- # "libraries" section)
- # < Used for our own variable and command expansions (see ExpandVariables)
- # > Used for our own variable and command expansions (see ExpandVariables)
- # ^ Used for our own variable and command expansions (see ExpandVariables)
- #
- # "/' Used when a value is quoted. If these are present, then we
- # check the second character instead.
- #
- if to_file == fro_file or exception_re.match(item):
- return item
- else:
- # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
- # temporary measure. This should really be addressed by keeping all paths
- # in POSIX until actual project generation.
- ret = os.path.normpath(os.path.join(
- gyp.common.RelativePath(os.path.dirname(fro_file),
- os.path.dirname(to_file)),
- item)).replace('\\', '/')
- if item[-1] == '/':
- ret += '/'
- return ret
-
-def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
- # Python documentation recommends objects which do not support hash
- # set this value to None. Python library objects follow this rule.
- is_hashable = lambda val: val.__hash__
-
- # If x is hashable, returns whether x is in s. Else returns whether x is in l.
- def is_in_set_or_list(x, s, l):
- if is_hashable(x):
- return x in s
- return x in l
-
- prepend_index = 0
-
- # Make membership testing of hashables in |to| (in particular, strings)
- # faster.
- hashable_to_set = set(x for x in to if is_hashable(x))
- for item in fro:
- singleton = False
- if type(item) in (str, int):
- # The cheap and easy case.
- if is_paths:
- to_item = MakePathRelative(to_file, fro_file, item)
- else:
- to_item = item
-
- if not (type(item) is str and item.startswith('-')):
- # Any string that doesn't begin with a "-" is a singleton - it can
- # only appear once in a list, to be enforced by the list merge append
- # or prepend.
- singleton = True
- elif type(item) is dict:
- # Make a copy of the dictionary, continuing to look for paths to fix.
- # The other intelligent aspects of merge processing won't apply because
- # item is being merged into an empty dict.
- to_item = {}
- MergeDicts(to_item, item, to_file, fro_file)
- elif type(item) is list:
- # Recurse, making a copy of the list. If the list contains any
- # descendant dicts, path fixing will occur. Note that here, custom
- # values for is_paths and append are dropped; those are only to be
- # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
- # matter anyway because the new |to_item| list is empty.
- to_item = []
- MergeLists(to_item, item, to_file, fro_file)
- else:
- raise TypeError(
- 'Attempt to merge list item of unsupported type ' + \
- item.__class__.__name__)
-
- if append:
- # If appending a singleton that's already in the list, don't append.
- # This ensures that the earliest occurrence of the item will stay put.
- if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
- to.append(to_item)
- if is_hashable(to_item):
- hashable_to_set.add(to_item)
- else:
- # If prepending a singleton that's already in the list, remove the
- # existing instance and proceed with the prepend. This ensures that the
- # item appears at the earliest possible position in the list.
- while singleton and to_item in to:
- to.remove(to_item)
-
- # Don't just insert everything at index 0. That would prepend the new
- # items to the list in reverse order, which would be an unwelcome
- # surprise.
- to.insert(prepend_index, to_item)
- if is_hashable(to_item):
- hashable_to_set.add(to_item)
- prepend_index = prepend_index + 1
-
-
-def MergeDicts(to, fro, to_file, fro_file):
- # I wanted to name the parameter "from" but it's a Python keyword...
- for k, v in fro.iteritems():
- # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
- # copy semantics. Something else may want to merge from the |fro| dict
- # later, and having the same dict ref pointed to twice in the tree isn't
- # what anyone wants considering that the dicts may subsequently be
- # modified.
- if k in to:
- bad_merge = False
- if type(v) in (str, int):
- if type(to[k]) not in (str, int):
- bad_merge = True
- elif type(v) is not type(to[k]):
- bad_merge = True
-
- if bad_merge:
- raise TypeError(
- 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
- ' into incompatible type ' + to[k].__class__.__name__ + \
- ' for key ' + k)
- if type(v) in (str, int):
- # Overwrite the existing value, if any. Cheap and easy.
- is_path = IsPathSection(k)
- if is_path:
- to[k] = MakePathRelative(to_file, fro_file, v)
- else:
- to[k] = v
- elif type(v) is dict:
- # Recurse, guaranteeing copies will be made of objects that require it.
- if not k in to:
- to[k] = {}
- MergeDicts(to[k], v, to_file, fro_file)
- elif type(v) is list:
- # Lists in dicts can be merged with different policies, depending on
- # how the key in the "from" dict (k, the from-key) is written.
- #
- # If the from-key has ...the to-list will have this action
- # this character appended:... applied when receiving the from-list:
- # = replace
- # + prepend
- # ? set, only if to-list does not yet exist
- # (none) append
- #
- # This logic is list-specific, but since it relies on the associated
- # dict key, it's checked in this dict-oriented function.
- ext = k[-1]
- append = True
- if ext == '=':
- list_base = k[:-1]
- lists_incompatible = [list_base, list_base + '?']
- to[list_base] = []
- elif ext == '+':
- list_base = k[:-1]
- lists_incompatible = [list_base + '=', list_base + '?']
- append = False
- elif ext == '?':
- list_base = k[:-1]
- lists_incompatible = [list_base, list_base + '=', list_base + '+']
- else:
- list_base = k
- lists_incompatible = [list_base + '=', list_base + '?']
-
- # Some combinations of merge policies appearing together are meaningless.
- # It's stupid to replace and append simultaneously, for example. Append
- # and prepend are the only policies that can coexist.
- for list_incompatible in lists_incompatible:
- if list_incompatible in fro:
- raise GypError('Incompatible list policies ' + k + ' and ' +
- list_incompatible)
-
- if list_base in to:
- if ext == '?':
- # If the key ends in "?", the list will only be merged if it doesn't
- # already exist.
- continue
- elif type(to[list_base]) is not list:
- # This may not have been checked above if merging in a list with an
- # extension character.
- raise TypeError(
- 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
- ' into incompatible type ' + to[list_base].__class__.__name__ + \
- ' for key ' + list_base + '(' + k + ')')
- else:
- to[list_base] = []
-
- # Call MergeLists, which will make copies of objects that require it.
- # MergeLists can recurse back into MergeDicts, although this will be
- # to make copies of dicts (with paths fixed), there will be no
- # subsequent dict "merging" once entering a list because lists are
- # always replaced, appended to, or prepended to.
- is_paths = IsPathSection(list_base)
- MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
- else:
- raise TypeError(
- 'Attempt to merge dict value of unsupported type ' + \
- v.__class__.__name__ + ' for key ' + k)
-
-
-def MergeConfigWithInheritance(new_configuration_dict, build_file,
- target_dict, configuration, visited):
- # Skip if previously visted.
- if configuration in visited:
- return
-
- # Look at this configuration.
- configuration_dict = target_dict['configurations'][configuration]
-
- # Merge in parents.
- for parent in configuration_dict.get('inherit_from', []):
- MergeConfigWithInheritance(new_configuration_dict, build_file,
- target_dict, parent, visited + [configuration])
-
- # Merge it into the new config.
- MergeDicts(new_configuration_dict, configuration_dict,
- build_file, build_file)
-
- # Drop abstract.
- if 'abstract' in new_configuration_dict:
- del new_configuration_dict['abstract']
-
-
-def SetUpConfigurations(target, target_dict):
- # key_suffixes is a list of key suffixes that might appear on key names.
- # These suffixes are handled in conditional evaluations (for =, +, and ?)
- # and rules/exclude processing (for ! and /). Keys with these suffixes
- # should be treated the same as keys without.
- key_suffixes = ['=', '+', '?', '!', '/']
-
- build_file = gyp.common.BuildFile(target)
-
- # Provide a single configuration by default if none exists.
- # TODO(mark): Signal an error if default_configurations exists but
- # configurations does not.
- if not 'configurations' in target_dict:
- target_dict['configurations'] = {'Default': {}}
- if not 'default_configuration' in target_dict:
- concrete = [i for (i, config) in target_dict['configurations'].iteritems()
- if not config.get('abstract')]
- target_dict['default_configuration'] = sorted(concrete)[0]
-
- merged_configurations = {}
- configs = target_dict['configurations']
- for (configuration, old_configuration_dict) in configs.iteritems():
- # Skip abstract configurations (saves work only).
- if old_configuration_dict.get('abstract'):
- continue
- # Configurations inherit (most) settings from the enclosing target scope.
- # Get the inheritance relationship right by making a copy of the target
- # dict.
- new_configuration_dict = {}
- for (key, target_val) in target_dict.iteritems():
- key_ext = key[-1:]
- if key_ext in key_suffixes:
- key_base = key[:-1]
- else:
- key_base = key
- if not key_base in non_configuration_keys:
- new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
-
- # Merge in configuration (with all its parents first).
- MergeConfigWithInheritance(new_configuration_dict, build_file,
- target_dict, configuration, [])
-
- merged_configurations[configuration] = new_configuration_dict
-
- # Put the new configurations back into the target dict as a configuration.
- for configuration in merged_configurations.keys():
- target_dict['configurations'][configuration] = (
- merged_configurations[configuration])
-
- # Now drop all the abstract ones.
- for configuration in target_dict['configurations'].keys():
- old_configuration_dict = target_dict['configurations'][configuration]
- if old_configuration_dict.get('abstract'):
- del target_dict['configurations'][configuration]
-
- # Now that all of the target's configurations have been built, go through
- # the target dict's keys and remove everything that's been moved into a
- # "configurations" section.
- delete_keys = []
- for key in target_dict:
- key_ext = key[-1:]
- if key_ext in key_suffixes:
- key_base = key[:-1]
- else:
- key_base = key
- if not key_base in non_configuration_keys:
- delete_keys.append(key)
- for key in delete_keys:
- del target_dict[key]
-
- # Check the configurations to see if they contain invalid keys.
- for configuration in target_dict['configurations'].keys():
- configuration_dict = target_dict['configurations'][configuration]
- for key in configuration_dict.keys():
- if key in invalid_configuration_keys:
- raise GypError('%s not allowed in the %s configuration, found in '
- 'target %s' % (key, configuration, target))
-
-
-
-def ProcessListFiltersInDict(name, the_dict):
- """Process regular expression and exclusion-based filters on lists.
-
- An exclusion list is in a dict key named with a trailing "!", like
- "sources!". Every item in such a list is removed from the associated
- main list, which in this example, would be "sources". Removed items are
- placed into a "sources_excluded" list in the dict.
-
- Regular expression (regex) filters are contained in dict keys named with a
- trailing "/", such as "sources/" to operate on the "sources" list. Regex
- filters in a dict take the form:
- 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
- ['include', '_mac\\.cc$'] ],
- The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
- _win.cc. The second filter then includes all files ending in _mac.cc that
- are now or were once in the "sources" list. Items matching an "exclude"
- filter are subject to the same processing as would occur if they were listed
- by name in an exclusion list (ending in "!"). Items matching an "include"
- filter are brought back into the main list if previously excluded by an
- exclusion list or exclusion regex filter. Subsequent matching "exclude"
- patterns can still cause items to be excluded after matching an "include".
- """
-
- # Look through the dictionary for any lists whose keys end in "!" or "/".
- # These are lists that will be treated as exclude lists and regular
- # expression-based exclude/include lists. Collect the lists that are
- # needed first, looking for the lists that they operate on, and assemble
- # then into |lists|. This is done in a separate loop up front, because
- # the _included and _excluded keys need to be added to the_dict, and that
- # can't be done while iterating through it.
-
- lists = []
- del_lists = []
- for key, value in the_dict.iteritems():
- operation = key[-1]
- if operation != '!' and operation != '/':
- continue
-
- if type(value) is not list:
- raise ValueError(name + ' key ' + key + ' must be list, not ' + \
- value.__class__.__name__)
-
- list_key = key[:-1]
- if list_key not in the_dict:
- # This happens when there's a list like "sources!" but no corresponding
- # "sources" list. Since there's nothing for it to operate on, queue up
- # the "sources!" list for deletion now.
- del_lists.append(key)
- continue
-
- if type(the_dict[list_key]) is not list:
- value = the_dict[list_key]
- raise ValueError(name + ' key ' + list_key + \
- ' must be list, not ' + \
- value.__class__.__name__ + ' when applying ' + \
- {'!': 'exclusion', '/': 'regex'}[operation])
-
- if not list_key in lists:
- lists.append(list_key)
-
- # Delete the lists that are known to be unneeded at this point.
- for del_list in del_lists:
- del the_dict[del_list]
-
- for list_key in lists:
- the_list = the_dict[list_key]
-
- # Initialize the list_actions list, which is parallel to the_list. Each
- # item in list_actions identifies whether the corresponding item in
- # the_list should be excluded, unconditionally preserved (included), or
- # whether no exclusion or inclusion has been applied. Items for which
- # no exclusion or inclusion has been applied (yet) have value -1, items
- # excluded have value 0, and items included have value 1. Includes and
- # excludes override previous actions. All items in list_actions are
- # initialized to -1 because no excludes or includes have been processed
- # yet.
- list_actions = list((-1,) * len(the_list))
-
- exclude_key = list_key + '!'
- if exclude_key in the_dict:
- for exclude_item in the_dict[exclude_key]:
- for index in xrange(0, len(the_list)):
- if exclude_item == the_list[index]:
- # This item matches the exclude_item, so set its action to 0
- # (exclude).
- list_actions[index] = 0
-
- # The "whatever!" list is no longer needed, dump it.
- del the_dict[exclude_key]
-
- regex_key = list_key + '/'
- if regex_key in the_dict:
- for regex_item in the_dict[regex_key]:
- [action, pattern] = regex_item
- pattern_re = re.compile(pattern)
-
- if action == 'exclude':
- # This item matches an exclude regex, so set its value to 0 (exclude).
- action_value = 0
- elif action == 'include':
- # This item matches an include regex, so set its value to 1 (include).
- action_value = 1
- else:
- # This is an action that doesn't make any sense.
- raise ValueError('Unrecognized action ' + action + ' in ' + name + \
- ' key ' + regex_key)
-
- for index in xrange(0, len(the_list)):
- list_item = the_list[index]
- if list_actions[index] == action_value:
- # Even if the regex matches, nothing will change so continue (regex
- # searches are expensive).
- continue
- if pattern_re.search(list_item):
- # Regular expression match.
- list_actions[index] = action_value
-
- # The "whatever/" list is no longer needed, dump it.
- del the_dict[regex_key]
-
- # Add excluded items to the excluded list.
- #
- # Note that exclude_key ("sources!") is different from excluded_key
- # ("sources_excluded"). The exclude_key list is input and it was already
- # processed and deleted; the excluded_key list is output and it's about
- # to be created.
- excluded_key = list_key + '_excluded'
- if excluded_key in the_dict:
- raise GypError(name + ' key ' + excluded_key +
- ' must not be present prior '
- ' to applying exclusion/regex filters for ' + list_key)
-
- excluded_list = []
-
- # Go backwards through the list_actions list so that as items are deleted,
- # the indices of items that haven't been seen yet don't shift. That means
- # that things need to be prepended to excluded_list to maintain them in the
- # same order that they existed in the_list.
- for index in xrange(len(list_actions) - 1, -1, -1):
- if list_actions[index] == 0:
- # Dump anything with action 0 (exclude). Keep anything with action 1
- # (include) or -1 (no include or exclude seen for the item).
- excluded_list.insert(0, the_list[index])
- del the_list[index]
-
- # If anything was excluded, put the excluded list into the_dict at
- # excluded_key.
- if len(excluded_list) > 0:
- the_dict[excluded_key] = excluded_list
-
- # Now recurse into subdicts and lists that may contain dicts.
- for key, value in the_dict.iteritems():
- if type(value) is dict:
- ProcessListFiltersInDict(key, value)
- elif type(value) is list:
- ProcessListFiltersInList(key, value)
-
-
-def ProcessListFiltersInList(name, the_list):
- for item in the_list:
- if type(item) is dict:
- ProcessListFiltersInDict(name, item)
- elif type(item) is list:
- ProcessListFiltersInList(name, item)
-
-
-def ValidateTargetType(target, target_dict):
- """Ensures the 'type' field on the target is one of the known types.
-
- Arguments:
- target: string, name of target.
- target_dict: dict, target spec.
-
- Raises an exception on error.
- """
- VALID_TARGET_TYPES = ('executable', 'loadable_module',
- 'static_library', 'shared_library',
- 'mac_kernel_extension', 'none')
- target_type = target_dict.get('type', None)
- if target_type not in VALID_TARGET_TYPES:
- raise GypError("Target %s has an invalid target type '%s'. "
- "Must be one of %s." %
- (target, target_type, '/'.join(VALID_TARGET_TYPES)))
- if (target_dict.get('standalone_static_library', 0) and
- not target_type == 'static_library'):
- raise GypError('Target %s has type %s but standalone_static_library flag is'
- ' only valid for static_library type.' % (target,
- target_type))
-
-
-def ValidateSourcesInTarget(target, target_dict, build_file,
- duplicate_basename_check):
- if not duplicate_basename_check:
- return
- if target_dict.get('type', None) != 'static_library':
- return
- sources = target_dict.get('sources', [])
- basenames = {}
- for source in sources:
- name, ext = os.path.splitext(source)
- is_compiled_file = ext in [
- '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
- if not is_compiled_file:
- continue
- basename = os.path.basename(name) # Don't include extension.
- basenames.setdefault(basename, []).append(source)
-
- error = ''
- for basename, files in basenames.iteritems():
- if len(files) > 1:
- error += ' %s: %s\n' % (basename, ' '.join(files))
-
- if error:
- print('static library %s has several files with the same basename:\n' %
- target + error + 'libtool on Mac cannot handle that. Use '
- '--no-duplicate-basename-check to disable this validation.')
- raise GypError('Duplicate basenames in sources section, see list above')
-
-
-def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
- """Ensures that the rules sections in target_dict are valid and consistent,
- and determines which sources they apply to.
-
- Arguments:
- target: string, name of target.
- target_dict: dict, target spec containing "rules" and "sources" lists.
- extra_sources_for_rules: a list of keys to scan for rule matches in
- addition to 'sources'.
- """
-
- # Dicts to map between values found in rules' 'rule_name' and 'extension'
- # keys and the rule dicts themselves.
- rule_names = {}
- rule_extensions = {}
-
- rules = target_dict.get('rules', [])
- for rule in rules:
- # Make sure that there's no conflict among rule names and extensions.
- rule_name = rule['rule_name']
- if rule_name in rule_names:
- raise GypError('rule %s exists in duplicate, target %s' %
- (rule_name, target))
- rule_names[rule_name] = rule
-
- rule_extension = rule['extension']
- if rule_extension.startswith('.'):
- rule_extension = rule_extension[1:]
- if rule_extension in rule_extensions:
- raise GypError(('extension %s associated with multiple rules, ' +
- 'target %s rules %s and %s') %
- (rule_extension, target,
- rule_extensions[rule_extension]['rule_name'],
- rule_name))
- rule_extensions[rule_extension] = rule
-
- # Make sure rule_sources isn't already there. It's going to be
- # created below if needed.
- if 'rule_sources' in rule:
- raise GypError(
- 'rule_sources must not exist in input, target %s rule %s' %
- (target, rule_name))
-
- rule_sources = []
- source_keys = ['sources']
- source_keys.extend(extra_sources_for_rules)
- for source_key in source_keys:
- for source in target_dict.get(source_key, []):
- (source_root, source_extension) = os.path.splitext(source)
- if source_extension.startswith('.'):
- source_extension = source_extension[1:]
- if source_extension == rule_extension:
- rule_sources.append(source)
-
- if len(rule_sources) > 0:
- rule['rule_sources'] = rule_sources
-
-
-def ValidateRunAsInTarget(target, target_dict, build_file):
- target_name = target_dict.get('target_name')
- run_as = target_dict.get('run_as')
- if not run_as:
- return
- if type(run_as) is not dict:
- raise GypError("The 'run_as' in target %s from file %s should be a "
- "dictionary." %
- (target_name, build_file))
- action = run_as.get('action')
- if not action:
- raise GypError("The 'run_as' in target %s from file %s must have an "
- "'action' section." %
- (target_name, build_file))
- if type(action) is not list:
- raise GypError("The 'action' for 'run_as' in target %s from file %s "
- "must be a list." %
- (target_name, build_file))
- working_directory = run_as.get('working_directory')
- if working_directory and type(working_directory) is not str:
- raise GypError("The 'working_directory' for 'run_as' in target %s "
- "in file %s should be a string." %
- (target_name, build_file))
- environment = run_as.get('environment')
- if environment and type(environment) is not dict:
- raise GypError("The 'environment' for 'run_as' in target %s "
- "in file %s should be a dictionary." %
- (target_name, build_file))
-
-
-def ValidateActionsInTarget(target, target_dict, build_file):
- '''Validates the inputs to the actions in a target.'''
- target_name = target_dict.get('target_name')
- actions = target_dict.get('actions', [])
- for action in actions:
- action_name = action.get('action_name')
- if not action_name:
- raise GypError("Anonymous action in target %s. "
- "An action must have an 'action_name' field." %
- target_name)
- inputs = action.get('inputs', None)
- if inputs is None:
- raise GypError('Action in target %s has no inputs.' % target_name)
- action_command = action.get('action')
- if action_command and not action_command[0]:
- raise GypError("Empty action as command in target %s." % target_name)
-
-
-def TurnIntIntoStrInDict(the_dict):
- """Given dict the_dict, recursively converts all integers into strings.
- """
- # Use items instead of iteritems because there's no need to try to look at
- # reinserted keys and their associated values.
- for k, v in the_dict.items():
- if type(v) is int:
- v = str(v)
- the_dict[k] = v
- elif type(v) is dict:
- TurnIntIntoStrInDict(v)
- elif type(v) is list:
- TurnIntIntoStrInList(v)
-
- if type(k) is int:
- del the_dict[k]
- the_dict[str(k)] = v
-
-
-def TurnIntIntoStrInList(the_list):
- """Given list the_list, recursively converts all integers into strings.
- """
- for index in xrange(0, len(the_list)):
- item = the_list[index]
- if type(item) is int:
- the_list[index] = str(item)
- elif type(item) is dict:
- TurnIntIntoStrInDict(item)
- elif type(item) is list:
- TurnIntIntoStrInList(item)
-
-
-def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
- data):
- """Return only the targets that are deep dependencies of |root_targets|."""
- qualified_root_targets = []
- for target in root_targets:
- target = target.strip()
- qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
- if not qualified_targets:
- raise GypError("Could not find target %s" % target)
- qualified_root_targets.extend(qualified_targets)
-
- wanted_targets = {}
- for target in qualified_root_targets:
- wanted_targets[target] = targets[target]
- for dependency in dependency_nodes[target].DeepDependencies():
- wanted_targets[dependency] = targets[dependency]
-
- wanted_flat_list = [t for t in flat_list if t in wanted_targets]
-
- # Prune unwanted targets from each build_file's data dict.
- for build_file in data['target_build_files']:
- if not 'targets' in data[build_file]:
- continue
- new_targets = []
- for target in data[build_file]['targets']:
- qualified_name = gyp.common.QualifiedTarget(build_file,
- target['target_name'],
- target['toolset'])
- if qualified_name in wanted_targets:
- new_targets.append(target)
- data[build_file]['targets'] = new_targets
-
- return wanted_targets, wanted_flat_list
-
-
-def VerifyNoCollidingTargets(targets):
- """Verify that no two targets in the same directory share the same name.
-
- Arguments:
- targets: A list of targets in the form 'path/to/file.gyp:target_name'.
- """
- # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
- used = {}
- for target in targets:
- # Separate out 'path/to/file.gyp, 'target_name' from
- # 'path/to/file.gyp:target_name'.
- path, name = target.rsplit(':', 1)
- # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
- subdir, gyp = os.path.split(path)
- # Use '.' for the current directory '', so that the error messages make
- # more sense.
- if not subdir:
- subdir = '.'
- # Prepare a key like 'path/to:target_name'.
- key = subdir + ':' + name
- if key in used:
- # Complain if this target is already used.
- raise GypError('Duplicate target name "%s" in directory "%s" used both '
- 'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
- used[key] = gyp
-
-
-def SetGeneratorGlobals(generator_input_info):
- # Set up path_sections and non_configuration_keys with the default data plus
- # the generator-specific data.
- global path_sections
- path_sections = set(base_path_sections)
- path_sections.update(generator_input_info['path_sections'])
-
- global non_configuration_keys
- non_configuration_keys = base_non_configuration_keys[:]
- non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
-
- global multiple_toolsets
- multiple_toolsets = generator_input_info[
- 'generator_supports_multiple_toolsets']
-
- global generator_filelist_paths
- generator_filelist_paths = generator_input_info['generator_filelist_paths']
-
-
-def Load(build_files, variables, includes, depth, generator_input_info, check,
- circular_check, duplicate_basename_check, parallel, root_targets):
- SetGeneratorGlobals(generator_input_info)
- # A generator can have other lists (in addition to sources) be processed
- # for rules.
- extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
-
- # Load build files. This loads every target-containing build file into
- # the |data| dictionary such that the keys to |data| are build file names,
- # and the values are the entire build file contents after "early" or "pre"
- # processing has been done and includes have been resolved.
- # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
- # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
- # track of the keys corresponding to "target" files.
- data = {'target_build_files': set()}
- # Normalize paths everywhere. This is important because paths will be
- # used as keys to the data dict and for references between input files.
- build_files = set(map(os.path.normpath, build_files))
- if parallel:
- LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
- check, generator_input_info)
- else:
- aux_data = {}
- for build_file in build_files:
- try:
- LoadTargetBuildFile(build_file, data, aux_data,
- variables, includes, depth, check, True)
- except Exception, e:
- gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
- raise
-
- # Build a dict to access each target's subdict by qualified name.
- targets = BuildTargetsDict(data)
-
- # Fully qualify all dependency links.
- QualifyDependencies(targets)
-
- # Remove self-dependencies from targets that have 'prune_self_dependencies'
- # set to 1.
- RemoveSelfDependencies(targets)
-
- # Expand dependencies specified as build_file:*.
- ExpandWildcardDependencies(targets, data)
-
- # Remove all dependencies marked as 'link_dependency' from the targets of
- # type 'none'.
- RemoveLinkDependenciesFromNoneTargets(targets)
-
- # Apply exclude (!) and regex (/) list filters only for dependency_sections.
- for target_name, target_dict in targets.iteritems():
- tmp_dict = {}
- for key_base in dependency_sections:
- for op in ('', '!', '/'):
- key = key_base + op
- if key in target_dict:
- tmp_dict[key] = target_dict[key]
- del target_dict[key]
- ProcessListFiltersInDict(target_name, tmp_dict)
- # Write the results back to |target_dict|.
- for key in tmp_dict:
- target_dict[key] = tmp_dict[key]
-
- # Make sure every dependency appears at most once.
- RemoveDuplicateDependencies(targets)
-
- if circular_check:
- # Make sure that any targets in a.gyp don't contain dependencies in other
- # .gyp files that further depend on a.gyp.
- VerifyNoGYPFileCircularDependencies(targets)
-
- [dependency_nodes, flat_list] = BuildDependencyList(targets)
-
- if root_targets:
- # Remove, from |targets| and |flat_list|, the targets that are not deep
- # dependencies of the targets specified in |root_targets|.
- targets, flat_list = PruneUnwantedTargets(
- targets, flat_list, dependency_nodes, root_targets, data)
-
- # Check that no two targets in the same directory have the same name.
- VerifyNoCollidingTargets(flat_list)
-
- # Handle dependent settings of various types.
- for settings_type in ['all_dependent_settings',
- 'direct_dependent_settings',
- 'link_settings']:
- DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
-
- # Take out the dependent settings now that they've been published to all
- # of the targets that require them.
- for target in flat_list:
- if settings_type in targets[target]:
- del targets[target][settings_type]
-
- # Make sure static libraries don't declare dependencies on other static
- # libraries, but that linkables depend on all unlinked static libraries
- # that they need so that their link steps will be correct.
- gii = generator_input_info
- if gii['generator_wants_static_library_dependencies_adjusted']:
- AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
- gii['generator_wants_sorted_dependencies'])
-
- # Apply "post"/"late"/"target" variable expansions and condition evaluations.
- for target in flat_list:
- target_dict = targets[target]
- build_file = gyp.common.BuildFile(target)
- ProcessVariablesAndConditionsInDict(
- target_dict, PHASE_LATE, variables, build_file)
-
- # Move everything that can go into a "configurations" section into one.
- for target in flat_list:
- target_dict = targets[target]
- SetUpConfigurations(target, target_dict)
-
- # Apply exclude (!) and regex (/) list filters.
- for target in flat_list:
- target_dict = targets[target]
- ProcessListFiltersInDict(target, target_dict)
-
- # Apply "latelate" variable expansions and condition evaluations.
- for target in flat_list:
- target_dict = targets[target]
- build_file = gyp.common.BuildFile(target)
- ProcessVariablesAndConditionsInDict(
- target_dict, PHASE_LATELATE, variables, build_file)
-
- # Make sure that the rules make sense, and build up rule_sources lists as
- # needed. Not all generators will need to use the rule_sources lists, but
- # some may, and it seems best to build the list in a common spot.
- # Also validate actions and run_as elements in targets.
- for target in flat_list:
- target_dict = targets[target]
- build_file = gyp.common.BuildFile(target)
- ValidateTargetType(target, target_dict)
- ValidateSourcesInTarget(target, target_dict, build_file,
- duplicate_basename_check)
- ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
- ValidateRunAsInTarget(target, target_dict, build_file)
- ValidateActionsInTarget(target, target_dict, build_file)
-
- # Generators might not expect ints. Turn them into strs.
- TurnIntIntoStrInDict(data)
-
- # TODO(mark): Return |data| for now because the generator needs a list of
- # build files that came in. In the future, maybe it should just accept
- # a list, and not the whole data dict.
- return [flat_list, targets, data]
diff --git a/deps/gyp/pylib/gyp/input_test.py b/deps/gyp/pylib/gyp/input_test.py
deleted file mode 100755
index 4234fbb830..0000000000
--- a/deps/gyp/pylib/gyp/input_test.py
+++ /dev/null
@@ -1,90 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for the input.py file."""
-
-import gyp.input
-import unittest
-import sys
-
-
-class TestFindCycles(unittest.TestCase):
- def setUp(self):
- self.nodes = {}
- for x in ('a', 'b', 'c', 'd', 'e'):
- self.nodes[x] = gyp.input.DependencyGraphNode(x)
-
- def _create_dependency(self, dependent, dependency):
- dependent.dependencies.append(dependency)
- dependency.dependents.append(dependent)
-
- def test_no_cycle_empty_graph(self):
- for label, node in self.nodes.iteritems():
- self.assertEquals([], node.FindCycles())
-
- def test_no_cycle_line(self):
- self._create_dependency(self.nodes['a'], self.nodes['b'])
- self._create_dependency(self.nodes['b'], self.nodes['c'])
- self._create_dependency(self.nodes['c'], self.nodes['d'])
-
- for label, node in self.nodes.iteritems():
- self.assertEquals([], node.FindCycles())
-
- def test_no_cycle_dag(self):
- self._create_dependency(self.nodes['a'], self.nodes['b'])
- self._create_dependency(self.nodes['a'], self.nodes['c'])
- self._create_dependency(self.nodes['b'], self.nodes['c'])
-
- for label, node in self.nodes.iteritems():
- self.assertEquals([], node.FindCycles())
-
- def test_cycle_self_reference(self):
- self._create_dependency(self.nodes['a'], self.nodes['a'])
-
- self.assertEquals([[self.nodes['a'], self.nodes['a']]],
- self.nodes['a'].FindCycles())
-
- def test_cycle_two_nodes(self):
- self._create_dependency(self.nodes['a'], self.nodes['b'])
- self._create_dependency(self.nodes['b'], self.nodes['a'])
-
- self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
- self.nodes['a'].FindCycles())
- self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
- self.nodes['b'].FindCycles())
-
- def test_two_cycles(self):
- self._create_dependency(self.nodes['a'], self.nodes['b'])
- self._create_dependency(self.nodes['b'], self.nodes['a'])
-
- self._create_dependency(self.nodes['b'], self.nodes['c'])
- self._create_dependency(self.nodes['c'], self.nodes['b'])
-
- cycles = self.nodes['a'].FindCycles()
- self.assertTrue(
- [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
- self.assertTrue(
- [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
- self.assertEquals(2, len(cycles))
-
- def test_big_cycle(self):
- self._create_dependency(self.nodes['a'], self.nodes['b'])
- self._create_dependency(self.nodes['b'], self.nodes['c'])
- self._create_dependency(self.nodes['c'], self.nodes['d'])
- self._create_dependency(self.nodes['d'], self.nodes['e'])
- self._create_dependency(self.nodes['e'], self.nodes['a'])
-
- self.assertEquals([[self.nodes['a'],
- self.nodes['b'],
- self.nodes['c'],
- self.nodes['d'],
- self.nodes['e'],
- self.nodes['a']]],
- self.nodes['a'].FindCycles())
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/gyp/pylib/gyp/mac_tool.py b/deps/gyp/pylib/gyp/mac_tool.py
deleted file mode 100755
index e146198b07..0000000000
--- a/deps/gyp/pylib/gyp/mac_tool.py
+++ /dev/null
@@ -1,610 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary=False):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary=False, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("<string>${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/deps/gyp/pylib/gyp/msvs_emulation.py b/deps/gyp/pylib/gyp/msvs_emulation.py
deleted file mode 100644
index ca67b122f0..0000000000
--- a/deps/gyp/pylib/gyp/msvs_emulation.py
+++ /dev/null
@@ -1,1087 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This module helps emulate Visual Studio 2008 behavior on top of other
-build systems, primarily ninja.
-"""
-
-import os
-import re
-import subprocess
-import sys
-
-from gyp.common import OrderedSet
-import gyp.MSVSUtil
-import gyp.MSVSVersion
-
-
-windows_quoter_regex = re.compile(r'(\\*)"')
-
-
-def QuoteForRspFile(arg):
- """Quote a command line argument so that it appears as one argument when
- processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
- Windows programs)."""
- # See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
- # threads. This is actually the quoting rules for CommandLineToArgvW, not
- # for the shell, because the shell doesn't do anything in Windows. This
- # works more or less because most programs (including the compiler, etc.)
- # use that function to handle command line arguments.
-
- # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
- # preceding it, and results in n backslashes + the quote. So we substitute
- # in 2* what we match, +1 more, plus the quote.
- arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
-
- # %'s also need to be doubled otherwise they're interpreted as batch
- # positional arguments. Also make sure to escape the % so that they're
- # passed literally through escaping so they can be singled to just the
- # original %. Otherwise, trying to pass the literal representation that
- # looks like an environment variable to the shell (e.g. %PATH%) would fail.
- arg = arg.replace('%', '%%')
-
- # These commands are used in rsp files, so no escaping for the shell (via ^)
- # is necessary.
-
- # Finally, wrap the whole thing in quotes so that the above quote rule
- # applies and whitespace isn't a word break.
- return '"' + arg + '"'
-
-
-def EncodeRspFileList(args):
- """Process a list of arguments using QuoteCmdExeArgument."""
- # Note that the first argument is assumed to be the command. Don't add
- # quotes around it because then built-ins like 'echo', etc. won't work.
- # Take care to normpath only the path in the case of 'call ../x.bat' because
- # otherwise the whole thing is incorrectly interpreted as a path and not
- # normalized correctly.
- if not args: return ''
- if args[0].startswith('call '):
- call, program = args[0].split(' ', 1)
- program = call + ' ' + os.path.normpath(program)
- else:
- program = os.path.normpath(args[0])
- return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
-
-
-def _GenericRetrieve(root, default, path):
- """Given a list of dictionary keys |path| and a tree of dicts |root|, find
- value at path, or return |default| if any of the path doesn't exist."""
- if not root:
- return default
- if not path:
- return root
- return _GenericRetrieve(root.get(path[0]), default, path[1:])
-
-
-def _AddPrefix(element, prefix):
- """Add |prefix| to |element| or each subelement if element is iterable."""
- if element is None:
- return element
- # Note, not Iterable because we don't want to handle strings like that.
- if isinstance(element, list) or isinstance(element, tuple):
- return [prefix + e for e in element]
- else:
- return prefix + element
-
-
-def _DoRemapping(element, map):
- """If |element| then remap it through |map|. If |element| is iterable then
- each item will be remapped. Any elements not found will be removed."""
- if map is not None and element is not None:
- if not callable(map):
- map = map.get # Assume it's a dict, otherwise a callable to do the remap.
- if isinstance(element, list) or isinstance(element, tuple):
- element = filter(None, [map(elem) for elem in element])
- else:
- element = map(element)
- return element
-
-
-def _AppendOrReturn(append, element):
- """If |append| is None, simply return |element|. If |append| is not None,
- then add |element| to it, adding each item in |element| if it's a list or
- tuple."""
- if append is not None and element is not None:
- if isinstance(element, list) or isinstance(element, tuple):
- append.extend(element)
- else:
- append.append(element)
- else:
- return element
-
-
-def _FindDirectXInstallation():
- """Try to find an installation location for the DirectX SDK. Check for the
- standard environment variable, and if that doesn't exist, try to find
- via the registry. May return None if not found in either location."""
- # Return previously calculated value, if there is one
- if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
- return _FindDirectXInstallation.dxsdk_dir
-
- dxsdk_dir = os.environ.get('DXSDK_DIR')
- if not dxsdk_dir:
- # Setup params to pass to and attempt to launch reg.exe.
- cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- for line in p.communicate()[0].splitlines():
- if 'InstallPath' in line:
- dxsdk_dir = line.split(' ')[3] + "\\"
-
- # Cache return value
- _FindDirectXInstallation.dxsdk_dir = dxsdk_dir
- return dxsdk_dir
-
-
-def GetGlobalVSMacroEnv(vs_version):
- """Get a dict of variables mapping internal VS macro names to their gyp
- equivalents. Returns all variables that are independent of the target."""
- env = {}
- # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
- # Visual Studio is actually installed.
- if vs_version.Path():
- env['$(VSInstallDir)'] = vs_version.Path()
- env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
- # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
- # set. This happens when the SDK is sync'd via src-internal, rather than
- # by typical end-user installation of the SDK. If it's not set, we don't
- # want to leave the unexpanded variable in the path, so simply strip it.
- dxsdk_dir = _FindDirectXInstallation()
- env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
- # Try to find an installation location for the Windows DDK by checking
- # the WDK_DIR environment variable, may be None.
- env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
- return env
-
-def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
- """Finds msvs_system_include_dirs that are common to all targets, removes
- them from all targets, and returns an OrderedSet containing them."""
- all_system_includes = OrderedSet(
- configs[0].get('msvs_system_include_dirs', []))
- for config in configs[1:]:
- system_includes = config.get('msvs_system_include_dirs', [])
- all_system_includes = all_system_includes & OrderedSet(system_includes)
- if not all_system_includes:
- return None
- # Expand macros in all_system_includes.
- env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
- expanded_system_includes = OrderedSet([ExpandMacros(include, env)
- for include in all_system_includes])
- if any(['$' in include for include in expanded_system_includes]):
- # Some path relies on target-specific variables, bail.
- return None
-
- # Remove system includes shared by all targets from the targets.
- for config in configs:
- includes = config.get('msvs_system_include_dirs', [])
- if includes: # Don't insert a msvs_system_include_dirs key if not needed.
- # This must check the unexpanded includes list:
- new_includes = [i for i in includes if i not in all_system_includes]
- config['msvs_system_include_dirs'] = new_includes
- return expanded_system_includes
-
-
-class MsvsSettings(object):
- """A class that understands the gyp 'msvs_...' values (especially the
- msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
- class helps map those settings to command line options."""
-
- def __init__(self, spec, generator_flags):
- self.spec = spec
- self.vs_version = GetVSVersion(generator_flags)
-
- supported_fields = [
- ('msvs_configuration_attributes', dict),
- ('msvs_settings', dict),
- ('msvs_system_include_dirs', list),
- ('msvs_disabled_warnings', list),
- ('msvs_precompiled_header', str),
- ('msvs_precompiled_source', str),
- ('msvs_configuration_platform', str),
- ('msvs_target_platform', str),
- ]
- configs = spec['configurations']
- for field, default in supported_fields:
- setattr(self, field, {})
- for configname, config in configs.iteritems():
- getattr(self, field)[configname] = config.get(field, default())
-
- self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
-
- unsupported_fields = [
- 'msvs_prebuild',
- 'msvs_postbuild',
- ]
- unsupported = []
- for field in unsupported_fields:
- for config in configs.values():
- if field in config:
- unsupported += ["%s not supported (target %s)." %
- (field, spec['target_name'])]
- if unsupported:
- raise Exception('\n'.join(unsupported))
-
- def GetExtension(self):
- """Returns the extension for the target, with no leading dot.
-
- Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
- the target type.
- """
- ext = self.spec.get('product_extension', None)
- if ext:
- return ext
- return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
-
- def GetVSMacroEnv(self, base_to_build=None, config=None):
- """Get a dict of variables mapping internal VS macro names to their gyp
- equivalents."""
- target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
- target_name = self.spec.get('product_prefix', '') + \
- self.spec.get('product_name', self.spec['target_name'])
- target_dir = base_to_build + '\\' if base_to_build else ''
- target_ext = '.' + self.GetExtension()
- target_file_name = target_name + target_ext
-
- replacements = {
- '$(InputName)': '${root}',
- '$(InputPath)': '${source}',
- '$(IntDir)': '$!INTERMEDIATE_DIR',
- '$(OutDir)\\': target_dir,
- '$(PlatformName)': target_platform,
- '$(ProjectDir)\\': '',
- '$(ProjectName)': self.spec['target_name'],
- '$(TargetDir)\\': target_dir,
- '$(TargetExt)': target_ext,
- '$(TargetFileName)': target_file_name,
- '$(TargetName)': target_name,
- '$(TargetPath)': os.path.join(target_dir, target_file_name),
- }
- replacements.update(GetGlobalVSMacroEnv(self.vs_version))
- return replacements
-
- def ConvertVSMacros(self, s, base_to_build=None, config=None):
- """Convert from VS macro names to something equivalent."""
- env = self.GetVSMacroEnv(base_to_build, config=config)
- return ExpandMacros(s, env)
-
- def AdjustLibraries(self, libraries):
- """Strip -l from library if it's specified with that."""
- libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
- return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
-
- def _GetAndMunge(self, field, path, default, prefix, append, map):
- """Retrieve a value from |field| at |path| or return |default|. If
- |append| is specified, and the item is found, it will be appended to that
- object instead of returned. If |map| is specified, results will be
- remapped through |map| before being returned or appended."""
- result = _GenericRetrieve(field, default, path)
- result = _DoRemapping(result, map)
- result = _AddPrefix(result, prefix)
- return _AppendOrReturn(append, result)
-
- class _GetWrapper(object):
- def __init__(self, parent, field, base_path, append=None):
- self.parent = parent
- self.field = field
- self.base_path = [base_path]
- self.append = append
- def __call__(self, name, map=None, prefix='', default=None):
- return self.parent._GetAndMunge(self.field, self.base_path + [name],
- default=default, prefix=prefix, append=self.append, map=map)
-
- def GetArch(self, config):
- """Get architecture based on msvs_configuration_platform and
- msvs_target_platform. Returns either 'x86' or 'x64'."""
- configuration_platform = self.msvs_configuration_platform.get(config, '')
- platform = self.msvs_target_platform.get(config, '')
- if not platform: # If no specific override, use the configuration's.
- platform = configuration_platform
- # Map from platform to architecture.
- return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
-
- def _TargetConfig(self, config):
- """Returns the target-specific configuration."""
- # There's two levels of architecture/platform specification in VS. The
- # first level is globally for the configuration (this is what we consider
- # "the" config at the gyp level, which will be something like 'Debug' or
- # 'Release_x64'), and a second target-specific configuration, which is an
- # override for the global one. |config| is remapped here to take into
- # account the local target-specific overrides to the global configuration.
- arch = self.GetArch(config)
- if arch == 'x64' and not config.endswith('_x64'):
- config += '_x64'
- if arch == 'x86' and config.endswith('_x64'):
- config = config.rsplit('_', 1)[0]
- return config
-
- def _Setting(self, path, config,
- default=None, prefix='', append=None, map=None):
- """_GetAndMunge for msvs_settings."""
- return self._GetAndMunge(
- self.msvs_settings[config], path, default, prefix, append, map)
-
- def _ConfigAttrib(self, path, config,
- default=None, prefix='', append=None, map=None):
- """_GetAndMunge for msvs_configuration_attributes."""
- return self._GetAndMunge(
- self.msvs_configuration_attributes[config],
- path, default, prefix, append, map)
-
- def AdjustIncludeDirs(self, include_dirs, config):
- """Updates include_dirs to expand VS specific paths, and adds the system
- include dirs used for platform SDK and similar."""
- config = self._TargetConfig(config)
- includes = include_dirs + self.msvs_system_include_dirs[config]
- includes.extend(self._Setting(
- ('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
- return [self.ConvertVSMacros(p, config=config) for p in includes]
-
- def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
- """Updates midl_include_dirs to expand VS specific paths, and adds the
- system include dirs used for platform SDK and similar."""
- config = self._TargetConfig(config)
- includes = midl_include_dirs + self.msvs_system_include_dirs[config]
- includes.extend(self._Setting(
- ('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
- return [self.ConvertVSMacros(p, config=config) for p in includes]
-
- def GetComputedDefines(self, config):
- """Returns the set of defines that are injected to the defines list based
- on other VS settings."""
- config = self._TargetConfig(config)
- defines = []
- if self._ConfigAttrib(['CharacterSet'], config) == '1':
- defines.extend(('_UNICODE', 'UNICODE'))
- if self._ConfigAttrib(['CharacterSet'], config) == '2':
- defines.append('_MBCS')
- defines.extend(self._Setting(
- ('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
- return defines
-
- def GetCompilerPdbName(self, config, expand_special):
- """Get the pdb file name that should be used for compiler invocations, or
- None if there's no explicit name specified."""
- config = self._TargetConfig(config)
- pdbname = self._Setting(
- ('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
- if pdbname:
- pdbname = expand_special(self.ConvertVSMacros(pdbname))
- return pdbname
-
- def GetMapFileName(self, config, expand_special):
- """Gets the explicitly overriden map file name for a target or returns None
- if it's not set."""
- config = self._TargetConfig(config)
- map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
- if map_file:
- map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
- return map_file
-
- def GetOutputName(self, config, expand_special):
- """Gets the explicitly overridden output name for a target or returns None
- if it's not overridden."""
- config = self._TargetConfig(config)
- type = self.spec['type']
- root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
- # TODO(scottmg): Handle OutputDirectory without OutputFile.
- output_file = self._Setting((root, 'OutputFile'), config)
- if output_file:
- output_file = expand_special(self.ConvertVSMacros(
- output_file, config=config))
- return output_file
-
- def GetPDBName(self, config, expand_special, default):
- """Gets the explicitly overridden pdb name for a target or returns
- default if it's not overridden, or if no pdb will be generated."""
- config = self._TargetConfig(config)
- output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
- generate_debug_info = self._Setting(
- ('VCLinkerTool', 'GenerateDebugInformation'), config)
- if generate_debug_info == 'true':
- if output_file:
- return expand_special(self.ConvertVSMacros(output_file, config=config))
- else:
- return default
- else:
- return None
-
- def GetNoImportLibrary(self, config):
- """If NoImportLibrary: true, ninja will not expect the output to include
- an import library."""
- config = self._TargetConfig(config)
- noimplib = self._Setting(('NoImportLibrary',), config)
- return noimplib == 'true'
-
- def GetAsmflags(self, config):
- """Returns the flags that need to be added to ml invocations."""
- config = self._TargetConfig(config)
- asmflags = []
- safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
- if safeseh == 'true':
- asmflags.append('/safeseh')
- return asmflags
-
- def GetCflags(self, config):
- """Returns the flags that need to be added to .c and .cc compilations."""
- config = self._TargetConfig(config)
- cflags = []
- cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
- cl = self._GetWrapper(self, self.msvs_settings[config],
- 'VCCLCompilerTool', append=cflags)
- cl('Optimization',
- map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
- cl('InlineFunctionExpansion', prefix='/Ob')
- cl('DisableSpecificWarnings', prefix='/wd')
- cl('StringPooling', map={'true': '/GF'})
- cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
- cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
- cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
- cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
- cl('FloatingPointModel',
- map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
- default='0')
- cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
- cl('WholeProgramOptimization', map={'true': '/GL'})
- cl('WarningLevel', prefix='/W')
- cl('WarnAsError', map={'true': '/WX'})
- cl('CallingConvention',
- map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
- cl('DebugInformationFormat',
- map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
- cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
- cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
- cl('MinimalRebuild', map={'true': '/Gm'})
- cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
- cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
- cl('RuntimeLibrary',
- map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
- cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
- cl('DefaultCharIsUnsigned', map={'true': '/J'})
- cl('TreatWChar_tAsBuiltInType',
- map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
- cl('EnablePREfast', map={'true': '/analyze'})
- cl('AdditionalOptions', prefix='')
- cl('EnableEnhancedInstructionSet',
- map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
- prefix='/arch:')
- cflags.extend(['/FI' + f for f in self._Setting(
- ('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
- if self.vs_version.short_name in ('2013', '2013e', '2015'):
- # New flag required in 2013 to maintain previous PDB behavior.
- cflags.append('/FS')
- # ninja handles parallelism by itself, don't have the compiler do it too.
- cflags = filter(lambda x: not x.startswith('/MP'), cflags)
- return cflags
-
- def _GetPchFlags(self, config, extension):
- """Get the flags to be added to the cflags for precompiled header support.
- """
- config = self._TargetConfig(config)
- # The PCH is only built once by a particular source file. Usage of PCH must
- # only be for the same language (i.e. C vs. C++), so only include the pch
- # flags when the language matches.
- if self.msvs_precompiled_header[config]:
- source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
- if _LanguageMatchesForPch(source_ext, extension):
- pch = os.path.split(self.msvs_precompiled_header[config])[1]
- return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
- return []
-
- def GetCflagsC(self, config):
- """Returns the flags that need to be added to .c compilations."""
- config = self._TargetConfig(config)
- return self._GetPchFlags(config, '.c')
-
- def GetCflagsCC(self, config):
- """Returns the flags that need to be added to .cc compilations."""
- config = self._TargetConfig(config)
- return ['/TP'] + self._GetPchFlags(config, '.cc')
-
- def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
- """Get and normalize the list of paths in AdditionalLibraryDirectories
- setting."""
- config = self._TargetConfig(config)
- libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
- config, default=[])
- libpaths = [os.path.normpath(
- gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
- for p in libpaths]
- return ['/LIBPATH:"' + p + '"' for p in libpaths]
-
- def GetLibFlags(self, config, gyp_to_build_path):
- """Returns the flags that need to be added to lib commands."""
- config = self._TargetConfig(config)
- libflags = []
- lib = self._GetWrapper(self, self.msvs_settings[config],
- 'VCLibrarianTool', append=libflags)
- libflags.extend(self._GetAdditionalLibraryDirectories(
- 'VCLibrarianTool', config, gyp_to_build_path))
- lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
- lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
- prefix='/MACHINE:')
- lib('AdditionalOptions')
- return libflags
-
- def GetDefFile(self, gyp_to_build_path):
- """Returns the .def file from sources, if any. Otherwise returns None."""
- spec = self.spec
- if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
- def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
- if len(def_files) == 1:
- return gyp_to_build_path(def_files[0])
- elif len(def_files) > 1:
- raise Exception("Multiple .def files")
- return None
-
- def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
- """.def files get implicitly converted to a ModuleDefinitionFile for the
- linker in the VS generator. Emulate that behaviour here."""
- def_file = self.GetDefFile(gyp_to_build_path)
- if def_file:
- ldflags.append('/DEF:"%s"' % def_file)
-
- def GetPGDName(self, config, expand_special):
- """Gets the explicitly overridden pgd name for a target or returns None
- if it's not overridden."""
- config = self._TargetConfig(config)
- output_file = self._Setting(
- ('VCLinkerTool', 'ProfileGuidedDatabase'), config)
- if output_file:
- output_file = expand_special(self.ConvertVSMacros(
- output_file, config=config))
- return output_file
-
- def GetLdflags(self, config, gyp_to_build_path, expand_special,
- manifest_base_name, output_name, is_executable, build_dir):
- """Returns the flags that need to be added to link commands, and the
- manifest files."""
- config = self._TargetConfig(config)
- ldflags = []
- ld = self._GetWrapper(self, self.msvs_settings[config],
- 'VCLinkerTool', append=ldflags)
- self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
- ld('GenerateDebugInformation', map={'true': '/DEBUG'})
- ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
- prefix='/MACHINE:')
- ldflags.extend(self._GetAdditionalLibraryDirectories(
- 'VCLinkerTool', config, gyp_to_build_path))
- ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
- ld('TreatLinkerWarningAsErrors', prefix='/WX',
- map={'true': '', 'false': ':NO'})
- out = self.GetOutputName(config, expand_special)
- if out:
- ldflags.append('/OUT:' + out)
- pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
- if pdb:
- ldflags.append('/PDB:' + pdb)
- pgd = self.GetPGDName(config, expand_special)
- if pgd:
- ldflags.append('/PGD:' + pgd)
- map_file = self.GetMapFileName(config, expand_special)
- ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
- else '/MAP'})
- ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
- ld('AdditionalOptions', prefix='')
-
- minimum_required_version = self._Setting(
- ('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
- if minimum_required_version:
- minimum_required_version = ',' + minimum_required_version
- ld('SubSystem',
- map={'1': 'CONSOLE%s' % minimum_required_version,
- '2': 'WINDOWS%s' % minimum_required_version},
- prefix='/SUBSYSTEM:')
-
- stack_reserve_size = self._Setting(
- ('VCLinkerTool', 'StackReserveSize'), config, default='')
- if stack_reserve_size:
- stack_commit_size = self._Setting(
- ('VCLinkerTool', 'StackCommitSize'), config, default='')
- if stack_commit_size:
- stack_commit_size = ',' + stack_commit_size
- ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
-
- ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
- ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
- ld('BaseAddress', prefix='/BASE:')
- ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
- ld('RandomizedBaseAddress',
- map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
- ld('DataExecutionPrevention',
- map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
- ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
- ld('ForceSymbolReferences', prefix='/INCLUDE:')
- ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
- ld('LinkTimeCodeGeneration',
- map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
- '4': ':PGUPDATE'},
- prefix='/LTCG')
- ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
- ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
- ld('EntryPointSymbol', prefix='/ENTRY:')
- ld('Profile', map={'true': '/PROFILE'})
- ld('LargeAddressAware',
- map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
- # TODO(scottmg): This should sort of be somewhere else (not really a flag).
- ld('AdditionalDependencies', prefix='')
-
- if self.GetArch(config) == 'x86':
- safeseh_default = 'true'
- else:
- safeseh_default = None
- ld('ImageHasSafeExceptionHandlers',
- map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
- default=safeseh_default)
-
- # If the base address is not specifically controlled, DYNAMICBASE should
- # be on by default.
- base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
- ldflags)
- if not base_flags:
- ldflags.append('/DYNAMICBASE')
-
- # If the NXCOMPAT flag has not been specified, default to on. Despite the
- # documentation that says this only defaults to on when the subsystem is
- # Vista or greater (which applies to the linker), the IDE defaults it on
- # unless it's explicitly off.
- if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
- ldflags.append('/NXCOMPAT')
-
- have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
- manifest_flags, intermediate_manifest, manifest_files = \
- self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
- is_executable and not have_def_file, build_dir)
- ldflags.extend(manifest_flags)
- return ldflags, intermediate_manifest, manifest_files
-
- def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
- allow_isolation, build_dir):
- """Returns a 3-tuple:
- - the set of flags that need to be added to the link to generate
- a default manifest
- - the intermediate manifest that the linker will generate that should be
- used to assert it doesn't add anything to the merged one.
- - the list of all the manifest files to be merged by the manifest tool and
- included into the link."""
- generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
- config,
- default='true')
- if generate_manifest != 'true':
- # This means not only that the linker should not generate the intermediate
- # manifest but also that the manifest tool should do nothing even when
- # additional manifests are specified.
- return ['/MANIFEST:NO'], [], []
-
- output_name = name + '.intermediate.manifest'
- flags = [
- '/MANIFEST',
- '/ManifestFile:' + output_name,
- ]
-
- # Instead of using the MANIFESTUAC flags, we generate a .manifest to
- # include into the list of manifests. This allows us to avoid the need to
- # do two passes during linking. The /MANIFEST flag and /ManifestFile are
- # still used, and the intermediate manifest is used to assert that the
- # final manifest we get from merging all the additional manifest files
- # (plus the one we generate here) isn't modified by merging the
- # intermediate into it.
-
- # Always NO, because we generate a manifest file that has what we want.
- flags.append('/MANIFESTUAC:NO')
-
- config = self._TargetConfig(config)
- enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
- default='true')
- manifest_files = []
- generated_manifest_outer = \
-"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
-"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
-"</assembly>"
- if enable_uac == 'true':
- execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
- config, default='0')
- execution_level_map = {
- '0': 'asInvoker',
- '1': 'highestAvailable',
- '2': 'requireAdministrator'
- }
-
- ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
- default='false')
-
- inner = '''
-<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
- <security>
- <requestedPrivileges>
- <requestedExecutionLevel level='%s' uiAccess='%s' />
- </requestedPrivileges>
- </security>
-</trustInfo>''' % (execution_level_map[execution_level], ui_access)
- else:
- inner = ''
-
- generated_manifest_contents = generated_manifest_outer % inner
- generated_name = name + '.generated.manifest'
- # Need to join with the build_dir here as we're writing it during
- # generation time, but we return the un-joined version because the build
- # will occur in that directory. We only write the file if the contents
- # have changed so that simply regenerating the project files doesn't
- # cause a relink.
- build_dir_generated_name = os.path.join(build_dir, generated_name)
- gyp.common.EnsureDirExists(build_dir_generated_name)
- f = gyp.common.WriteOnDiff(build_dir_generated_name)
- f.write(generated_manifest_contents)
- f.close()
- manifest_files = [generated_name]
-
- if allow_isolation:
- flags.append('/ALLOWISOLATION')
-
- manifest_files += self._GetAdditionalManifestFiles(config,
- gyp_to_build_path)
- return flags, output_name, manifest_files
-
- def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
- """Gets additional manifest files that are added to the default one
- generated by the linker."""
- files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
- default=[])
- if isinstance(files, str):
- files = files.split(';')
- return [os.path.normpath(
- gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
- for f in files]
-
- def IsUseLibraryDependencyInputs(self, config):
- """Returns whether the target should be linked via Use Library Dependency
- Inputs (using component .objs of a given .lib)."""
- config = self._TargetConfig(config)
- uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
- return uldi == 'true'
-
- def IsEmbedManifest(self, config):
- """Returns whether manifest should be linked into binary."""
- config = self._TargetConfig(config)
- embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
- default='true')
- return embed == 'true'
-
- def IsLinkIncremental(self, config):
- """Returns whether the target should be linked incrementally."""
- config = self._TargetConfig(config)
- link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
- return link_inc != '1'
-
- def GetRcflags(self, config, gyp_to_ninja_path):
- """Returns the flags that need to be added to invocations of the resource
- compiler."""
- config = self._TargetConfig(config)
- rcflags = []
- rc = self._GetWrapper(self, self.msvs_settings[config],
- 'VCResourceCompilerTool', append=rcflags)
- rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
- rcflags.append('/I' + gyp_to_ninja_path('.'))
- rc('PreprocessorDefinitions', prefix='/d')
- # /l arg must be in hex without leading '0x'
- rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
- return rcflags
-
- def BuildCygwinBashCommandLine(self, args, path_to_base):
- """Build a command line that runs args via cygwin bash. We assume that all
- incoming paths are in Windows normpath'd form, so they need to be
- converted to posix style for the part of the command line that's passed to
- bash. We also have to do some Visual Studio macro emulation here because
- various rules use magic VS names for things. Also note that rules that
- contain ninja variables cannot be fixed here (for example ${source}), so
- the outer generator needs to make sure that the paths that are written out
- are in posix style, if the command line will be used here."""
- cygwin_dir = os.path.normpath(
- os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
- cd = ('cd %s' % path_to_base).replace('\\', '/')
- args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
- args = ["'%s'" % a.replace("'", "'\\''") for a in args]
- bash_cmd = ' '.join(args)
- cmd = (
- 'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
- 'bash -c "%s ; %s"' % (cd, bash_cmd))
- return cmd
-
- def IsRuleRunUnderCygwin(self, rule):
- """Determine if an action should be run under cygwin. If the variable is
- unset, or set to 1 we use cygwin."""
- return int(rule.get('msvs_cygwin_shell',
- self.spec.get('msvs_cygwin_shell', 1))) != 0
-
- def _HasExplicitRuleForExtension(self, spec, extension):
- """Determine if there's an explicit rule for a particular extension."""
- for rule in spec.get('rules', []):
- if rule['extension'] == extension:
- return True
- return False
-
- def _HasExplicitIdlActions(self, spec):
- """Determine if an action should not run midl for .idl files."""
- return any([action.get('explicit_idl_action', 0)
- for action in spec.get('actions', [])])
-
- def HasExplicitIdlRulesOrActions(self, spec):
- """Determine if there's an explicit rule or action for idl files. When
- there isn't we need to generate implicit rules to build MIDL .idl files."""
- return (self._HasExplicitRuleForExtension(spec, 'idl') or
- self._HasExplicitIdlActions(spec))
-
- def HasExplicitAsmRules(self, spec):
- """Determine if there's an explicit rule for asm files. When there isn't we
- need to generate implicit rules to assemble .asm files."""
- return self._HasExplicitRuleForExtension(spec, 'asm')
-
- def GetIdlBuildData(self, source, config):
- """Determine the implicit outputs for an idl file. Returns output
- directory, outputs, and variables and flags that are required."""
- config = self._TargetConfig(config)
- midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
- def midl(name, default=None):
- return self.ConvertVSMacros(midl_get(name, default=default),
- config=config)
- tlb = midl('TypeLibraryName', default='${root}.tlb')
- header = midl('HeaderFileName', default='${root}.h')
- dlldata = midl('DLLDataFileName', default='dlldata.c')
- iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
- proxy = midl('ProxyFileName', default='${root}_p.c')
- # Note that .tlb is not included in the outputs as it is not always
- # generated depending on the content of the input idl file.
- outdir = midl('OutputDirectory', default='')
- output = [header, dlldata, iid, proxy]
- variables = [('tlb', tlb),
- ('h', header),
- ('dlldata', dlldata),
- ('iid', iid),
- ('proxy', proxy)]
- # TODO(scottmg): Are there configuration settings to set these flags?
- target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
- flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
- return outdir, output, variables, flags
-
-
-def _LanguageMatchesForPch(source_ext, pch_source_ext):
- c_exts = ('.c',)
- cc_exts = ('.cc', '.cxx', '.cpp')
- return ((source_ext in c_exts and pch_source_ext in c_exts) or
- (source_ext in cc_exts and pch_source_ext in cc_exts))
-
-
-class PrecompiledHeader(object):
- """Helper to generate dependencies and build rules to handle generation of
- precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
- """
- def __init__(
- self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
- self.settings = settings
- self.config = config
- pch_source = self.settings.msvs_precompiled_source[self.config]
- self.pch_source = gyp_to_build_path(pch_source)
- filename, _ = os.path.splitext(pch_source)
- self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
-
- def _PchHeader(self):
- """Get the header that will appear in an #include line for all source
- files."""
- return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
-
- def GetObjDependencies(self, sources, objs, arch):
- """Given a list of sources files and the corresponding object files,
- returns a list of the pch files that should be depended upon. The
- additional wrapping in the return value is for interface compatibility
- with make.py on Mac, and xcode_emulation.py."""
- assert arch is None
- if not self._PchHeader():
- return []
- pch_ext = os.path.splitext(self.pch_source)[1]
- for source in sources:
- if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
- return [(None, None, self.output_obj)]
- return []
-
- def GetPchBuildCommands(self, arch):
- """Not used on Windows as there are no additional build steps required
- (instead, existing steps are modified in GetFlagsModifications below)."""
- return []
-
- def GetFlagsModifications(self, input, output, implicit, command,
- cflags_c, cflags_cc, expand_special):
- """Get the modified cflags and implicit dependencies that should be used
- for the pch compilation step."""
- if input == self.pch_source:
- pch_output = ['/Yc' + self._PchHeader()]
- if command == 'cxx':
- return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
- self.output_obj, [])
- elif command == 'cc':
- return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
- self.output_obj, [])
- return [], output, implicit
-
-
-vs_version = None
-def GetVSVersion(generator_flags):
- global vs_version
- if not vs_version:
- vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
- generator_flags.get('msvs_version', 'auto'),
- allow_fallback=False)
- return vs_version
-
-def _GetVsvarsSetupArgs(generator_flags, arch):
- vs = GetVSVersion(generator_flags)
- return vs.SetupScript()
-
-def ExpandMacros(string, expansions):
- """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
- for the canonical way to retrieve a suitable dict."""
- if '$' in string:
- for old, new in expansions.iteritems():
- assert '$(' not in new, new
- string = string.replace(old, new)
- return string
-
-def _ExtractImportantEnvironment(output_of_set):
- """Extracts environment variables required for the toolchain to run from
- a textual dump output by the cmd.exe 'set' command."""
- envvars_to_save = (
- 'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
- 'include',
- 'lib',
- 'libpath',
- 'path',
- 'pathext',
- 'systemroot',
- 'temp',
- 'tmp',
- )
- env = {}
- for line in output_of_set.splitlines():
- for envvar in envvars_to_save:
- if re.match(envvar + '=', line.lower()):
- var, setting = line.split('=', 1)
- if envvar == 'path':
- # Our own rules (for running gyp-win-tool) and other actions in
- # Chromium rely on python being in the path. Add the path to this
- # python here so that if it's not in the path when ninja is run
- # later, python will still be found.
- setting = os.path.dirname(sys.executable) + os.pathsep + setting
- env[var.upper()] = setting
- break
- for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
- if required not in env:
- raise Exception('Environment variable "%s" '
- 'required to be set to valid path' % required)
- return env
-
-def _FormatAsEnvironmentBlock(envvar_dict):
- """Format as an 'environment block' directly suitable for CreateProcess.
- Briefly this is a list of key=value\0, terminated by an additional \0. See
- CreateProcess documentation for more details."""
- block = ''
- nul = '\0'
- for key, value in envvar_dict.iteritems():
- block += key + '=' + value + nul
- block += nul
- return block
-
-def _ExtractCLPath(output_of_where):
- """Gets the path to cl.exe based on the output of calling the environment
- setup batch file, followed by the equivalent of `where`."""
- # Take the first line, as that's the first found in the PATH.
- for line in output_of_where.strip().splitlines():
- if line.startswith('LOC:'):
- return line[len('LOC:'):].strip()
-
-def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
- system_includes, open_out):
- """It's not sufficient to have the absolute path to the compiler, linker,
- etc. on Windows, as those tools rely on .dlls being in the PATH. We also
- need to support both x86 and x64 compilers within the same build (to support
- msvs_target_platform hackery). Different architectures require a different
- compiler binary, and different supporting environment variables (INCLUDE,
- LIB, LIBPATH). So, we extract the environment here, wrap all invocations
- of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
- sets up the environment, and then we do not prefix the compiler with
- an absolute path, instead preferring something like "cl.exe" in the rule
- which will then run whichever the environment setup has put in the path.
- When the following procedure to generate environment files does not
- meet your requirement (e.g. for custom toolchains), you can pass
- "-G ninja_use_custom_environment_files" to the gyp to suppress file
- generation and use custom environment files prepared by yourself."""
- archs = ('x86', 'x64')
- if generator_flags.get('ninja_use_custom_environment_files', 0):
- cl_paths = {}
- for arch in archs:
- cl_paths[arch] = 'cl.exe'
- return cl_paths
- vs = GetVSVersion(generator_flags)
- cl_paths = {}
- for arch in archs:
- # Extract environment variables for subprocesses.
- args = vs.SetupScript(arch)
- args.extend(('&&', 'set'))
- popen = subprocess.Popen(
- args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- variables, _ = popen.communicate()
- env = _ExtractImportantEnvironment(variables)
-
- # Inject system includes from gyp files into INCLUDE.
- if system_includes:
- system_includes = system_includes | OrderedSet(
- env.get('INCLUDE', '').split(';'))
- env['INCLUDE'] = ';'.join(system_includes)
-
- env_block = _FormatAsEnvironmentBlock(env)
- f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
- f.write(env_block)
- f.close()
-
- # Find cl.exe location for this architecture.
- args = vs.SetupScript(arch)
- args.extend(('&&',
- 'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
- popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
- output, _ = popen.communicate()
- cl_paths[arch] = _ExtractCLPath(output)
- return cl_paths
-
-def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
- """Emulate behavior of msvs_error_on_missing_sources present in the msvs
- generator: Check that all regular source files, i.e. not created at run time,
- exist on disk. Missing files cause needless recompilation when building via
- VS, and we want this check to match for people/bots that build using ninja,
- so they're not surprised when the VS build fails."""
- if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
- no_specials = filter(lambda x: '$' not in x, sources)
- relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
- missing = filter(lambda x: not os.path.exists(x), relative)
- if missing:
- # They'll look like out\Release\..\..\stuff\things.cc, so normalize the
- # path for a slightly less crazy looking output.
- cleaned_up = [os.path.normpath(x) for x in missing]
- raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
-
-# Sets some values in default_variables, which are required for many
-# generators, run on Windows.
-def CalculateCommonVariables(default_variables, params):
- generator_flags = params.get('generator_flags', {})
-
- # Set a variable so conditions can be based on msvs_version.
- msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
- default_variables['MSVS_VERSION'] = msvs_version.ShortName()
-
- # To determine processor word size on Windows, in addition to checking
- # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
- # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
- # contains the actual word size of the system when running thru WOW64).
- if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
- '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
- default_variables['MSVS_OS_BITS'] = 64
- else:
- default_variables['MSVS_OS_BITS'] = 32
diff --git a/deps/gyp/pylib/gyp/ninja_syntax.py b/deps/gyp/pylib/gyp/ninja_syntax.py
deleted file mode 100644
index d2948f06c0..0000000000
--- a/deps/gyp/pylib/gyp/ninja_syntax.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# This file comes from
-# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
-# Do not edit! Edit the upstream one instead.
-
-"""Python module for generating .ninja files.
-
-Note that this is emphatically not a required piece of Ninja; it's
-just a helpful utility for build-file-generation systems that already
-use Python.
-"""
-
-import textwrap
-import re
-
-def escape_path(word):
- return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
-
-class Writer(object):
- def __init__(self, output, width=78):
- self.output = output
- self.width = width
-
- def newline(self):
- self.output.write('\n')
-
- def comment(self, text):
- for line in textwrap.wrap(text, self.width - 2):
- self.output.write('# ' + line + '\n')
-
- def variable(self, key, value, indent=0):
- if value is None:
- return
- if isinstance(value, list):
- value = ' '.join(filter(None, value)) # Filter out empty strings.
- self._line('%s = %s' % (key, value), indent)
-
- def pool(self, name, depth):
- self._line('pool %s' % name)
- self.variable('depth', depth, indent=1)
-
- def rule(self, name, command, description=None, depfile=None,
- generator=False, pool=None, restat=False, rspfile=None,
- rspfile_content=None, deps=None):
- self._line('rule %s' % name)
- self.variable('command', command, indent=1)
- if description:
- self.variable('description', description, indent=1)
- if depfile:
- self.variable('depfile', depfile, indent=1)
- if generator:
- self.variable('generator', '1', indent=1)
- if pool:
- self.variable('pool', pool, indent=1)
- if restat:
- self.variable('restat', '1', indent=1)
- if rspfile:
- self.variable('rspfile', rspfile, indent=1)
- if rspfile_content:
- self.variable('rspfile_content', rspfile_content, indent=1)
- if deps:
- self.variable('deps', deps, indent=1)
-
- def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
- variables=None):
- outputs = self._as_list(outputs)
- all_inputs = self._as_list(inputs)[:]
- out_outputs = list(map(escape_path, outputs))
- all_inputs = list(map(escape_path, all_inputs))
-
- if implicit:
- implicit = map(escape_path, self._as_list(implicit))
- all_inputs.append('|')
- all_inputs.extend(implicit)
- if order_only:
- order_only = map(escape_path, self._as_list(order_only))
- all_inputs.append('||')
- all_inputs.extend(order_only)
-
- self._line('build %s: %s' % (' '.join(out_outputs),
- ' '.join([rule] + all_inputs)))
-
- if variables:
- if isinstance(variables, dict):
- iterator = iter(variables.items())
- else:
- iterator = iter(variables)
-
- for key, val in iterator:
- self.variable(key, val, indent=1)
-
- return outputs
-
- def include(self, path):
- self._line('include %s' % path)
-
- def subninja(self, path):
- self._line('subninja %s' % path)
-
- def default(self, paths):
- self._line('default %s' % ' '.join(self._as_list(paths)))
-
- def _count_dollars_before_index(self, s, i):
- """Returns the number of '$' characters right in front of s[i]."""
- dollar_count = 0
- dollar_index = i - 1
- while dollar_index > 0 and s[dollar_index] == '$':
- dollar_count += 1
- dollar_index -= 1
- return dollar_count
-
- def _line(self, text, indent=0):
- """Write 'text' word-wrapped at self.width characters."""
- leading_space = ' ' * indent
- while len(leading_space) + len(text) > self.width:
- # The text is too wide; wrap if possible.
-
- # Find the rightmost space that would obey our width constraint and
- # that's not an escaped space.
- available_space = self.width - len(leading_space) - len(' $')
- space = available_space
- while True:
- space = text.rfind(' ', 0, space)
- if space < 0 or \
- self._count_dollars_before_index(text, space) % 2 == 0:
- break
-
- if space < 0:
- # No such space; just use the first unescaped space we can find.
- space = available_space - 1
- while True:
- space = text.find(' ', space + 1)
- if space < 0 or \
- self._count_dollars_before_index(text, space) % 2 == 0:
- break
- if space < 0:
- # Give up on breaking.
- break
-
- self.output.write(leading_space + text[0:space] + ' $\n')
- text = text[space+1:]
-
- # Subsequent lines are continuations, so indent them.
- leading_space = ' ' * (indent+2)
-
- self.output.write(leading_space + text + '\n')
-
- def _as_list(self, input):
- if input is None:
- return []
- if isinstance(input, list):
- return input
- return [input]
-
-
-def escape(string):
- """Escape a string such that it can be embedded into a Ninja file without
- further interpretation."""
- assert '\n' not in string, 'Ninja syntax does not allow newlines'
- # We only have one special metacharacter: '$'.
- return string.replace('$', '$$')
diff --git a/deps/gyp/pylib/gyp/ordered_dict.py b/deps/gyp/pylib/gyp/ordered_dict.py
deleted file mode 100644
index a1e89f9199..0000000000
--- a/deps/gyp/pylib/gyp/ordered_dict.py
+++ /dev/null
@@ -1,289 +0,0 @@
-# Unmodified from http://code.activestate.com/recipes/576693/
-# other than to add MIT license header (as specified on page, but not in code).
-# Linked from Python documentation here:
-# http://docs.python.org/2/library/collections.html#collections.OrderedDict
-#
-# This should be deleted once Py2.7 is available on all bots, see
-# http://crbug.com/241769.
-#
-# Copyright (c) 2009 Raymond Hettinger.
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-# THE SOFTWARE.
-
-# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
-# Passes Python2.7's test suite and incorporates all the latest updates.
-
-try:
- from thread import get_ident as _get_ident
-except ImportError:
- from dummy_thread import get_ident as _get_ident
-
-try:
- from _abcoll import KeysView, ValuesView, ItemsView
-except ImportError:
- pass
-
-
-class OrderedDict(dict):
- 'Dictionary that remembers insertion order'
- # An inherited dict maps keys to values.
- # The inherited dict provides __getitem__, __len__, __contains__, and get.
- # The remaining methods are order-aware.
- # Big-O running times for all methods are the same as for regular dictionaries.
-
- # The internal self.__map dictionary maps keys to links in a doubly linked list.
- # The circular doubly linked list starts and ends with a sentinel element.
- # The sentinel element never gets deleted (this simplifies the algorithm).
- # Each link is stored as a list of length three: [PREV, NEXT, KEY].
-
- def __init__(self, *args, **kwds):
- '''Initialize an ordered dictionary. Signature is the same as for
- regular dictionaries, but keyword arguments are not recommended
- because their insertion order is arbitrary.
-
- '''
- if len(args) > 1:
- raise TypeError('expected at most 1 arguments, got %d' % len(args))
- try:
- self.__root
- except AttributeError:
- self.__root = root = [] # sentinel node
- root[:] = [root, root, None]
- self.__map = {}
- self.__update(*args, **kwds)
-
- def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
- 'od.__setitem__(i, y) <==> od[i]=y'
- # Setting a new item creates a new link which goes at the end of the linked
- # list, and the inherited dictionary is updated with the new key/value pair.
- if key not in self:
- root = self.__root
- last = root[0]
- last[1] = root[0] = self.__map[key] = [last, root, key]
- dict_setitem(self, key, value)
-
- def __delitem__(self, key, dict_delitem=dict.__delitem__):
- 'od.__delitem__(y) <==> del od[y]'
- # Deleting an existing item uses self.__map to find the link which is
- # then removed by updating the links in the predecessor and successor nodes.
- dict_delitem(self, key)
- link_prev, link_next, key = self.__map.pop(key)
- link_prev[1] = link_next
- link_next[0] = link_prev
-
- def __iter__(self):
- 'od.__iter__() <==> iter(od)'
- root = self.__root
- curr = root[1]
- while curr is not root:
- yield curr[2]
- curr = curr[1]
-
- def __reversed__(self):
- 'od.__reversed__() <==> reversed(od)'
- root = self.__root
- curr = root[0]
- while curr is not root:
- yield curr[2]
- curr = curr[0]
-
- def clear(self):
- 'od.clear() -> None. Remove all items from od.'
- try:
- for node in self.__map.itervalues():
- del node[:]
- root = self.__root
- root[:] = [root, root, None]
- self.__map.clear()
- except AttributeError:
- pass
- dict.clear(self)
-
- def popitem(self, last=True):
- '''od.popitem() -> (k, v), return and remove a (key, value) pair.
- Pairs are returned in LIFO order if last is true or FIFO order if false.
-
- '''
- if not self:
- raise KeyError('dictionary is empty')
- root = self.__root
- if last:
- link = root[0]
- link_prev = link[0]
- link_prev[1] = root
- root[0] = link_prev
- else:
- link = root[1]
- link_next = link[1]
- root[1] = link_next
- link_next[0] = root
- key = link[2]
- del self.__map[key]
- value = dict.pop(self, key)
- return key, value
-
- # -- the following methods do not depend on the internal structure --
-
- def keys(self):
- 'od.keys() -> list of keys in od'
- return list(self)
-
- def values(self):
- 'od.values() -> list of values in od'
- return [self[key] for key in self]
-
- def items(self):
- 'od.items() -> list of (key, value) pairs in od'
- return [(key, self[key]) for key in self]
-
- def iterkeys(self):
- 'od.iterkeys() -> an iterator over the keys in od'
- return iter(self)
-
- def itervalues(self):
- 'od.itervalues -> an iterator over the values in od'
- for k in self:
- yield self[k]
-
- def iteritems(self):
- 'od.iteritems -> an iterator over the (key, value) items in od'
- for k in self:
- yield (k, self[k])
-
- # Suppress 'OrderedDict.update: Method has no argument':
- # pylint: disable=E0211
- def update(*args, **kwds):
- '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
-
- If E is a dict instance, does: for k in E: od[k] = E[k]
- If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
- Or if E is an iterable of items, does: for k, v in E: od[k] = v
- In either case, this is followed by: for k, v in F.items(): od[k] = v
-
- '''
- if len(args) > 2:
- raise TypeError('update() takes at most 2 positional '
- 'arguments (%d given)' % (len(args),))
- elif not args:
- raise TypeError('update() takes at least 1 argument (0 given)')
- self = args[0]
- # Make progressively weaker assumptions about "other"
- other = ()
- if len(args) == 2:
- other = args[1]
- if isinstance(other, dict):
- for key in other:
- self[key] = other[key]
- elif hasattr(other, 'keys'):
- for key in other.keys():
- self[key] = other[key]
- else:
- for key, value in other:
- self[key] = value
- for key, value in kwds.items():
- self[key] = value
-
- __update = update # let subclasses override update without breaking __init__
-
- __marker = object()
-
- def pop(self, key, default=__marker):
- '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
- If key is not found, d is returned if given, otherwise KeyError is raised.
-
- '''
- if key in self:
- result = self[key]
- del self[key]
- return result
- if default is self.__marker:
- raise KeyError(key)
- return default
-
- def setdefault(self, key, default=None):
- 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
- if key in self:
- return self[key]
- self[key] = default
- return default
-
- def __repr__(self, _repr_running={}):
- 'od.__repr__() <==> repr(od)'
- call_key = id(self), _get_ident()
- if call_key in _repr_running:
- return '...'
- _repr_running[call_key] = 1
- try:
- if not self:
- return '%s()' % (self.__class__.__name__,)
- return '%s(%r)' % (self.__class__.__name__, self.items())
- finally:
- del _repr_running[call_key]
-
- def __reduce__(self):
- 'Return state information for pickling'
- items = [[k, self[k]] for k in self]
- inst_dict = vars(self).copy()
- for k in vars(OrderedDict()):
- inst_dict.pop(k, None)
- if inst_dict:
- return (self.__class__, (items,), inst_dict)
- return self.__class__, (items,)
-
- def copy(self):
- 'od.copy() -> a shallow copy of od'
- return self.__class__(self)
-
- @classmethod
- def fromkeys(cls, iterable, value=None):
- '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
- and values equal to v (which defaults to None).
-
- '''
- d = cls()
- for key in iterable:
- d[key] = value
- return d
-
- def __eq__(self, other):
- '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
- while comparison to a regular mapping is order-insensitive.
-
- '''
- if isinstance(other, OrderedDict):
- return len(self)==len(other) and self.items() == other.items()
- return dict.__eq__(self, other)
-
- def __ne__(self, other):
- return not self == other
-
- # -- the following methods are only used in Python 2.7 --
-
- def viewkeys(self):
- "od.viewkeys() -> a set-like object providing a view on od's keys"
- return KeysView(self)
-
- def viewvalues(self):
- "od.viewvalues() -> an object providing a view on od's values"
- return ValuesView(self)
-
- def viewitems(self):
- "od.viewitems() -> a set-like object providing a view on od's items"
- return ItemsView(self)
-
diff --git a/deps/gyp/pylib/gyp/simple_copy.py b/deps/gyp/pylib/gyp/simple_copy.py
deleted file mode 100644
index 74c98c5a79..0000000000
--- a/deps/gyp/pylib/gyp/simple_copy.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A clone of the default copy.deepcopy that doesn't handle cyclic
-structures or complex types except for dicts and lists. This is
-because gyp copies so large structure that small copy overhead ends up
-taking seconds in a project the size of Chromium."""
-
-class Error(Exception):
- pass
-
-__all__ = ["Error", "deepcopy"]
-
-def deepcopy(x):
- """Deep copy operation on gyp objects such as strings, ints, dicts
- and lists. More than twice as fast as copy.deepcopy but much less
- generic."""
-
- try:
- return _deepcopy_dispatch[type(x)](x)
- except KeyError:
- raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' +
- 'or expand simple_copy support.' % type(x))
-
-_deepcopy_dispatch = d = {}
-
-def _deepcopy_atomic(x):
- return x
-
-for x in (type(None), int, long, float,
- bool, str, unicode, type):
- d[x] = _deepcopy_atomic
-
-def _deepcopy_list(x):
- return [deepcopy(a) for a in x]
-d[list] = _deepcopy_list
-
-def _deepcopy_dict(x):
- y = {}
- for key, value in x.iteritems():
- y[deepcopy(key)] = deepcopy(value)
- return y
-d[dict] = _deepcopy_dict
-
-del d
diff --git a/deps/gyp/pylib/gyp/win_tool.py b/deps/gyp/pylib/gyp/win_tool.py
deleted file mode 100755
index bb6f1ea436..0000000000
--- a/deps/gyp/pylib/gyp/win_tool.py
+++ /dev/null
@@ -1,314 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions for Windows builds.
-
-These functions are executed via gyp-win-tool when using the ninja generator.
-"""
-
-import os
-import re
-import shutil
-import subprocess
-import stat
-import string
-import sys
-
-BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-
-# A regex matching an argument corresponding to the output filename passed to
-# link.exe.
-_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
-
-def main(args):
- executor = WinTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class WinTool(object):
- """This class performs all the Windows tooling steps. The methods can either
- be executed directly, or dispatched from an argument list."""
-
- def _UseSeparateMspdbsrv(self, env, args):
- """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
- shared one."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- if args[0] != 'link.exe':
- return
-
- # Use the output filename passed to the linker to generate an endpoint name
- # for mspdbsrv.exe.
- endpoint_name = None
- for arg in args:
- m = _LINK_EXE_OUT_ARG.match(arg)
- if m:
- endpoint_name = re.sub(r'\W+', '',
- '%s_%d' % (m.group('out'), os.getpid()))
- break
-
- if endpoint_name is None:
- return
-
- # Adds the appropriate environment variable. This will be read by link.exe
- # to know which instance of mspdbsrv.exe it should connect to (if it's
- # not set then the default endpoint is used).
- env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like recursive-mirror to RecursiveMirror."""
- return name_string.title().replace('-', '')
-
- def _GetEnv(self, arch):
- """Gets the saved environment from a file for a given architecture."""
- # The environment is saved as an "environment block" (see CreateProcess
- # and msvs_emulation for details). We convert to a dict here.
- # Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
- pairs = open(arch).read()[:-2].split('\0')
- kvs = [item.split('=', 1) for item in pairs]
- return dict(kvs)
-
- def ExecStamp(self, path):
- """Simple stamp command."""
- open(path, 'w').close()
-
- def ExecRecursiveMirror(self, source, dest):
- """Emulation of rm -rf out && cp -af in out."""
- if os.path.exists(dest):
- if os.path.isdir(dest):
- def _on_error(fn, path, excinfo):
- # The operation failed, possibly because the file is set to
- # read-only. If that's why, make it writable and try the op again.
- if not os.access(path, os.W_OK):
- os.chmod(path, stat.S_IWRITE)
- fn(path)
- shutil.rmtree(dest, onerror=_on_error)
- else:
- if not os.access(dest, os.W_OK):
- # Attempt to make the file writable before deleting it.
- os.chmod(dest, stat.S_IWRITE)
- os.unlink(dest)
-
- if os.path.isdir(source):
- shutil.copytree(source, dest)
- else:
- shutil.copy2(source, dest)
-
- def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
- """Filter diagnostic output from link that looks like:
- ' Creating library ui.dll.lib and object ui.dll.exp'
- This happens when there are exports from the dll or exe.
- """
- env = self._GetEnv(arch)
- if use_separate_mspdbsrv == 'True':
- self._UseSeparateMspdbsrv(env, args)
- link = subprocess.Popen([args[0].replace('/', '\\')] + list(args[1:]),
- shell=True,
- env=env,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- out, _ = link.communicate()
- for line in out.splitlines():
- if (not line.startswith(' Creating library ') and
- not line.startswith('Generating code') and
- not line.startswith('Finished generating code')):
- print line
- return link.returncode
-
- def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
- mt, rc, intermediate_manifest, *manifests):
- """A wrapper for handling creating a manifest resource and then executing
- a link command."""
- # The 'normal' way to do manifests is to have link generate a manifest
- # based on gathering dependencies from the object files, then merge that
- # manifest with other manifests supplied as sources, convert the merged
- # manifest to a resource, and then *relink*, including the compiled
- # version of the manifest resource. This breaks incremental linking, and
- # is generally overly complicated. Instead, we merge all the manifests
- # provided (along with one that includes what would normally be in the
- # linker-generated one, see msvs_emulation.py), and include that into the
- # first and only link. We still tell link to generate a manifest, but we
- # only use that to assert that our simpler process did not miss anything.
- variables = {
- 'python': sys.executable,
- 'arch': arch,
- 'out': out,
- 'ldcmd': ldcmd,
- 'resname': resname,
- 'mt': mt,
- 'rc': rc,
- 'intermediate_manifest': intermediate_manifest,
- 'manifests': ' '.join(manifests),
- }
- add_to_ld = ''
- if manifests:
- subprocess.check_call(
- '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
- '-manifest %(manifests)s -out:%(out)s.manifest' % variables)
- if embed_manifest == 'True':
- subprocess.check_call(
- '%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
- ' %(out)s.manifest.rc %(resname)s' % variables)
- subprocess.check_call(
- '%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
- '%(out)s.manifest.rc' % variables)
- add_to_ld = ' %(out)s.manifest.res' % variables
- subprocess.check_call(ldcmd + add_to_ld)
-
- # Run mt.exe on the theoretically complete manifest we generated, merging
- # it with the one the linker generated to confirm that the linker
- # generated one does not add anything. This is strictly unnecessary for
- # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
- # used in a #pragma comment.
- if manifests:
- # Merge the intermediate one with ours to .assert.manifest, then check
- # that .assert.manifest is identical to ours.
- subprocess.check_call(
- '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
- '-manifest %(out)s.manifest %(intermediate_manifest)s '
- '-out:%(out)s.assert.manifest' % variables)
- assert_manifest = '%(out)s.assert.manifest' % variables
- our_manifest = '%(out)s.manifest' % variables
- # Load and normalize the manifests. mt.exe sometimes removes whitespace,
- # and sometimes doesn't unfortunately.
- with open(our_manifest, 'rb') as our_f:
- with open(assert_manifest, 'rb') as assert_f:
- our_data = our_f.read().translate(None, string.whitespace)
- assert_data = assert_f.read().translate(None, string.whitespace)
- if our_data != assert_data:
- os.unlink(out)
- def dump(filename):
- sys.stderr.write('%s\n-----\n' % filename)
- with open(filename, 'rb') as f:
- sys.stderr.write(f.read() + '\n-----\n')
- dump(intermediate_manifest)
- dump(our_manifest)
- dump(assert_manifest)
- sys.stderr.write(
- 'Linker generated manifest "%s" added to final manifest "%s" '
- '(result in "%s"). '
- 'Were /MANIFEST switches used in #pragma statements? ' % (
- intermediate_manifest, our_manifest, assert_manifest))
- return 1
-
- def ExecManifestWrapper(self, arch, *args):
- """Run manifest tool with environment set. Strip out undesirable warning
- (some XML blocks are recognized by the OS loader, but not the manifest
- tool)."""
- env = self._GetEnv(arch)
- popen = subprocess.Popen(args, shell=True, env=env,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, _ = popen.communicate()
- for line in out.splitlines():
- if line and 'manifest authoring warning 81010002' not in line:
- print line
- return popen.returncode
-
- def ExecManifestToRc(self, arch, *args):
- """Creates a resource file pointing a SxS assembly manifest.
- |args| is tuple containing path to resource file, path to manifest file
- and resource name which can be "1" (for executables) or "2" (for DLLs)."""
- manifest_path, resource_path, resource_name = args
- with open(resource_path, 'wb') as output:
- output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
- resource_name,
- os.path.abspath(manifest_path).replace('\\', '/')))
-
- def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
- *flags):
- """Filter noisy filenames output from MIDL compile step that isn't
- quietable via command line flags.
- """
- args = ['midl', '/nologo'] + list(flags) + [
- '/out', outdir,
- '/tlb', tlb,
- '/h', h,
- '/dlldata', dlldata,
- '/iid', iid,
- '/proxy', proxy,
- idl]
- env = self._GetEnv(arch)
- popen = subprocess.Popen(args, shell=True, env=env,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, _ = popen.communicate()
- # Filter junk out of stdout, and write filtered versions. Output we want
- # to filter is pairs of lines that look like this:
- # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
- # objidl.idl
- lines = out.splitlines()
- prefixes = ('Processing ', '64 bit Processing ')
- processing = set(os.path.basename(x)
- for x in lines if x.startswith(prefixes))
- for line in lines:
- if not line.startswith(prefixes) and line not in processing:
- print line
- return popen.returncode
-
- def ExecAsmWrapper(self, arch, *args):
- """Filter logo banner from invocations of asm.exe."""
- env = self._GetEnv(arch)
- popen = subprocess.Popen(args, shell=True, env=env,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, _ = popen.communicate()
- for line in out.splitlines():
- if (not line.startswith('Copyright (C) Microsoft Corporation') and
- not line.startswith('Microsoft (R) Macro Assembler') and
- not line.startswith(' Assembling: ') and
- line):
- print line
- return popen.returncode
-
- def ExecRcWrapper(self, arch, *args):
- """Filter logo banner from invocations of rc.exe. Older versions of RC
- don't support the /nologo flag."""
- env = self._GetEnv(arch)
- popen = subprocess.Popen(args, shell=True, env=env,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, _ = popen.communicate()
- for line in out.splitlines():
- if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
- not line.startswith('Copyright (C) Microsoft Corporation') and
- line):
- print line
- return popen.returncode
-
- def ExecActionWrapper(self, arch, rspfile, *dir):
- """Runs an action command line from a response file using the environment
- for |arch|. If |dir| is supplied, use that as the working directory."""
- env = self._GetEnv(arch)
- # TODO(scottmg): This is a temporary hack to get some specific variables
- # through to actions that are set after gyp-time. http://crbug.com/333738.
- for k, v in os.environ.iteritems():
- if k not in env:
- env[k] = v
- args = open(rspfile).read()
- dir = dir[0] if dir else None
- return subprocess.call(args, shell=True, env=env, cwd=dir)
-
- def ExecClCompile(self, project_dir, selected_files):
- """Executed by msvs-ninja projects when the 'ClCompile' target is used to
- build selected C/C++ files."""
- project_dir = os.path.relpath(project_dir, BASE_DIR)
- selected_files = selected_files.split(';')
- ninja_targets = [os.path.join(project_dir, filename) + '^^'
- for filename in selected_files]
- cmd = ['ninja.exe']
- cmd.extend(ninja_targets)
- return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/deps/gyp/pylib/gyp/xcode_emulation.py b/deps/gyp/pylib/gyp/xcode_emulation.py
deleted file mode 100644
index ac6852faf9..0000000000
--- a/deps/gyp/pylib/gyp/xcode_emulation.py
+++ /dev/null
@@ -1,1627 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This module contains classes that help to emulate xcodebuild behavior on top of
-other build systems, such as make and ninja.
-"""
-
-import copy
-import gyp.common
-import os
-import os.path
-import re
-import shlex
-import subprocess
-import sys
-import tempfile
-from gyp.common import GypError
-
-# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
-# "xcodebuild" is called too quickly (it has been found to return incorrect
-# version number).
-XCODE_VERSION_CACHE = None
-
-# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
-# corresponding to the installed version of Xcode.
-XCODE_ARCHS_DEFAULT_CACHE = None
-
-
-def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
- """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
- and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
- mapping = {'$(ARCHS_STANDARD)': archs}
- if archs_including_64_bit:
- mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit
- return mapping
-
-class XcodeArchsDefault(object):
- """A class to resolve ARCHS variable from xcode_settings, resolving Xcode
- macros and implementing filtering by VALID_ARCHS. The expansion of macros
- depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
- on the version of Xcode.
- """
-
- # Match variable like $(ARCHS_STANDARD).
- variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$')
-
- def __init__(self, default, mac, iphonesimulator, iphoneos):
- self._default = (default,)
- self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator}
-
- def _VariableMapping(self, sdkroot):
- """Returns the dictionary of variable mapping depending on the SDKROOT."""
- sdkroot = sdkroot.lower()
- if 'iphoneos' in sdkroot:
- return self._archs['ios']
- elif 'iphonesimulator' in sdkroot:
- return self._archs['iossim']
- else:
- return self._archs['mac']
-
- def _ExpandArchs(self, archs, sdkroot):
- """Expands variables references in ARCHS, and remove duplicates."""
- variable_mapping = self._VariableMapping(sdkroot)
- expanded_archs = []
- for arch in archs:
- if self.variable_pattern.match(arch):
- variable = arch
- try:
- variable_expansion = variable_mapping[variable]
- for arch in variable_expansion:
- if arch not in expanded_archs:
- expanded_archs.append(arch)
- except KeyError as e:
- print 'Warning: Ignoring unsupported variable "%s".' % variable
- elif arch not in expanded_archs:
- expanded_archs.append(arch)
- return expanded_archs
-
- def ActiveArchs(self, archs, valid_archs, sdkroot):
- """Expands variables references in ARCHS, and filter by VALID_ARCHS if it
- is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
- values present in VALID_ARCHS are kept)."""
- expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '')
- if valid_archs:
- filtered_archs = []
- for arch in expanded_archs:
- if arch in valid_archs:
- filtered_archs.append(arch)
- expanded_archs = filtered_archs
- return expanded_archs
-
-
-def GetXcodeArchsDefault():
- """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
- installed version of Xcode. The default values used by Xcode for ARCHS
- and the expansion of the variables depends on the version of Xcode used.
-
- For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
- uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
- $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
- and deprecated with Xcode 5.1.
-
- For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
- architecture as part of $(ARCHS_STANDARD) and default to only building it.
-
- For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
- of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
- are also part of $(ARCHS_STANDARD).
-
- All thoses rules are coded in the construction of the |XcodeArchsDefault|
- object to use depending on the version of Xcode detected. The object is
- for performance reason."""
- global XCODE_ARCHS_DEFAULT_CACHE
- if XCODE_ARCHS_DEFAULT_CACHE:
- return XCODE_ARCHS_DEFAULT_CACHE
- xcode_version, _ = XcodeVersion()
- if xcode_version < '0500':
- XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
- '$(ARCHS_STANDARD)',
- XcodeArchsVariableMapping(['i386']),
- XcodeArchsVariableMapping(['i386']),
- XcodeArchsVariableMapping(['armv7']))
- elif xcode_version < '0510':
- XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
- '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
- XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
- XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']),
- XcodeArchsVariableMapping(
- ['armv7', 'armv7s'],
- ['armv7', 'armv7s', 'arm64']))
- else:
- XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
- '$(ARCHS_STANDARD)',
- XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
- XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']),
- XcodeArchsVariableMapping(
- ['armv7', 'armv7s', 'arm64'],
- ['armv7', 'armv7s', 'arm64']))
- return XCODE_ARCHS_DEFAULT_CACHE
-
-
-class XcodeSettings(object):
- """A class that understands the gyp 'xcode_settings' object."""
-
- # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
- # at class-level for efficiency.
- _sdk_path_cache = {}
- _sdk_root_cache = {}
-
- # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
- # cached at class-level for efficiency.
- _plist_cache = {}
-
- # Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
- # cached at class-level for efficiency.
- _codesigning_key_cache = {}
-
- def __init__(self, spec):
- self.spec = spec
-
- self.isIOS = False
-
- # Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
- # This means self.xcode_settings[config] always contains all settings
- # for that config -- the per-target settings as well. Settings that are
- # the same for all configs are implicitly per-target settings.
- self.xcode_settings = {}
- configs = spec['configurations']
- for configname, config in configs.iteritems():
- self.xcode_settings[configname] = config.get('xcode_settings', {})
- self._ConvertConditionalKeys(configname)
- if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
- None):
- self.isIOS = True
-
- # This is only non-None temporarily during the execution of some methods.
- self.configname = None
-
- # Used by _AdjustLibrary to match .a and .dylib entries in libraries.
- self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
-
- def _ConvertConditionalKeys(self, configname):
- """Converts or warns on conditional keys. Xcode supports conditional keys,
- such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
- with some keys converted while the rest force a warning."""
- settings = self.xcode_settings[configname]
- conditional_keys = [key for key in settings if key.endswith(']')]
- for key in conditional_keys:
- # If you need more, speak up at http://crbug.com/122592
- if key.endswith("[sdk=iphoneos*]"):
- if configname.endswith("iphoneos"):
- new_key = key.split("[")[0]
- settings[new_key] = settings[key]
- else:
- print 'Warning: Conditional keys not implemented, ignoring:', \
- ' '.join(conditional_keys)
- del settings[key]
-
- def _Settings(self):
- assert self.configname
- return self.xcode_settings[self.configname]
-
- def _Test(self, test_key, cond_key, default):
- return self._Settings().get(test_key, default) == cond_key
-
- def _Appendf(self, lst, test_key, format_str, default=None):
- if test_key in self._Settings():
- lst.append(format_str % str(self._Settings()[test_key]))
- elif default:
- lst.append(format_str % str(default))
-
- def _WarnUnimplemented(self, test_key):
- if test_key in self._Settings():
- print 'Warning: Ignoring not yet implemented key "%s".' % test_key
-
- def IsBinaryOutputFormat(self, configname):
- default = "binary" if self.isIOS else "xml"
- format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT',
- default)
- return format == "binary"
-
- def _IsBundle(self):
- return int(self.spec.get('mac_bundle', 0)) != 0
-
- def _IsIosAppExtension(self):
- return int(self.spec.get('ios_app_extension', 0)) != 0
-
- def _IsIosWatchKitExtension(self):
- return int(self.spec.get('ios_watchkit_extension', 0)) != 0
-
- def _IsIosWatchApp(self):
- return int(self.spec.get('ios_watch_app', 0)) != 0
-
- def GetFrameworkVersion(self):
- """Returns the framework version of the current target. Only valid for
- bundles."""
- assert self._IsBundle()
- return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
-
- def GetWrapperExtension(self):
- """Returns the bundle extension (.app, .framework, .plugin, etc). Only
- valid for bundles."""
- assert self._IsBundle()
- if self.spec['type'] in ('loadable_module', 'shared_library'):
- default_wrapper_extension = {
- 'loadable_module': 'bundle',
- 'shared_library': 'framework',
- }[self.spec['type']]
- wrapper_extension = self.GetPerTargetSetting(
- 'WRAPPER_EXTENSION', default=default_wrapper_extension)
- return '.' + self.spec.get('product_extension', wrapper_extension)
- elif self.spec['type'] == 'executable':
- if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
- return '.' + self.spec.get('product_extension', 'appex')
- else:
- return '.' + self.spec.get('product_extension', 'app')
- else:
- assert False, "Don't know extension for '%s', target '%s'" % (
- self.spec['type'], self.spec['target_name'])
-
- def GetProductName(self):
- """Returns PRODUCT_NAME."""
- return self.spec.get('product_name', self.spec['target_name'])
-
- def GetFullProductName(self):
- """Returns FULL_PRODUCT_NAME."""
- if self._IsBundle():
- return self.GetWrapperName()
- else:
- return self._GetStandaloneBinaryPath()
-
- def GetWrapperName(self):
- """Returns the directory name of the bundle represented by this target.
- Only valid for bundles."""
- assert self._IsBundle()
- return self.GetProductName() + self.GetWrapperExtension()
-
- def GetBundleContentsFolderPath(self):
- """Returns the qualified path to the bundle's contents folder. E.g.
- Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
- if self.isIOS:
- return self.GetWrapperName()
- assert self._IsBundle()
- if self.spec['type'] == 'shared_library':
- return os.path.join(
- self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
- else:
- # loadable_modules have a 'Contents' folder like executables.
- return os.path.join(self.GetWrapperName(), 'Contents')
-
- def GetBundleResourceFolder(self):
- """Returns the qualified path to the bundle's resource folder. E.g.
- Chromium.app/Contents/Resources. Only valid for bundles."""
- assert self._IsBundle()
- if self.isIOS:
- return self.GetBundleContentsFolderPath()
- return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
-
- def GetBundlePlistPath(self):
- """Returns the qualified path to the bundle's plist file. E.g.
- Chromium.app/Contents/Info.plist. Only valid for bundles."""
- assert self._IsBundle()
- if self.spec['type'] in ('executable', 'loadable_module'):
- return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
- else:
- return os.path.join(self.GetBundleContentsFolderPath(),
- 'Resources', 'Info.plist')
-
- def GetProductType(self):
- """Returns the PRODUCT_TYPE of this target."""
- if self._IsIosAppExtension():
- assert self._IsBundle(), ('ios_app_extension flag requires mac_bundle '
- '(target %s)' % self.spec['target_name'])
- return 'com.apple.product-type.app-extension'
- if self._IsIosWatchKitExtension():
- assert self._IsBundle(), ('ios_watchkit_extension flag requires '
- 'mac_bundle (target %s)' % self.spec['target_name'])
- return 'com.apple.product-type.watchkit-extension'
- if self._IsIosWatchApp():
- assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
- '(target %s)' % self.spec['target_name'])
- return 'com.apple.product-type.application.watchapp'
- if self._IsBundle():
- return {
- 'executable': 'com.apple.product-type.application',
- 'loadable_module': 'com.apple.product-type.bundle',
- 'shared_library': 'com.apple.product-type.framework',
- }[self.spec['type']]
- else:
- return {
- 'executable': 'com.apple.product-type.tool',
- 'loadable_module': 'com.apple.product-type.library.dynamic',
- 'shared_library': 'com.apple.product-type.library.dynamic',
- 'static_library': 'com.apple.product-type.library.static',
- }[self.spec['type']]
-
- def GetMachOType(self):
- """Returns the MACH_O_TYPE of this target."""
- # Weird, but matches Xcode.
- if not self._IsBundle() and self.spec['type'] == 'executable':
- return ''
- return {
- 'executable': 'mh_execute',
- 'static_library': 'staticlib',
- 'shared_library': 'mh_dylib',
- 'loadable_module': 'mh_bundle',
- }[self.spec['type']]
-
- def _GetBundleBinaryPath(self):
- """Returns the name of the bundle binary of by this target.
- E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
- assert self._IsBundle()
- if self.spec['type'] in ('shared_library') or self.isIOS:
- path = self.GetBundleContentsFolderPath()
- elif self.spec['type'] in ('executable', 'loadable_module'):
- path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
- return os.path.join(path, self.GetExecutableName())
-
- def _GetStandaloneExecutableSuffix(self):
- if 'product_extension' in self.spec:
- return '.' + self.spec['product_extension']
- return {
- 'executable': '',
- 'static_library': '.a',
- 'shared_library': '.dylib',
- 'loadable_module': '.so',
- }[self.spec['type']]
-
- def _GetStandaloneExecutablePrefix(self):
- return self.spec.get('product_prefix', {
- 'executable': '',
- 'static_library': 'lib',
- 'shared_library': 'lib',
- # Non-bundled loadable_modules are called foo.so for some reason
- # (that is, .so and no prefix) with the xcode build -- match that.
- 'loadable_module': '',
- }[self.spec['type']])
-
- def _GetStandaloneBinaryPath(self):
- """Returns the name of the non-bundle binary represented by this target.
- E.g. hello_world. Only valid for non-bundles."""
- assert not self._IsBundle()
- assert self.spec['type'] in (
- 'executable', 'shared_library', 'static_library', 'loadable_module'), (
- 'Unexpected type %s' % self.spec['type'])
- target = self.spec['target_name']
- if self.spec['type'] == 'static_library':
- if target[:3] == 'lib':
- target = target[3:]
- elif self.spec['type'] in ('loadable_module', 'shared_library'):
- if target[:3] == 'lib':
- target = target[3:]
-
- target_prefix = self._GetStandaloneExecutablePrefix()
- target = self.spec.get('product_name', target)
- target_ext = self._GetStandaloneExecutableSuffix()
- return target_prefix + target + target_ext
-
- def GetExecutableName(self):
- """Returns the executable name of the bundle represented by this target.
- E.g. Chromium."""
- if self._IsBundle():
- return self.spec.get('product_name', self.spec['target_name'])
- else:
- return self._GetStandaloneBinaryPath()
-
- def GetExecutablePath(self):
- """Returns the directory name of the bundle represented by this target. E.g.
- Chromium.app/Contents/MacOS/Chromium."""
- if self._IsBundle():
- return self._GetBundleBinaryPath()
- else:
- return self._GetStandaloneBinaryPath()
-
- def GetActiveArchs(self, configname):
- """Returns the architectures this target should be built for."""
- config_settings = self.xcode_settings[configname]
- xcode_archs_default = GetXcodeArchsDefault()
- return xcode_archs_default.ActiveArchs(
- config_settings.get('ARCHS'),
- config_settings.get('VALID_ARCHS'),
- config_settings.get('SDKROOT'))
-
- def _GetSdkVersionInfoItem(self, sdk, infoitem):
- # xcodebuild requires Xcode and can't run on Command Line Tools-only
- # systems from 10.7 onward.
- # Since the CLT has no SDK paths anyway, returning None is the
- # most sensible route and should still do the right thing.
- try:
- return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
- except:
- pass
-
- def _SdkRoot(self, configname):
- if configname is None:
- configname = self.configname
- return self.GetPerConfigSetting('SDKROOT', configname, default='')
-
- def _SdkPath(self, configname=None):
- sdk_root = self._SdkRoot(configname)
- if sdk_root.startswith('/'):
- return sdk_root
- return self._XcodeSdkPath(sdk_root)
-
- def _XcodeSdkPath(self, sdk_root):
- if sdk_root not in XcodeSettings._sdk_path_cache:
- sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
- XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
- if sdk_root:
- XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
- return XcodeSettings._sdk_path_cache[sdk_root]
-
- def _AppendPlatformVersionMinFlags(self, lst):
- self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
- if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
- # TODO: Implement this better?
- sdk_path_basename = os.path.basename(self._SdkPath())
- if sdk_path_basename.lower().startswith('iphonesimulator'):
- self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
- '-mios-simulator-version-min=%s')
- else:
- self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
- '-miphoneos-version-min=%s')
-
- def GetCflags(self, configname, arch=None):
- """Returns flags that need to be added to .c, .cc, .m, and .mm
- compilations."""
- # This functions (and the similar ones below) do not offer complete
- # emulation of all xcode_settings keys. They're implemented on demand.
-
- self.configname = configname
- cflags = []
-
- sdk_root = self._SdkPath()
- if 'SDKROOT' in self._Settings() and sdk_root:
- cflags.append('-isysroot %s' % sdk_root)
-
- if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
- cflags.append('-Wconstant-conversion')
-
- if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
- cflags.append('-funsigned-char')
-
- if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
- cflags.append('-fasm-blocks')
-
- if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
- if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
- cflags.append('-mdynamic-no-pic')
- else:
- pass
- # TODO: In this case, it depends on the target. xcode passes
- # mdynamic-no-pic by default for executable and possibly static lib
- # according to mento
-
- if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
- cflags.append('-mpascal-strings')
-
- self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
-
- if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
- dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
- if dbg_format == 'dwarf':
- cflags.append('-gdwarf-2')
- elif dbg_format == 'stabs':
- raise NotImplementedError('stabs debug format is not supported yet.')
- elif dbg_format == 'dwarf-with-dsym':
- cflags.append('-gdwarf-2')
- else:
- raise NotImplementedError('Unknown debug format %s' % dbg_format)
-
- if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
- cflags.append('-fstrict-aliasing')
- elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
- cflags.append('-fno-strict-aliasing')
-
- if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
- cflags.append('-fvisibility=hidden')
-
- if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
- cflags.append('-Werror')
-
- if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
- cflags.append('-Wnewline-eof')
-
- # In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or
- # llvm-gcc. It also requires a fairly recent libtool, and
- # if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the
- # path to the libLTO.dylib that matches the used clang.
- if self._Test('LLVM_LTO', 'YES', default='NO'):
- cflags.append('-flto')
-
- self._AppendPlatformVersionMinFlags(cflags)
-
- # TODO:
- if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
- self._WarnUnimplemented('COPY_PHASE_STRIP')
- self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
- self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
-
- # TODO: This is exported correctly, but assigning to it is not supported.
- self._WarnUnimplemented('MACH_O_TYPE')
- self._WarnUnimplemented('PRODUCT_TYPE')
-
- if arch is not None:
- archs = [arch]
- else:
- assert self.configname
- archs = self.GetActiveArchs(self.configname)
- if len(archs) != 1:
- # TODO: Supporting fat binaries will be annoying.
- self._WarnUnimplemented('ARCHS')
- archs = ['i386']
- cflags.append('-arch ' + archs[0])
-
- if archs[0] in ('i386', 'x86_64'):
- if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
- cflags.append('-msse3')
- if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
- default='NO'):
- cflags.append('-mssse3') # Note 3rd 's'.
- if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
- cflags.append('-msse4.1')
- if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
- cflags.append('-msse4.2')
-
- cflags += self._Settings().get('WARNING_CFLAGS', [])
-
- if sdk_root:
- framework_root = sdk_root
- else:
- framework_root = ''
- config = self.spec['configurations'][self.configname]
- framework_dirs = config.get('mac_framework_dirs', [])
- for directory in framework_dirs:
- cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root))
-
- self.configname = None
- return cflags
-
- def GetCflagsC(self, configname):
- """Returns flags that need to be added to .c, and .m compilations."""
- self.configname = configname
- cflags_c = []
- if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
- cflags_c.append('-ansi')
- else:
- self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
- cflags_c += self._Settings().get('OTHER_CFLAGS', [])
- self.configname = None
- return cflags_c
-
- def GetCflagsCC(self, configname):
- """Returns flags that need to be added to .cc, and .mm compilations."""
- self.configname = configname
- cflags_cc = []
-
- clang_cxx_language_standard = self._Settings().get(
- 'CLANG_CXX_LANGUAGE_STANDARD')
- # Note: Don't make c++0x to c++11 so that c++0x can be used with older
- # clangs that don't understand c++11 yet (like Xcode 4.2's).
- if clang_cxx_language_standard:
- cflags_cc.append('-std=%s' % clang_cxx_language_standard)
-
- self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
-
- if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
- cflags_cc.append('-fno-rtti')
- if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
- cflags_cc.append('-fno-exceptions')
- if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
- cflags_cc.append('-fvisibility-inlines-hidden')
- if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
- cflags_cc.append('-fno-threadsafe-statics')
- # Note: This flag is a no-op for clang, it only has an effect for gcc.
- if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
- cflags_cc.append('-Wno-invalid-offsetof')
-
- other_ccflags = []
-
- for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
- # TODO: More general variable expansion. Missing in many other places too.
- if flag in ('$inherited', '$(inherited)', '${inherited}'):
- flag = '$OTHER_CFLAGS'
- if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
- other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
- else:
- other_ccflags.append(flag)
- cflags_cc += other_ccflags
-
- self.configname = None
- return cflags_cc
-
- def _AddObjectiveCGarbageCollectionFlags(self, flags):
- gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
- if gc_policy == 'supported':
- flags.append('-fobjc-gc')
- elif gc_policy == 'required':
- flags.append('-fobjc-gc-only')
-
- def _AddObjectiveCARCFlags(self, flags):
- if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
- flags.append('-fobjc-arc')
-
- def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
- if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
- 'YES', default='NO'):
- flags.append('-Wobjc-missing-property-synthesis')
-
- def GetCflagsObjC(self, configname):
- """Returns flags that need to be added to .m compilations."""
- self.configname = configname
- cflags_objc = []
- self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
- self._AddObjectiveCARCFlags(cflags_objc)
- self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
- self.configname = None
- return cflags_objc
-
- def GetCflagsObjCC(self, configname):
- """Returns flags that need to be added to .mm compilations."""
- self.configname = configname
- cflags_objcc = []
- self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
- self._AddObjectiveCARCFlags(cflags_objcc)
- self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
- if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
- cflags_objcc.append('-fobjc-call-cxx-cdtors')
- self.configname = None
- return cflags_objcc
-
- def GetInstallNameBase(self):
- """Return DYLIB_INSTALL_NAME_BASE for this target."""
- # Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
- if (self.spec['type'] != 'shared_library' and
- (self.spec['type'] != 'loadable_module' or self._IsBundle())):
- return None
- install_base = self.GetPerTargetSetting(
- 'DYLIB_INSTALL_NAME_BASE',
- default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
- return install_base
-
- def _StandardizePath(self, path):
- """Do :standardizepath processing for path."""
- # I'm not quite sure what :standardizepath does. Just call normpath(),
- # but don't let @executable_path/../foo collapse to foo.
- if '/' in path:
- prefix, rest = '', path
- if path.startswith('@'):
- prefix, rest = path.split('/', 1)
- rest = os.path.normpath(rest) # :standardizepath
- path = os.path.join(prefix, rest)
- return path
-
- def GetInstallName(self):
- """Return LD_DYLIB_INSTALL_NAME for this target."""
- # Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
- if (self.spec['type'] != 'shared_library' and
- (self.spec['type'] != 'loadable_module' or self._IsBundle())):
- return None
-
- default_install_name = \
- '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
- install_name = self.GetPerTargetSetting(
- 'LD_DYLIB_INSTALL_NAME', default=default_install_name)
-
- # Hardcode support for the variables used in chromium for now, to
- # unblock people using the make build.
- if '$' in install_name:
- assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
- '$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
- 'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
- 'yet in target \'%s\' (got \'%s\')' %
- (self.spec['target_name'], install_name))
-
- install_name = install_name.replace(
- '$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
- self._StandardizePath(self.GetInstallNameBase()))
- if self._IsBundle():
- # These are only valid for bundles, hence the |if|.
- install_name = install_name.replace(
- '$(WRAPPER_NAME)', self.GetWrapperName())
- install_name = install_name.replace(
- '$(PRODUCT_NAME)', self.GetProductName())
- else:
- assert '$(WRAPPER_NAME)' not in install_name
- assert '$(PRODUCT_NAME)' not in install_name
-
- install_name = install_name.replace(
- '$(EXECUTABLE_PATH)', self.GetExecutablePath())
- return install_name
-
- def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
- """Checks if ldflag contains a filename and if so remaps it from
- gyp-directory-relative to build-directory-relative."""
- # This list is expanded on demand.
- # They get matched as:
- # -exported_symbols_list file
- # -Wl,exported_symbols_list file
- # -Wl,exported_symbols_list,file
- LINKER_FILE = r'(\S+)'
- WORD = r'\S+'
- linker_flags = [
- ['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
- ['-unexported_symbols_list', LINKER_FILE],
- ['-reexported_symbols_list', LINKER_FILE],
- ['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
- ]
- for flag_pattern in linker_flags:
- regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
- m = regex.match(ldflag)
- if m:
- ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
- ldflag[m.end(1):]
- # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
- # TODO(thakis): Update ffmpeg.gyp):
- if ldflag.startswith('-L'):
- ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
- return ldflag
-
- def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
- """Returns flags that need to be passed to the linker.
-
- Args:
- configname: The name of the configuration to get ld flags for.
- product_dir: The directory where products such static and dynamic
- libraries are placed. This is added to the library search path.
- gyp_to_build_path: A function that converts paths relative to the
- current gyp file to paths relative to the build direcotry.
- """
- self.configname = configname
- ldflags = []
-
- # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
- # can contain entries that depend on this. Explicitly absolutify these.
- for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
- ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
-
- if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
- ldflags.append('-Wl,-dead_strip')
-
- if self._Test('PREBINDING', 'YES', default='NO'):
- ldflags.append('-Wl,-prebind')
-
- self._Appendf(
- ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
- self._Appendf(
- ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
-
- self._AppendPlatformVersionMinFlags(ldflags)
-
- if 'SDKROOT' in self._Settings() and self._SdkPath():
- ldflags.append('-isysroot ' + self._SdkPath())
-
- for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
- ldflags.append('-L' + gyp_to_build_path(library_path))
-
- if 'ORDER_FILE' in self._Settings():
- ldflags.append('-Wl,-order_file ' +
- '-Wl,' + gyp_to_build_path(
- self._Settings()['ORDER_FILE']))
-
- if arch is not None:
- archs = [arch]
- else:
- assert self.configname
- archs = self.GetActiveArchs(self.configname)
- if len(archs) != 1:
- # TODO: Supporting fat binaries will be annoying.
- self._WarnUnimplemented('ARCHS')
- archs = ['i386']
- ldflags.append('-arch ' + archs[0])
-
- # Xcode adds the product directory by default.
- ldflags.append('-L' + product_dir)
-
- install_name = self.GetInstallName()
- if install_name and self.spec['type'] != 'loadable_module':
- ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
-
- for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
- ldflags.append('-Wl,-rpath,' + rpath)
-
- sdk_root = self._SdkPath()
- if not sdk_root:
- sdk_root = ''
- config = self.spec['configurations'][self.configname]
- framework_dirs = config.get('mac_framework_dirs', [])
- for directory in framework_dirs:
- ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
-
- is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
- if sdk_root and is_extension:
- # Adds the link flags for extensions. These flags are common for all
- # extensions and provide loader and main function.
- # These flags reflect the compilation options used by xcode to compile
- # extensions.
- ldflags.append('-lpkstart')
- if XcodeVersion() < '0900':
- ldflags.append(sdk_root +
- '/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
- ldflags.append('-fapplication-extension')
- ldflags.append('-Xlinker -rpath '
- '-Xlinker @executable_path/../../Frameworks')
-
- self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
-
- self.configname = None
- return ldflags
-
- def GetLibtoolflags(self, configname):
- """Returns flags that need to be passed to the static linker.
-
- Args:
- configname: The name of the configuration to get ld flags for.
- """
- self.configname = configname
- libtoolflags = []
-
- for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
- libtoolflags.append(libtoolflag)
- # TODO(thakis): ARCHS?
-
- self.configname = None
- return libtoolflags
-
- def GetPerTargetSettings(self):
- """Gets a list of all the per-target settings. This will only fetch keys
- whose values are the same across all configurations."""
- first_pass = True
- result = {}
- for configname in sorted(self.xcode_settings.keys()):
- if first_pass:
- result = dict(self.xcode_settings[configname])
- first_pass = False
- else:
- for key, value in self.xcode_settings[configname].iteritems():
- if key not in result:
- continue
- elif result[key] != value:
- del result[key]
- return result
-
- def GetPerConfigSetting(self, setting, configname, default=None):
- if configname in self.xcode_settings:
- return self.xcode_settings[configname].get(setting, default)
- else:
- return self.GetPerTargetSetting(setting, default)
-
- def GetPerTargetSetting(self, setting, default=None):
- """Tries to get xcode_settings.setting from spec. Assumes that the setting
- has the same value in all configurations and throws otherwise."""
- is_first_pass = True
- result = None
- for configname in sorted(self.xcode_settings.keys()):
- if is_first_pass:
- result = self.xcode_settings[configname].get(setting, None)
- is_first_pass = False
- else:
- assert result == self.xcode_settings[configname].get(setting, None), (
- "Expected per-target setting for '%s', got per-config setting "
- "(target %s)" % (setting, self.spec['target_name']))
- if result is None:
- return default
- return result
-
- def _GetStripPostbuilds(self, configname, output_binary, quiet):
- """Returns a list of shell commands that contain the shell commands
- neccessary to strip this target's binary. These should be run as postbuilds
- before the actual postbuilds run."""
- self.configname = configname
-
- result = []
- if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
- self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
-
- default_strip_style = 'debugging'
- if self.spec['type'] == 'loadable_module' and self._IsBundle():
- default_strip_style = 'non-global'
- elif self.spec['type'] == 'executable':
- default_strip_style = 'all'
-
- strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
- strip_flags = {
- 'all': '',
- 'non-global': '-x',
- 'debugging': '-S',
- }[strip_style]
-
- explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
- if explicit_strip_flags:
- strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
-
- if not quiet:
- result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
- result.append('strip %s %s' % (strip_flags, output_binary))
-
- self.configname = None
- return result
-
- def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
- """Returns a list of shell commands that contain the shell commands
- neccessary to massage this target's debug information. These should be run
- as postbuilds before the actual postbuilds run."""
- self.configname = configname
-
- # For static libraries, no dSYMs are created.
- result = []
- if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
- self._Test(
- 'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
- self.spec['type'] != 'static_library'):
- if not quiet:
- result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
- result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
-
- self.configname = None
- return result
-
- def _GetTargetPostbuilds(self, configname, output, output_binary,
- quiet=False):
- """Returns a list of shell commands that contain the shell commands
- to run as postbuilds for this target, before the actual postbuilds."""
- # dSYMs need to build before stripping happens.
- return (
- self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
- self._GetStripPostbuilds(configname, output_binary, quiet))
-
- def _GetIOSPostbuilds(self, configname, output_binary):
- """Return a shell command to codesign the iOS output binary so it can
- be deployed to a device. This should be run as the very last step of the
- build."""
- if not (self.isIOS and self.spec['type'] == 'executable'):
- return []
-
- settings = self.xcode_settings[configname]
- key = self._GetIOSCodeSignIdentityKey(settings)
- if not key:
- return []
-
- # Warn for any unimplemented signing xcode keys.
- unimpl = ['OTHER_CODE_SIGN_FLAGS']
- unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
- if unimpl:
- print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
- ', '.join(sorted(unimpl)))
-
- return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
- os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
- settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
- settings.get('CODE_SIGN_ENTITLEMENTS', ''),
- settings.get('PROVISIONING_PROFILE', ''))
- ]
-
- def _GetIOSCodeSignIdentityKey(self, settings):
- identity = settings.get('CODE_SIGN_IDENTITY')
- if not identity:
- return None
- if identity not in XcodeSettings._codesigning_key_cache:
- output = subprocess.check_output(
- ['security', 'find-identity', '-p', 'codesigning', '-v'])
- for line in output.splitlines():
- if identity in line:
- fingerprint = line.split()[1]
- cache = XcodeSettings._codesigning_key_cache
- assert identity not in cache or fingerprint == cache[identity], (
- "Multiple codesigning fingerprints for identity: %s" % identity)
- XcodeSettings._codesigning_key_cache[identity] = fingerprint
- return XcodeSettings._codesigning_key_cache.get(identity, '')
-
- def AddImplicitPostbuilds(self, configname, output, output_binary,
- postbuilds=[], quiet=False):
- """Returns a list of shell commands that should run before and after
- |postbuilds|."""
- assert output_binary is not None
- pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
- post = self._GetIOSPostbuilds(configname, output_binary)
- return pre + postbuilds + post
-
- def _AdjustLibrary(self, library, config_name=None):
- if library.endswith('.framework'):
- l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
- else:
- m = self.library_re.match(library)
- if m:
- l = '-l' + m.group(1)
- else:
- l = library
-
- sdk_root = self._SdkPath(config_name)
- if not sdk_root:
- sdk_root = ''
- # Xcode 7 started shipping with ".tbd" (text based stubs) files instead of
- # ".dylib" without providing a real support for them. What it does, for
- # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the
- # library order and cause collision when building Chrome.
- #
- # Instead substitude ".tbd" to ".dylib" in the generated project when the
- # following conditions are both true:
- # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib",
- # - the ".dylib" file does not exists but a ".tbd" file do.
- library = l.replace('$(SDKROOT)', sdk_root)
- if l.startswith('$(SDKROOT)'):
- basename, ext = os.path.splitext(library)
- if ext == '.dylib' and not os.path.exists(library):
- tbd_library = basename + '.tbd'
- if os.path.exists(tbd_library):
- library = tbd_library
- return library
-
- def AdjustLibraries(self, libraries, config_name=None):
- """Transforms entries like 'Cocoa.framework' in libraries into entries like
- '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
- """
- libraries = [self._AdjustLibrary(library, config_name)
- for library in libraries]
- return libraries
-
- def _BuildMachineOSBuild(self):
- return GetStdout(['sw_vers', '-buildVersion'])
-
- def _XcodeIOSDeviceFamily(self, configname):
- family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
- return [int(x) for x in family.split(',')]
-
- def GetExtraPlistItems(self, configname=None):
- """Returns a dictionary with extra items to insert into Info.plist."""
- if configname not in XcodeSettings._plist_cache:
- cache = {}
- cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
-
- xcode, xcode_build = XcodeVersion()
- cache['DTXcode'] = xcode
- cache['DTXcodeBuild'] = xcode_build
-
- sdk_root = self._SdkRoot(configname)
- if not sdk_root:
- sdk_root = self._DefaultSdkRoot()
- cache['DTSDKName'] = sdk_root
- if xcode >= '0430':
- cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
- sdk_root, 'ProductBuildVersion')
- else:
- cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
-
- if self.isIOS:
- cache['DTPlatformName'] = cache['DTSDKName']
- if configname.endswith("iphoneos"):
- cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
- sdk_root, 'ProductVersion')
- cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
- else:
- cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
- XcodeSettings._plist_cache[configname] = cache
-
- # Include extra plist items that are per-target, not per global
- # XcodeSettings.
- items = dict(XcodeSettings._plist_cache[configname])
- if self.isIOS:
- items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
- return items
-
- def _DefaultSdkRoot(self):
- """Returns the default SDKROOT to use.
-
- Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
- project, then the environment variable was empty. Starting with this
- version, Xcode uses the name of the newest SDK installed.
- """
- xcode_version, xcode_build = XcodeVersion()
- if xcode_version < '0500':
- return ''
- default_sdk_path = self._XcodeSdkPath('')
- default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
- if default_sdk_root:
- return default_sdk_root
- try:
- all_sdks = GetStdout(['xcodebuild', '-showsdks'])
- except:
- # If xcodebuild fails, there will be no valid SDKs
- return ''
- for line in all_sdks.splitlines():
- items = line.split()
- if len(items) >= 3 and items[-2] == '-sdk':
- sdk_root = items[-1]
- sdk_path = self._XcodeSdkPath(sdk_root)
- if sdk_path == default_sdk_path:
- return sdk_root
- return ''
-
-
-class MacPrefixHeader(object):
- """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
-
- This feature consists of several pieces:
- * If GCC_PREFIX_HEADER is present, all compilations in that project get an
- additional |-include path_to_prefix_header| cflag.
- * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
- instead compiled, and all other compilations in the project get an
- additional |-include path_to_compiled_header| instead.
- + Compiled prefix headers have the extension gch. There is one gch file for
- every language used in the project (c, cc, m, mm), since gch files for
- different languages aren't compatible.
- + gch files themselves are built with the target's normal cflags, but they
- obviously don't get the |-include| flag. Instead, they need a -x flag that
- describes their language.
- + All o files in the target need to depend on the gch file, to make sure
- it's built before any o file is built.
-
- This class helps with some of these tasks, but it needs help from the build
- system for writing dependencies to the gch files, for writing build commands
- for the gch files, and for figuring out the location of the gch files.
- """
- def __init__(self, xcode_settings,
- gyp_path_to_build_path, gyp_path_to_build_output):
- """If xcode_settings is None, all methods on this class are no-ops.
-
- Args:
- gyp_path_to_build_path: A function that takes a gyp-relative path,
- and returns a path relative to the build directory.
- gyp_path_to_build_output: A function that takes a gyp-relative path and
- a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
- to where the output of precompiling that path for that language
- should be placed (without the trailing '.gch').
- """
- # This doesn't support per-configuration prefix headers. Good enough
- # for now.
- self.header = None
- self.compile_headers = False
- if xcode_settings:
- self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
- self.compile_headers = xcode_settings.GetPerTargetSetting(
- 'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
- self.compiled_headers = {}
- if self.header:
- if self.compile_headers:
- for lang in ['c', 'cc', 'm', 'mm']:
- self.compiled_headers[lang] = gyp_path_to_build_output(
- self.header, lang)
- self.header = gyp_path_to_build_path(self.header)
-
- def _CompiledHeader(self, lang, arch):
- assert self.compile_headers
- h = self.compiled_headers[lang]
- if arch:
- h += '.' + arch
- return h
-
- def GetInclude(self, lang, arch=None):
- """Gets the cflags to include the prefix header for language |lang|."""
- if self.compile_headers and lang in self.compiled_headers:
- return '-include %s' % self._CompiledHeader(lang, arch)
- elif self.header:
- return '-include %s' % self.header
- else:
- return ''
-
- def _Gch(self, lang, arch):
- """Returns the actual file name of the prefix header for language |lang|."""
- assert self.compile_headers
- return self._CompiledHeader(lang, arch) + '.gch'
-
- def GetObjDependencies(self, sources, objs, arch=None):
- """Given a list of source files and the corresponding object files, returns
- a list of (source, object, gch) tuples, where |gch| is the build-directory
- relative path to the gch file each object file depends on. |compilable[i]|
- has to be the source file belonging to |objs[i]|."""
- if not self.header or not self.compile_headers:
- return []
-
- result = []
- for source, obj in zip(sources, objs):
- ext = os.path.splitext(source)[1]
- lang = {
- '.c': 'c',
- '.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
- '.m': 'm',
- '.mm': 'mm',
- }.get(ext, None)
- if lang:
- result.append((source, obj, self._Gch(lang, arch)))
- return result
-
- def GetPchBuildCommands(self, arch=None):
- """Returns [(path_to_gch, language_flag, language, header)].
- |path_to_gch| and |header| are relative to the build directory.
- """
- if not self.header or not self.compile_headers:
- return []
- return [
- (self._Gch('c', arch), '-x c-header', 'c', self.header),
- (self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
- (self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
- (self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
- ]
-
-
-def XcodeVersion():
- """Returns a tuple of version and build version of installed Xcode."""
- # `xcodebuild -version` output looks like
- # Xcode 4.6.3
- # Build version 4H1503
- # or like
- # Xcode 3.2.6
- # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
- # BuildVersion: 10M2518
- # Convert that to '0463', '4H1503'.
- global XCODE_VERSION_CACHE
- if XCODE_VERSION_CACHE:
- return XCODE_VERSION_CACHE
- try:
- version_list = GetStdout(['xcodebuild', '-version']).splitlines()
- # In some circumstances xcodebuild exits 0 but doesn't return
- # the right results; for example, a user on 10.7 or 10.8 with
- # a bogus path set via xcode-select
- # In that case this may be a CLT-only install so fall back to
- # checking that version.
- if len(version_list) < 2:
- raise GypError("xcodebuild returned unexpected results")
- except:
- version = CLTVersion()
- if version:
- version = re.match(r'(\d\.\d\.?\d*)', version).groups()[0]
- else:
- raise GypError("No Xcode or CLT version detected!")
- # The CLT has no build information, so we return an empty string.
- version_list = [version, '']
- version = version_list[0]
- build = version_list[-1]
- # Be careful to convert "4.2" to "0420":
- version = version.split()[-1].replace('.', '')
- version = (version + '0' * (3 - len(version))).zfill(4)
- if build:
- build = build.split()[-1]
- XCODE_VERSION_CACHE = (version, build)
- return XCODE_VERSION_CACHE
-
-
-# This function ported from the logic in Homebrew's CLT version check
-def CLTVersion():
- """Returns the version of command-line tools from pkgutil."""
- # pkgutil output looks like
- # package-id: com.apple.pkg.CLTools_Executables
- # version: 5.0.1.0.1.1382131676
- # volume: /
- # location: /
- # install-time: 1382544035
- # groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
- STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
- FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
- MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
-
- regex = re.compile('version: (?P<version>.+)')
- for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
- try:
- output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
- return re.search(regex, output).groupdict()['version']
- except:
- continue
-
-
-def GetStdout(cmdlist):
- """Returns the content of standard output returned by invoking |cmdlist|.
- Raises |GypError| if the command return with a non-zero return code."""
- job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
- out = job.communicate()[0]
- if job.returncode != 0:
- sys.stderr.write(out + '\n')
- raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
- return out.rstrip('\n')
-
-
-def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
- """Merges the global xcode_settings dictionary into each configuration of the
- target represented by spec. For keys that are both in the global and the local
- xcode_settings dict, the local key gets precendence.
- """
- # The xcode generator special-cases global xcode_settings and does something
- # that amounts to merging in the global xcode_settings into each local
- # xcode_settings dict.
- global_xcode_settings = global_dict.get('xcode_settings', {})
- for config in spec['configurations'].values():
- if 'xcode_settings' in config:
- new_settings = global_xcode_settings.copy()
- new_settings.update(config['xcode_settings'])
- config['xcode_settings'] = new_settings
-
-
-def IsMacBundle(flavor, spec):
- """Returns if |spec| should be treated as a bundle.
-
- Bundles are directories with a certain subdirectory structure, instead of
- just a single file. Bundle rules do not produce a binary but also package
- resources into that directory."""
- is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
- if is_mac_bundle:
- assert spec['type'] != 'none', (
- 'mac_bundle targets cannot have type none (target "%s")' %
- spec['target_name'])
- return is_mac_bundle
-
-
-def GetMacBundleResources(product_dir, xcode_settings, resources):
- """Yields (output, resource) pairs for every resource in |resources|.
- Only call this for mac bundle targets.
-
- Args:
- product_dir: Path to the directory containing the output bundle,
- relative to the build directory.
- xcode_settings: The XcodeSettings of the current target.
- resources: A list of bundle resources, relative to the build directory.
- """
- dest = os.path.join(product_dir,
- xcode_settings.GetBundleResourceFolder())
- for res in resources:
- output = dest
-
- # The make generator doesn't support it, so forbid it everywhere
- # to keep the generators more interchangable.
- assert ' ' not in res, (
- "Spaces in resource filenames not supported (%s)" % res)
-
- # Split into (path,file).
- res_parts = os.path.split(res)
-
- # Now split the path into (prefix,maybe.lproj).
- lproj_parts = os.path.split(res_parts[0])
- # If the resource lives in a .lproj bundle, add that to the destination.
- if lproj_parts[1].endswith('.lproj'):
- output = os.path.join(output, lproj_parts[1])
-
- output = os.path.join(output, res_parts[1])
- # Compiled XIB files are referred to by .nib.
- if output.endswith('.xib'):
- output = os.path.splitext(output)[0] + '.nib'
- # Compiled storyboard files are referred to by .storyboardc.
- if output.endswith('.storyboard'):
- output = os.path.splitext(output)[0] + '.storyboardc'
-
- yield output, res
-
-
-def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
- """Returns (info_plist, dest_plist, defines, extra_env), where:
- * |info_plist| is the source plist path, relative to the
- build directory,
- * |dest_plist| is the destination plist path, relative to the
- build directory,
- * |defines| is a list of preprocessor defines (empty if the plist
- shouldn't be preprocessed,
- * |extra_env| is a dict of env variables that should be exported when
- invoking |mac_tool copy-info-plist|.
-
- Only call this for mac bundle targets.
-
- Args:
- product_dir: Path to the directory containing the output bundle,
- relative to the build directory.
- xcode_settings: The XcodeSettings of the current target.
- gyp_to_build_path: A function that converts paths relative to the
- current gyp file to paths relative to the build direcotry.
- """
- info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
- if not info_plist:
- return None, None, [], {}
-
- # The make generator doesn't support it, so forbid it everywhere
- # to keep the generators more interchangable.
- assert ' ' not in info_plist, (
- "Spaces in Info.plist filenames not supported (%s)" % info_plist)
-
- info_plist = gyp_path_to_build_path(info_plist)
-
- # If explicitly set to preprocess the plist, invoke the C preprocessor and
- # specify any defines as -D flags.
- if xcode_settings.GetPerTargetSetting(
- 'INFOPLIST_PREPROCESS', default='NO') == 'YES':
- # Create an intermediate file based on the path.
- defines = shlex.split(xcode_settings.GetPerTargetSetting(
- 'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
- else:
- defines = []
-
- dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
- extra_env = xcode_settings.GetPerTargetSettings()
-
- return info_plist, dest_plist, defines, extra_env
-
-
-def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
- additional_settings=None):
- """Return the environment variables that Xcode would set. See
- http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
- for a full list.
-
- Args:
- xcode_settings: An XcodeSettings object. If this is None, this function
- returns an empty dict.
- built_products_dir: Absolute path to the built products dir.
- srcroot: Absolute path to the source root.
- configuration: The build configuration name.
- additional_settings: An optional dict with more values to add to the
- result.
- """
- if not xcode_settings: return {}
-
- # This function is considered a friend of XcodeSettings, so let it reach into
- # its implementation details.
- spec = xcode_settings.spec
-
- # These are filled in on a as-needed basis.
- env = {
- 'BUILT_FRAMEWORKS_DIR' : built_products_dir,
- 'BUILT_PRODUCTS_DIR' : built_products_dir,
- 'CONFIGURATION' : configuration,
- 'PRODUCT_NAME' : xcode_settings.GetProductName(),
- # See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
- 'SRCROOT' : srcroot,
- 'SOURCE_ROOT': '${SRCROOT}',
- # This is not true for static libraries, but currently the env is only
- # written for bundles:
- 'TARGET_BUILD_DIR' : built_products_dir,
- 'TEMP_DIR' : '${TMPDIR}',
- }
- if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
- env['SDKROOT'] = xcode_settings._SdkPath(configuration)
- else:
- env['SDKROOT'] = ''
-
- if spec['type'] in (
- 'executable', 'static_library', 'shared_library', 'loadable_module'):
- env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
- env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
- env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
- mach_o_type = xcode_settings.GetMachOType()
- if mach_o_type:
- env['MACH_O_TYPE'] = mach_o_type
- env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
- if xcode_settings._IsBundle():
- env['CONTENTS_FOLDER_PATH'] = \
- xcode_settings.GetBundleContentsFolderPath()
- env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
- xcode_settings.GetBundleResourceFolder()
- env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
- env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
-
- install_name = xcode_settings.GetInstallName()
- if install_name:
- env['LD_DYLIB_INSTALL_NAME'] = install_name
- install_name_base = xcode_settings.GetInstallNameBase()
- if install_name_base:
- env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
- if XcodeVersion() >= '0500' and not env.get('SDKROOT'):
- sdk_root = xcode_settings._SdkRoot(configuration)
- if not sdk_root:
- sdk_root = xcode_settings._XcodeSdkPath('')
- env['SDKROOT'] = sdk_root
-
- if not additional_settings:
- additional_settings = {}
- else:
- # Flatten lists to strings.
- for k in additional_settings:
- if not isinstance(additional_settings[k], str):
- additional_settings[k] = ' '.join(additional_settings[k])
- additional_settings.update(env)
-
- for k in additional_settings:
- additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
-
- return additional_settings
-
-
-def _NormalizeEnvVarReferences(str):
- """Takes a string containing variable references in the form ${FOO}, $(FOO),
- or $FOO, and returns a string with all variable references in the form ${FOO}.
- """
- # $FOO -> ${FOO}
- str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
-
- # $(FOO) -> ${FOO}
- matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
- for match in matches:
- to_replace, variable = match
- assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
- str = str.replace(to_replace, '${' + variable + '}')
-
- return str
-
-
-def ExpandEnvVars(string, expansions):
- """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
- expansions list. If the variable expands to something that references
- another variable, this variable is expanded as well if it's in env --
- until no variables present in env are left."""
- for k, v in reversed(expansions):
- string = string.replace('${' + k + '}', v)
- string = string.replace('$(' + k + ')', v)
- string = string.replace('$' + k, v)
- return string
-
-
-def _TopologicallySortedEnvVarKeys(env):
- """Takes a dict |env| whose values are strings that can refer to other keys,
- for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
- env such that key2 is after key1 in L if env[key2] refers to env[key1].
-
- Throws an Exception in case of dependency cycles.
- """
- # Since environment variables can refer to other variables, the evaluation
- # order is important. Below is the logic to compute the dependency graph
- # and sort it.
- regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
- def GetEdges(node):
- # Use a definition of edges such that user_of_variable -> used_varible.
- # This happens to be easier in this case, since a variable's
- # definition contains all variables it references in a single string.
- # We can then reverse the result of the topological sort at the end.
- # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
- matches = set([v for v in regex.findall(env[node]) if v in env])
- for dependee in matches:
- assert '${' not in dependee, 'Nested variables not supported: ' + dependee
- return matches
-
- try:
- # Topologically sort, and then reverse, because we used an edge definition
- # that's inverted from the expected result of this function (see comment
- # above).
- order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
- order.reverse()
- return order
- except gyp.common.CycleError, e:
- raise GypError(
- 'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
-
-
-def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
- configuration, additional_settings=None):
- env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
- additional_settings)
- return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
-
-
-def GetSpecPostbuildCommands(spec, quiet=False):
- """Returns the list of postbuilds explicitly defined on |spec|, in a form
- executable by a shell."""
- postbuilds = []
- for postbuild in spec.get('postbuilds', []):
- if not quiet:
- postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
- spec['target_name'], postbuild['postbuild_name']))
- postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
- return postbuilds
-
-
-def _HasIOSTarget(targets):
- """Returns true if any target contains the iOS specific key
- IPHONEOS_DEPLOYMENT_TARGET."""
- for target_dict in targets.values():
- for config in target_dict['configurations'].values():
- if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
- return True
- return False
-
-
-def _AddIOSDeviceConfigurations(targets):
- """Clone all targets and append -iphoneos to the name. Configure these targets
- to build for iOS devices and use correct architectures for those builds."""
- for target_dict in targets.itervalues():
- toolset = target_dict['toolset']
- configs = target_dict['configurations']
- for config_name, config_dict in dict(configs).iteritems():
- iphoneos_config_dict = copy.deepcopy(config_dict)
- configs[config_name + '-iphoneos'] = iphoneos_config_dict
- configs[config_name + '-iphonesimulator'] = config_dict
- if toolset == 'target':
- iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
- return targets
-
-def CloneConfigurationForDeviceAndEmulator(target_dicts):
- """If |target_dicts| contains any iOS targets, automatically create -iphoneos
- targets for iOS device builds."""
- if _HasIOSTarget(target_dicts):
- return _AddIOSDeviceConfigurations(target_dicts)
- return target_dicts
diff --git a/deps/gyp/pylib/gyp/xcode_ninja.py b/deps/gyp/pylib/gyp/xcode_ninja.py
deleted file mode 100644
index 3820d6bf04..0000000000
--- a/deps/gyp/pylib/gyp/xcode_ninja.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Xcode-ninja wrapper project file generator.
-
-This updates the data structures passed to the Xcode gyp generator to build
-with ninja instead. The Xcode project itself is transformed into a list of
-executable targets, each with a build step to build with ninja, and a target
-with every source and resource file. This appears to sidestep some of the
-major performance headaches experienced using complex projects and large number
-of targets within Xcode.
-"""
-
-import errno
-import gyp.generator.ninja
-import os
-import re
-import xml.sax.saxutils
-
-
-def _WriteWorkspace(main_gyp, sources_gyp, params):
- """ Create a workspace to wrap main and sources gyp paths. """
- (build_file_root, build_file_ext) = os.path.splitext(main_gyp)
- workspace_path = build_file_root + '.xcworkspace'
- options = params['options']
- if options.generator_output:
- workspace_path = os.path.join(options.generator_output, workspace_path)
- try:
- os.makedirs(workspace_path)
- except OSError, e:
- if e.errno != errno.EEXIST:
- raise
- output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
- '<Workspace version = "1.0">\n'
- for gyp_name in [main_gyp, sources_gyp]:
- name = os.path.splitext(os.path.basename(gyp_name))[0] + '.xcodeproj'
- name = xml.sax.saxutils.quoteattr("group:" + name)
- output_string += ' <FileRef location = %s></FileRef>\n' % name
- output_string += '</Workspace>\n'
-
- workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
-
- try:
- with open(workspace_file, 'r') as input_file:
- input_string = input_file.read()
- if input_string == output_string:
- return
- except IOError:
- # Ignore errors if the file doesn't exist.
- pass
-
- with open(workspace_file, 'w') as output_file:
- output_file.write(output_string)
-
-def _TargetFromSpec(old_spec, params):
- """ Create fake target for xcode-ninja wrapper. """
- # Determine ninja top level build dir (e.g. /path/to/out).
- ninja_toplevel = None
- jobs = 0
- if params:
- options = params['options']
- ninja_toplevel = \
- os.path.join(options.toplevel_dir,
- gyp.generator.ninja.ComputeOutputDir(params))
- jobs = params.get('generator_flags', {}).get('xcode_ninja_jobs', 0)
-
- target_name = old_spec.get('target_name')
- product_name = old_spec.get('product_name', target_name)
- product_extension = old_spec.get('product_extension')
-
- ninja_target = {}
- ninja_target['target_name'] = target_name
- ninja_target['product_name'] = product_name
- if product_extension:
- ninja_target['product_extension'] = product_extension
- ninja_target['toolset'] = old_spec.get('toolset')
- ninja_target['default_configuration'] = old_spec.get('default_configuration')
- ninja_target['configurations'] = {}
-
- # Tell Xcode to look in |ninja_toplevel| for build products.
- new_xcode_settings = {}
- if ninja_toplevel:
- new_xcode_settings['CONFIGURATION_BUILD_DIR'] = \
- "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
-
- if 'configurations' in old_spec:
- for config in old_spec['configurations'].iterkeys():
- old_xcode_settings = \
- old_spec['configurations'][config].get('xcode_settings', {})
- if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
- new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
- new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
- old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
- ninja_target['configurations'][config] = {}
- ninja_target['configurations'][config]['xcode_settings'] = \
- new_xcode_settings
-
- ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
- ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0)
- ninja_target['ios_watchkit_extension'] = \
- old_spec.get('ios_watchkit_extension', 0)
- ninja_target['ios_watchkit_app'] = old_spec.get('ios_watchkit_app', 0)
- ninja_target['type'] = old_spec['type']
- if ninja_toplevel:
- ninja_target['actions'] = [
- {
- 'action_name': 'Compile and copy %s via ninja' % target_name,
- 'inputs': [],
- 'outputs': [],
- 'action': [
- 'env',
- 'PATH=%s' % os.environ['PATH'],
- 'ninja',
- '-C',
- new_xcode_settings['CONFIGURATION_BUILD_DIR'],
- target_name,
- ],
- 'message': 'Compile and copy %s via ninja' % target_name,
- },
- ]
- if jobs > 0:
- ninja_target['actions'][0]['action'].extend(('-j', jobs))
- return ninja_target
-
-def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
- """Limit targets for Xcode wrapper.
-
- Xcode sometimes performs poorly with too many targets, so only include
- proper executable targets, with filters to customize.
- Arguments:
- target_extras: Regular expression to always add, matching any target.
- executable_target_pattern: Regular expression limiting executable targets.
- spec: Specifications for target.
- """
- target_name = spec.get('target_name')
- # Always include targets matching target_extras.
- if target_extras is not None and re.search(target_extras, target_name):
- return True
-
- # Otherwise just show executable targets.
- if spec.get('type', '') == 'executable' and \
- spec.get('product_extension', '') != 'bundle':
-
- # If there is a filter and the target does not match, exclude the target.
- if executable_target_pattern is not None:
- if not re.search(executable_target_pattern, target_name):
- return False
- return True
- return False
-
-def CreateWrapper(target_list, target_dicts, data, params):
- """Initialize targets for the ninja wrapper.
-
- This sets up the necessary variables in the targets to generate Xcode projects
- that use ninja as an external builder.
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- data: Dict of flattened build files keyed on gyp path.
- params: Dict of global options for gyp.
- """
- orig_gyp = params['build_files'][0]
- for gyp_name, gyp_dict in data.iteritems():
- if gyp_name == orig_gyp:
- depth = gyp_dict['_DEPTH']
-
- # Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
- # and prepend .ninja before the .gyp extension.
- generator_flags = params.get('generator_flags', {})
- main_gyp = generator_flags.get('xcode_ninja_main_gyp', None)
- if main_gyp is None:
- (build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
- main_gyp = build_file_root + ".ninja" + build_file_ext
-
- # Create new |target_list|, |target_dicts| and |data| data structures.
- new_target_list = []
- new_target_dicts = {}
- new_data = {}
-
- # Set base keys needed for |data|.
- new_data[main_gyp] = {}
- new_data[main_gyp]['included_files'] = []
- new_data[main_gyp]['targets'] = []
- new_data[main_gyp]['xcode_settings'] = \
- data[orig_gyp].get('xcode_settings', {})
-
- # Normally the xcode-ninja generator includes only valid executable targets.
- # If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
- # executable targets that match the pattern. (Default all)
- executable_target_pattern = \
- generator_flags.get('xcode_ninja_executable_target_pattern', None)
-
- # For including other non-executable targets, add the matching target name
- # to the |xcode_ninja_target_pattern| regular expression. (Default none)
- target_extras = generator_flags.get('xcode_ninja_target_pattern', None)
-
- for old_qualified_target in target_list:
- spec = target_dicts[old_qualified_target]
- if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
- # Add to new_target_list.
- target_name = spec.get('target_name')
- new_target_name = '%s:%s#target' % (main_gyp, target_name)
- new_target_list.append(new_target_name)
-
- # Add to new_target_dicts.
- new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
-
- # Add to new_data.
- for old_target in data[old_qualified_target.split(':')[0]]['targets']:
- if old_target['target_name'] == target_name:
- new_data_target = {}
- new_data_target['target_name'] = old_target['target_name']
- new_data_target['toolset'] = old_target['toolset']
- new_data[main_gyp]['targets'].append(new_data_target)
-
- # Create sources target.
- sources_target_name = 'sources_for_indexing'
- sources_target = _TargetFromSpec(
- { 'target_name' : sources_target_name,
- 'toolset': 'target',
- 'default_configuration': 'Default',
- 'mac_bundle': '0',
- 'type': 'executable'
- }, None)
-
- # Tell Xcode to look everywhere for headers.
- sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
-
- sources = []
- for target, target_dict in target_dicts.iteritems():
- base = os.path.dirname(target)
- files = target_dict.get('sources', []) + \
- target_dict.get('mac_bundle_resources', [])
- for action in target_dict.get('actions', []):
- files.extend(action.get('inputs', []))
- # Remove files starting with $. These are mostly intermediate files for the
- # build system.
- files = [ file for file in files if not file.startswith('$')]
-
- # Make sources relative to root build file.
- relative_path = os.path.dirname(main_gyp)
- sources += [ os.path.relpath(os.path.join(base, file), relative_path)
- for file in files ]
-
- sources_target['sources'] = sorted(set(sources))
-
- # Put sources_to_index in it's own gyp.
- sources_gyp = \
- os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
- fully_qualified_target_name = \
- '%s:%s#target' % (sources_gyp, sources_target_name)
-
- # Add to new_target_list, new_target_dicts and new_data.
- new_target_list.append(fully_qualified_target_name)
- new_target_dicts[fully_qualified_target_name] = sources_target
- new_data_target = {}
- new_data_target['target_name'] = sources_target['target_name']
- new_data_target['_DEPTH'] = depth
- new_data_target['toolset'] = "target"
- new_data[sources_gyp] = {}
- new_data[sources_gyp]['targets'] = []
- new_data[sources_gyp]['included_files'] = []
- new_data[sources_gyp]['xcode_settings'] = \
- data[orig_gyp].get('xcode_settings', {})
- new_data[sources_gyp]['targets'].append(new_data_target)
-
- # Write workspace to file.
- _WriteWorkspace(main_gyp, sources_gyp, params)
- return (new_target_list, new_target_dicts, new_data)
diff --git a/deps/gyp/pylib/gyp/xcodeproj_file.py b/deps/gyp/pylib/gyp/xcodeproj_file.py
deleted file mode 100644
index d08b7f7770..0000000000
--- a/deps/gyp/pylib/gyp/xcodeproj_file.py
+++ /dev/null
@@ -1,2927 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Xcode project file generator.
-
-This module is both an Xcode project file generator and a documentation of the
-Xcode project file format. Knowledge of the project file format was gained
-based on extensive experience with Xcode, and by making changes to projects in
-Xcode.app and observing the resultant changes in the associated project files.
-
-XCODE PROJECT FILES
-
-The generator targets the file format as written by Xcode 3.2 (specifically,
-3.2.6), but past experience has taught that the format has not changed
-significantly in the past several years, and future versions of Xcode are able
-to read older project files.
-
-Xcode project files are "bundled": the project "file" from an end-user's
-perspective is actually a directory with an ".xcodeproj" extension. The
-project file from this module's perspective is actually a file inside this
-directory, always named "project.pbxproj". This file contains a complete
-description of the project and is all that is needed to use the xcodeproj.
-Other files contained in the xcodeproj directory are simply used to store
-per-user settings, such as the state of various UI elements in the Xcode
-application.
-
-The project.pbxproj file is a property list, stored in a format almost
-identical to the NeXTstep property list format. The file is able to carry
-Unicode data, and is encoded in UTF-8. The root element in the property list
-is a dictionary that contains several properties of minimal interest, and two
-properties of immense interest. The most important property is a dictionary
-named "objects". The entire structure of the project is represented by the
-children of this property. The objects dictionary is keyed by unique 96-bit
-values represented by 24 uppercase hexadecimal characters. Each value in the
-objects dictionary is itself a dictionary, describing an individual object.
-
-Each object in the dictionary is a member of a class, which is identified by
-the "isa" property of each object. A variety of classes are represented in a
-project file. Objects can refer to other objects by ID, using the 24-character
-hexadecimal object key. A project's objects form a tree, with a root object
-of class PBXProject at the root. As an example, the PBXProject object serves
-as parent to an XCConfigurationList object defining the build configurations
-used in the project, a PBXGroup object serving as a container for all files
-referenced in the project, and a list of target objects, each of which defines
-a target in the project. There are several different types of target object,
-such as PBXNativeTarget and PBXAggregateTarget. In this module, this
-relationship is expressed by having each target type derive from an abstract
-base named XCTarget.
-
-The project.pbxproj file's root dictionary also contains a property, sibling to
-the "objects" dictionary, named "rootObject". The value of rootObject is a
-24-character object key referring to the root PBXProject object in the
-objects dictionary.
-
-In Xcode, every file used as input to a target or produced as a final product
-of a target must appear somewhere in the hierarchy rooted at the PBXGroup
-object referenced by the PBXProject's mainGroup property. A PBXGroup is
-generally represented as a folder in the Xcode application. PBXGroups can
-contain other PBXGroups as well as PBXFileReferences, which are pointers to
-actual files.
-
-Each XCTarget contains a list of build phases, represented in this module by
-the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations
-are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the
-"Compile Sources" and "Link Binary With Libraries" phases displayed in the
-Xcode application. Files used as input to these phases (for example, source
-files in the former case and libraries and frameworks in the latter) are
-represented by PBXBuildFile objects, referenced by elements of "files" lists
-in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile
-object as a "weak" reference: it does not "own" the PBXBuildFile, which is
-owned by the root object's mainGroup or a descendant group. In most cases, the
-layer of indirection between an XCBuildPhase and a PBXFileReference via a
-PBXBuildFile appears extraneous, but there's actually one reason for this:
-file-specific compiler flags are added to the PBXBuildFile object so as to
-allow a single file to be a member of multiple targets while having distinct
-compiler flags for each. These flags can be modified in the Xcode applciation
-in the "Build" tab of a File Info window.
-
-When a project is open in the Xcode application, Xcode will rewrite it. As
-such, this module is careful to adhere to the formatting used by Xcode, to
-avoid insignificant changes appearing in the file when it is used in the
-Xcode application. This will keep version control repositories happy, and
-makes it possible to compare a project file used in Xcode to one generated by
-this module to determine if any significant changes were made in the
-application.
-
-Xcode has its own way of assigning 24-character identifiers to each object,
-which is not duplicated here. Because the identifier only is only generated
-once, when an object is created, and is then left unchanged, there is no need
-to attempt to duplicate Xcode's behavior in this area. The generator is free
-to select any identifier, even at random, to refer to the objects it creates,
-and Xcode will retain those identifiers and use them when subsequently
-rewriting the project file. However, the generator would choose new random
-identifiers each time the project files are generated, leading to difficulties
-comparing "used" project files to "pristine" ones produced by this module,
-and causing the appearance of changes as every object identifier is changed
-when updated projects are checked in to a version control repository. To
-mitigate this problem, this module chooses identifiers in a more deterministic
-way, by hashing a description of each object as well as its parent and ancestor
-objects. This strategy should result in minimal "shift" in IDs as successive
-generations of project files are produced.
-
-THIS MODULE
-
-This module introduces several classes, all derived from the XCObject class.
-Nearly all of the "brains" are built into the XCObject class, which understands
-how to create and modify objects, maintain the proper tree structure, compute
-identifiers, and print objects. For the most part, classes derived from
-XCObject need only provide a _schema class object, a dictionary that
-expresses what properties objects of the class may contain.
-
-Given this structure, it's possible to build a minimal project file by creating
-objects of the appropriate types and making the proper connections:
-
- config_list = XCConfigurationList()
- group = PBXGroup()
- project = PBXProject({'buildConfigurationList': config_list,
- 'mainGroup': group})
-
-With the project object set up, it can be added to an XCProjectFile object.
-XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject
-subclass that does not actually correspond to a class type found in a project
-file. Rather, it is used to represent the project file's root dictionary.
-Printing an XCProjectFile will print the entire project file, including the
-full "objects" dictionary.
-
- project_file = XCProjectFile({'rootObject': project})
- project_file.ComputeIDs()
- project_file.Print()
-
-Xcode project files are always encoded in UTF-8. This module will accept
-strings of either the str class or the unicode class. Strings of class str
-are assumed to already be encoded in UTF-8. Obviously, if you're just using
-ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset.
-Strings of class unicode are handled properly and encoded in UTF-8 when
-a project file is output.
-"""
-
-import gyp.common
-import posixpath
-import re
-import struct
-import sys
-
-# hashlib is supplied as of Python 2.5 as the replacement interface for sha
-# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if
-# available, avoiding a deprecation warning under 2.6. Import sha otherwise,
-# preserving 2.4 compatibility.
-try:
- import hashlib
- _new_sha1 = hashlib.sha1
-except ImportError:
- import sha
- _new_sha1 = sha.new
-
-
-# See XCObject._EncodeString. This pattern is used to determine when a string
-# can be printed unquoted. Strings that match this pattern may be printed
-# unquoted. Strings that do not match must be quoted and may be further
-# transformed to be properly encoded. Note that this expression matches the
-# characters listed with "+", for 1 or more occurrences: if a string is empty,
-# it must not match this pattern, because it needs to be encoded as "".
-_unquoted = re.compile('^[A-Za-z0-9$./_]+$')
-
-# Strings that match this pattern are quoted regardless of what _unquoted says.
-# Oddly, Xcode will quote any string with a run of three or more underscores.
-_quoted = re.compile('___')
-
-# This pattern should match any character that needs to be escaped by
-# XCObject._EncodeString. See that function.
-_escaped = re.compile('[\\\\"]|[\x00-\x1f]')
-
-
-# Used by SourceTreeAndPathFromPath
-_path_leading_variable = re.compile(r'^\$\((.*?)\)(/(.*))?$')
-
-def SourceTreeAndPathFromPath(input_path):
- """Given input_path, returns a tuple with sourceTree and path values.
-
- Examples:
- input_path (source_tree, output_path)
- '$(VAR)/path' ('VAR', 'path')
- '$(VAR)' ('VAR', None)
- 'path' (None, 'path')
- """
-
- source_group_match = _path_leading_variable.match(input_path)
- if source_group_match:
- source_tree = source_group_match.group(1)
- output_path = source_group_match.group(3) # This may be None.
- else:
- source_tree = None
- output_path = input_path
-
- return (source_tree, output_path)
-
-def ConvertVariablesToShellSyntax(input_string):
- return re.sub(r'\$\((.*?)\)', '${\\1}', input_string)
-
-class XCObject(object):
- """The abstract base of all class types used in Xcode project files.
-
- Class variables:
- _schema: A dictionary defining the properties of this class. The keys to
- _schema are string property keys as used in project files. Values
- are a list of four or five elements:
- [ is_list, property_type, is_strong, is_required, default ]
- is_list: True if the property described is a list, as opposed
- to a single element.
- property_type: The type to use as the value of the property,
- or if is_list is True, the type to use for each
- element of the value's list. property_type must
- be an XCObject subclass, or one of the built-in
- types str, int, or dict.
- is_strong: If property_type is an XCObject subclass, is_strong
- is True to assert that this class "owns," or serves
- as parent, to the property value (or, if is_list is
- True, values). is_strong must be False if
- property_type is not an XCObject subclass.
- is_required: True if the property is required for the class.
- Note that is_required being True does not preclude
- an empty string ("", in the case of property_type
- str) or list ([], in the case of is_list True) from
- being set for the property.
- default: Optional. If is_requried is True, default may be set
- to provide a default value for objects that do not supply
- their own value. If is_required is True and default
- is not provided, users of the class must supply their own
- value for the property.
- Note that although the values of the array are expressed in
- boolean terms, subclasses provide values as integers to conserve
- horizontal space.
- _should_print_single_line: False in XCObject. Subclasses whose objects
- should be written to the project file in the
- alternate single-line format, such as
- PBXFileReference and PBXBuildFile, should
- set this to True.
- _encode_transforms: Used by _EncodeString to encode unprintable characters.
- The index into this list is the ordinal of the
- character to transform; each value is a string
- used to represent the character in the output. XCObject
- provides an _encode_transforms list suitable for most
- XCObject subclasses.
- _alternate_encode_transforms: Provided for subclasses that wish to use
- the alternate encoding rules. Xcode seems
- to use these rules when printing objects in
- single-line format. Subclasses that desire
- this behavior should set _encode_transforms
- to _alternate_encode_transforms.
- _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
- to construct this object's ID. Most classes that need custom
- hashing behavior should do it by overriding Hashables,
- but in some cases an object's parent may wish to push a
- hashable value into its child, and it can do so by appending
- to _hashables.
- Attributes:
- id: The object's identifier, a 24-character uppercase hexadecimal string.
- Usually, objects being created should not set id until the entire
- project file structure is built. At that point, UpdateIDs() should
- be called on the root object to assign deterministic values for id to
- each object in the tree.
- parent: The object's parent. This is set by a parent XCObject when a child
- object is added to it.
- _properties: The object's property dictionary. An object's properties are
- described by its class' _schema variable.
- """
-
- _schema = {}
- _should_print_single_line = False
-
- # See _EncodeString.
- _encode_transforms = []
- i = 0
- while i < ord(' '):
- _encode_transforms.append('\\U%04x' % i)
- i = i + 1
- _encode_transforms[7] = '\\a'
- _encode_transforms[8] = '\\b'
- _encode_transforms[9] = '\\t'
- _encode_transforms[10] = '\\n'
- _encode_transforms[11] = '\\v'
- _encode_transforms[12] = '\\f'
- _encode_transforms[13] = '\\n'
-
- _alternate_encode_transforms = list(_encode_transforms)
- _alternate_encode_transforms[9] = chr(9)
- _alternate_encode_transforms[10] = chr(10)
- _alternate_encode_transforms[11] = chr(11)
-
- def __init__(self, properties=None, id=None, parent=None):
- self.id = id
- self.parent = parent
- self._properties = {}
- self._hashables = []
- self._SetDefaultsFromSchema()
- self.UpdateProperties(properties)
-
- def __repr__(self):
- try:
- name = self.Name()
- except NotImplementedError:
- return '<%s at 0x%x>' % (self.__class__.__name__, id(self))
- return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
-
- def Copy(self):
- """Make a copy of this object.
-
- The new object will have its own copy of lists and dicts. Any XCObject
- objects owned by this object (marked "strong") will be copied in the
- new object, even those found in lists. If this object has any weak
- references to other XCObjects, the same references are added to the new
- object without making a copy.
- """
-
- that = self.__class__(id=self.id, parent=self.parent)
- for key, value in self._properties.iteritems():
- is_strong = self._schema[key][2]
-
- if isinstance(value, XCObject):
- if is_strong:
- new_value = value.Copy()
- new_value.parent = that
- that._properties[key] = new_value
- else:
- that._properties[key] = value
- elif isinstance(value, str) or isinstance(value, unicode) or \
- isinstance(value, int):
- that._properties[key] = value
- elif isinstance(value, list):
- if is_strong:
- # If is_strong is True, each element is an XCObject, so it's safe to
- # call Copy.
- that._properties[key] = []
- for item in value:
- new_item = item.Copy()
- new_item.parent = that
- that._properties[key].append(new_item)
- else:
- that._properties[key] = value[:]
- elif isinstance(value, dict):
- # dicts are never strong.
- if is_strong:
- raise TypeError('Strong dict for key ' + key + ' in ' + \
- self.__class__.__name__)
- else:
- that._properties[key] = value.copy()
- else:
- raise TypeError('Unexpected type ' + value.__class__.__name__ + \
- ' for key ' + key + ' in ' + self.__class__.__name__)
-
- return that
-
- def Name(self):
- """Return the name corresponding to an object.
-
- Not all objects necessarily need to be nameable, and not all that do have
- a "name" property. Override as needed.
- """
-
- # If the schema indicates that "name" is required, try to access the
- # property even if it doesn't exist. This will result in a KeyError
- # being raised for the property that should be present, which seems more
- # appropriate than NotImplementedError in this case.
- if 'name' in self._properties or \
- ('name' in self._schema and self._schema['name'][3]):
- return self._properties['name']
-
- raise NotImplementedError(self.__class__.__name__ + ' must implement Name')
-
- def Comment(self):
- """Return a comment string for the object.
-
- Most objects just use their name as the comment, but PBXProject uses
- different values.
-
- The returned comment is not escaped and does not have any comment marker
- strings applied to it.
- """
-
- return self.Name()
-
- def Hashables(self):
- hashables = [self.__class__.__name__]
-
- name = self.Name()
- if name != None:
- hashables.append(name)
-
- hashables.extend(self._hashables)
-
- return hashables
-
- def HashablesForChild(self):
- return None
-
- def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None):
- """Set "id" properties deterministically.
-
- An object's "id" property is set based on a hash of its class type and
- name, as well as the class type and name of all ancestor objects. As
- such, it is only advisable to call ComputeIDs once an entire project file
- tree is built.
-
- If recursive is True, recurse into all descendant objects and update their
- hashes.
-
- If overwrite is True, any existing value set in the "id" property will be
- replaced.
- """
-
- def _HashUpdate(hash, data):
- """Update hash with data's length and contents.
-
- If the hash were updated only with the value of data, it would be
- possible for clowns to induce collisions by manipulating the names of
- their objects. By adding the length, it's exceedingly less likely that
- ID collisions will be encountered, intentionally or not.
- """
-
- hash.update(struct.pack('>i', len(data)))
- hash.update(data)
-
- if seed_hash is None:
- seed_hash = _new_sha1()
-
- hash = seed_hash.copy()
-
- hashables = self.Hashables()
- assert len(hashables) > 0
- for hashable in hashables:
- _HashUpdate(hash, hashable)
-
- if recursive:
- hashables_for_child = self.HashablesForChild()
- if hashables_for_child is None:
- child_hash = hash
- else:
- assert len(hashables_for_child) > 0
- child_hash = seed_hash.copy()
- for hashable in hashables_for_child:
- _HashUpdate(child_hash, hashable)
-
- for child in self.Children():
- child.ComputeIDs(recursive, overwrite, child_hash)
-
- if overwrite or self.id is None:
- # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is
- # is 160 bits. Instead of throwing out 64 bits of the digest, xor them
- # into the portion that gets used.
- assert hash.digest_size % 4 == 0
- digest_int_count = hash.digest_size / 4
- digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest())
- id_ints = [0, 0, 0]
- for index in xrange(0, digest_int_count):
- id_ints[index % 3] ^= digest_ints[index]
- self.id = '%08X%08X%08X' % tuple(id_ints)
-
- def EnsureNoIDCollisions(self):
- """Verifies that no two objects have the same ID. Checks all descendants.
- """
-
- ids = {}
- descendants = self.Descendants()
- for descendant in descendants:
- if descendant.id in ids:
- other = ids[descendant.id]
- raise KeyError(
- 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \
- (descendant.id, str(descendant._properties),
- str(other._properties), self._properties['rootObject'].Name()))
- ids[descendant.id] = descendant
-
- def Children(self):
- """Returns a list of all of this object's owned (strong) children."""
-
- children = []
- for property, attributes in self._schema.iteritems():
- (is_list, property_type, is_strong) = attributes[0:3]
- if is_strong and property in self._properties:
- if not is_list:
- children.append(self._properties[property])
- else:
- children.extend(self._properties[property])
- return children
-
- def Descendants(self):
- """Returns a list of all of this object's descendants, including this
- object.
- """
-
- children = self.Children()
- descendants = [self]
- for child in children:
- descendants.extend(child.Descendants())
- return descendants
-
- def PBXProjectAncestor(self):
- # The base case for recursion is defined at PBXProject.PBXProjectAncestor.
- if self.parent:
- return self.parent.PBXProjectAncestor()
- return None
-
- def _EncodeComment(self, comment):
- """Encodes a comment to be placed in the project file output, mimicing
- Xcode behavior.
- """
-
- # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If
- # the string already contains a "*/", it is turned into "(*)/". This keeps
- # the file writer from outputting something that would be treated as the
- # end of a comment in the middle of something intended to be entirely a
- # comment.
-
- return '/* ' + comment.replace('*/', '(*)/') + ' */'
-
- def _EncodeTransform(self, match):
- # This function works closely with _EncodeString. It will only be called
- # by re.sub with match.group(0) containing a character matched by the
- # the _escaped expression.
- char = match.group(0)
-
- # Backslashes (\) and quotation marks (") are always replaced with a
- # backslash-escaped version of the same. Everything else gets its
- # replacement from the class' _encode_transforms array.
- if char == '\\':
- return '\\\\'
- if char == '"':
- return '\\"'
- return self._encode_transforms[ord(char)]
-
- def _EncodeString(self, value):
- """Encodes a string to be placed in the project file output, mimicing
- Xcode behavior.
- """
-
- # Use quotation marks when any character outside of the range A-Z, a-z, 0-9,
- # $ (dollar sign), . (period), and _ (underscore) is present. Also use
- # quotation marks to represent empty strings.
- #
- # Escape " (double-quote) and \ (backslash) by preceding them with a
- # backslash.
- #
- # Some characters below the printable ASCII range are encoded specially:
- # 7 ^G BEL is encoded as "\a"
- # 8 ^H BS is encoded as "\b"
- # 11 ^K VT is encoded as "\v"
- # 12 ^L NP is encoded as "\f"
- # 127 ^? DEL is passed through as-is without escaping
- # - In PBXFileReference and PBXBuildFile objects:
- # 9 ^I HT is passed through as-is without escaping
- # 10 ^J NL is passed through as-is without escaping
- # 13 ^M CR is passed through as-is without escaping
- # - In other objects:
- # 9 ^I HT is encoded as "\t"
- # 10 ^J NL is encoded as "\n"
- # 13 ^M CR is encoded as "\n" rendering it indistinguishable from
- # 10 ^J NL
- # All other characters within the ASCII control character range (0 through
- # 31 inclusive) are encoded as "\U001f" referring to the Unicode code point
- # in hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
- # Characters above the ASCII range are passed through to the output encoded
- # as UTF-8 without any escaping. These mappings are contained in the
- # class' _encode_transforms list.
-
- if _unquoted.search(value) and not _quoted.search(value):
- return value
-
- return '"' + _escaped.sub(self._EncodeTransform, value) + '"'
-
- def _XCPrint(self, file, tabs, line):
- file.write('\t' * tabs + line)
-
- def _XCPrintableValue(self, tabs, value, flatten_list=False):
- """Returns a representation of value that may be printed in a project file,
- mimicing Xcode's behavior.
-
- _XCPrintableValue can handle str and int values, XCObjects (which are
- made printable by returning their id property), and list and dict objects
- composed of any of the above types. When printing a list or dict, and
- _should_print_single_line is False, the tabs parameter is used to determine
- how much to indent the lines corresponding to the items in the list or
- dict.
-
- If flatten_list is True, single-element lists will be transformed into
- strings.
- """
-
- printable = ''
- comment = None
-
- if self._should_print_single_line:
- sep = ' '
- element_tabs = ''
- end_tabs = ''
- else:
- sep = '\n'
- element_tabs = '\t' * (tabs + 1)
- end_tabs = '\t' * tabs
-
- if isinstance(value, XCObject):
- printable += value.id
- comment = value.Comment()
- elif isinstance(value, str):
- printable += self._EncodeString(value)
- elif isinstance(value, unicode):
- printable += self._EncodeString(value.encode('utf-8'))
- elif isinstance(value, int):
- printable += str(value)
- elif isinstance(value, list):
- if flatten_list and len(value) <= 1:
- if len(value) == 0:
- printable += self._EncodeString('')
- else:
- printable += self._EncodeString(value[0])
- else:
- printable = '(' + sep
- for item in value:
- printable += element_tabs + \
- self._XCPrintableValue(tabs + 1, item, flatten_list) + \
- ',' + sep
- printable += end_tabs + ')'
- elif isinstance(value, dict):
- printable = '{' + sep
- for item_key, item_value in sorted(value.iteritems()):
- printable += element_tabs + \
- self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \
- self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \
- sep
- printable += end_tabs + '}'
- else:
- raise TypeError("Can't make " + value.__class__.__name__ + ' printable')
-
- if comment != None:
- printable += ' ' + self._EncodeComment(comment)
-
- return printable
-
- def _XCKVPrint(self, file, tabs, key, value):
- """Prints a key and value, members of an XCObject's _properties dictionary,
- to file.
-
- tabs is an int identifying the indentation level. If the class'
- _should_print_single_line variable is True, tabs is ignored and the
- key-value pair will be followed by a space insead of a newline.
- """
-
- if self._should_print_single_line:
- printable = ''
- after_kv = ' '
- else:
- printable = '\t' * tabs
- after_kv = '\n'
-
- # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy
- # objects without comments. Sometimes it prints them with comments, but
- # the majority of the time, it doesn't. To avoid unnecessary changes to
- # the project file after Xcode opens it, don't write comments for
- # remoteGlobalIDString. This is a sucky hack and it would certainly be
- # cleaner to extend the schema to indicate whether or not a comment should
- # be printed, but since this is the only case where the problem occurs and
- # Xcode itself can't seem to make up its mind, the hack will suffice.
- #
- # Also see PBXContainerItemProxy._schema['remoteGlobalIDString'].
- if key == 'remoteGlobalIDString' and isinstance(self,
- PBXContainerItemProxy):
- value_to_print = value.id
- else:
- value_to_print = value
-
- # PBXBuildFile's settings property is represented in the output as a dict,
- # but a hack here has it represented as a string. Arrange to strip off the
- # quotes so that it shows up in the output as expected.
- if key == 'settings' and isinstance(self, PBXBuildFile):
- strip_value_quotes = True
- else:
- strip_value_quotes = False
-
- # In another one-off, let's set flatten_list on buildSettings properties
- # of XCBuildConfiguration objects, because that's how Xcode treats them.
- if key == 'buildSettings' and isinstance(self, XCBuildConfiguration):
- flatten_list = True
- else:
- flatten_list = False
-
- try:
- printable_key = self._XCPrintableValue(tabs, key, flatten_list)
- printable_value = self._XCPrintableValue(tabs, value_to_print,
- flatten_list)
- if strip_value_quotes and len(printable_value) > 1 and \
- printable_value[0] == '"' and printable_value[-1] == '"':
- printable_value = printable_value[1:-1]
- printable += printable_key + ' = ' + printable_value + ';' + after_kv
- except TypeError, e:
- gyp.common.ExceptionAppend(e,
- 'while printing key "%s"' % key)
- raise
-
- self._XCPrint(file, 0, printable)
-
- def Print(self, file=sys.stdout):
- """Prints a reprentation of this object to file, adhering to Xcode output
- formatting.
- """
-
- self.VerifyHasRequiredProperties()
-
- if self._should_print_single_line:
- # When printing an object in a single line, Xcode doesn't put any space
- # between the beginning of a dictionary (or presumably a list) and the
- # first contained item, so you wind up with snippets like
- # ...CDEF = {isa = PBXFileReference; fileRef = 0123...
- # If it were me, I would have put a space in there after the opening
- # curly, but I guess this is just another one of those inconsistencies
- # between how Xcode prints PBXFileReference and PBXBuildFile objects as
- # compared to other objects. Mimic Xcode's behavior here by using an
- # empty string for sep.
- sep = ''
- end_tabs = 0
- else:
- sep = '\n'
- end_tabs = 2
-
- # Start the object. For example, '\t\tPBXProject = {\n'.
- self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep)
-
- # "isa" isn't in the _properties dictionary, it's an intrinsic property
- # of the class which the object belongs to. Xcode always outputs "isa"
- # as the first element of an object dictionary.
- self._XCKVPrint(file, 3, 'isa', self.__class__.__name__)
-
- # The remaining elements of an object dictionary are sorted alphabetically.
- for property, value in sorted(self._properties.iteritems()):
- self._XCKVPrint(file, 3, property, value)
-
- # End the object.
- self._XCPrint(file, end_tabs, '};\n')
-
- def UpdateProperties(self, properties, do_copy=False):
- """Merge the supplied properties into the _properties dictionary.
-
- The input properties must adhere to the class schema or a KeyError or
- TypeError exception will be raised. If adding an object of an XCObject
- subclass and the schema indicates a strong relationship, the object's
- parent will be set to this object.
-
- If do_copy is True, then lists, dicts, strong-owned XCObjects, and
- strong-owned XCObjects in lists will be copied instead of having their
- references added.
- """
-
- if properties is None:
- return
-
- for property, value in properties.iteritems():
- # Make sure the property is in the schema.
- if not property in self._schema:
- raise KeyError(property + ' not in ' + self.__class__.__name__)
-
- # Make sure the property conforms to the schema.
- (is_list, property_type, is_strong) = self._schema[property][0:3]
- if is_list:
- if value.__class__ != list:
- raise TypeError(
- property + ' of ' + self.__class__.__name__ + \
- ' must be list, not ' + value.__class__.__name__)
- for item in value:
- if not isinstance(item, property_type) and \
- not (item.__class__ == unicode and property_type == str):
- # Accept unicode where str is specified. str is treated as
- # UTF-8-encoded.
- raise TypeError(
- 'item of ' + property + ' of ' + self.__class__.__name__ + \
- ' must be ' + property_type.__name__ + ', not ' + \
- item.__class__.__name__)
- elif not isinstance(value, property_type) and \
- not (value.__class__ == unicode and property_type == str):
- # Accept unicode where str is specified. str is treated as
- # UTF-8-encoded.
- raise TypeError(
- property + ' of ' + self.__class__.__name__ + ' must be ' + \
- property_type.__name__ + ', not ' + value.__class__.__name__)
-
- # Checks passed, perform the assignment.
- if do_copy:
- if isinstance(value, XCObject):
- if is_strong:
- self._properties[property] = value.Copy()
- else:
- self._properties[property] = value
- elif isinstance(value, str) or isinstance(value, unicode) or \
- isinstance(value, int):
- self._properties[property] = value
- elif isinstance(value, list):
- if is_strong:
- # If is_strong is True, each element is an XCObject, so it's safe
- # to call Copy.
- self._properties[property] = []
- for item in value:
- self._properties[property].append(item.Copy())
- else:
- self._properties[property] = value[:]
- elif isinstance(value, dict):
- self._properties[property] = value.copy()
- else:
- raise TypeError("Don't know how to copy a " + \
- value.__class__.__name__ + ' object for ' + \
- property + ' in ' + self.__class__.__name__)
- else:
- self._properties[property] = value
-
- # Set up the child's back-reference to this object. Don't use |value|
- # any more because it may not be right if do_copy is true.
- if is_strong:
- if not is_list:
- self._properties[property].parent = self
- else:
- for item in self._properties[property]:
- item.parent = self
-
- def HasProperty(self, key):
- return key in self._properties
-
- def GetProperty(self, key):
- return self._properties[key]
-
- def SetProperty(self, key, value):
- self.UpdateProperties({key: value})
-
- def DelProperty(self, key):
- if key in self._properties:
- del self._properties[key]
-
- def AppendProperty(self, key, value):
- # TODO(mark): Support ExtendProperty too (and make this call that)?
-
- # Schema validation.
- if not key in self._schema:
- raise KeyError(key + ' not in ' + self.__class__.__name__)
-
- (is_list, property_type, is_strong) = self._schema[key][0:3]
- if not is_list:
- raise TypeError(key + ' of ' + self.__class__.__name__ + ' must be list')
- if not isinstance(value, property_type):
- raise TypeError('item of ' + key + ' of ' + self.__class__.__name__ + \
- ' must be ' + property_type.__name__ + ', not ' + \
- value.__class__.__name__)
-
- # If the property doesn't exist yet, create a new empty list to receive the
- # item.
- if not key in self._properties:
- self._properties[key] = []
-
- # Set up the ownership link.
- if is_strong:
- value.parent = self
-
- # Store the item.
- self._properties[key].append(value)
-
- def VerifyHasRequiredProperties(self):
- """Ensure that all properties identified as required by the schema are
- set.
- """
-
- # TODO(mark): A stronger verification mechanism is needed. Some
- # subclasses need to perform validation beyond what the schema can enforce.
- for property, attributes in self._schema.iteritems():
- (is_list, property_type, is_strong, is_required) = attributes[0:4]
- if is_required and not property in self._properties:
- raise KeyError(self.__class__.__name__ + ' requires ' + property)
-
- def _SetDefaultsFromSchema(self):
- """Assign object default values according to the schema. This will not
- overwrite properties that have already been set."""
-
- defaults = {}
- for property, attributes in self._schema.iteritems():
- (is_list, property_type, is_strong, is_required) = attributes[0:4]
- if is_required and len(attributes) >= 5 and \
- not property in self._properties:
- default = attributes[4]
-
- defaults[property] = default
-
- if len(defaults) > 0:
- # Use do_copy=True so that each new object gets its own copy of strong
- # objects, lists, and dicts.
- self.UpdateProperties(defaults, do_copy=True)
-
-
-class XCHierarchicalElement(XCObject):
- """Abstract base for PBXGroup and PBXFileReference. Not represented in a
- project file."""
-
- # TODO(mark): Do name and path belong here? Probably so.
- # If path is set and name is not, name may have a default value. Name will
- # be set to the basename of path, if the basename of path is different from
- # the full value of path. If path is already just a leaf name, name will
- # not be set.
- _schema = XCObject._schema.copy()
- _schema.update({
- 'comments': [0, str, 0, 0],
- 'fileEncoding': [0, str, 0, 0],
- 'includeInIndex': [0, int, 0, 0],
- 'indentWidth': [0, int, 0, 0],
- 'lineEnding': [0, int, 0, 0],
- 'sourceTree': [0, str, 0, 1, '<group>'],
- 'tabWidth': [0, int, 0, 0],
- 'usesTabs': [0, int, 0, 0],
- 'wrapsLines': [0, int, 0, 0],
- })
-
- def __init__(self, properties=None, id=None, parent=None):
- # super
- XCObject.__init__(self, properties, id, parent)
- if 'path' in self._properties and not 'name' in self._properties:
- path = self._properties['path']
- name = posixpath.basename(path)
- if name != '' and path != name:
- self.SetProperty('name', name)
-
- if 'path' in self._properties and \
- (not 'sourceTree' in self._properties or \
- self._properties['sourceTree'] == '<group>'):
- # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take
- # the variable out and make the path be relative to that variable by
- # assigning the variable name as the sourceTree.
- (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path'])
- if source_tree != None:
- self._properties['sourceTree'] = source_tree
- if path != None:
- self._properties['path'] = path
- if source_tree != None and path is None and \
- not 'name' in self._properties:
- # The path was of the form "$(SDKROOT)" with no path following it.
- # This object is now relative to that variable, so it has no path
- # attribute of its own. It does, however, keep a name.
- del self._properties['path']
- self._properties['name'] = source_tree
-
- def Name(self):
- if 'name' in self._properties:
- return self._properties['name']
- elif 'path' in self._properties:
- return self._properties['path']
- else:
- # This happens in the case of the root PBXGroup.
- return None
-
- def Hashables(self):
- """Custom hashables for XCHierarchicalElements.
-
- XCHierarchicalElements are special. Generally, their hashes shouldn't
- change if the paths don't change. The normal XCObject implementation of
- Hashables adds a hashable for each object, which means that if
- the hierarchical structure changes (possibly due to changes caused when
- TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
- the hashes will change. For example, if a project file initially contains
- a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
- a/b. If someone later adds a/f2 to the project file, a/b can no longer be
- collapsed, and f1 winds up with parent b and grandparent a. That would
- be sufficient to change f1's hash.
-
- To counteract this problem, hashables for all XCHierarchicalElements except
- for the main group (which has neither a name nor a path) are taken to be
- just the set of path components. Because hashables are inherited from
- parents, this provides assurance that a/b/f1 has the same set of hashables
- whether its parent is b or a/b.
-
- The main group is a special case. As it is permitted to have no name or
- path, it is permitted to use the standard XCObject hash mechanism. This
- is not considered a problem because there can be only one main group.
- """
-
- if self == self.PBXProjectAncestor()._properties['mainGroup']:
- # super
- return XCObject.Hashables(self)
-
- hashables = []
-
- # Put the name in first, ensuring that if TakeOverOnlyChild collapses
- # children into a top-level group like "Source", the name always goes
- # into the list of hashables without interfering with path components.
- if 'name' in self._properties:
- # Make it less likely for people to manipulate hashes by following the
- # pattern of always pushing an object type value onto the list first.
- hashables.append(self.__class__.__name__ + '.name')
- hashables.append(self._properties['name'])
-
- # NOTE: This still has the problem that if an absolute path is encountered,
- # including paths with a sourceTree, they'll still inherit their parents'
- # hashables, even though the paths aren't relative to their parents. This
- # is not expected to be much of a problem in practice.
- path = self.PathFromSourceTreeAndPath()
- if path != None:
- components = path.split(posixpath.sep)
- for component in components:
- hashables.append(self.__class__.__name__ + '.path')
- hashables.append(component)
-
- hashables.extend(self._hashables)
-
- return hashables
-
- def Compare(self, other):
- # Allow comparison of these types. PBXGroup has the highest sort rank;
- # PBXVariantGroup is treated as equal to PBXFileReference.
- valid_class_types = {
- PBXFileReference: 'file',
- PBXGroup: 'group',
- PBXVariantGroup: 'file',
- }
- self_type = valid_class_types[self.__class__]
- other_type = valid_class_types[other.__class__]
-
- if self_type == other_type:
- # If the two objects are of the same sort rank, compare their names.
- return cmp(self.Name(), other.Name())
-
- # Otherwise, sort groups before everything else.
- if self_type == 'group':
- return -1
- return 1
-
- def CompareRootGroup(self, other):
- # This function should be used only to compare direct children of the
- # containing PBXProject's mainGroup. These groups should appear in the
- # listed order.
- # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the
- # generator should have a way of influencing this list rather than having
- # to hardcode for the generator here.
- order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products',
- 'Build']
-
- # If the groups aren't in the listed order, do a name comparison.
- # Otherwise, groups in the listed order should come before those that
- # aren't.
- self_name = self.Name()
- other_name = other.Name()
- self_in = isinstance(self, PBXGroup) and self_name in order
- other_in = isinstance(self, PBXGroup) and other_name in order
- if not self_in and not other_in:
- return self.Compare(other)
- if self_name in order and not other_name in order:
- return -1
- if other_name in order and not self_name in order:
- return 1
-
- # If both groups are in the listed order, go by the defined order.
- self_index = order.index(self_name)
- other_index = order.index(other_name)
- if self_index < other_index:
- return -1
- if self_index > other_index:
- return 1
- return 0
-
- def PathFromSourceTreeAndPath(self):
- # Turn the object's sourceTree and path properties into a single flat
- # string of a form comparable to the path parameter. If there's a
- # sourceTree property other than "<group>", wrap it in $(...) for the
- # comparison.
- components = []
- if self._properties['sourceTree'] != '<group>':
- components.append('$(' + self._properties['sourceTree'] + ')')
- if 'path' in self._properties:
- components.append(self._properties['path'])
-
- if len(components) > 0:
- return posixpath.join(*components)
-
- return None
-
- def FullPath(self):
- # Returns a full path to self relative to the project file, or relative
- # to some other source tree. Start with self, and walk up the chain of
- # parents prepending their paths, if any, until no more parents are
- # available (project-relative path) or until a path relative to some
- # source tree is found.
- xche = self
- path = None
- while isinstance(xche, XCHierarchicalElement) and \
- (path is None or \
- (not path.startswith('/') and not path.startswith('$'))):
- this_path = xche.PathFromSourceTreeAndPath()
- if this_path != None and path != None:
- path = posixpath.join(this_path, path)
- elif this_path != None:
- path = this_path
- xche = xche.parent
-
- return path
-
-
-class PBXGroup(XCHierarchicalElement):
- """
- Attributes:
- _children_by_path: Maps pathnames of children of this PBXGroup to the
- actual child XCHierarchicalElement objects.
- _variant_children_by_name_and_path: Maps (name, path) tuples of
- PBXVariantGroup children to the actual child PBXVariantGroup objects.
- """
-
- _schema = XCHierarchicalElement._schema.copy()
- _schema.update({
- 'children': [1, XCHierarchicalElement, 1, 1, []],
- 'name': [0, str, 0, 0],
- 'path': [0, str, 0, 0],
- })
-
- def __init__(self, properties=None, id=None, parent=None):
- # super
- XCHierarchicalElement.__init__(self, properties, id, parent)
- self._children_by_path = {}
- self._variant_children_by_name_and_path = {}
- for child in self._properties.get('children', []):
- self._AddChildToDicts(child)
-
- def Hashables(self):
- # super
- hashables = XCHierarchicalElement.Hashables(self)
-
- # It is not sufficient to just rely on name and parent to build a unique
- # hashable : a node could have two child PBXGroup sharing a common name.
- # To add entropy the hashable is enhanced with the names of all its
- # children.
- for child in self._properties.get('children', []):
- child_name = child.Name()
- if child_name != None:
- hashables.append(child_name)
-
- return hashables
-
- def HashablesForChild(self):
- # To avoid a circular reference the hashables used to compute a child id do
- # not include the child names.
- return XCHierarchicalElement.Hashables(self)
-
- def _AddChildToDicts(self, child):
- # Sets up this PBXGroup object's dicts to reference the child properly.
- child_path = child.PathFromSourceTreeAndPath()
- if child_path:
- if child_path in self._children_by_path:
- raise ValueError('Found multiple children with path ' + child_path)
- self._children_by_path[child_path] = child
-
- if isinstance(child, PBXVariantGroup):
- child_name = child._properties.get('name', None)
- key = (child_name, child_path)
- if key in self._variant_children_by_name_and_path:
- raise ValueError('Found multiple PBXVariantGroup children with ' + \
- 'name ' + str(child_name) + ' and path ' + \
- str(child_path))
- self._variant_children_by_name_and_path[key] = child
-
- def AppendChild(self, child):
- # Callers should use this instead of calling
- # AppendProperty('children', child) directly because this function
- # maintains the group's dicts.
- self.AppendProperty('children', child)
- self._AddChildToDicts(child)
-
- def GetChildByName(self, name):
- # This is not currently optimized with a dict as GetChildByPath is because
- # it has few callers. Most callers probably want GetChildByPath. This
- # function is only useful to get children that have names but no paths,
- # which is rare. The children of the main group ("Source", "Products",
- # etc.) is pretty much the only case where this likely to come up.
- #
- # TODO(mark): Maybe this should raise an error if more than one child is
- # present with the same name.
- if not 'children' in self._properties:
- return None
-
- for child in self._properties['children']:
- if child.Name() == name:
- return child
-
- return None
-
- def GetChildByPath(self, path):
- if not path:
- return None
-
- if path in self._children_by_path:
- return self._children_by_path[path]
-
- return None
-
- def GetChildByRemoteObject(self, remote_object):
- # This method is a little bit esoteric. Given a remote_object, which
- # should be a PBXFileReference in another project file, this method will
- # return this group's PBXReferenceProxy object serving as a local proxy
- # for the remote PBXFileReference.
- #
- # This function might benefit from a dict optimization as GetChildByPath
- # for some workloads, but profiling shows that it's not currently a
- # problem.
- if not 'children' in self._properties:
- return None
-
- for child in self._properties['children']:
- if not isinstance(child, PBXReferenceProxy):
- continue
-
- container_proxy = child._properties['remoteRef']
- if container_proxy._properties['remoteGlobalIDString'] == remote_object:
- return child
-
- return None
-
- def AddOrGetFileByPath(self, path, hierarchical):
- """Returns an existing or new file reference corresponding to path.
-
- If hierarchical is True, this method will create or use the necessary
- hierarchical group structure corresponding to path. Otherwise, it will
- look in and create an item in the current group only.
-
- If an existing matching reference is found, it is returned, otherwise, a
- new one will be created, added to the correct group, and returned.
-
- If path identifies a directory by virtue of carrying a trailing slash,
- this method returns a PBXFileReference of "folder" type. If path
- identifies a variant, by virtue of it identifying a file inside a directory
- with an ".lproj" extension, this method returns a PBXVariantGroup
- containing the variant named by path, and possibly other variants. For
- all other paths, a "normal" PBXFileReference will be returned.
- """
-
- # Adding or getting a directory? Directories end with a trailing slash.
- is_dir = False
- if path.endswith('/'):
- is_dir = True
- path = posixpath.normpath(path)
- if is_dir:
- path = path + '/'
-
- # Adding or getting a variant? Variants are files inside directories
- # with an ".lproj" extension. Xcode uses variants for localization. For
- # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named
- # MainMenu.nib inside path/to, and give it a variant named Language. In
- # this example, grandparent would be set to path/to and parent_root would
- # be set to Language.
- variant_name = None
- parent = posixpath.dirname(path)
- grandparent = posixpath.dirname(parent)
- parent_basename = posixpath.basename(parent)
- (parent_root, parent_ext) = posixpath.splitext(parent_basename)
- if parent_ext == '.lproj':
- variant_name = parent_root
- if grandparent == '':
- grandparent = None
-
- # Putting a directory inside a variant group is not currently supported.
- assert not is_dir or variant_name is None
-
- path_split = path.split(posixpath.sep)
- if len(path_split) == 1 or \
- ((is_dir or variant_name != None) and len(path_split) == 2) or \
- not hierarchical:
- # The PBXFileReference or PBXVariantGroup will be added to or gotten from
- # this PBXGroup, no recursion necessary.
- if variant_name is None:
- # Add or get a PBXFileReference.
- file_ref = self.GetChildByPath(path)
- if file_ref != None:
- assert file_ref.__class__ == PBXFileReference
- else:
- file_ref = PBXFileReference({'path': path})
- self.AppendChild(file_ref)
- else:
- # Add or get a PBXVariantGroup. The variant group name is the same
- # as the basename (MainMenu.nib in the example above). grandparent
- # specifies the path to the variant group itself, and path_split[-2:]
- # is the path of the specific variant relative to its group.
- variant_group_name = posixpath.basename(path)
- variant_group_ref = self.AddOrGetVariantGroupByNameAndPath(
- variant_group_name, grandparent)
- variant_path = posixpath.sep.join(path_split[-2:])
- variant_ref = variant_group_ref.GetChildByPath(variant_path)
- if variant_ref != None:
- assert variant_ref.__class__ == PBXFileReference
- else:
- variant_ref = PBXFileReference({'name': variant_name,
- 'path': variant_path})
- variant_group_ref.AppendChild(variant_ref)
- # The caller is interested in the variant group, not the specific
- # variant file.
- file_ref = variant_group_ref
- return file_ref
- else:
- # Hierarchical recursion. Add or get a PBXGroup corresponding to the
- # outermost path component, and then recurse into it, chopping off that
- # path component.
- next_dir = path_split[0]
- group_ref = self.GetChildByPath(next_dir)
- if group_ref != None:
- assert group_ref.__class__ == PBXGroup
- else:
- group_ref = PBXGroup({'path': next_dir})
- self.AppendChild(group_ref)
- return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]),
- hierarchical)
-
- def AddOrGetVariantGroupByNameAndPath(self, name, path):
- """Returns an existing or new PBXVariantGroup for name and path.
-
- If a PBXVariantGroup identified by the name and path arguments is already
- present as a child of this object, it is returned. Otherwise, a new
- PBXVariantGroup with the correct properties is created, added as a child,
- and returned.
-
- This method will generally be called by AddOrGetFileByPath, which knows
- when to create a variant group based on the structure of the pathnames
- passed to it.
- """
-
- key = (name, path)
- if key in self._variant_children_by_name_and_path:
- variant_group_ref = self._variant_children_by_name_and_path[key]
- assert variant_group_ref.__class__ == PBXVariantGroup
- return variant_group_ref
-
- variant_group_properties = {'name': name}
- if path != None:
- variant_group_properties['path'] = path
- variant_group_ref = PBXVariantGroup(variant_group_properties)
- self.AppendChild(variant_group_ref)
-
- return variant_group_ref
-
- def TakeOverOnlyChild(self, recurse=False):
- """If this PBXGroup has only one child and it's also a PBXGroup, take
- it over by making all of its children this object's children.
-
- This function will continue to take over only children when those children
- are groups. If there are three PBXGroups representing a, b, and c, with
- c inside b and b inside a, and a and b have no other children, this will
- result in a taking over both b and c, forming a PBXGroup for a/b/c.
-
- If recurse is True, this function will recurse into children and ask them
- to collapse themselves by taking over only children as well. Assuming
- an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
- (d1, d2, and f are files, the rest are groups), recursion will result in
- a group for a/b/c containing a group for d3/e.
- """
-
- # At this stage, check that child class types are PBXGroup exactly,
- # instead of using isinstance. The only subclass of PBXGroup,
- # PBXVariantGroup, should not participate in reparenting in the same way:
- # reparenting by merging different object types would be wrong.
- while len(self._properties['children']) == 1 and \
- self._properties['children'][0].__class__ == PBXGroup:
- # Loop to take over the innermost only-child group possible.
-
- child = self._properties['children'][0]
-
- # Assume the child's properties, including its children. Save a copy
- # of this object's old properties, because they'll still be needed.
- # This object retains its existing id and parent attributes.
- old_properties = self._properties
- self._properties = child._properties
- self._children_by_path = child._children_by_path
-
- if not 'sourceTree' in self._properties or \
- self._properties['sourceTree'] == '<group>':
- # The child was relative to its parent. Fix up the path. Note that
- # children with a sourceTree other than "<group>" are not relative to
- # their parents, so no path fix-up is needed in that case.
- if 'path' in old_properties:
- if 'path' in self._properties:
- # Both the original parent and child have paths set.
- self._properties['path'] = posixpath.join(old_properties['path'],
- self._properties['path'])
- else:
- # Only the original parent has a path, use it.
- self._properties['path'] = old_properties['path']
- if 'sourceTree' in old_properties:
- # The original parent had a sourceTree set, use it.
- self._properties['sourceTree'] = old_properties['sourceTree']
-
- # If the original parent had a name set, keep using it. If the original
- # parent didn't have a name but the child did, let the child's name
- # live on. If the name attribute seems unnecessary now, get rid of it.
- if 'name' in old_properties and old_properties['name'] != None and \
- old_properties['name'] != self.Name():
- self._properties['name'] = old_properties['name']
- if 'name' in self._properties and 'path' in self._properties and \
- self._properties['name'] == self._properties['path']:
- del self._properties['name']
-
- # Notify all children of their new parent.
- for child in self._properties['children']:
- child.parent = self
-
- # If asked to recurse, recurse.
- if recurse:
- for child in self._properties['children']:
- if child.__class__ == PBXGroup:
- child.TakeOverOnlyChild(recurse)
-
- def SortGroup(self):
- self._properties['children'] = \
- sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y))
-
- # Recurse.
- for child in self._properties['children']:
- if isinstance(child, PBXGroup):
- child.SortGroup()
-
-
-class XCFileLikeElement(XCHierarchicalElement):
- # Abstract base for objects that can be used as the fileRef property of
- # PBXBuildFile.
-
- def PathHashables(self):
- # A PBXBuildFile that refers to this object will call this method to
- # obtain additional hashables specific to this XCFileLikeElement. Don't
- # just use this object's hashables, they're not specific and unique enough
- # on their own (without access to the parent hashables.) Instead, provide
- # hashables that identify this object by path by getting its hashables as
- # well as the hashables of ancestor XCHierarchicalElement objects.
-
- hashables = []
- xche = self
- while xche != None and isinstance(xche, XCHierarchicalElement):
- xche_hashables = xche.Hashables()
- for index in xrange(0, len(xche_hashables)):
- hashables.insert(index, xche_hashables[index])
- xche = xche.parent
- return hashables
-
-
-class XCContainerPortal(XCObject):
- # Abstract base for objects that can be used as the containerPortal property
- # of PBXContainerItemProxy.
- pass
-
-
-class XCRemoteObject(XCObject):
- # Abstract base for objects that can be used as the remoteGlobalIDString
- # property of PBXContainerItemProxy.
- pass
-
-
-class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
- _schema = XCFileLikeElement._schema.copy()
- _schema.update({
- 'explicitFileType': [0, str, 0, 0],
- 'lastKnownFileType': [0, str, 0, 0],
- 'name': [0, str, 0, 0],
- 'path': [0, str, 0, 1],
- })
-
- # Weird output rules for PBXFileReference.
- _should_print_single_line = True
- # super
- _encode_transforms = XCFileLikeElement._alternate_encode_transforms
-
- def __init__(self, properties=None, id=None, parent=None):
- # super
- XCFileLikeElement.__init__(self, properties, id, parent)
- if 'path' in self._properties and self._properties['path'].endswith('/'):
- self._properties['path'] = self._properties['path'][:-1]
- is_dir = True
- else:
- is_dir = False
-
- if 'path' in self._properties and \
- not 'lastKnownFileType' in self._properties and \
- not 'explicitFileType' in self._properties:
- # TODO(mark): This is the replacement for a replacement for a quick hack.
- # It is no longer incredibly sucky, but this list needs to be extended.
- extension_map = {
- 'a': 'archive.ar',
- 'app': 'wrapper.application',
- 'bdic': 'file',
- 'bundle': 'wrapper.cfbundle',
- 'c': 'sourcecode.c.c',
- 'cc': 'sourcecode.cpp.cpp',
- 'cpp': 'sourcecode.cpp.cpp',
- 'css': 'text.css',
- 'cxx': 'sourcecode.cpp.cpp',
- 'dart': 'sourcecode',
- 'dylib': 'compiled.mach-o.dylib',
- 'framework': 'wrapper.framework',
- 'gyp': 'sourcecode',
- 'gypi': 'sourcecode',
- 'h': 'sourcecode.c.h',
- 'hxx': 'sourcecode.cpp.h',
- 'icns': 'image.icns',
- 'java': 'sourcecode.java',
- 'js': 'sourcecode.javascript',
- 'kext': 'wrapper.kext',
- 'm': 'sourcecode.c.objc',
- 'mm': 'sourcecode.cpp.objcpp',
- 'nib': 'wrapper.nib',
- 'o': 'compiled.mach-o.objfile',
- 'pdf': 'image.pdf',
- 'pl': 'text.script.perl',
- 'plist': 'text.plist.xml',
- 'pm': 'text.script.perl',
- 'png': 'image.png',
- 'py': 'text.script.python',
- 'r': 'sourcecode.rez',
- 'rez': 'sourcecode.rez',
- 's': 'sourcecode.asm',
- 'storyboard': 'file.storyboard',
- 'strings': 'text.plist.strings',
- 'swift': 'sourcecode.swift',
- 'ttf': 'file',
- 'xcassets': 'folder.assetcatalog',
- 'xcconfig': 'text.xcconfig',
- 'xcdatamodel': 'wrapper.xcdatamodel',
- 'xcdatamodeld':'wrapper.xcdatamodeld',
- 'xib': 'file.xib',
- 'y': 'sourcecode.yacc',
- }
-
- prop_map = {
- 'dart': 'explicitFileType',
- 'gyp': 'explicitFileType',
- 'gypi': 'explicitFileType',
- }
-
- if is_dir:
- file_type = 'folder'
- prop_name = 'lastKnownFileType'
- else:
- basename = posixpath.basename(self._properties['path'])
- (root, ext) = posixpath.splitext(basename)
- # Check the map using a lowercase extension.
- # TODO(mark): Maybe it should try with the original case first and fall
- # back to lowercase, in case there are any instances where case
- # matters. There currently aren't.
- if ext != '':
- ext = ext[1:].lower()
-
- # TODO(mark): "text" is the default value, but "file" is appropriate
- # for unrecognized files not containing text. Xcode seems to choose
- # based on content.
- file_type = extension_map.get(ext, 'text')
- prop_name = prop_map.get(ext, 'lastKnownFileType')
-
- self._properties[prop_name] = file_type
-
-
-class PBXVariantGroup(PBXGroup, XCFileLikeElement):
- """PBXVariantGroup is used by Xcode to represent localizations."""
- # No additions to the schema relative to PBXGroup.
- pass
-
-
-# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below
-# because it uses PBXContainerItemProxy, defined below.
-
-
-class XCBuildConfiguration(XCObject):
- _schema = XCObject._schema.copy()
- _schema.update({
- 'baseConfigurationReference': [0, PBXFileReference, 0, 0],
- 'buildSettings': [0, dict, 0, 1, {}],
- 'name': [0, str, 0, 1],
- })
-
- def HasBuildSetting(self, key):
- return key in self._properties['buildSettings']
-
- def GetBuildSetting(self, key):
- return self._properties['buildSettings'][key]
-
- def SetBuildSetting(self, key, value):
- # TODO(mark): If a list, copy?
- self._properties['buildSettings'][key] = value
-
- def AppendBuildSetting(self, key, value):
- if not key in self._properties['buildSettings']:
- self._properties['buildSettings'][key] = []
- self._properties['buildSettings'][key].append(value)
-
- def DelBuildSetting(self, key):
- if key in self._properties['buildSettings']:
- del self._properties['buildSettings'][key]
-
- def SetBaseConfiguration(self, value):
- self._properties['baseConfigurationReference'] = value
-
-class XCConfigurationList(XCObject):
- # _configs is the default list of configurations.
- _configs = [ XCBuildConfiguration({'name': 'Debug'}),
- XCBuildConfiguration({'name': 'Release'}) ]
-
- _schema = XCObject._schema.copy()
- _schema.update({
- 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs],
- 'defaultConfigurationIsVisible': [0, int, 0, 1, 1],
- 'defaultConfigurationName': [0, str, 0, 1, 'Release'],
- })
-
- def Name(self):
- return 'Build configuration list for ' + \
- self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"'
-
- def ConfigurationNamed(self, name):
- """Convenience accessor to obtain an XCBuildConfiguration by name."""
- for configuration in self._properties['buildConfigurations']:
- if configuration._properties['name'] == name:
- return configuration
-
- raise KeyError(name)
-
- def DefaultConfiguration(self):
- """Convenience accessor to obtain the default XCBuildConfiguration."""
- return self.ConfigurationNamed(self._properties['defaultConfigurationName'])
-
- def HasBuildSetting(self, key):
- """Determines the state of a build setting in all XCBuildConfiguration
- child objects.
-
- If all child objects have key in their build settings, and the value is the
- same in all child objects, returns 1.
-
- If no child objects have the key in their build settings, returns 0.
-
- If some, but not all, child objects have the key in their build settings,
- or if any children have different values for the key, returns -1.
- """
-
- has = None
- value = None
- for configuration in self._properties['buildConfigurations']:
- configuration_has = configuration.HasBuildSetting(key)
- if has is None:
- has = configuration_has
- elif has != configuration_has:
- return -1
-
- if configuration_has:
- configuration_value = configuration.GetBuildSetting(key)
- if value is None:
- value = configuration_value
- elif value != configuration_value:
- return -1
-
- if not has:
- return 0
-
- return 1
-
- def GetBuildSetting(self, key):
- """Gets the build setting for key.
-
- All child XCConfiguration objects must have the same value set for the
- setting, or a ValueError will be raised.
- """
-
- # TODO(mark): This is wrong for build settings that are lists. The list
- # contents should be compared (and a list copy returned?)
-
- value = None
- for configuration in self._properties['buildConfigurations']:
- configuration_value = configuration.GetBuildSetting(key)
- if value is None:
- value = configuration_value
- else:
- if value != configuration_value:
- raise ValueError('Variant values for ' + key)
-
- return value
-
- def SetBuildSetting(self, key, value):
- """Sets the build setting for key to value in all child
- XCBuildConfiguration objects.
- """
-
- for configuration in self._properties['buildConfigurations']:
- configuration.SetBuildSetting(key, value)
-
- def AppendBuildSetting(self, key, value):
- """Appends value to the build setting for key, which is treated as a list,
- in all child XCBuildConfiguration objects.
- """
-
- for configuration in self._properties['buildConfigurations']:
- configuration.AppendBuildSetting(key, value)
-
- def DelBuildSetting(self, key):
- """Deletes the build setting key from all child XCBuildConfiguration
- objects.
- """
-
- for configuration in self._properties['buildConfigurations']:
- configuration.DelBuildSetting(key)
-
- def SetBaseConfiguration(self, value):
- """Sets the build configuration in all child XCBuildConfiguration objects.
- """
-
- for configuration in self._properties['buildConfigurations']:
- configuration.SetBaseConfiguration(value)
-
-
-class PBXBuildFile(XCObject):
- _schema = XCObject._schema.copy()
- _schema.update({
- 'fileRef': [0, XCFileLikeElement, 0, 1],
- 'settings': [0, str, 0, 0], # hack, it's a dict
- })
-
- # Weird output rules for PBXBuildFile.
- _should_print_single_line = True
- _encode_transforms = XCObject._alternate_encode_transforms
-
- def Name(self):
- # Example: "main.cc in Sources"
- return self._properties['fileRef'].Name() + ' in ' + self.parent.Name()
-
- def Hashables(self):
- # super
- hashables = XCObject.Hashables(self)
-
- # It is not sufficient to just rely on Name() to get the
- # XCFileLikeElement's name, because that is not a complete pathname.
- # PathHashables returns hashables unique enough that no two
- # PBXBuildFiles should wind up with the same set of hashables, unless
- # someone adds the same file multiple times to the same target. That
- # would be considered invalid anyway.
- hashables.extend(self._properties['fileRef'].PathHashables())
-
- return hashables
-
-
-class XCBuildPhase(XCObject):
- """Abstract base for build phase classes. Not represented in a project
- file.
-
- Attributes:
- _files_by_path: A dict mapping each path of a child in the files list by
- path (keys) to the corresponding PBXBuildFile children (values).
- _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
- to the corresponding PBXBuildFile children (values).
- """
-
- # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't
- # actually have a "files" list. XCBuildPhase should not have "files" but
- # another abstract subclass of it should provide this, and concrete build
- # phase types that do have "files" lists should be derived from that new
- # abstract subclass. XCBuildPhase should only provide buildActionMask and
- # runOnlyForDeploymentPostprocessing, and not files or the various
- # file-related methods and attributes.
-
- _schema = XCObject._schema.copy()
- _schema.update({
- 'buildActionMask': [0, int, 0, 1, 0x7fffffff],
- 'files': [1, PBXBuildFile, 1, 1, []],
- 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0],
- })
-
- def __init__(self, properties=None, id=None, parent=None):
- # super
- XCObject.__init__(self, properties, id, parent)
-
- self._files_by_path = {}
- self._files_by_xcfilelikeelement = {}
- for pbxbuildfile in self._properties.get('files', []):
- self._AddBuildFileToDicts(pbxbuildfile)
-
- def FileGroup(self, path):
- # Subclasses must override this by returning a two-element tuple. The
- # first item in the tuple should be the PBXGroup to which "path" should be
- # added, either as a child or deeper descendant. The second item should
- # be a boolean indicating whether files should be added into hierarchical
- # groups or one single flat group.
- raise NotImplementedError(
- self.__class__.__name__ + ' must implement FileGroup')
-
- def _AddPathToDict(self, pbxbuildfile, path):
- """Adds path to the dict tracking paths belonging to this build phase.
-
- If the path is already a member of this build phase, raises an exception.
- """
-
- if path in self._files_by_path:
- raise ValueError('Found multiple build files with path ' + path)
- self._files_by_path[path] = pbxbuildfile
-
- def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
- """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
-
- If path is specified, then it is the path that is being added to the
- phase, and pbxbuildfile must contain either a PBXFileReference directly
- referencing that path, or it must contain a PBXVariantGroup that itself
- contains a PBXFileReference referencing the path.
-
- If path is not specified, either the PBXFileReference's path or the paths
- of all children of the PBXVariantGroup are taken as being added to the
- phase.
-
- If the path is already present in the phase, raises an exception.
-
- If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
- are already present in the phase, referenced by a different PBXBuildFile
- object, raises an exception. This does not raise an exception when
- a PBXFileReference or PBXVariantGroup reappear and are referenced by the
- same PBXBuildFile that has already introduced them, because in the case
- of PBXVariantGroup objects, they may correspond to multiple paths that are
- not all added simultaneously. When this situation occurs, the path needs
- to be added to _files_by_path, but nothing needs to change in
- _files_by_xcfilelikeelement, and the caller should have avoided adding
- the PBXBuildFile if it is already present in the list of children.
- """
-
- xcfilelikeelement = pbxbuildfile._properties['fileRef']
-
- paths = []
- if path != None:
- # It's best when the caller provides the path.
- if isinstance(xcfilelikeelement, PBXVariantGroup):
- paths.append(path)
- else:
- # If the caller didn't provide a path, there can be either multiple
- # paths (PBXVariantGroup) or one.
- if isinstance(xcfilelikeelement, PBXVariantGroup):
- for variant in xcfilelikeelement._properties['children']:
- paths.append(variant.FullPath())
- else:
- paths.append(xcfilelikeelement.FullPath())
-
- # Add the paths first, because if something's going to raise, the
- # messages provided by _AddPathToDict are more useful owing to its
- # having access to a real pathname and not just an object's Name().
- for a_path in paths:
- self._AddPathToDict(pbxbuildfile, a_path)
-
- # If another PBXBuildFile references this XCFileLikeElement, there's a
- # problem.
- if xcfilelikeelement in self._files_by_xcfilelikeelement and \
- self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile:
- raise ValueError('Found multiple build files for ' + \
- xcfilelikeelement.Name())
- self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
-
- def AppendBuildFile(self, pbxbuildfile, path=None):
- # Callers should use this instead of calling
- # AppendProperty('files', pbxbuildfile) directly because this function
- # maintains the object's dicts. Better yet, callers can just call AddFile
- # with a pathname and not worry about building their own PBXBuildFile
- # objects.
- self.AppendProperty('files', pbxbuildfile)
- self._AddBuildFileToDicts(pbxbuildfile, path)
-
- def AddFile(self, path, settings=None):
- (file_group, hierarchical) = self.FileGroup(path)
- file_ref = file_group.AddOrGetFileByPath(path, hierarchical)
-
- if file_ref in self._files_by_xcfilelikeelement and \
- isinstance(file_ref, PBXVariantGroup):
- # There's already a PBXBuildFile in this phase corresponding to the
- # PBXVariantGroup. path just provides a new variant that belongs to
- # the group. Add the path to the dict.
- pbxbuildfile = self._files_by_xcfilelikeelement[file_ref]
- self._AddBuildFileToDicts(pbxbuildfile, path)
- else:
- # Add a new PBXBuildFile to get file_ref into the phase.
- if settings is None:
- pbxbuildfile = PBXBuildFile({'fileRef': file_ref})
- else:
- pbxbuildfile = PBXBuildFile({'fileRef': file_ref, 'settings': settings})
- self.AppendBuildFile(pbxbuildfile, path)
-
-
-class PBXHeadersBuildPhase(XCBuildPhase):
- # No additions to the schema relative to XCBuildPhase.
-
- def Name(self):
- return 'Headers'
-
- def FileGroup(self, path):
- return self.PBXProjectAncestor().RootGroupForPath(path)
-
-
-class PBXResourcesBuildPhase(XCBuildPhase):
- # No additions to the schema relative to XCBuildPhase.
-
- def Name(self):
- return 'Resources'
-
- def FileGroup(self, path):
- return self.PBXProjectAncestor().RootGroupForPath(path)
-
-
-class PBXSourcesBuildPhase(XCBuildPhase):
- # No additions to the schema relative to XCBuildPhase.
-
- def Name(self):
- return 'Sources'
-
- def FileGroup(self, path):
- return self.PBXProjectAncestor().RootGroupForPath(path)
-
-
-class PBXFrameworksBuildPhase(XCBuildPhase):
- # No additions to the schema relative to XCBuildPhase.
-
- def Name(self):
- return 'Frameworks'
-
- def FileGroup(self, path):
- (root, ext) = posixpath.splitext(path)
- if ext != '':
- ext = ext[1:].lower()
- if ext == 'o':
- # .o files are added to Xcode Frameworks phases, but conceptually aren't
- # frameworks, they're more like sources or intermediates. Redirect them
- # to show up in one of those other groups.
- return self.PBXProjectAncestor().RootGroupForPath(path)
- else:
- return (self.PBXProjectAncestor().FrameworksGroup(), False)
-
-
-class PBXShellScriptBuildPhase(XCBuildPhase):
- _schema = XCBuildPhase._schema.copy()
- _schema.update({
- 'inputPaths': [1, str, 0, 1, []],
- 'name': [0, str, 0, 0],
- 'outputPaths': [1, str, 0, 1, []],
- 'shellPath': [0, str, 0, 1, '/bin/sh'],
- 'shellScript': [0, str, 0, 1],
- 'showEnvVarsInLog': [0, int, 0, 0],
- })
-
- def Name(self):
- if 'name' in self._properties:
- return self._properties['name']
-
- return 'ShellScript'
-
-
-class PBXCopyFilesBuildPhase(XCBuildPhase):
- _schema = XCBuildPhase._schema.copy()
- _schema.update({
- 'dstPath': [0, str, 0, 1],
- 'dstSubfolderSpec': [0, int, 0, 1],
- 'name': [0, str, 0, 0],
- })
-
- # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is
- # "DIR", match group 3 is "path" or None.
- path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$')
-
- # path_tree_to_subfolder maps names of Xcode variables to the associated
- # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object.
- path_tree_to_subfolder = {
- 'BUILT_FRAMEWORKS_DIR': 10, # Frameworks Directory
- 'BUILT_PRODUCTS_DIR': 16, # Products Directory
- # Other types that can be chosen via the Xcode UI.
- # TODO(mark): Map Xcode variable names to these.
- # : 1, # Wrapper
- # : 6, # Executables: 6
- # : 7, # Resources
- # : 15, # Java Resources
- # : 11, # Shared Frameworks
- # : 12, # Shared Support
- # : 13, # PlugIns
- }
-
- def Name(self):
- if 'name' in self._properties:
- return self._properties['name']
-
- return 'CopyFiles'
-
- def FileGroup(self, path):
- return self.PBXProjectAncestor().RootGroupForPath(path)
-
- def SetDestination(self, path):
- """Set the dstSubfolderSpec and dstPath properties from path.
-
- path may be specified in the same notation used for XCHierarchicalElements,
- specifically, "$(DIR)/path".
- """
-
- path_tree_match = self.path_tree_re.search(path)
- if path_tree_match:
- # Everything else needs to be relative to an Xcode variable.
- path_tree = path_tree_match.group(1)
- relative_path = path_tree_match.group(3)
-
- if path_tree in self.path_tree_to_subfolder:
- subfolder = self.path_tree_to_subfolder[path_tree]
- if relative_path is None:
- relative_path = ''
- else:
- # The path starts with an unrecognized Xcode variable
- # name like $(SRCROOT). Xcode will still handle this
- # as an "absolute path" that starts with the variable.
- subfolder = 0
- relative_path = path
- elif path.startswith('/'):
- # Special case. Absolute paths are in dstSubfolderSpec 0.
- subfolder = 0
- relative_path = path[1:]
- else:
- raise ValueError('Can\'t use path %s in a %s' % \
- (path, self.__class__.__name__))
-
- self._properties['dstPath'] = relative_path
- self._properties['dstSubfolderSpec'] = subfolder
-
-
-class PBXBuildRule(XCObject):
- _schema = XCObject._schema.copy()
- _schema.update({
- 'compilerSpec': [0, str, 0, 1],
- 'filePatterns': [0, str, 0, 0],
- 'fileType': [0, str, 0, 1],
- 'isEditable': [0, int, 0, 1, 1],
- 'outputFiles': [1, str, 0, 1, []],
- 'script': [0, str, 0, 0],
- })
-
- def Name(self):
- # Not very inspired, but it's what Xcode uses.
- return self.__class__.__name__
-
- def Hashables(self):
- # super
- hashables = XCObject.Hashables(self)
-
- # Use the hashables of the weak objects that this object refers to.
- hashables.append(self._properties['fileType'])
- if 'filePatterns' in self._properties:
- hashables.append(self._properties['filePatterns'])
- return hashables
-
-
-class PBXContainerItemProxy(XCObject):
- # When referencing an item in this project file, containerPortal is the
- # PBXProject root object of this project file. When referencing an item in
- # another project file, containerPortal is a PBXFileReference identifying
- # the other project file.
- #
- # When serving as a proxy to an XCTarget (in this project file or another),
- # proxyType is 1. When serving as a proxy to a PBXFileReference (in another
- # project file), proxyType is 2. Type 2 is used for references to the
- # producs of the other project file's targets.
- #
- # Xcode is weird about remoteGlobalIDString. Usually, it's printed without
- # a comment, indicating that it's tracked internally simply as a string, but
- # sometimes it's printed with a comment (usually when the object is initially
- # created), indicating that it's tracked as a project file object at least
- # sometimes. This module always tracks it as an object, but contains a hack
- # to prevent it from printing the comment in the project file output. See
- # _XCKVPrint.
- _schema = XCObject._schema.copy()
- _schema.update({
- 'containerPortal': [0, XCContainerPortal, 0, 1],
- 'proxyType': [0, int, 0, 1],
- 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1],
- 'remoteInfo': [0, str, 0, 1],
- })
-
- def __repr__(self):
- props = self._properties
- name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo'])
- return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
-
- def Name(self):
- # Admittedly not the best name, but it's what Xcode uses.
- return self.__class__.__name__
-
- def Hashables(self):
- # super
- hashables = XCObject.Hashables(self)
-
- # Use the hashables of the weak objects that this object refers to.
- hashables.extend(self._properties['containerPortal'].Hashables())
- hashables.extend(self._properties['remoteGlobalIDString'].Hashables())
- return hashables
-
-
-class PBXTargetDependency(XCObject):
- # The "target" property accepts an XCTarget object, and obviously not
- # NoneType. But XCTarget is defined below, so it can't be put into the
- # schema yet. The definition of PBXTargetDependency can't be moved below
- # XCTarget because XCTarget's own schema references PBXTargetDependency.
- # Python doesn't deal well with this circular relationship, and doesn't have
- # a real way to do forward declarations. To work around, the type of
- # the "target" property is reset below, after XCTarget is defined.
- #
- # At least one of "name" and "target" is required.
- _schema = XCObject._schema.copy()
- _schema.update({
- 'name': [0, str, 0, 0],
- 'target': [0, None.__class__, 0, 0],
- 'targetProxy': [0, PBXContainerItemProxy, 1, 1],
- })
-
- def __repr__(self):
- name = self._properties.get('name') or self._properties['target'].Name()
- return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
-
- def Name(self):
- # Admittedly not the best name, but it's what Xcode uses.
- return self.__class__.__name__
-
- def Hashables(self):
- # super
- hashables = XCObject.Hashables(self)
-
- # Use the hashables of the weak objects that this object refers to.
- hashables.extend(self._properties['targetProxy'].Hashables())
- return hashables
-
-
-class PBXReferenceProxy(XCFileLikeElement):
- _schema = XCFileLikeElement._schema.copy()
- _schema.update({
- 'fileType': [0, str, 0, 1],
- 'path': [0, str, 0, 1],
- 'remoteRef': [0, PBXContainerItemProxy, 1, 1],
- })
-
-
-class XCTarget(XCRemoteObject):
- # An XCTarget is really just an XCObject, the XCRemoteObject thing is just
- # to allow PBXProject to be used in the remoteGlobalIDString property of
- # PBXContainerItemProxy.
- #
- # Setting a "name" property at instantiation may also affect "productName",
- # which may in turn affect the "PRODUCT_NAME" build setting in children of
- # "buildConfigurationList". See __init__ below.
- _schema = XCRemoteObject._schema.copy()
- _schema.update({
- 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
- XCConfigurationList()],
- 'buildPhases': [1, XCBuildPhase, 1, 1, []],
- 'dependencies': [1, PBXTargetDependency, 1, 1, []],
- 'name': [0, str, 0, 1],
- 'productName': [0, str, 0, 1],
- })
-
- def __init__(self, properties=None, id=None, parent=None,
- force_outdir=None, force_prefix=None, force_extension=None):
- # super
- XCRemoteObject.__init__(self, properties, id, parent)
-
- # Set up additional defaults not expressed in the schema. If a "name"
- # property was supplied, set "productName" if it is not present. Also set
- # the "PRODUCT_NAME" build setting in each configuration, but only if
- # the setting is not present in any build configuration.
- if 'name' in self._properties:
- if not 'productName' in self._properties:
- self.SetProperty('productName', self._properties['name'])
-
- if 'productName' in self._properties:
- if 'buildConfigurationList' in self._properties:
- configs = self._properties['buildConfigurationList']
- if configs.HasBuildSetting('PRODUCT_NAME') == 0:
- configs.SetBuildSetting('PRODUCT_NAME',
- self._properties['productName'])
-
- def AddDependency(self, other):
- pbxproject = self.PBXProjectAncestor()
- other_pbxproject = other.PBXProjectAncestor()
- if pbxproject == other_pbxproject:
- # Add a dependency to another target in the same project file.
- container = PBXContainerItemProxy({'containerPortal': pbxproject,
- 'proxyType': 1,
- 'remoteGlobalIDString': other,
- 'remoteInfo': other.Name()})
- dependency = PBXTargetDependency({'target': other,
- 'targetProxy': container})
- self.AppendProperty('dependencies', dependency)
- else:
- # Add a dependency to a target in a different project file.
- other_project_ref = \
- pbxproject.AddOrGetProjectReference(other_pbxproject)[1]
- container = PBXContainerItemProxy({
- 'containerPortal': other_project_ref,
- 'proxyType': 1,
- 'remoteGlobalIDString': other,
- 'remoteInfo': other.Name(),
- })
- dependency = PBXTargetDependency({'name': other.Name(),
- 'targetProxy': container})
- self.AppendProperty('dependencies', dependency)
-
- # Proxy all of these through to the build configuration list.
-
- def ConfigurationNamed(self, name):
- return self._properties['buildConfigurationList'].ConfigurationNamed(name)
-
- def DefaultConfiguration(self):
- return self._properties['buildConfigurationList'].DefaultConfiguration()
-
- def HasBuildSetting(self, key):
- return self._properties['buildConfigurationList'].HasBuildSetting(key)
-
- def GetBuildSetting(self, key):
- return self._properties['buildConfigurationList'].GetBuildSetting(key)
-
- def SetBuildSetting(self, key, value):
- return self._properties['buildConfigurationList'].SetBuildSetting(key, \
- value)
-
- def AppendBuildSetting(self, key, value):
- return self._properties['buildConfigurationList'].AppendBuildSetting(key, \
- value)
-
- def DelBuildSetting(self, key):
- return self._properties['buildConfigurationList'].DelBuildSetting(key)
-
-
-# Redefine the type of the "target" property. See PBXTargetDependency._schema
-# above.
-PBXTargetDependency._schema['target'][1] = XCTarget
-
-
-class PBXNativeTarget(XCTarget):
- # buildPhases is overridden in the schema to be able to set defaults.
- #
- # NOTE: Contrary to most objects, it is advisable to set parent when
- # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject
- # object. A parent reference is required for a PBXNativeTarget during
- # construction to be able to set up the target defaults for productReference,
- # because a PBXBuildFile object must be created for the target and it must
- # be added to the PBXProject's mainGroup hierarchy.
- _schema = XCTarget._schema.copy()
- _schema.update({
- 'buildPhases': [1, XCBuildPhase, 1, 1,
- [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]],
- 'buildRules': [1, PBXBuildRule, 1, 1, []],
- 'productReference': [0, PBXFileReference, 0, 1],
- 'productType': [0, str, 0, 1],
- })
-
- # Mapping from Xcode product-types to settings. The settings are:
- # filetype : used for explicitFileType in the project file
- # prefix : the prefix for the file name
- # suffix : the suffix for the file name
- _product_filetypes = {
- 'com.apple.product-type.application': ['wrapper.application',
- '', '.app'],
- 'com.apple.product-type.application.watchapp': ['wrapper.application',
- '', '.app'],
- 'com.apple.product-type.watchkit-extension': ['wrapper.app-extension',
- '', '.appex'],
- 'com.apple.product-type.app-extension': ['wrapper.app-extension',
- '', '.appex'],
- 'com.apple.product-type.bundle': ['wrapper.cfbundle',
- '', '.bundle'],
- 'com.apple.product-type.framework': ['wrapper.framework',
- '', '.framework'],
- 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib',
- 'lib', '.dylib'],
- 'com.apple.product-type.library.static': ['archive.ar',
- 'lib', '.a'],
- 'com.apple.product-type.tool': ['compiled.mach-o.executable',
- '', ''],
- 'com.apple.product-type.bundle.unit-test': ['wrapper.cfbundle',
- '', '.xctest'],
- 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib',
- '', '.so'],
- 'com.apple.product-type.kernel-extension': ['wrapper.kext',
- '', '.kext'],
- }
-
- def __init__(self, properties=None, id=None, parent=None,
- force_outdir=None, force_prefix=None, force_extension=None):
- # super
- XCTarget.__init__(self, properties, id, parent)
-
- if 'productName' in self._properties and \
- 'productType' in self._properties and \
- not 'productReference' in self._properties and \
- self._properties['productType'] in self._product_filetypes:
- products_group = None
- pbxproject = self.PBXProjectAncestor()
- if pbxproject != None:
- products_group = pbxproject.ProductsGroup()
-
- if products_group != None:
- (filetype, prefix, suffix) = \
- self._product_filetypes[self._properties['productType']]
- # Xcode does not have a distinct type for loadable modules that are
- # pure BSD targets (not in a bundle wrapper). GYP allows such modules
- # to be specified by setting a target type to loadable_module without
- # having mac_bundle set. These are mapped to the pseudo-product type
- # com.googlecode.gyp.xcode.bundle.
- #
- # By picking up this special type and converting it to a dynamic
- # library (com.apple.product-type.library.dynamic) with fix-ups,
- # single-file loadable modules can be produced.
- #
- # MACH_O_TYPE is changed to mh_bundle to produce the proper file type
- # (as opposed to mh_dylib). In order for linking to succeed,
- # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be
- # cleared. They are meaningless for type mh_bundle.
- #
- # Finally, the .so extension is forcibly applied over the default
- # (.dylib), unless another forced extension is already selected.
- # .dylib is plainly wrong, and .bundle is used by loadable_modules in
- # bundle wrappers (com.apple.product-type.bundle). .so seems an odd
- # choice because it's used as the extension on many other systems that
- # don't distinguish between linkable shared libraries and non-linkable
- # loadable modules, but there's precedent: Python loadable modules on
- # Mac OS X use an .so extension.
- if self._properties['productType'] == 'com.googlecode.gyp.xcode.bundle':
- self._properties['productType'] = \
- 'com.apple.product-type.library.dynamic'
- self.SetBuildSetting('MACH_O_TYPE', 'mh_bundle')
- self.SetBuildSetting('DYLIB_CURRENT_VERSION', '')
- self.SetBuildSetting('DYLIB_COMPATIBILITY_VERSION', '')
- if force_extension is None:
- force_extension = suffix[1:]
-
- if self._properties['productType'] == \
- 'com.apple.product-type-bundle.unit.test':
- if force_extension is None:
- force_extension = suffix[1:]
-
- if force_extension is not None:
- # If it's a wrapper (bundle), set WRAPPER_EXTENSION.
- # Extension override.
- suffix = '.' + force_extension
- if filetype.startswith('wrapper.'):
- self.SetBuildSetting('WRAPPER_EXTENSION', force_extension)
- else:
- self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension)
-
- if filetype.startswith('compiled.mach-o.executable'):
- product_name = self._properties['productName']
- product_name += suffix
- suffix = ''
- self.SetProperty('productName', product_name)
- self.SetBuildSetting('PRODUCT_NAME', product_name)
-
- # Xcode handles most prefixes based on the target type, however there
- # are exceptions. If a "BSD Dynamic Library" target is added in the
- # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that
- # behavior.
- if force_prefix is not None:
- prefix = force_prefix
- if filetype.startswith('wrapper.'):
- self.SetBuildSetting('WRAPPER_PREFIX', prefix)
- else:
- self.SetBuildSetting('EXECUTABLE_PREFIX', prefix)
-
- if force_outdir is not None:
- self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir)
-
- # TODO(tvl): Remove the below hack.
- # http://code.google.com/p/gyp/issues/detail?id=122
-
- # Some targets include the prefix in the target_name. These targets
- # really should just add a product_name setting that doesn't include
- # the prefix. For example:
- # target_name = 'libevent', product_name = 'event'
- # This check cleans up for them.
- product_name = self._properties['productName']
- prefix_len = len(prefix)
- if prefix_len and (product_name[:prefix_len] == prefix):
- product_name = product_name[prefix_len:]
- self.SetProperty('productName', product_name)
- self.SetBuildSetting('PRODUCT_NAME', product_name)
-
- ref_props = {
- 'explicitFileType': filetype,
- 'includeInIndex': 0,
- 'path': prefix + product_name + suffix,
- 'sourceTree': 'BUILT_PRODUCTS_DIR',
- }
- file_ref = PBXFileReference(ref_props)
- products_group.AppendChild(file_ref)
- self.SetProperty('productReference', file_ref)
-
- def GetBuildPhaseByType(self, type):
- if not 'buildPhases' in self._properties:
- return None
-
- the_phase = None
- for phase in self._properties['buildPhases']:
- if isinstance(phase, type):
- # Some phases may be present in multiples in a well-formed project file,
- # but phases like PBXSourcesBuildPhase may only be present singly, and
- # this function is intended as an aid to GetBuildPhaseByType. Loop
- # over the entire list of phases and assert if more than one of the
- # desired type is found.
- assert the_phase is None
- the_phase = phase
-
- return the_phase
-
- def HeadersPhase(self):
- headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase)
- if headers_phase is None:
- headers_phase = PBXHeadersBuildPhase()
-
- # The headers phase should come before the resources, sources, and
- # frameworks phases, if any.
- insert_at = len(self._properties['buildPhases'])
- for index in xrange(0, len(self._properties['buildPhases'])):
- phase = self._properties['buildPhases'][index]
- if isinstance(phase, PBXResourcesBuildPhase) or \
- isinstance(phase, PBXSourcesBuildPhase) or \
- isinstance(phase, PBXFrameworksBuildPhase):
- insert_at = index
- break
-
- self._properties['buildPhases'].insert(insert_at, headers_phase)
- headers_phase.parent = self
-
- return headers_phase
-
- def ResourcesPhase(self):
- resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase)
- if resources_phase is None:
- resources_phase = PBXResourcesBuildPhase()
-
- # The resources phase should come before the sources and frameworks
- # phases, if any.
- insert_at = len(self._properties['buildPhases'])
- for index in xrange(0, len(self._properties['buildPhases'])):
- phase = self._properties['buildPhases'][index]
- if isinstance(phase, PBXSourcesBuildPhase) or \
- isinstance(phase, PBXFrameworksBuildPhase):
- insert_at = index
- break
-
- self._properties['buildPhases'].insert(insert_at, resources_phase)
- resources_phase.parent = self
-
- return resources_phase
-
- def SourcesPhase(self):
- sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase)
- if sources_phase is None:
- sources_phase = PBXSourcesBuildPhase()
- self.AppendProperty('buildPhases', sources_phase)
-
- return sources_phase
-
- def FrameworksPhase(self):
- frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase)
- if frameworks_phase is None:
- frameworks_phase = PBXFrameworksBuildPhase()
- self.AppendProperty('buildPhases', frameworks_phase)
-
- return frameworks_phase
-
- def AddDependency(self, other):
- # super
- XCTarget.AddDependency(self, other)
-
- static_library_type = 'com.apple.product-type.library.static'
- shared_library_type = 'com.apple.product-type.library.dynamic'
- framework_type = 'com.apple.product-type.framework'
- if isinstance(other, PBXNativeTarget) and \
- 'productType' in self._properties and \
- self._properties['productType'] != static_library_type and \
- 'productType' in other._properties and \
- (other._properties['productType'] == static_library_type or \
- ((other._properties['productType'] == shared_library_type or \
- other._properties['productType'] == framework_type) and \
- ((not other.HasBuildSetting('MACH_O_TYPE')) or
- other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))):
-
- file_ref = other.GetProperty('productReference')
-
- pbxproject = self.PBXProjectAncestor()
- other_pbxproject = other.PBXProjectAncestor()
- if pbxproject != other_pbxproject:
- other_project_product_group = \
- pbxproject.AddOrGetProjectReference(other_pbxproject)[0]
- file_ref = other_project_product_group.GetChildByRemoteObject(file_ref)
-
- self.FrameworksPhase().AppendProperty('files',
- PBXBuildFile({'fileRef': file_ref}))
-
-
-class PBXAggregateTarget(XCTarget):
- pass
-
-
-class PBXProject(XCContainerPortal):
- # A PBXProject is really just an XCObject, the XCContainerPortal thing is
- # just to allow PBXProject to be used in the containerPortal property of
- # PBXContainerItemProxy.
- """
-
- Attributes:
- path: "sample.xcodeproj". TODO(mark) Document me!
- _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each
- value is a reference to the dict in the
- projectReferences list associated with the keyed
- PBXProject.
- """
-
- _schema = XCContainerPortal._schema.copy()
- _schema.update({
- 'attributes': [0, dict, 0, 0],
- 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
- XCConfigurationList()],
- 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.2'],
- 'hasScannedForEncodings': [0, int, 0, 1, 1],
- 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()],
- 'projectDirPath': [0, str, 0, 1, ''],
- 'projectReferences': [1, dict, 0, 0],
- 'projectRoot': [0, str, 0, 1, ''],
- 'targets': [1, XCTarget, 1, 1, []],
- })
-
- def __init__(self, properties=None, id=None, parent=None, path=None):
- self.path = path
- self._other_pbxprojects = {}
- # super
- return XCContainerPortal.__init__(self, properties, id, parent)
-
- def Name(self):
- name = self.path
- if name[-10:] == '.xcodeproj':
- name = name[:-10]
- return posixpath.basename(name)
-
- def Path(self):
- return self.path
-
- def Comment(self):
- return 'Project object'
-
- def Children(self):
- # super
- children = XCContainerPortal.Children(self)
-
- # Add children that the schema doesn't know about. Maybe there's a more
- # elegant way around this, but this is the only case where we need to own
- # objects in a dictionary (that is itself in a list), and three lines for
- # a one-off isn't that big a deal.
- if 'projectReferences' in self._properties:
- for reference in self._properties['projectReferences']:
- children.append(reference['ProductGroup'])
-
- return children
-
- def PBXProjectAncestor(self):
- return self
-
- def _GroupByName(self, name):
- if not 'mainGroup' in self._properties:
- self.SetProperty('mainGroup', PBXGroup())
-
- main_group = self._properties['mainGroup']
- group = main_group.GetChildByName(name)
- if group is None:
- group = PBXGroup({'name': name})
- main_group.AppendChild(group)
-
- return group
-
- # SourceGroup and ProductsGroup are created by default in Xcode's own
- # templates.
- def SourceGroup(self):
- return self._GroupByName('Source')
-
- def ProductsGroup(self):
- return self._GroupByName('Products')
-
- # IntermediatesGroup is used to collect source-like files that are generated
- # by rules or script phases and are placed in intermediate directories such
- # as DerivedSources.
- def IntermediatesGroup(self):
- return self._GroupByName('Intermediates')
-
- # FrameworksGroup and ProjectsGroup are top-level groups used to collect
- # frameworks and projects.
- def FrameworksGroup(self):
- return self._GroupByName('Frameworks')
-
- def ProjectsGroup(self):
- return self._GroupByName('Projects')
-
- def RootGroupForPath(self, path):
- """Returns a PBXGroup child of this object to which path should be added.
-
- This method is intended to choose between SourceGroup and
- IntermediatesGroup on the basis of whether path is present in a source
- directory or an intermediates directory. For the purposes of this
- determination, any path located within a derived file directory such as
- PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
- directory.
-
- The returned value is a two-element tuple. The first element is the
- PBXGroup, and the second element specifies whether that group should be
- organized hierarchically (True) or as a single flat list (False).
- """
-
- # TODO(mark): make this a class variable and bind to self on call?
- # Also, this list is nowhere near exhaustive.
- # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by
- # gyp.generator.xcode. There should probably be some way for that module
- # to push the names in, rather than having to hard-code them here.
- source_tree_groups = {
- 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
- 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
- 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
- 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
- }
-
- (source_tree, path) = SourceTreeAndPathFromPath(path)
- if source_tree != None and source_tree in source_tree_groups:
- (group_func, hierarchical) = source_tree_groups[source_tree]
- group = group_func()
- return (group, hierarchical)
-
- # TODO(mark): make additional choices based on file extension.
-
- return (self.SourceGroup(), True)
-
- def AddOrGetFileInRootGroup(self, path):
- """Returns a PBXFileReference corresponding to path in the correct group
- according to RootGroupForPath's heuristics.
-
- If an existing PBXFileReference for path exists, it will be returned.
- Otherwise, one will be created and returned.
- """
-
- (group, hierarchical) = self.RootGroupForPath(path)
- return group.AddOrGetFileByPath(path, hierarchical)
-
- def RootGroupsTakeOverOnlyChildren(self, recurse=False):
- """Calls TakeOverOnlyChild for all groups in the main group."""
-
- for group in self._properties['mainGroup']._properties['children']:
- if isinstance(group, PBXGroup):
- group.TakeOverOnlyChild(recurse)
-
- def SortGroups(self):
- # Sort the children of the mainGroup (like "Source" and "Products")
- # according to their defined order.
- self._properties['mainGroup']._properties['children'] = \
- sorted(self._properties['mainGroup']._properties['children'],
- cmp=lambda x,y: x.CompareRootGroup(y))
-
- # Sort everything else by putting group before files, and going
- # alphabetically by name within sections of groups and files. SortGroup
- # is recursive.
- for group in self._properties['mainGroup']._properties['children']:
- if not isinstance(group, PBXGroup):
- continue
-
- if group.Name() == 'Products':
- # The Products group is a special case. Instead of sorting
- # alphabetically, sort things in the order of the targets that
- # produce the products. To do this, just build up a new list of
- # products based on the targets.
- products = []
- for target in self._properties['targets']:
- if not isinstance(target, PBXNativeTarget):
- continue
- product = target._properties['productReference']
- # Make sure that the product is already in the products group.
- assert product in group._properties['children']
- products.append(product)
-
- # Make sure that this process doesn't miss anything that was already
- # in the products group.
- assert len(products) == len(group._properties['children'])
- group._properties['children'] = products
- else:
- group.SortGroup()
-
- def AddOrGetProjectReference(self, other_pbxproject):
- """Add a reference to another project file (via PBXProject object) to this
- one.
-
- Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in
- this project file that contains a PBXReferenceProxy object for each
- product of each PBXNativeTarget in the other project file. ProjectRef is
- a PBXFileReference to the other project file.
-
- If this project file already references the other project file, the
- existing ProductGroup and ProjectRef are returned. The ProductGroup will
- still be updated if necessary.
- """
-
- if not 'projectReferences' in self._properties:
- self._properties['projectReferences'] = []
-
- product_group = None
- project_ref = None
-
- if not other_pbxproject in self._other_pbxprojects:
- # This project file isn't yet linked to the other one. Establish the
- # link.
- product_group = PBXGroup({'name': 'Products'})
-
- # ProductGroup is strong.
- product_group.parent = self
-
- # There's nothing unique about this PBXGroup, and if left alone, it will
- # wind up with the same set of hashables as all other PBXGroup objects
- # owned by the projectReferences list. Add the hashables of the
- # remote PBXProject that it's related to.
- product_group._hashables.extend(other_pbxproject.Hashables())
-
- # The other project reports its path as relative to the same directory
- # that this project's path is relative to. The other project's path
- # is not necessarily already relative to this project. Figure out the
- # pathname that this project needs to use to refer to the other one.
- this_path = posixpath.dirname(self.Path())
- projectDirPath = self.GetProperty('projectDirPath')
- if projectDirPath:
- if posixpath.isabs(projectDirPath[0]):
- this_path = projectDirPath
- else:
- this_path = posixpath.join(this_path, projectDirPath)
- other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path)
-
- # ProjectRef is weak (it's owned by the mainGroup hierarchy).
- project_ref = PBXFileReference({
- 'lastKnownFileType': 'wrapper.pb-project',
- 'path': other_path,
- 'sourceTree': 'SOURCE_ROOT',
- })
- self.ProjectsGroup().AppendChild(project_ref)
-
- ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref}
- self._other_pbxprojects[other_pbxproject] = ref_dict
- self.AppendProperty('projectReferences', ref_dict)
-
- # Xcode seems to sort this list case-insensitively
- self._properties['projectReferences'] = \
- sorted(self._properties['projectReferences'], cmp=lambda x,y:
- cmp(x['ProjectRef'].Name().lower(),
- y['ProjectRef'].Name().lower()))
- else:
- # The link already exists. Pull out the relevnt data.
- project_ref_dict = self._other_pbxprojects[other_pbxproject]
- product_group = project_ref_dict['ProductGroup']
- project_ref = project_ref_dict['ProjectRef']
-
- self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
-
- inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False)
- targets = other_pbxproject.GetProperty('targets')
- if all(self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets):
- dir_path = project_ref._properties['path']
- product_group._hashables.extend(dir_path)
-
- return [product_group, project_ref]
-
- def _AllSymrootsUnique(self, target, inherit_unique_symroot):
- # Returns True if all configurations have a unique 'SYMROOT' attribute.
- # The value of inherit_unique_symroot decides, if a configuration is assumed
- # to inherit a unique 'SYMROOT' attribute from its parent, if it doesn't
- # define an explicit value for 'SYMROOT'.
- symroots = self._DefinedSymroots(target)
- for s in self._DefinedSymroots(target):
- if (s is not None and not self._IsUniqueSymrootForTarget(s) or
- s is None and not inherit_unique_symroot):
- return False
- return True if symroots else inherit_unique_symroot
-
- def _DefinedSymroots(self, target):
- # Returns all values for the 'SYMROOT' attribute defined in all
- # configurations for this target. If any configuration doesn't define the
- # 'SYMROOT' attribute, None is added to the returned set. If all
- # configurations don't define the 'SYMROOT' attribute, an empty set is
- # returned.
- config_list = target.GetProperty('buildConfigurationList')
- symroots = set()
- for config in config_list.GetProperty('buildConfigurations'):
- setting = config.GetProperty('buildSettings')
- if 'SYMROOT' in setting:
- symroots.add(setting['SYMROOT'])
- else:
- symroots.add(None)
- if len(symroots) == 1 and None in symroots:
- return set()
- return symroots
-
- def _IsUniqueSymrootForTarget(self, symroot):
- # This method returns True if all configurations in target contain a
- # 'SYMROOT' attribute that is unique for the given target. A value is
- # unique, if the Xcode macro '$SRCROOT' appears in it in any form.
- uniquifier = ['$SRCROOT', '$(SRCROOT)']
- if any(x in symroot for x in uniquifier):
- return True
- return False
-
- def _SetUpProductReferences(self, other_pbxproject, product_group,
- project_ref):
- # TODO(mark): This only adds references to products in other_pbxproject
- # when they don't exist in this pbxproject. Perhaps it should also
- # remove references from this pbxproject that are no longer present in
- # other_pbxproject. Perhaps it should update various properties if they
- # change.
- for target in other_pbxproject._properties['targets']:
- if not isinstance(target, PBXNativeTarget):
- continue
-
- other_fileref = target._properties['productReference']
- if product_group.GetChildByRemoteObject(other_fileref) is None:
- # Xcode sets remoteInfo to the name of the target and not the name
- # of its product, despite this proxy being a reference to the product.
- container_item = PBXContainerItemProxy({
- 'containerPortal': project_ref,
- 'proxyType': 2,
- 'remoteGlobalIDString': other_fileref,
- 'remoteInfo': target.Name()
- })
- # TODO(mark): Does sourceTree get copied straight over from the other
- # project? Can the other project ever have lastKnownFileType here
- # instead of explicitFileType? (Use it if so?) Can path ever be
- # unset? (I don't think so.) Can other_fileref have name set, and
- # does it impact the PBXReferenceProxy if so? These are the questions
- # that perhaps will be answered one day.
- reference_proxy = PBXReferenceProxy({
- 'fileType': other_fileref._properties['explicitFileType'],
- 'path': other_fileref._properties['path'],
- 'sourceTree': other_fileref._properties['sourceTree'],
- 'remoteRef': container_item,
- })
-
- product_group.AppendChild(reference_proxy)
-
- def SortRemoteProductReferences(self):
- # For each remote project file, sort the associated ProductGroup in the
- # same order that the targets are sorted in the remote project file. This
- # is the sort order used by Xcode.
-
- def CompareProducts(x, y, remote_products):
- # x and y are PBXReferenceProxy objects. Go through their associated
- # PBXContainerItem to get the remote PBXFileReference, which will be
- # present in the remote_products list.
- x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString']
- y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString']
- x_index = remote_products.index(x_remote)
- y_index = remote_products.index(y_remote)
-
- # Use the order of each remote PBXFileReference in remote_products to
- # determine the sort order.
- return cmp(x_index, y_index)
-
- for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems():
- # Build up a list of products in the remote project file, ordered the
- # same as the targets that produce them.
- remote_products = []
- for target in other_pbxproject._properties['targets']:
- if not isinstance(target, PBXNativeTarget):
- continue
- remote_products.append(target._properties['productReference'])
-
- # Sort the PBXReferenceProxy children according to the list of remote
- # products.
- product_group = ref_dict['ProductGroup']
- product_group._properties['children'] = sorted(
- product_group._properties['children'],
- cmp=lambda x, y, rp=remote_products: CompareProducts(x, y, rp))
-
-
-class XCProjectFile(XCObject):
- _schema = XCObject._schema.copy()
- _schema.update({
- 'archiveVersion': [0, int, 0, 1, 1],
- 'classes': [0, dict, 0, 1, {}],
- 'objectVersion': [0, int, 0, 1, 46],
- 'rootObject': [0, PBXProject, 1, 1],
- })
-
- def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
- # Although XCProjectFile is implemented here as an XCObject, it's not a
- # proper object in the Xcode sense, and it certainly doesn't have its own
- # ID. Pass through an attempt to update IDs to the real root object.
- if recursive:
- self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash)
-
- def Print(self, file=sys.stdout):
- self.VerifyHasRequiredProperties()
-
- # Add the special "objects" property, which will be caught and handled
- # separately during printing. This structure allows a fairly standard
- # loop do the normal printing.
- self._properties['objects'] = {}
- self._XCPrint(file, 0, '// !$*UTF8*$!\n')
- if self._should_print_single_line:
- self._XCPrint(file, 0, '{ ')
- else:
- self._XCPrint(file, 0, '{\n')
- for property, value in sorted(self._properties.iteritems(),
- cmp=lambda x, y: cmp(x, y)):
- if property == 'objects':
- self._PrintObjects(file)
- else:
- self._XCKVPrint(file, 1, property, value)
- self._XCPrint(file, 0, '}\n')
- del self._properties['objects']
-
- def _PrintObjects(self, file):
- if self._should_print_single_line:
- self._XCPrint(file, 0, 'objects = {')
- else:
- self._XCPrint(file, 1, 'objects = {\n')
-
- objects_by_class = {}
- for object in self.Descendants():
- if object == self:
- continue
- class_name = object.__class__.__name__
- if not class_name in objects_by_class:
- objects_by_class[class_name] = []
- objects_by_class[class_name].append(object)
-
- for class_name in sorted(objects_by_class):
- self._XCPrint(file, 0, '\n')
- self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n')
- for object in sorted(objects_by_class[class_name],
- cmp=lambda x, y: cmp(x.id, y.id)):
- object.Print(file)
- self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n')
-
- if self._should_print_single_line:
- self._XCPrint(file, 0, '}; ')
- else:
- self._XCPrint(file, 1, '};\n')
diff --git a/deps/gyp/pylib/gyp/xml_fix.py b/deps/gyp/pylib/gyp/xml_fix.py
deleted file mode 100644
index 5de848158d..0000000000
--- a/deps/gyp/pylib/gyp/xml_fix.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Applies a fix to CR LF TAB handling in xml.dom.
-
-Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
-Working around this: http://bugs.python.org/issue5752
-TODO(bradnelson): Consider dropping this when we drop XP support.
-"""
-
-
-import xml.dom.minidom
-
-
-def _Replacement_write_data(writer, data, is_attrib=False):
- """Writes datachars to writer."""
- data = data.replace("&", "&amp;").replace("<", "&lt;")
- data = data.replace("\"", "&quot;").replace(">", "&gt;")
- if is_attrib:
- data = data.replace(
- "\r", "&#xD;").replace(
- "\n", "&#xA;").replace(
- "\t", "&#x9;")
- writer.write(data)
-
-
-def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
- # indent = current indentation
- # addindent = indentation to add to higher levels
- # newl = newline string
- writer.write(indent+"<" + self.tagName)
-
- attrs = self._get_attributes()
- a_names = attrs.keys()
- a_names.sort()
-
- for a_name in a_names:
- writer.write(" %s=\"" % a_name)
- _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
- writer.write("\"")
- if self.childNodes:
- writer.write(">%s" % newl)
- for node in self.childNodes:
- node.writexml(writer, indent + addindent, addindent, newl)
- writer.write("%s</%s>%s" % (indent, self.tagName, newl))
- else:
- writer.write("/>%s" % newl)
-
-
-class XmlFix(object):
- """Object to manage temporary patching of xml.dom.minidom."""
-
- def __init__(self):
- # Preserve current xml.dom.minidom functions.
- self.write_data = xml.dom.minidom._write_data
- self.writexml = xml.dom.minidom.Element.writexml
- # Inject replacement versions of a function and a method.
- xml.dom.minidom._write_data = _Replacement_write_data
- xml.dom.minidom.Element.writexml = _Replacement_writexml
-
- def Cleanup(self):
- if self.write_data:
- xml.dom.minidom._write_data = self.write_data
- xml.dom.minidom.Element.writexml = self.writexml
- self.write_data = None
-
- def __del__(self):
- self.Cleanup()