summaryrefslogtreecommitdiff
path: root/buildstream
diff options
context:
space:
mode:
authorTristan Van Berkom <tristan.vanberkom@codethink.co.uk>2018-04-17 21:57:29 +0900
committerTristan Van Berkom <tristan.van.berkom@gmail.com>2018-04-17 15:55:01 +0000
commit5f3da731cbd75ddbfb1abb68e66ad3a736d4a28e (patch)
tree955de2c93a4083e6b3747aab71bbc7abfe139a59 /buildstream
parentfb6687b6e87284cbb93b576f19ff629aa233533e (diff)
downloadbuildstream-5f3da731cbd75ddbfb1abb68e66ad3a736d4a28e.tar.gz
_loader package: Splitting up the loader code into separate pieces
This also refactors the loader code to honor private symbol naming policy. This is a part of issue #285
Diffstat (limited to 'buildstream')
-rw-r--r--buildstream/_loader/loadelement.py172
-rw-r--r--buildstream/_loader/loader.py619
-rw-r--r--buildstream/_loader/types.py63
3 files changed, 494 insertions, 360 deletions
diff --git a/buildstream/_loader/loadelement.py b/buildstream/_loader/loadelement.py
new file mode 100644
index 000000000..b270fbef8
--- /dev/null
+++ b/buildstream/_loader/loadelement.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2016 Codethink Limited
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+# Authors:
+# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
+
+# System imports
+from collections import Mapping
+
+# BuildStream toplevel imports
+from .._exceptions import LoadError, LoadErrorReason
+from .. import _yaml
+
+# Local package imports
+from .types import Symbol, Dependency
+
+
+# LoadElement():
+#
+# A transient object breaking down what is loaded allowing us to
+# do complex operations in multiple passes.
+#
+# Args:
+# node (dict): A YAML loaded dictionary
+# name (str): The element name
+# loader (Loader): The Loader object for this element
+#
+class LoadElement():
+
+ def __init__(self, node, filename, loader):
+
+ #
+ # Public members
+ #
+ self.node = node # The YAML node
+ self.name = filename # The element name
+ self.full_name = None # The element full name (with associated junction)
+ self.deps = None # The list of Dependency objects
+
+ #
+ # Private members
+ #
+ self._loader = loader # The Loader object
+ self._dep_cache = None # The dependency cache, to speed up depends()
+
+ #
+ # Initialization
+ #
+ if loader.project.junction:
+ # dependency is in subproject, qualify name
+ self.full_name = '{}:{}'.format(loader.project.junction.name, self.name)
+ else:
+ # dependency is in top-level project
+ self.full_name = self.name
+
+ # Ensure the root node is valid
+ _yaml.node_validate(self.node, [
+ 'kind', 'depends', 'sources', 'sandbox',
+ 'variables', 'environment', 'environment-nocache',
+ 'config', 'public', 'description',
+ ])
+
+ # Extract the Dependencies
+ self.deps = _extract_depends_from_node(self.node)
+
+ # depends():
+ #
+ # Checks if this element depends on another element, directly
+ # or indirectly.
+ #
+ # Args:
+ # other (LoadElement): Another LoadElement
+ #
+ # Returns:
+ # (bool): True if this LoadElement depends on 'other'
+ #
+ def depends(self, other):
+ self._ensure_depends_cache()
+ return self._dep_cache.get(other.full_name) is not None
+
+ ###########################################
+ # Private Methods #
+ ###########################################
+ def _ensure_depends_cache(self):
+
+ if self._dep_cache:
+ return
+
+ self._dep_cache = {}
+ for dep in self.deps:
+ elt = self._loader.get_element_for_dep(dep)
+
+ # Ensure the cache of the element we depend on
+ elt._ensure_depends_cache()
+
+ # We depend on this element
+ self._dep_cache[elt.full_name] = True
+
+ # And we depend on everything this element depends on
+ self._dep_cache.update(elt._dep_cache)
+
+
+# _extract_depends_from_node():
+#
+# Creates an array of Dependency objects from a given dict node 'node',
+# allows both strings and dicts for expressing the dependency and
+# throws a comprehensive LoadError in the case that the node is malformed.
+#
+# After extracting depends, the symbol is deleted from the node
+#
+# Args:
+# node (dict): A YAML loaded dictionary
+#
+# Returns:
+# (list): a list of Dependency objects
+#
+def _extract_depends_from_node(node):
+ depends = _yaml.node_get(node, list, Symbol.DEPENDS, default_value=[])
+ output_deps = []
+
+ for dep in depends:
+ dep_provenance = _yaml.node_get_provenance(node, key=Symbol.DEPENDS, indices=[depends.index(dep)])
+
+ if isinstance(dep, str):
+ dependency = Dependency(dep, provenance=dep_provenance)
+
+ elif isinstance(dep, Mapping):
+ _yaml.node_validate(dep, ['filename', 'type', 'junction'])
+
+ # Make type optional, for this we set it to None
+ dep_type = _yaml.node_get(dep, str, Symbol.TYPE, default_value=None)
+ if dep_type is None or dep_type == Symbol.ALL:
+ dep_type = None
+ elif dep_type not in [Symbol.BUILD, Symbol.RUNTIME]:
+ provenance = _yaml.node_get_provenance(dep, key=Symbol.TYPE)
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Dependency type '{}' is not 'build', 'runtime' or 'all'"
+ .format(provenance, dep_type))
+
+ filename = _yaml.node_get(dep, str, Symbol.FILENAME)
+ junction = _yaml.node_get(dep, str, Symbol.JUNCTION, default_value=None)
+ dependency = Dependency(filename,
+ dep_type=dep_type,
+ junction=junction,
+ provenance=dep_provenance)
+
+ else:
+ index = depends.index(dep)
+ p = _yaml.node_get_provenance(node, key=Symbol.DEPENDS, indices=[index])
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Dependency is not specified as a string or a dictionary".format(p))
+
+ output_deps.append(dependency)
+
+ # Now delete "depends", we dont want it anymore
+ del node[Symbol.DEPENDS]
+
+ return output_deps
diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index 245d43d3f..5c33d9a74 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
#
-# Copyright (C) 2016 Codethink Limited
+# Copyright (C) 2018 Codethink Limited
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
@@ -32,178 +32,32 @@ from ..element import Element
from .._profile import Topics, profile_start, profile_end
from .._platform import Platform
+from .types import Symbol, Dependency
+from .loadelement import LoadElement
from . import MetaElement
from . import MetaSource
-#################################################
-# Local Types #
-#################################################
+# Loader():
#
-# List of symbols we recognize
+# The Loader class does the heavy lifting of parsing target
+# bst files and ultimately transforming them into a list of MetaElements
+# with their own MetaSources, ready for instantiation by the core.
#
-class Symbol():
- FILENAME = "filename"
- KIND = "kind"
- DEPENDS = "depends"
- SOURCES = "sources"
- CONFIG = "config"
- VARIABLES = "variables"
- ENVIRONMENT = "environment"
- ENV_NOCACHE = "environment-nocache"
- PUBLIC = "public"
- TYPE = "type"
- BUILD = "build"
- RUNTIME = "runtime"
- ALL = "all"
- DIRECTORY = "directory"
- JUNCTION = "junction"
- SANDBOX = "sandbox"
-
-
-# A simple dependency object
-#
-class Dependency():
- def __init__(self, name,
- dep_type=None, junction=None, provenance=None):
- self.name = name
- self.dep_type = dep_type
- self.junction = junction
- self.provenance = provenance
-
-
-# A transient object breaking down what is loaded
-# allowing us to do complex operations in multiple
-# passes
-#
-class LoadElement():
-
- def __init__(self, data, filename, loader):
-
- self.data = data
- self.name = filename
- self.loader = loader
-
- if loader.project.junction:
- # dependency is in subproject, qualify name
- self.full_name = '{}:{}'.format(loader.project.junction.name, self.name)
- else:
- # dependency is in top-level project
- self.full_name = self.name
-
- # Ensure the root node is valid
- _yaml.node_validate(self.data, [
- 'kind', 'depends', 'sources', 'sandbox',
- 'variables', 'environment', 'environment-nocache',
- 'config', 'public', 'description',
- ])
-
- # Cache dependency tree to detect circular dependencies
- self.dep_cache = None
-
- # Dependencies
- self.deps = extract_depends_from_node(self.data)
-
- #############################################
- # Routines used by the Loader #
- #############################################
-
- # Checks if this element depends on another element, directly
- # or indirectly.
- #
- def depends(self, other):
-
- self.ensure_depends_cache()
- return self.dep_cache.get(other.full_name) is not None
-
- def ensure_depends_cache(self):
-
- if self.dep_cache:
- return
-
- self.dep_cache = {}
- for dep in self.deps:
- elt = self.loader.get_element_for_dep(dep)
-
- # Ensure the cache of the element we depend on
- elt.ensure_depends_cache()
-
- # We depend on this element
- self.dep_cache[elt.full_name] = True
-
- # And we depend on everything this element depends on
- self.dep_cache.update(elt.dep_cache)
-
-
-# Creates an array of dependency dicts from a given dict node 'data',
-# allows both strings and dicts for expressing the dependency and
-# throws a comprehensive LoadError in the case that the data is malformed.
-#
-# After extracting depends, they are removed from the data node
-#
-# Returns a normalized array of Dependency objects
-def extract_depends_from_node(data):
- depends = _yaml.node_get(data, list, Symbol.DEPENDS, default_value=[])
- output_deps = []
-
- for dep in depends:
- dep_provenance = _yaml.node_get_provenance(data, key=Symbol.DEPENDS, indices=[depends.index(dep)])
-
- if isinstance(dep, str):
- dependency = Dependency(dep, provenance=dep_provenance)
-
- elif isinstance(dep, Mapping):
- _yaml.node_validate(dep, ['filename', 'type', 'junction'])
-
- # Make type optional, for this we set it to None
- dep_type = _yaml.node_get(dep, str, Symbol.TYPE, default_value=None)
- if dep_type is None or dep_type == Symbol.ALL:
- dep_type = None
- elif dep_type not in [Symbol.BUILD, Symbol.RUNTIME]:
- provenance = _yaml.node_get_provenance(dep, key=Symbol.TYPE)
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Dependency type '{}' is not 'build', 'runtime' or 'all'"
- .format(provenance, dep_type))
-
- filename = _yaml.node_get(dep, str, Symbol.FILENAME)
-
- junction = _yaml.node_get(dep, str, Symbol.JUNCTION, default_value=None)
-
- dependency = Dependency(filename,
- dep_type=dep_type, junction=junction,
- provenance=dep_provenance)
-
- else:
- index = depends.index(dep)
- provenance = _yaml.node_get_provenance(data, key=Symbol.DEPENDS, indices=[index])
-
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: List '{}' element {:d} is not a list or dict"
- .format(provenance, Symbol.DEPENDS, index))
-
- output_deps.append(dependency)
-
- # Now delete "depends", we dont want it anymore
- del data[Symbol.DEPENDS]
-
- return output_deps
-
-
-#################################################
-# The Loader #
-#################################################
-#
-# The Loader class does the heavy lifting of parsing a target
-# bst file and creating a tree of LoadElements
+# Args:
+# context (Context): The Context object
+# project (Project): The toplevel Project object
+# filenames (list of str): Target, element-path relative bst filenames in the project
+# parent (Loader): A parent Loader object, in the case this is a junctioned Loader
+# tempdir (str): A directory to cleanup with the Loader, given to the loader by a parent
+# loader in the case that this loader is a subproject loader.
#
class Loader():
def __init__(self, context, project, filenames, *, parent=None, tempdir=None):
- basedir = project.element_path
-
# Ensure we have an absolute path for the base directory
- #
+ basedir = project.element_path
if not os.path.isabs(basedir):
basedir = os.path.abspath(basedir)
@@ -216,25 +70,24 @@ class Loader():
"path to the base project directory: {}"
.format(filename, basedir))
- self.context = context
- self.project = project
- self.options = project.options # Project options (OptionPool)
- self.basedir = basedir # Base project directory
- self.targets = filenames # Target bst elements
- self.tempdir = tempdir
-
- self.parent = parent
-
- self.platform = Platform.get_platform()
- self.artifacts = self.platform.artifactcache
+ #
+ # Public members
+ #
+ self.project = project # The associated Project
- self.meta_elements = {} # Dict of resolved meta elements by name
- self.elements = {} # Dict of elements
- self.loaders = {} # Dict of junction loaders
+ #
+ # Private members
+ #
+ self._context = context
+ self._options = project.options # Project options (OptionPool)
+ self._basedir = basedir # Base project directory
+ self._targets = filenames # Target bst elements
+ self._tempdir = tempdir # A directory to cleanup
+ self._parent = parent # The parent loader
- ########################################
- # Main Entry Point #
- ########################################
+ self._meta_elements = {} # Dict of resolved meta elements by name
+ self._elements = {} # Dict of elements
+ self._loaders = {} # Dict of junction loaders
# load():
#
@@ -252,9 +105,9 @@ class Loader():
# First pass, recursively load files and populate our table of LoadElements
#
- for target in self.targets:
+ for target in self._targets:
profile_start(Topics.LOAD_PROJECT, target)
- self.load_file(target, rewritable, ticker)
+ self._load_file(target, rewritable, ticker)
profile_end(Topics.LOAD_PROJECT, target)
#
@@ -264,186 +117,137 @@ class Loader():
# Set up a dummy element that depends on all top-level targets
# to resolve potential circular dependencies between them
DummyTarget = namedtuple('DummyTarget', ['name', 'full_name', 'deps'])
- dummy = DummyTarget(name='', full_name='', deps=[Dependency(e) for e in self.targets])
- self.elements[''] = dummy
+ dummy = DummyTarget(name='', full_name='', deps=[Dependency(e) for e in self._targets])
+ self._elements[''] = dummy
- profile_key = "_".join(t for t in self.targets)
+ profile_key = "_".join(t for t in self._targets)
profile_start(Topics.CIRCULAR_CHECK, profile_key)
- self.check_circular_deps('')
+ self._check_circular_deps('')
profile_end(Topics.CIRCULAR_CHECK, profile_key)
#
# Sort direct dependencies of elements by their dependency ordering
#
- for target in self.targets:
+ for target in self._targets:
profile_start(Topics.SORT_DEPENDENCIES, target)
- self.sort_dependencies(target)
+ self._sort_dependencies(target)
profile_end(Topics.SORT_DEPENDENCIES, target)
# Finally, wrap what we have into LoadElements and return the target
#
- return [self.collect_element(target) for target in self.targets]
+ return [self._collect_element(target) for target in self._targets]
- # get_loader():
- #
- # Return loader for specified junction
- #
- # Args:
- # filename (str): Junction name
+ # cleanup():
#
- # Raises: LoadError
+ # Remove temporary checkout directories of subprojects
#
- # Returns: A Loader or None if specified junction does not exist
- def get_loader(self, filename, *, rewritable=False, ticker=None, level=0):
- # return previously determined result
- if filename in self.loaders:
- loader = self.loaders[filename]
-
- if loader is None:
- # do not allow junctions with the same name in different
- # subprojects
- raise LoadError(LoadErrorReason.CONFLICTING_JUNCTION,
- "Conflicting junction {} in subprojects, define junction in {}"
- .format(filename, self.project.name))
-
- return loader
-
- if self.parent:
- # junctions in the parent take precedence over junctions defined
- # in subprojects
- loader = self.parent.get_loader(filename, rewritable=rewritable, ticker=ticker, level=level + 1)
- if loader:
- self.loaders[filename] = loader
- return loader
-
- try:
- load_element = self.load_file(filename, rewritable, ticker)
- except LoadError as e:
- if e.reason != LoadErrorReason.MISSING_FILE:
- # other load error
- raise
-
- if level == 0:
- # junction element not found in this or ancestor projects
- raise
- else:
- # mark junction as not available to allow detection of
- # conflicting junctions in subprojects
- self.loaders[filename] = None
- return None
-
- # meta junction element
- meta_element = self.collect_element(filename)
- if meta_element.kind != 'junction':
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
-
- element = Element._new_from_meta(meta_element, self.artifacts)
- element._preflight()
-
- for source in element.sources():
- # Handle the case where a subproject needs to be fetched
- #
- if source.get_consistency() == Consistency.RESOLVED:
- if self.context._fetch_subprojects:
- if ticker:
- ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
- source.fetch()
- else:
- detail = "Try fetching the project with `bst fetch {}`".format(filename)
- raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,
- "Subproject fetch needed for junction: {}".format(filename),
- detail=detail)
-
- # Handle the case where a subproject has no ref
- #
- elif source.get_consistency() == Consistency.INCONSISTENT:
- detail = "Try tracking the junction element with `bst track {}`".format(filename)
- raise LoadError(LoadErrorReason.SUBPROJECT_INCONSISTENT,
- "Subproject has no ref for junction: {}".format(filename),
- detail=detail)
-
- # Stage sources
- os.makedirs(self.context.builddir, exist_ok=True)
- basedir = tempfile.mkdtemp(prefix="{}-".format(element.normal_name), dir=self.context.builddir)
- element._stage_sources_at(basedir, mount_workspaces=False)
+ def cleanup(self):
+ if self._parent and not self._tempdir:
+ # already done
+ return
- # Load the project
- project_dir = os.path.join(basedir, element.path)
- try:
- project = Project(project_dir, self.context, junction=element)
- except LoadError as e:
- if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
- raise LoadError(reason=LoadErrorReason.INVALID_JUNCTION,
- message="Could not find the project.conf file for {}. "
- "Expecting a project at path '{}'"
- .format(element, element.path or '.')) from e
- else:
- raise
+ # recurse
+ for loader in self._loaders.values():
+ # value may be None with nested junctions without overrides
+ if loader is not None:
+ loader.cleanup()
- loader = Loader(self.context, project, [], parent=self, tempdir=basedir)
+ if not self._parent:
+ # basedir of top-level loader is never a temporary directory
+ return
- self.loaders[filename] = loader
+ # safe guard to not accidentally delete directories outside builddir
+ if self._tempdir.startswith(self._context.builddir + os.sep):
+ if os.path.exists(self._tempdir):
+ shutil.rmtree(self._tempdir)
- return loader
+ # get_element_for_dep():
+ #
+ # Gets a cached LoadElement by Dependency object
+ #
+ # This is used by LoadElement
+ #
+ # Args:
+ # dep (Dependency): The dependency to search for
+ #
+ # Returns:
+ # (LoadElement): The cached LoadElement
+ #
+ def get_element_for_dep(self, dep):
+ loader = self._get_loader_for_dep(dep)
+ return loader._elements[dep.name]
- ########################################
- # Loading Files #
- ########################################
+ ###########################################
+ # Private Methods #
+ ###########################################
+ # _load_file():
+ #
# Recursively load bst files
#
- def load_file(self, filename, rewritable, ticker):
+ # Args:
+ # filename (str): The element-path relative bst file
+ # rewritable (bool): Whether we should load in round trippable mode
+ # ticker (callable): A callback to report loaded filenames to the frontend
+ #
+ # Returns:
+ # (LoadElement): A loaded LoadElement
+ #
+ def _load_file(self, filename, rewritable, ticker):
# Silently ignore already loaded files
- if filename in self.elements:
- return self.elements[filename]
+ if filename in self._elements:
+ return self._elements[filename]
# Call the ticker
if ticker:
ticker(filename)
# Load the data and process any conditional statements therein
- fullpath = os.path.join(self.basedir, filename)
- data = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable)
- self.options.process_node(data)
+ fullpath = os.path.join(self._basedir, filename)
+ node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable)
+ self._options.process_node(node)
- element = LoadElement(data, filename, self)
+ element = LoadElement(node, filename, self)
- self.elements[filename] = element
+ self._elements[filename] = element
# Load all dependency files for the new LoadElement
for dep in element.deps:
if dep.junction:
- self.load_file(dep.junction, rewritable, ticker)
- loader = self.get_loader(dep.junction, rewritable=rewritable, ticker=ticker)
+ self._load_file(dep.junction, rewritable, ticker)
+ loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker)
else:
loader = self
- dep_element = loader.load_file(dep.name, rewritable, ticker)
+ dep_element = loader._load_file(dep.name, rewritable, ticker)
- if _yaml.node_get(dep_element.data, str, Symbol.KIND) == 'junction':
+ if _yaml.node_get(dep_element.node, str, Symbol.KIND) == 'junction':
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Cannot depend on junction"
.format(dep.provenance))
return element
- ########################################
- # Checking Circular Dependencies #
- ########################################
+ # _check_circular_deps():
#
# Detect circular dependencies on LoadElements with
# dependencies already resolved.
#
- def check_circular_deps(self, element_name, check_elements=None, validated=None):
+ # Args:
+ # element_name (str): The element-path relative element name to check
+ #
+ # Raises:
+ # (LoadError): In case there was a circular dependency error
+ #
+ def _check_circular_deps(self, element_name, check_elements=None, validated=None):
if check_elements is None:
check_elements = {}
if validated is None:
validated = {}
- element = self.elements[element_name]
+ element = self._elements[element_name]
# element name must be unique across projects
# to be usable as key for the check_elements and validated dicts
@@ -461,16 +265,14 @@ class Loader():
# Push / Check each dependency / Pop
check_elements[element_name] = True
for dep in element.deps:
- loader = self.get_loader_for_dep(dep)
- loader.check_circular_deps(dep.name, check_elements, validated)
+ loader = self._get_loader_for_dep(dep)
+ loader._check_circular_deps(dep.name, check_elements, validated)
del check_elements[element_name]
# Eliminate duplicate paths
validated[element_name] = True
- ########################################
- # Element Sorting #
- ########################################
+ # _sort_dependencies():
#
# Sort dependencies of each element by their dependencies,
# so that direct dependencies which depend on other direct
@@ -479,11 +281,15 @@ class Loader():
#
# This avoids the need for performing multiple topological
# sorts throughout the build process.
- def sort_dependencies(self, element_name, visited=None):
+ #
+ # Args:
+ # element_name (str): The element-path relative element name to sort
+ #
+ def _sort_dependencies(self, element_name, visited=None):
if visited is None:
visited = {}
- element = self.elements[element_name]
+ element = self._elements[element_name]
# element name must be unique across projects
# to be usable as key for the visited dict
@@ -493,8 +299,8 @@ class Loader():
return
for dep in element.deps:
- loader = self.get_loader_for_dep(dep)
- loader.sort_dependencies(dep.name, visited=visited)
+ loader = self._get_loader_for_dep(dep)
+ loader._sort_dependencies(dep.name, visited=visited)
def dependency_cmp(dep_a, dep_b):
element_a = self.get_element_for_dep(dep_a)
@@ -542,30 +348,35 @@ class Loader():
visited[element_name] = True
- ########################################
- # Element Collection #
- ########################################
- # Collect the toplevel elements we have, resolve their deps and return !
+ # _collect_element()
+ #
+ # Collect the toplevel elements we have
+ #
+ # Args:
+ # element_name (str): The element-path relative element name to sort
+ #
+ # Returns:
+ # (MetaElement): A recursively loaded MetaElement
#
- def collect_element(self, element_name):
+ def _collect_element(self, element_name):
- element = self.elements[element_name]
+ element = self._elements[element_name]
# Return the already built one, if we already built it
- meta_element = self.meta_elements.get(element_name)
+ meta_element = self._meta_elements.get(element_name)
if meta_element:
return meta_element
- data = element.data
- elt_provenance = _yaml.node_get_provenance(data)
+ node = element.node
+ elt_provenance = _yaml.node_get_provenance(node)
meta_sources = []
- sources = _yaml.node_get(data, list, Symbol.SOURCES, default_value=[])
+ sources = _yaml.node_get(node, list, Symbol.SOURCES, default_value=[])
# Safe loop calling into _yaml.node_get() for each element ensures
# we have good error reporting
for i in range(len(sources)):
- source = _yaml.node_get(data, Mapping, Symbol.SOURCES, indices=[i])
+ source = _yaml.node_get(node, Mapping, Symbol.SOURCES, indices=[i])
provenance = _yaml.node_get_provenance(source)
kind = _yaml.node_get(source, str, Symbol.KIND)
del source[Symbol.KIND]
@@ -579,18 +390,18 @@ class Loader():
meta_source = MetaSource(element_name, index, kind, source, directory)
meta_sources.append(meta_source)
- kind = _yaml.node_get(data, str, Symbol.KIND)
+ kind = _yaml.node_get(node, str, Symbol.KIND)
meta_element = MetaElement(self.project, element_name, kind,
elt_provenance, meta_sources,
- _yaml.node_get(data, Mapping, Symbol.CONFIG, default_value={}),
- _yaml.node_get(data, Mapping, Symbol.VARIABLES, default_value={}),
- _yaml.node_get(data, Mapping, Symbol.ENVIRONMENT, default_value={}),
- _yaml.node_get(data, list, Symbol.ENV_NOCACHE, default_value=[]),
- _yaml.node_get(data, Mapping, Symbol.PUBLIC, default_value={}),
- _yaml.node_get(data, Mapping, Symbol.SANDBOX, default_value={}))
+ _yaml.node_get(node, Mapping, Symbol.CONFIG, default_value={}),
+ _yaml.node_get(node, Mapping, Symbol.VARIABLES, default_value={}),
+ _yaml.node_get(node, Mapping, Symbol.ENVIRONMENT, default_value={}),
+ _yaml.node_get(node, list, Symbol.ENV_NOCACHE, default_value=[]),
+ _yaml.node_get(node, Mapping, Symbol.PUBLIC, default_value={}),
+ _yaml.node_get(node, Mapping, Symbol.SANDBOX, default_value={}))
# Cache it now, make sure it's already there before recursing
- self.meta_elements[element_name] = meta_element
+ self._meta_elements[element_name] = meta_element
# Descend
for dep in element.deps:
@@ -598,8 +409,8 @@ class Loader():
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}: Junctions do not support dependencies".format(dep.provenance))
- loader = self.get_loader_for_dep(dep)
- meta_dep = loader.collect_element(dep.name)
+ loader = self._get_loader_for_dep(dep)
+ meta_dep = loader._collect_element(dep.name)
if dep.dep_type != 'runtime':
meta_element.build_dependencies.append(meta_dep)
if dep.dep_type != 'build':
@@ -607,35 +418,123 @@ class Loader():
return meta_element
- def get_loader_for_dep(self, dep):
- if dep.junction:
- # junction dependency, delegate to appropriate loader
- return self.loaders[dep.junction]
- else:
- return self
+ # _get_loader():
+ #
+ # Return loader for specified junction
+ #
+ # Args:
+ # filename (str): Junction name
+ #
+ # Raises: LoadError
+ #
+ # Returns: A Loader or None if specified junction does not exist
+ def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0):
+ # return previously determined result
+ if filename in self._loaders:
+ loader = self._loaders[filename]
- def get_element_for_dep(self, dep):
- return self.get_loader_for_dep(dep).elements[dep.name]
+ if loader is None:
+ # do not allow junctions with the same name in different
+ # subprojects
+ raise LoadError(LoadErrorReason.CONFLICTING_JUNCTION,
+ "Conflicting junction {} in subprojects, define junction in {}"
+ .format(filename, self.project.name))
- # cleanup():
- #
- # Remove temporary checkout directories of subprojects
- def cleanup(self):
- if self.parent and not self.tempdir:
- # already done
- return
+ return loader
- # recurse
- for loader in self.loaders.values():
- # value may be None with nested junctions without overrides
- if loader is not None:
- loader.cleanup()
+ if self._parent:
+ # junctions in the parent take precedence over junctions defined
+ # in subprojects
+ loader = self._parent._get_loader(filename, rewritable=rewritable, ticker=ticker, level=level + 1)
+ if loader:
+ self._loaders[filename] = loader
+ return loader
- if not self.parent:
- # basedir of top-level loader is never a temporary directory
- return
+ try:
+ load_element = self._load_file(filename, rewritable, ticker)
+ except LoadError as e:
+ if e.reason != LoadErrorReason.MISSING_FILE:
+ # other load error
+ raise
- # safe guard to not accidentally delete directories outside builddir
- if self.tempdir.startswith(self.context.builddir + os.sep):
- if os.path.exists(self.tempdir):
- shutil.rmtree(self.tempdir)
+ if level == 0:
+ # junction element not found in this or ancestor projects
+ raise
+ else:
+ # mark junction as not available to allow detection of
+ # conflicting junctions in subprojects
+ self._loaders[filename] = None
+ return None
+
+ # meta junction element
+ meta_element = self._collect_element(filename)
+ if meta_element.kind != 'junction':
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
+
+ platform = Platform.get_platform()
+ element = Element._new_from_meta(meta_element, platform.artifactcache)
+ element._preflight()
+
+ for source in element.sources():
+ # Handle the case where a subproject needs to be fetched
+ #
+ if source.get_consistency() == Consistency.RESOLVED:
+ if self._context._fetch_subprojects:
+ if ticker:
+ ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
+ source.fetch()
+ else:
+ detail = "Try fetching the project with `bst fetch {}`".format(filename)
+ raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,
+ "Subproject fetch needed for junction: {}".format(filename),
+ detail=detail)
+
+ # Handle the case where a subproject has no ref
+ #
+ elif source.get_consistency() == Consistency.INCONSISTENT:
+ detail = "Try tracking the junction element with `bst track {}`".format(filename)
+ raise LoadError(LoadErrorReason.SUBPROJECT_INCONSISTENT,
+ "Subproject has no ref for junction: {}".format(filename),
+ detail=detail)
+
+ # Stage sources
+ os.makedirs(self._context.builddir, exist_ok=True)
+ basedir = tempfile.mkdtemp(prefix="{}-".format(element.normal_name), dir=self._context.builddir)
+ element._stage_sources_at(basedir, mount_workspaces=False)
+
+ # Load the project
+ project_dir = os.path.join(basedir, element.path)
+ try:
+ project = Project(project_dir, self._context, junction=element)
+ except LoadError as e:
+ if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
+ raise LoadError(reason=LoadErrorReason.INVALID_JUNCTION,
+ message="Could not find the project.conf file for {}. "
+ "Expecting a project at path '{}'"
+ .format(element, element.path or '.')) from e
+ else:
+ raise
+
+ loader = Loader(self._context, project, [], parent=self, tempdir=basedir)
+
+ self._loaders[filename] = loader
+
+ return loader
+
+ # _get_loader_for_dep():
+ #
+ # Gets the appropriate Loader for a Dependency object
+ #
+ # Args:
+ # dep (Dependency): A Dependency object
+ #
+ # Returns:
+ # (Loader): The Loader object to use for this Dependency
+ #
+ def _get_loader_for_dep(self, dep):
+ if dep.junction:
+ # junction dependency, delegate to appropriate loader
+ return self._loaders[dep.junction]
+ else:
+ return self
diff --git a/buildstream/_loader/types.py b/buildstream/_loader/types.py
new file mode 100644
index 000000000..9d96894c0
--- /dev/null
+++ b/buildstream/_loader/types.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 Codethink Limited
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+# Authors:
+# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
+
+
+# Symbol():
+#
+# A simple object to denote the symbols we load with from YAML
+#
+class Symbol():
+ FILENAME = "filename"
+ KIND = "kind"
+ DEPENDS = "depends"
+ SOURCES = "sources"
+ CONFIG = "config"
+ VARIABLES = "variables"
+ ENVIRONMENT = "environment"
+ ENV_NOCACHE = "environment-nocache"
+ PUBLIC = "public"
+ TYPE = "type"
+ BUILD = "build"
+ RUNTIME = "runtime"
+ ALL = "all"
+ DIRECTORY = "directory"
+ JUNCTION = "junction"
+ SANDBOX = "sandbox"
+
+
+# Dependency()
+#
+# A simple object describing a dependency
+#
+# Args:
+# name (str): The element name
+# dep_type (str): The type of dependency, can be
+# Symbol.ALL, Symbol.BUILD, or Symbol.RUNTIME
+# junction (str): The element name of the junction, or None
+# provenance (Provenance): The YAML node provenance of where this
+# dependency was declared
+#
+class Dependency():
+ def __init__(self, name,
+ dep_type=None, junction=None, provenance=None):
+ self.name = name
+ self.dep_type = dep_type
+ self.junction = junction
+ self.provenance = provenance