summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTristan Van Berkom <tristan.vanberkom@codethink.co.uk>2018-03-09 17:45:13 +0900
committerTristan Van Berkom <tristan.vanberkom@codethink.co.uk>2018-03-20 17:46:42 +0900
commit19cad981007d514cf15218b783ae05ed16cb511a (patch)
tree674684882a55e4b9901c6b466f95f7d42cd1a481
parente2392ce7ca22eb6bedfa828e6ad77ca13d8656d1 (diff)
downloadbuildstream-19cad981007d514cf15218b783ae05ed16cb511a.tar.gz
Fix #248 - Support project.refs in the core.
This adds a new Source.load_ref() API which is technically optional to implement, projects which make use of a project.refs file must only use source plugins which implement the new load_ref() method. * source.py: Added load_ref() API to load a ref from a specified node. This also adds _load_ref() and _save_ref() wrappers which handle the logistics of when to load and save a ref to which location. This also fixes _set_ref() to apply the ref to the node unconditionally, this must be done independantly of whether the ref actually changed. o Modifications to the loading process such that Source now can have access to the element name and source index. o _pipeline.py: Delegate abstract loading of source refs to Source._load_ref() - Print a summarized warning about redundant source references - Assert that one cannot track cross-junction elements without project.refs. o _scheduler/trackqueue.py: Delegate saving refs to Source._save_ref()
-rw-r--r--buildstream/_loader.py5
-rw-r--r--buildstream/_metasource.py9
-rw-r--r--buildstream/_pipeline.py68
-rw-r--r--buildstream/_scheduler/trackqueue.py33
-rw-r--r--buildstream/source.py154
5 files changed, 235 insertions, 34 deletions
diff --git a/buildstream/_loader.py b/buildstream/_loader.py
index 2b91b34fd..9793581e0 100644
--- a/buildstream/_loader.py
+++ b/buildstream/_loader.py
@@ -572,9 +572,8 @@ class Loader():
directory = None
index = sources.index(source)
- source_name = "{}-{}".format(element_name, index)
-
- meta_source = MetaSource(source_name, kind, source, directory,
+ meta_source = MetaSource(element_name, index,
+ kind, source, directory,
provenance.node,
provenance.toplevel,
provenance.filename)
diff --git a/buildstream/_metasource.py b/buildstream/_metasource.py
index 8413ddbd8..9681b98f2 100644
--- a/buildstream/_metasource.py
+++ b/buildstream/_metasource.py
@@ -26,15 +26,18 @@ class MetaSource():
# An abstract object holding data suitable for constructing a Source
#
# Args:
- # name: The name of the source, for display purposes
+ # element_name: The name of the owning element
+ # element_index: The index of the source in the owning element's source list
# kind: The kind of the source
# config: The configuration data for the source
# origin_node: The original YAML dictionary node defining this source
# origin_toplevel: The toplevel YAML loaded from the original file
# origin_filename: The filename in which the node was loaded from
#
- def __init__(self, name, kind, config, directory, origin_node, origin_toplevel, origin_filename):
- self.name = name
+ def __init__(self, element_name, element_index, kind, config, directory,
+ origin_node, origin_toplevel, origin_filename):
+ self.element_name = element_name
+ self.element_index = element_index
self.kind = kind
self.config = config
self.directory = directory
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 15fcf42fd..d4f65b497 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -37,6 +37,7 @@ from . import Scope
from . import _site
from . import utils
from ._platform import Platform
+from ._project import ProjectRefStorage
from ._artifactcache.artifactcache import ArtifactCacheSpec, configured_remote_artifact_cache_specs
from ._scheduler import SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
@@ -115,6 +116,7 @@ class Pipeline():
self.total_elements = 0
self.unused_workspaces = []
self._resolved_elements = {}
+ self._redundant_refs = []
# Load selected platform
Platform._create_instance(context, project)
@@ -131,6 +133,17 @@ class Pipeline():
resolved_elements = [self.resolve(meta_element)
for meta_element in meta_elements]
+ # Now warn about any redundant source references which may have
+ # been discovered in the resolve() phase.
+ if self._redundant_refs:
+ detail = "The following inline specified source references will be ignored:\n\n"
+ lines = [
+ "{}:{}".format(source._get_provenance(), ref)
+ for source, ref in self._redundant_refs
+ ]
+ detail += "\n".join(lines)
+ self.message(MessageType.WARN, "Ignoring redundant source references", detail=detail)
+
self.targets = resolved_elements[:len(targets)]
self.exceptions = resolved_elements[len(targets):]
@@ -242,6 +255,43 @@ class Pipeline():
detail += " " + element.name + "\n"
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
+ # assert_junction_tracking()
+ #
+ # Raises an error if tracking is attempted on junctioned elements and
+ # a project.refs file is not enabled for the toplevel project.
+ #
+ # Args:
+ # elements (list of Element): The list of elements to be tracked
+ # build (bool): Whether this is being called for `bst build`, otherwise `bst track`
+ #
+ # The `build` argument is only useful for suggesting an appropriate
+ # alternative to the user
+ #
+ def assert_junction_tracking(self, elements, *, build):
+
+ # We can track anything if the toplevel project uses project.refs
+ #
+ if self.project._ref_storage == ProjectRefStorage.PROJECT_REFS:
+ return
+
+ # Ideally, we would want to report every cross junction element but not
+ # their dependencies, unless those cross junction elements dependencies
+ # were also explicitly requested on the command line.
+ #
+ # But this is too hard, lets shoot for a simple error.
+ for element in elements:
+ element_project = element._get_project()
+ if element_project is not self.project:
+ suggestion = '--except'
+ if build:
+ suggestion = '--track-except'
+
+ detail = "Requested to track sources across junction boundaries\n" + \
+ "in a project which does not use separate source references.\n\n" + \
+ "Try using `{}` arguments to limit the scope of tracking.".format(suggestion)
+
+ raise PipelineError("Untrackable sources", detail=detail, reason="untrackable-sources")
+
# Generator function to iterate over only the elements
# which are required to build the pipeline target, omitting
# cached elements. The elements are yielded in a depth sorted
@@ -274,6 +324,9 @@ class Pipeline():
# Internal: Instantiates plugin-provided Element and Source instances
# from MetaElement and MetaSource objects
#
+ # This has a side effect of populating `self._redundant_refs` so
+ # we can later print a warning
+ #
def resolve(self, meta_element):
if meta_element in self._resolved_elements:
return self._resolved_elements[meta_element]
@@ -292,10 +345,13 @@ class Pipeline():
# resolve sources
for meta_source in meta_element.sources:
- element._add_source(
- meta_element.project._create_source(meta_source.kind,
- meta_source)
- )
+ source = meta_element.project._create_source(meta_source.kind, meta_source)
+ redundant_ref = source._load_ref()
+ element._add_source(source)
+
+ # Collect redundant refs for a warning message
+ if redundant_ref is not None:
+ self._redundant_refs.append((source, redundant_ref))
return element
@@ -322,6 +378,8 @@ class Pipeline():
track.enqueue(dependencies)
self.session_elements = len(dependencies)
+ self.assert_junction_tracking(dependencies, build=False)
+
self.message(MessageType.START, "Starting track")
elapsed, status = scheduler.run([track])
changed = len(track.processed_elements)
@@ -427,6 +485,8 @@ class Pipeline():
if track_first:
track_plan = self.get_elements_to_track(track_first)
+ self.assert_junction_tracking(track_plan, build=True)
+
if build_all:
plan = self.dependencies(Scope.ALL)
else:
diff --git a/buildstream/_scheduler/trackqueue.py b/buildstream/_scheduler/trackqueue.py
index c92239053..8f2da7b10 100644
--- a/buildstream/_scheduler/trackqueue.py
+++ b/buildstream/_scheduler/trackqueue.py
@@ -24,6 +24,7 @@ import os
# BuildStream toplevel imports
from ..plugin import _plugin_lookup
+from .. import SourceError
from .. import _yaml
# Local imports
@@ -58,28 +59,16 @@ class TrackQueue(Queue):
# Set the new refs in the main process one by one as they complete
for unique_id, new_ref in result:
source = _plugin_lookup(unique_id)
- if source._set_ref(new_ref, source._Source__origin_node):
-
- changed = True
- project = source._get_project()
- toplevel = source._Source__origin_toplevel
- filename = source._Source__origin_filename
- fullname = os.path.join(project.element_path, filename)
-
- # Here we are in master process, what to do if writing
- # to the disk fails for some reason ?
- try:
- _yaml.dump(toplevel, fullname)
- except OSError as e:
- # FIXME: We currently dont have a clear path to
- # fail the scheduler from the main process, so
- # this will just warn and BuildStream will exit
- # with a success code.
- #
- source.warn("Failed to update project file",
- detail="{}: Failed to rewrite "
- "tracked source to file {}: {}"
- .format(source, fullname, e))
+ try:
+ source._save_ref(new_ref)
+ except SourceError as e:
+ # FIXME: We currently dont have a clear path to
+ # fail the scheduler from the main process, so
+ # this will just warn and BuildStream will exit
+ # with a success code.
+ #
+ source.warn("Failed to update project file",
+ detail="{}".format(e))
context = element._get_context()
context._push_message_depth(True)
diff --git a/buildstream/source.py b/buildstream/source.py
index 61bc3b549..25476bc08 100644
--- a/buildstream/source.py
+++ b/buildstream/source.py
@@ -29,6 +29,7 @@ from contextlib import contextmanager
from . import Plugin
from . import _yaml, utils
from ._exceptions import BstError, ImplError, LoadError, LoadErrorReason, ErrorDomain
+from ._projectrefs import ProjectRefStorage
class Consistency():
@@ -80,8 +81,11 @@ class Source(Plugin):
def __init__(self, context, project, meta):
provenance = _yaml.node_get_provenance(meta.config)
- super().__init__(meta.name, context, project, provenance, "source")
+ super().__init__("{}-{}".format(meta.element_name, meta.element_index),
+ context, project, provenance, "source")
+ self.__element_name = meta.element_name # The name of the element owning this source
+ self.__element_index = meta.element_index # The index of the source in the owning element's source list
self.__directory = meta.directory # Staging relative directory
self.__origin_node = meta.origin_node # YAML node this Source was loaded from
self.__origin_toplevel = meta.origin_toplevel # Toplevel YAML node for the file
@@ -186,6 +190,16 @@ class Source(Plugin):
"""
raise ImplError("Source plugin '{}' does not implement get_consistency()".format(self.get_kind()))
+ def load_ref(self, node):
+ """Loads the *ref* for this Source from the specified *node*.
+
+ Args:
+ node (dict): The YAML node to load the ref from
+
+ *Since: 1.2*
+ """
+ raise ImplError("Source plugin '{}' does not implement load_ref()".format(self.get_kind()))
+
def get_ref(self):
"""Fetch the internal ref, however it is represented
@@ -438,13 +452,149 @@ class Source(Plugin):
# This comparison should work even for tuples and lists,
# but we're mostly concerned about simple strings anyway.
if current_ref != ref:
- self.set_ref(ref, node)
changed = True
+ # Set the ref regardless of whether it changed, the
+ # TrackQueue() will want to update a specific node with
+ # the ref, regardless of whether the original has changed.
+ self.set_ref(ref, node)
+
self.__tracking = False
return changed
+ # _load_ref():
+ #
+ # Loads the ref for the said source.
+ #
+ # Raises:
+ # (SourceError): If the source does not implement load_ref()
+ #
+ # Returns:
+ # (ref): A redundant ref specified inline for a project.refs using project
+ #
+ # This is partly a wrapper around `Source.load_ref()`, it will decide
+ # where to load the ref from depending on which project the source belongs
+ # to and whether that project uses a project.refs file.
+ #
+ # Note the return value is used to construct a summarized warning in the
+ # case that the toplevel project uses project.refs and also lists refs
+ # which will be ignored.
+ #
+ def _load_ref(self):
+ context = self._get_context()
+ project = self._get_project()
+ toplevel = context._get_toplevel_project()
+ redundant_ref = None
+
+ element_name = self.__element_name
+ element_idx = self.__element_index
+
+ def do_load_ref(node):
+ try:
+ self.load_ref(ref_node)
+ except ImplError as e:
+ raise SourceError("{}: Storing refs in project.refs is not supported by '{}' sources"
+ .format(self, self.get_kind()),
+ reason="unsupported-load-ref") from e
+
+ # If the main project overrides the ref, use the override
+ if project is not toplevel and toplevel._ref_storage == ProjectRefStorage.PROJECT_REFS:
+ ref_node = toplevel.refs.lookup_ref(project.name, element_name, element_idx)
+ if ref_node is not None:
+ do_load_ref(ref_node)
+
+ # If the project itself uses project.refs, clear the ref which
+ # was already loaded via Source.configure(), as this would
+ # violate the rule of refs being either in project.refs or in
+ # the elements themselves.
+ #
+ elif project._ref_storage == ProjectRefStorage.PROJECT_REFS:
+
+ # First warn if there is a ref already loaded, and reset it
+ redundant_ref = self.get_ref()
+ if redundant_ref is not None:
+ self.set_ref(None, {})
+
+ # Try to load the ref
+ ref_node = project.refs.lookup_ref(project.name, element_name, element_idx)
+ if ref_node is not None:
+ do_load_ref(ref_node)
+
+ return redundant_ref
+
+ # _save_ref()
+ #
+ # Persists the ref for this source. This will decide where to save the
+ # ref, or refuse to persist it, depending on active ref-storage project
+ # settings.
+ #
+ # Args:
+ # new_ref (smth): The new reference to save
+ #
+ # Raises:
+ # (SourceError): In the case we encounter errors saving a file to disk
+ #
+ def _save_ref(self, new_ref):
+
+ context = self._get_context()
+ project = self._get_project()
+ toplevel = context._get_toplevel_project()
+
+ element_name = self.__element_name
+ element_idx = self.__element_index
+
+ #
+ # Step 1 - Obtain the node
+ #
+ if project is toplevel:
+ if toplevel._ref_storage == ProjectRefStorage.PROJECT_REFS:
+ node = toplevel.refs.lookup_ref(project.name, element_name, element_idx, write=True)
+ else:
+ node = self.__origin_node
+ else:
+ if toplevel._ref_storage == ProjectRefStorage.PROJECT_REFS:
+ node = toplevel.refs.lookup_ref(project.name, element_name, element_idx, write=True)
+ else:
+ node = {}
+
+ #
+ # Step 2 - Set the ref in memory, and determine changed state
+ #
+ changed = self._set_ref(new_ref, node)
+
+ def do_save_refs(refs):
+ try:
+ refs.save()
+ except OSError as e:
+ raise SourceError("{}: Error saving source reference to 'project.refs': {}"
+ .format(self, e),
+ reason="save-ref-error") from e
+
+ #
+ # Step 3 - Apply the change in project data
+ #
+ if project is toplevel:
+ if toplevel._ref_storage == ProjectRefStorage.PROJECT_REFS:
+ do_save_refs(toplevel.refs)
+ else:
+ # Save the ref in the originating file
+ #
+ toplevel_node = self.__origin_toplevel
+ filename = self.__origin_filename
+ fullname = os.path.join(toplevel.element_path, filename)
+ try:
+ _yaml.dump(toplevel_node, fullname)
+ except OSError as e:
+ raise SourceError("{}: Error saving source reference to '{}': {}"
+ .format(self, filename, e),
+ reason="save-ref-error") from e
+ else:
+ if toplevel._ref_storage == ProjectRefStorage.PROJECT_REFS:
+ do_save_refs(toplevel.refs)
+ else:
+ self.warn("{}: Not persisting new reference in junctioned project".format(self))
+
# Wrapper for track()
#
def _track(self):