diff options
author | Tristan Van Berkom <tristan.vanberkom@codethink.co.uk> | 2018-05-02 15:30:20 +0900 |
---|---|---|
committer | Tristan Van Berkom <tristan.vanberkom@codethink.co.uk> | 2018-05-08 03:59:38 +0900 |
commit | b8e15706a51272e4f4e116d9e373fd2581102868 (patch) | |
tree | 25518bb2f60418749a32418c3523a19c11e232e6 | |
parent | a542c81858d94e0be06216184f3df61600805138 (diff) | |
download | buildstream-b8e15706a51272e4f4e116d9e373fd2581102868.tar.gz |
_artifactcache: Added ArtifactCache.setup_remotes()
This removes some additional initialization code from Pipeline().
Some symbols have changed here, the initialization is now called
from Stream(), and a test case was also adjusted for this.
-rw-r--r-- | buildstream/_artifactcache/__init__.py | 1 | ||||
-rw-r--r-- | buildstream/_artifactcache/artifactcache.py | 176 | ||||
-rw-r--r-- | buildstream/_context.py | 4 | ||||
-rw-r--r-- | buildstream/_pipeline.py | 32 | ||||
-rw-r--r-- | buildstream/_project.py | 4 | ||||
-rw-r--r-- | buildstream/_stream.py | 12 | ||||
-rw-r--r-- | tests/artifactcache/config.py | 5 |
7 files changed, 131 insertions, 103 deletions
diff --git a/buildstream/_artifactcache/__init__.py b/buildstream/_artifactcache/__init__.py index a9a97b04a..66373fd75 100644 --- a/buildstream/_artifactcache/__init__.py +++ b/buildstream/_artifactcache/__init__.py @@ -19,4 +19,3 @@ # Tristan Van Berkom <tristan.vanberkom@codethink.co.uk> from .artifactcache import ArtifactCache, ArtifactCacheSpec -from .artifactcache import artifact_cache_specs_from_config_node, configured_remote_artifact_cache_specs diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py index 8757094e6..352cdb4a9 100644 --- a/buildstream/_artifactcache/artifactcache.py +++ b/buildstream/_artifactcache/artifactcache.py @@ -23,6 +23,7 @@ import string from collections import Mapping, namedtuple from .._exceptions import ImplError, LoadError, LoadErrorReason +from .._message import Message, MessageType from .. import utils from .. import _yaml @@ -53,56 +54,6 @@ class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push')): return ArtifactCacheSpec(url, push) -# artifact_cache_specs_from_config_node() -# -# Parses the configuration of remote artifact caches from a config block. -# -# Args: -# config_node (dict): The config block, which may contain the 'artifacts' key -# -# Returns: -# A list of ArtifactCacheSpec instances. -# -# Raises: -# LoadError, if the config block contains invalid keys. -# -def artifact_cache_specs_from_config_node(config_node): - cache_specs = [] - - artifacts = config_node.get('artifacts', []) - if isinstance(artifacts, Mapping): - cache_specs.append(ArtifactCacheSpec._new_from_config_node(artifacts)) - elif isinstance(artifacts, list): - for spec_node in artifacts: - cache_specs.append(ArtifactCacheSpec._new_from_config_node(spec_node)) - else: - provenance = _yaml.node_get_provenance(config_node, key='artifacts') - raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA, - "%s: 'artifacts' must be a single 'url:' mapping, or a list of mappings" % - (str(provenance))) - return cache_specs - - -# configured_remote_artifact_cache_specs(): -# -# Return the list of configured artifact remotes for a given project, in priority -# order. This takes into account the user and project configuration. -# -# Args: -# context (Context): The BuildStream context -# project (Project): The BuildStream project -# -# Returns: -# A list of ArtifactCacheSpec instances describing the remote artifact caches. -# -def configured_remote_artifact_cache_specs(context, project): - project_overrides = context.get_overrides(project.name) - project_extra_specs = artifact_cache_specs_from_config_node(project_overrides) - - return list(utils._deduplicate( - project_extra_specs + project.artifact_cache_specs + context.artifact_cache_specs)) - - # An ArtifactCache manages artifacts. # # Args: @@ -154,21 +105,63 @@ class ArtifactCache(): # assume project and element names are not allowed to contain slashes return '{0}/{1}/{2}'.format(project.name, element_name, key) - # set_remotes(): + # setup_remotes(): # - # Set the list of remote caches. If project is None, the global list of - # remote caches will be set, which is used by all projects. If a project is - # specified, the per-project list of remote caches will be set. + # Sets up which remotes to use # # Args: - # remote_specs (list): List of ArtifactCacheSpec instances, in priority order. - # project (Project): The Project instance for project-specific remotes - def set_remotes(self, remote_specs, *, project=None): - if project is None: - # global remotes - self.global_remote_specs = remote_specs + # use_config (bool): Whether to use project configuration + # remote_url (str): Remote artifact cache URL + # + # This requires that all of the projects which are to be processed in the session + # have already been loaded and are observable in the Context. + # + def setup_remotes(self, *, use_config=False, remote_url=None): + + # Initialize remote artifact caches. We allow the commandline to override + # the user config in some cases (for example `bst push --remote=...`). + has_remote_caches = False + if remote_url: + self._set_remotes([ArtifactCacheSpec(remote_url, push=True)]) + has_remote_caches = True + if use_config: + for project in self.context.get_projects(): + artifact_caches = _configured_remote_artifact_cache_specs(self.context, project) + if artifact_caches: # artifact_caches is a list of ArtifactCacheSpec instances + self._set_remotes(artifact_caches, project=project) + has_remote_caches = True + if has_remote_caches: + self._initialize_remotes() + + # specs_from_config_node() + # + # Parses the configuration of remote artifact caches from a config block. + # + # Args: + # config_node (dict): The config block, which may contain the 'artifacts' key + # + # Returns: + # A list of ArtifactCacheSpec instances. + # + # Raises: + # LoadError, if the config block contains invalid keys. + # + @staticmethod + def specs_from_config_node(config_node): + cache_specs = [] + + artifacts = config_node.get('artifacts', []) + if isinstance(artifacts, Mapping): + cache_specs.append(ArtifactCacheSpec._new_from_config_node(artifacts)) + elif isinstance(artifacts, list): + for spec_node in artifacts: + cache_specs.append(ArtifactCacheSpec._new_from_config_node(spec_node)) else: - self.project_remote_specs[project] = remote_specs + provenance = _yaml.node_get_provenance(config_node, key='artifacts') + raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA, + "%s: 'artifacts' must be a single 'url:' mapping, or a list of mappings" % + (str(provenance))) + return cache_specs ################################################ # Abstract methods for subclasses to implement # @@ -356,3 +349,64 @@ class ArtifactCache(): def link_key(self, element, oldkey, newkey): raise ImplError("Cache '{kind}' does not implement link_key()" .format(kind=type(self).__name__)) + + ################################################ + # Local Private Methods # + ################################################ + + # _message() + # + # Local message propagator + # + def _message(self, message_type, message, **kwargs): + args = dict(kwargs) + self.context.message( + Message(None, message_type, message, **args)) + + # _set_remotes(): + # + # Set the list of remote caches. If project is None, the global list of + # remote caches will be set, which is used by all projects. If a project is + # specified, the per-project list of remote caches will be set. + # + # Args: + # remote_specs (list): List of ArtifactCacheSpec instances, in priority order. + # project (Project): The Project instance for project-specific remotes + def _set_remotes(self, remote_specs, *, project=None): + if project is None: + # global remotes + self.global_remote_specs = remote_specs + else: + self.project_remote_specs[project] = remote_specs + + # _initialize_remotes() + # + # An internal wrapper which calls the abstract method and + # reports takes care of messaging + # + def _initialize_remotes(self): + def remote_failed(url, error): + self._message(MessageType.WARN, "Failed to fetch remote refs from {}: {}".format(url, error)) + + with self.context.timed_activity("Initializing remote caches", silent_nested=True): + self.initialize_remotes(on_failure=remote_failed) + + +# _configured_remote_artifact_cache_specs(): +# +# Return the list of configured artifact remotes for a given project, in priority +# order. This takes into account the user and project configuration. +# +# Args: +# context (Context): The BuildStream context +# project (Project): The BuildStream project +# +# Returns: +# A list of ArtifactCacheSpec instances describing the remote artifact caches. +# +def _configured_remote_artifact_cache_specs(context, project): + project_overrides = context.get_overrides(project.name) + project_extra_specs = ArtifactCache.specs_from_config_node(project_overrides) + + return list(utils._deduplicate( + project_extra_specs + project.artifact_cache_specs + context.artifact_cache_specs)) diff --git a/buildstream/_context.py b/buildstream/_context.py index 4d5b2b87d..bf7f49515 100644 --- a/buildstream/_context.py +++ b/buildstream/_context.py @@ -29,7 +29,7 @@ from . import _yaml from ._exceptions import LoadError, LoadErrorReason, BstError from ._message import Message, MessageType from ._profile import Topics, profile_start, profile_end -from ._artifactcache import artifact_cache_specs_from_config_node +from ._artifactcache import ArtifactCache # Context() @@ -164,7 +164,7 @@ class Context(): setattr(self, directory, path) # Load artifact share configuration - self.artifact_cache_specs = artifact_cache_specs_from_config_node(defaults) + self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults) # Load logging config logging = _yaml.node_get(defaults, Mapping, 'logging') diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py index 010e951b5..5327d3fb6 100644 --- a/buildstream/_pipeline.py +++ b/buildstream/_pipeline.py @@ -28,7 +28,6 @@ from ._loader import Loader from .element import Element from . import Scope, Consistency from ._project import ProjectRefStorage -from ._artifactcache.artifactcache import ArtifactCacheSpec, configured_remote_artifact_cache_specs # PipelineSelection() @@ -139,15 +138,11 @@ class Pipeline(): # Initialize the pipeline # # Args: - # use_configured_remote_caches (bool): Whether to contact configured remote artifact caches - # add_remote_cache (str): The URL for an additional remote artifact cache # track_element (list of Elements): List of elements specified by the frontend for tracking # track_cross_junctions (bool): Whether tracking is allowed to cross junction boundaries # track_selection (PipelineSelection): The selection algorithm for track elements # def initialize(self, - use_configured_remote_caches=False, - add_remote_cache=None, track_elements=None, track_cross_junctions=False, track_selection=PipelineSelection.ALL): @@ -155,21 +150,6 @@ class Pipeline(): # Preflight directly, before ever interrogating caches or anything. self._preflight() - # Initialize remote artifact caches. We allow the commandline to override - # the user config in some cases (for example `bst push --remote=...`). - has_remote_caches = False - if add_remote_cache: - self._artifacts.set_remotes([ArtifactCacheSpec(add_remote_cache, push=True)]) - has_remote_caches = True - if use_configured_remote_caches: - for project in self.context.get_projects(): - artifact_caches = configured_remote_artifact_cache_specs(self.context, project) - if artifact_caches: # artifact_caches is a list of ArtifactCacheSpec instances - self._artifacts.set_remotes(artifact_caches, project=project) - has_remote_caches = True - if has_remote_caches: - self._initialize_remote_caches() - # Work out what we're going track, if anything self._track_cross_junctions = track_cross_junctions if track_elements: @@ -355,18 +335,6 @@ class Pipeline(): for element in self.dependencies(Scope.ALL): element._preflight() - # _initialize_remote_caches() - # - # Initialize remote artifact caches, checking what - # artifacts are contained by the artifact cache remotes - # - def _initialize_remote_caches(self): - def remote_failed(url, error): - self._message(MessageType.WARN, "Failed to fetch remote refs from {}: {}".format(url, error)) - - with self.context.timed_activity("Initializing remote caches", silent_nested=True): - self._artifacts.initialize_remotes(on_failure=remote_failed) - # _resolve_cache_keys() # # Initially resolve the cache keys diff --git a/buildstream/_project.py b/buildstream/_project.py index d4afd7712..12074ab3a 100644 --- a/buildstream/_project.py +++ b/buildstream/_project.py @@ -29,7 +29,7 @@ from . import _yaml from ._profile import Topics, profile_start, profile_end from ._exceptions import LoadError, LoadErrorReason from ._options import OptionPool -from ._artifactcache import artifact_cache_specs_from_config_node +from ._artifactcache import ArtifactCache from ._elementfactory import ElementFactory from ._sourcefactory import SourceFactory from ._projectrefs import ProjectRefs, ProjectRefStorage @@ -296,7 +296,7 @@ class Project(): # # Load artifacts pull/push configuration for this project - self.artifact_cache_specs = artifact_cache_specs_from_config_node(config) + self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config) # Workspace configurations self.workspaces = Workspaces(self) diff --git a/buildstream/_stream.py b/buildstream/_stream.py index 09433147c..62d9f9804 100644 --- a/buildstream/_stream.py +++ b/buildstream/_stream.py @@ -822,9 +822,15 @@ class Stream(): rewritable=rewritable, fetch_subprojects=fetch_subprojects) - self._pipeline.initialize(use_configured_remote_caches=use_configured_remote_caches, - add_remote_cache=add_remote_cache, - track_elements=track_elements, + # After loading the projects, but before resolving cache keys, + # we need to initialize remote artifact caches where relevant + # + self._artifacts.setup_remotes(use_config=use_configured_remote_caches, + remote_url=add_remote_cache) + + # Now complete the initialization + # + self._pipeline.initialize(track_elements=track_elements, track_cross_junctions=track_cross_junctions, track_selection=track_selection) diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py index 690e354f5..079e511ef 100644 --- a/tests/artifactcache/config.py +++ b/tests/artifactcache/config.py @@ -3,7 +3,8 @@ import pytest import itertools import os -from buildstream._artifactcache import ArtifactCacheSpec, configured_remote_artifact_cache_specs +from buildstream._artifactcache import ArtifactCacheSpec +from buildstream._artifactcache.artifactcache import _configured_remote_artifact_cache_specs from buildstream._context import Context from buildstream._project import Project from buildstream.utils import _deduplicate @@ -99,7 +100,7 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user project = Project(str(project_dir), context) # Use the helper from the artifactcache module to parse our configuration. - parsed_cache_specs = configured_remote_artifact_cache_specs(context, project) + parsed_cache_specs = _configured_remote_artifact_cache_specs(context, project) # Verify that it was correctly read. expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches))) |