summaryrefslogtreecommitdiff
path: root/buildstream/_pipeline.py
diff options
context:
space:
mode:
authorSam Thursfield <sam.thursfield@codethink.co.uk>2017-11-22 14:58:09 +0000
committerSam Thursfield <sam.thursfield@codethink.co.uk>2018-01-11 17:13:10 +0000
commitd7959bd45493dc58ed6bc84aa2cf40b69b6854c6 (patch)
treecd5f2002291025e6473f82da11d81b6bc56f39df /buildstream/_pipeline.py
parent7a9c19984193b1072be4dc9adcbb00bcf732b822 (diff)
downloadbuildstream-d7959bd45493dc58ed6bc84aa2cf40b69b6854c6.tar.gz
Add support for multiple remote caches
This extends the 'artifacts' configuration block such that a list of `url` mappings can be given instead of a single entry. For example: artifacts: - url: http://example.com/artifacts1 - url: ssh://ostree@example.com/artifacts2 The OSTreeCache class is updated to set up multiple remotes and query remote refs from all of them. There are no automated tests for this yet. Empty URLs ('') now raise an exception. They cause breakages internally if we allow them through, and they can only occur if the user or our tests are misconfiguring things somehow. We report failure to fetch from the cache by printing a message to stderr for now. This is because BuildStream's actual logging functionality can't be used during frontend init -- see issue #168.
Diffstat (limited to 'buildstream/_pipeline.py')
-rw-r--r--buildstream/_pipeline.py32
1 files changed, 16 insertions, 16 deletions
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index cb7e0ed5e..db4c7fa85 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -40,6 +40,7 @@ from . import Scope
from . import _site
from . import utils
from ._platform import Platform
+from ._artifactcache import configured_artifact_cache_urls
from ._scheduler import SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
@@ -145,8 +146,8 @@ class Pipeline():
self.initialize_workspaces()
- if use_remote_cache and self.artifacts.can_fetch():
- self.fetch_remote_refs()
+ if use_remote_cache:
+ self.initialize_remote_caches()
self.resolve_cache_keys(inconsistent)
@@ -174,14 +175,13 @@ class Pipeline():
self.project._set_workspace(element, source, workspace)
- def fetch_remote_refs(self):
- with self.timed_activity("Fetching remote refs", silent_nested=True):
- try:
- self.artifacts.initialize_remote()
- self.artifacts.fetch_remote_refs()
- except ArtifactError:
- self.message(MessageType.WARN, "Failed to fetch remote refs")
- self.artifacts.set_offline()
+ def initialize_remote_caches(self):
+ def remote_failed(url, error):
+ self.message(MessageType.WARN, "Failed to fetch remote refs from {}: {}\n".format(url, error))
+
+ with self.timed_activity("Initializing remote caches", silent_nested=True):
+ artifact_urls = configured_artifact_cache_urls(self.context, self.project)
+ self.artifacts.set_remotes(artifact_urls, on_failure=remote_failed)
def resolve_cache_keys(self, inconsistent):
if inconsistent:
@@ -446,12 +446,12 @@ class Pipeline():
if track_plan:
track = TrackQueue(save=save)
queues.append(track)
- if self.artifacts.can_fetch():
+ if self.artifacts.has_fetch_remotes():
pull = PullQueue()
queues.append(pull)
queues.append(fetch)
queues.append(build)
- if self.artifacts.can_push():
+ if self.artifacts.has_push_remotes():
push = PushQueue()
queues.append(push)
@@ -689,8 +689,8 @@ class Pipeline():
#
def pull(self, scheduler, elements):
- if not self.artifacts.can_fetch():
- raise PipelineError("Not configured for pulling artifacts")
+ if not self.artifacts.has_fetch_remotes():
+ raise PipelineError("Not artifact caches available for pulling artifacts")
plan = elements
self.assert_consistent(plan)
@@ -727,8 +727,8 @@ class Pipeline():
#
def push(self, scheduler, elements):
- if not self.artifacts.can_push():
- raise PipelineError("Not configured for pushing artifacts")
+ if not self.artifacts.has_push_remotes():
+ raise PipelineError("No artifact caches available for pushing artifacts")
plan = elements
self.assert_consistent(plan)