summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2017-11-28 15:48:24 +0000
committerJürg Billeter <j@bitron.ch>2018-02-08 16:38:48 +0100
commit4b3d83107d80c193b812ddb63b7e6628ba678c87 (patch)
tree4d7bcd23ffda2533baf1bb5ba4accd0c258dd42a
parentce1b5bf600461465d10a61835a2783dddc1ac59a (diff)
downloadbuildstream-4b3d83107d80c193b812ddb63b7e6628ba678c87.tar.gz
_artifactcache: Use project-specific remotes for subprojects
-rw-r--r--buildstream/_artifactcache/artifactcache.py39
-rw-r--r--buildstream/_artifactcache/ostreecache.py110
-rw-r--r--buildstream/_artifactcache/tarcache.py4
-rw-r--r--buildstream/_pipeline.py19
-rw-r--r--buildstream/_platform/linux.py2
-rw-r--r--buildstream/_platform/unix.py2
-rw-r--r--buildstream/element.py4
7 files changed, 128 insertions, 52 deletions
diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py
index 44a5756c8..785a50c98 100644
--- a/buildstream/_artifactcache/artifactcache.py
+++ b/buildstream/_artifactcache/artifactcache.py
@@ -101,30 +101,44 @@ def configured_remote_artifact_cache_specs(context, project):
#
# Args:
# context (Context): The BuildStream context
-# project (Project): The BuildStream project
#
class ArtifactCache():
- def __init__(self, context, project):
+ def __init__(self, context):
self.context = context
- self.project = project
os.makedirs(context.artifactdir, exist_ok=True)
self.extractdir = os.path.join(context.artifactdir, 'extract')
self._local = False
- self.remote_specs = []
+ self.global_remote_specs = []
+ self.project_remote_specs = {}
# set_remotes():
#
- # Set the list of remote caches, which is initially empty. This will
- # contact each remote cache.
+ # Set the list of remote caches. If project is None, the global list of
+ # remote caches will be set, which is used by all projects. If a project is
+ # specified, the per-project list of remote caches will be set.
#
# Args:
# remote_specs (list): List of ArtifactCacheSpec instances, in priority order.
+ # project (Project): The Project instance for project-specific remotes
+ def set_remotes(self, remote_specs, *, project=None):
+ if project is None:
+ # global remotes
+ self.global_remote_specs = remote_specs
+ else:
+ self.project_remote_specs[project] = remote_specs
+
+ # initialize_remotes():
+ #
+ # This will contact each remote cache.
+ #
+ # Args:
# on_failure (callable): Called if we fail to contact one of the caches.
- def set_remotes(self, remote_specs, on_failure=None):
- self.remote_specs = remote_specs
+ #
+ def initialize_remotes(self, *, on_failure=None):
+ pass
# contains():
#
@@ -180,16 +194,19 @@ class ArtifactCache():
# Returns: True if any remote repositories are configured, False otherwise
#
def has_fetch_remotes(self):
- return (len(self.remote_specs) > 0)
+ return False
# has_push_remotes():
#
# Check whether any remote repositories are available for pushing.
#
+ # Args:
+ # element (Element): The Element to check
+ #
# Returns: True if any remote repository is configured, False otherwise
#
- def has_push_remotes(self):
- return (any(spec for spec in self.remote_specs if spec.push) > 0)
+ def has_push_remotes(self, *, element=None):
+ return False
# remote_contains_key():
#
diff --git a/buildstream/_artifactcache/ostreecache.py b/buildstream/_artifactcache/ostreecache.py
index d0508bd93..68868e03b 100644
--- a/buildstream/_artifactcache/ostreecache.py
+++ b/buildstream/_artifactcache/ostreecache.py
@@ -65,28 +65,33 @@ def buildref(element, key):
#
class OSTreeCache(ArtifactCache):
- def __init__(self, context, project, enable_push):
- super().__init__(context, project)
+ def __init__(self, context, enable_push):
+ super().__init__(context)
self.enable_push = enable_push
ostreedir = os.path.join(context.artifactdir, 'ostree')
self.repo = _ostree.ensure(ostreedir, False)
- self.push_urls = []
- self.pull_urls = []
+ self.push_urls = {}
+ self.pull_urls = {}
self._remote_refs = {}
-
- def set_remotes(self, remote_specs, on_failure=None):
- self.remote_specs = remote_specs
-
- self._initialize_remotes(on_failure)
+ self._has_fetch_remotes = False
+ self._has_push_remotes = False
def has_fetch_remotes(self):
- return (len(self.pull_urls) > 0)
+ return self._has_fetch_remotes
- def has_push_remotes(self):
- return (len(self.push_urls) > 0)
+ def has_push_remotes(self, *, element=None):
+ if not self._has_push_remotes:
+ # No project has push remotes
+ return False
+ elif element is None:
+ # At least one (sub)project has push remotes
+ return True
+ else:
+ # Check whether the specified element's project has push remotes
+ return len(self.push_urls[element._get_project()]) > 0
# contains():
#
@@ -125,11 +130,15 @@ class OSTreeCache(ArtifactCache):
# Returns: True if the artifact is in the cache, False otherwise
#
def remote_contains_key(self, element, key):
- if len(self._remote_refs) == 0:
+ if not self._has_fetch_remotes:
+ return False
+
+ remote_refs = self._remote_refs[element._get_project()]
+ if len(remote_refs) == 0:
return False
ref = buildref(element, key)
- return ref in self._remote_refs
+ return ref in remote_refs
# remote_contains():
#
@@ -242,14 +251,17 @@ class OSTreeCache(ArtifactCache):
# progress (callable): The progress callback, if any
#
def pull(self, element, progress=None):
+ project = element._get_project()
+
+ remote_refs = self._remote_refs[project]
ref = buildref(element, element._get_strict_cache_key())
weak_ref = buildref(element, element._get_cache_key(strength=_KeyStrength.WEAK))
try:
- if ref in self._remote_refs:
+ if ref in remote_refs:
# fetch the artifact using the strong cache key
- _ostree.fetch(self.repo, remote=self._remote_refs[ref],
+ _ostree.fetch(self.repo, remote=remote_refs[ref],
ref=ref, progress=progress)
# resolve ref to checksum
@@ -257,9 +269,9 @@ class OSTreeCache(ArtifactCache):
# update weak ref by pointing it to this newly fetched artifact
_ostree.set_ref(self.repo, weak_ref, rev)
- elif weak_ref in self._remote_refs:
+ elif weak_ref in remote_refs:
# fetch the artifact using the weak cache key
- _ostree.fetch(self.repo, remote=self._remote_refs[weak_ref],
+ _ostree.fetch(self.repo, remote=remote_refs[weak_ref],
ref=weak_ref, progress=progress)
# resolve weak_ref to checksum
@@ -294,13 +306,17 @@ class OSTreeCache(ArtifactCache):
def push(self, element):
any_pushed = False
- if len(self.push_urls) == 0:
+ project = element._get_project()
+
+ push_urls = self.push_urls[project]
+
+ if len(push_urls) == 0:
raise ArtifactError("Push is not enabled for any of the configured remote artifact caches.")
ref = buildref(element, element._get_cache_key())
weak_ref = buildref(element, element._get_cache_key(strength=_KeyStrength.WEAK))
- for push_url in self.push_urls:
+ for push_url in push_urls:
any_pushed |= self._push_to_remote(push_url, element, ref, weak_ref)
return any_pushed
@@ -360,10 +376,15 @@ class OSTreeCache(ArtifactCache):
_ostree.configure_remote(repo, remote_name, pull_url)
return remote_name
- def _initialize_remotes(self, on_failure=None):
- self.push_url = None
- self.pull_urls = []
- self._remote_refs = {}
+ def initialize_remotes(self, *, on_failure=None):
+ remote_specs = self.global_remote_specs
+
+ for project in self.project_remote_specs:
+ remote_specs += self.project_remote_specs[project]
+
+ remote_specs = list(utils._deduplicate(remote_specs))
+
+ remote_results = {}
# Callback to initialize one remote in a 'multiprocessing' subprocess.
#
@@ -388,7 +409,7 @@ class OSTreeCache(ArtifactCache):
# possible to pickle local functions such as child_action().
#
q = multiprocessing.Queue()
- for remote in self.remote_specs:
+ for remote in remote_specs:
p = multiprocessing.Process(target=child_action, args=(remote.url, q))
try:
@@ -408,25 +429,54 @@ class OSTreeCache(ArtifactCache):
elif error:
raise ArtifactError(error)
else:
+ if remote.push and push_url:
+ self._has_push_remotes = True
+ if pull_url:
+ self._has_fetch_remotes = True
+
+ remote_results[remote.url] = (push_url, pull_url, remote_refs)
+
+ # Prepare push_urls, pull_urls, and remote_refs for each project
+ for project in self.context._get_projects():
+ remote_specs = self.global_remote_specs
+ if project in self.project_remote_specs:
+ remote_specs = list(utils._deduplicate(remote_specs + self.project_remote_specs[project]))
+
+ push_urls = []
+ pull_urls = []
+ _remote_refs = {}
+
+ for remote in remote_specs:
+ # Errors are already handled in the loop above,
+ # skip unreachable remotes here.
+ if remote.url not in remote_results:
+ continue
+
+ push_url, pull_url, remote_refs = remote_results[remote.url]
+
if remote.push:
if push_url:
- self.push_urls.append(push_url)
+ push_urls.append(push_url)
else:
raise ArtifactError("Push enabled but not supported by repo at: {}".format(remote.url))
# The specs are deduplicated when reading the config, but since
# each push URL can supply an arbitrary pull URL we must dedup
# those again here.
- if pull_url and pull_url not in self.pull_urls:
- self.pull_urls.append(pull_url)
+ if pull_url and pull_url not in pull_urls:
+ pull_urls.append(pull_url)
# Update our overall map of remote refs with any refs that are
# present in the new remote and were not already found in
# higher priority ones.
remote = self._ensure_remote(self.repo, pull_url)
for ref in remote_refs:
- if ref not in self._remote_refs:
- self._remote_refs[ref] = remote
+ if ref not in _remote_refs:
+ _remote_refs[ref] = remote
+
+ self.push_urls[project] = push_urls
+ self.pull_urls[project] = pull_urls
+ self._remote_refs[project] = _remote_refs
def _push_to_remote(self, push_url, element, ref, weak_ref):
with utils._tempdir(dir=self.context.artifactdir, prefix='push-repo-') as temp_repo_dir:
diff --git a/buildstream/_artifactcache/tarcache.py b/buildstream/_artifactcache/tarcache.py
index 82487f077..84b446cd2 100644
--- a/buildstream/_artifactcache/tarcache.py
+++ b/buildstream/_artifactcache/tarcache.py
@@ -237,9 +237,9 @@ class Tar():
class TarCache(ArtifactCache):
- def __init__(self, context, project):
+ def __init__(self, context):
- super().__init__(context, project)
+ super().__init__(context)
self.tardir = os.path.join(context.artifactdir, 'tar')
os.makedirs(self.tardir, exist_ok=True)
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 1d4cdf227..47caf28c3 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -146,13 +146,18 @@ class Pipeline():
# Initialize remote artifact caches. We allow the commandline to override
# the user config in some cases (for example `bst push --remote=...`).
- artifact_caches = []
+ has_remote_caches = False
if add_remote_cache:
- artifact_caches += [ArtifactCacheSpec(add_remote_cache, push=True)]
+ self.artifacts.set_remotes([ArtifactCacheSpec(add_remote_cache, push=True)])
+ has_remote_caches = True
if use_configured_remote_caches:
- artifact_caches += configured_remote_artifact_cache_specs(self.context, self.project)
- if len(artifact_caches) > 0:
- self.initialize_remote_caches(artifact_caches)
+ for project in self.context._get_projects():
+ artifact_caches = configured_remote_artifact_cache_specs(self.context, project)
+ if len(artifact_caches) > 0:
+ self.artifacts.set_remotes(artifact_caches, project=project)
+ has_remote_caches = True
+ if has_remote_caches:
+ self.initialize_remote_caches()
self.resolve_cache_keys(track_elements)
@@ -180,12 +185,12 @@ class Pipeline():
self.project._set_workspace(element, workspace)
- def initialize_remote_caches(self, artifact_cache_specs):
+ def initialize_remote_caches(self):
def remote_failed(url, error):
self.message(MessageType.WARN, "Failed to fetch remote refs from {}: {}".format(url, error))
with self.timed_activity("Initializing remote caches", silent_nested=True):
- self.artifacts.set_remotes(artifact_cache_specs, on_failure=remote_failed)
+ self.artifacts.initialize_remotes(on_failure=remote_failed)
def resolve_cache_keys(self, track_elements):
if track_elements:
diff --git a/buildstream/_platform/linux.py b/buildstream/_platform/linux.py
index 265352561..5ca1878b9 100644
--- a/buildstream/_platform/linux.py
+++ b/buildstream/_platform/linux.py
@@ -36,7 +36,7 @@ class Linux(Platform):
self._user_ns_available = False
self.check_user_ns_available(context)
- self._artifact_cache = OSTreeCache(context, project, self._user_ns_available)
+ self._artifact_cache = OSTreeCache(context, self._user_ns_available)
def check_user_ns_available(self, context):
diff --git a/buildstream/_platform/unix.py b/buildstream/_platform/unix.py
index a34938a98..6d7b46374 100644
--- a/buildstream/_platform/unix.py
+++ b/buildstream/_platform/unix.py
@@ -32,7 +32,7 @@ class Unix(Platform):
def __init__(self, context, project):
super().__init__(context, project)
- self._artifact_cache = TarCache(context, project)
+ self._artifact_cache = TarCache(context)
# Not necessarily 100% reliable, but we want to fail early.
if os.geteuid() != 0:
diff --git a/buildstream/element.py b/buildstream/element.py
index 48ced1546..8d69f07b0 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -1167,6 +1167,10 @@ class Element(Plugin):
# (bool): True if this element should not be pushed
#
def _skip_push(self):
+ if not self.__artifacts.has_push_remotes(element=self):
+ # No push remotes for this element's project
+ return True
+
if not self._cached():
return True