summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--buildstream/_artifactcache/__init__.py2
-rw-r--r--buildstream/_artifactcache/artifactcache.py136
-rw-r--r--buildstream/_artifactcache/ostreecache.py214
-rw-r--r--buildstream/_context.py9
-rw-r--r--buildstream/_ostree.py2
-rw-r--r--buildstream/_pipeline.py32
-rw-r--r--buildstream/_project.py5
-rw-r--r--buildstream/data/userconfig.yaml8
-rw-r--r--buildstream/utils.py30
-rw-r--r--tests/frontend/pull.py33
-rw-r--r--tests/frontend/push.py33
-rw-r--r--tests/testutils/__init__.py2
-rw-r--r--tests/testutils/artifactshare.py34
13 files changed, 317 insertions, 223 deletions
diff --git a/buildstream/_artifactcache/__init__.py b/buildstream/_artifactcache/__init__.py
index 991e84869..7dc0ce3be 100644
--- a/buildstream/_artifactcache/__init__.py
+++ b/buildstream/_artifactcache/__init__.py
@@ -18,4 +18,4 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from .artifactcache import ArtifactCache
+from .artifactcache import ArtifactCache, artifact_cache_urls_from_config_node, configured_artifact_cache_urls
diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py
index 1283b37ab..1d30b2582 100644
--- a/buildstream/_artifactcache/artifactcache.py
+++ b/buildstream/_artifactcache/artifactcache.py
@@ -21,12 +21,72 @@
import os
from collections import Mapping
-from .._exceptions import ImplError
+from .._exceptions import ImplError, LoadError, LoadErrorReason
from .. import utils
from .. import _yaml
-# An ArtifactCache manages artifacts
+def artifact_cache_url_from_spec(spec):
+ _yaml.node_validate(spec, ['url'])
+ url = _yaml.node_get(spec, str, 'url')
+ if len(url) == 0:
+ provenance = _yaml.node_get_provenance(spec)
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}: empty artifact cache URL".format(provenance))
+ return url
+
+
+# artifact_cache_urls_from_config_node()
+#
+# Parses the configuration of remote artifact caches from a config block.
+#
+# Args:
+# config_node (dict): The config block, which may contain the 'artifacts' key
+#
+# Returns:
+# A list of URLs pointing to remote artifact caches.
+#
+# Raises:
+# LoadError, if the config block contains invalid keys.
+#
+def artifact_cache_urls_from_config_node(config_node):
+ urls = []
+
+ artifacts = config_node.get('artifacts', [])
+ if isinstance(artifacts, Mapping):
+ urls.append(artifact_cache_url_from_spec(artifacts))
+ elif isinstance(artifacts, list):
+ for spec in artifacts:
+ urls.append(artifact_cache_url_from_spec(spec))
+ else:
+ provenance = _yaml.node_get_provenance(config_node, key='artifacts')
+ raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
+ "%s: 'artifacts' must be a single 'url:' mapping, or a list of mappings" %
+ (str(provenance)))
+ return urls
+
+
+# configured_artifact_cache_urls():
+#
+# Return the list of configured artifact remotes for a given project, in priority
+# order. This takes into account the user and project configuration.
+#
+# Args:
+# context (Context): The BuildStream context
+# project (Project): The BuildStream project
+#
+# Returns:
+# A list of URLs pointing to remote artifact caches.
+#
+def configured_artifact_cache_urls(context, project):
+ project_overrides = context._get_overrides(project.name)
+ project_extra_urls = artifact_cache_urls_from_config_node(project_overrides)
+
+ return list(utils._deduplicate(
+ project_extra_urls + project.artifact_urls + context.artifact_urls))
+
+
+# An ArtifactCache manages artifacts.
#
# Args:
# context (Context): The BuildStream context
@@ -36,42 +96,24 @@ class ArtifactCache():
def __init__(self, context, project):
self.context = context
+ self.project = project
os.makedirs(context.artifactdir, exist_ok=True)
self.extractdir = os.path.join(context.artifactdir, 'extract')
self._local = False
+ self.urls = []
- project_overrides = context._get_overrides(project.name)
- artifact_overrides = _yaml.node_get(project_overrides, Mapping, 'artifacts', default_value={})
- override_url = _yaml.node_get(artifact_overrides, str, 'url', default_value='') or None
-
- _yaml.node_validate(artifact_overrides, ['url'])
-
- if override_url:
- self.url = override_url
- elif project.artifact_url:
- self.url = project.artifact_url
- else:
- self.url = context.artifact_url
-
- if self.url:
- if self.url.startswith('/') or self.url.startswith('file://'):
- self._local = True
-
- self.remote = utils.url_directory_name(self.url)
- else:
- self.remote = None
-
- self._offline = False
-
- # initialize_remote():
+ # set_remotes():
#
- # Initialize any remote artifact cache, if needed. This may require network
- # access and could block for several seconds.
+ # Set the list of remote caches, which is initially empty. This will
+ # contact each remote cache.
#
- def initialize_remote(self):
- pass
+ # Args:
+ # urls (list): List of artifact remote URLs, in priority order.
+ # on_failure (callable): Called if we fail to contact one of the caches.
+ def set_remotes(self, urls, on_failure=None):
+ self.urls = urls
# contains():
#
@@ -120,37 +162,28 @@ class ArtifactCache():
raise ImplError("Cache '{kind}' does not implement commit()"
.format(kind=type(self).__name__))
- # set_offline()
+ # has_fetch_remotes():
#
- # Do not attempt to pull or push artifacts.
+ # Check whether any remote repositories are available for fetching.
#
- def set_offline(self):
- self._offline = True
-
- # can_fetch():
+ # Returns: True if any remote repositories are configured, False otherwise
#
- # Check whether remote repository is available for fetching.
- #
- # Returns: True if remote repository is available, False otherwise
- #
- def can_fetch(self):
- return (not self._offline or self._local) and \
- self.remote is not None
+ def has_fetch_remotes(self):
+ return (len(self.urls) > 0)
- # can_push():
+ # has_push_remotes():
#
- # Check whether remote repository is available for pushing.
+ # Check whether any remote repositories are available for pushing.
#
- # Returns: True if remote repository is available, False otherwise
+ # Returns: True if any remote repository is configured, False otherwise
#
- def can_push(self):
- return (not self._offline or self._local) and \
- self.url is not None
+ def has_push_remotes(self):
+ return (len(self.urls) > 0)
# remote_contains_key():
#
# Check whether the artifact for the specified Element is already available
- # in the remote artifact cache.
+ # in any remote artifact cache.
#
# Args:
# element (Element): The Element to check
@@ -160,6 +193,3 @@ class ArtifactCache():
#
def remote_contains(self, element, strength=None):
return False
-
- def fetch_remote_refs(self):
- pass
diff --git a/buildstream/_artifactcache/ostreecache.py b/buildstream/_artifactcache/ostreecache.py
index 78796dc6c..9987746b0 100644
--- a/buildstream/_artifactcache/ostreecache.py
+++ b/buildstream/_artifactcache/ostreecache.py
@@ -48,7 +48,7 @@ def buildref(element, key):
return '{0}/{1}/{2}'.format(project.name, element_name, key)
-# An ArtifactCache manages artifacts in an OSTree repository
+# An OSTreeCache manages artifacts in an OSTree repository
#
# Args:
# context (Context): The BuildStream context
@@ -70,35 +70,19 @@ class OSTreeCache(ArtifactCache):
self.repo = _ostree.ensure(ostreedir, False)
self.push_url = None
- self.pull_url = None
-
- self._remote_refs = None
-
- def initialize_remote(self):
- if self.url is not None:
- if self.url.startswith('ssh://'):
- self.push_url = self.url
- try:
- # Contact the remote cache.
- self.pull_url = initialize_push_connection(self.push_url)
- except PushException as e:
- raise ArtifactError("BuildStream did not connect succesfully "
- "to the shared cache: {}".format(e))
- elif self.url.startswith('http://') or self.url.startswith('https://'):
- self.push_url = None
- self.pull_url = self.url
- elif self._local:
- self.push_url = self.url
- self.pull_url = self.url
- else:
- raise ArtifactError("Unsupported URL scheme: {}".format(self.url))
+ self.pull_urls = []
+ self._remote_refs = {}
+
+ def set_remotes(self, urls, on_failure=None):
+ self.urls = urls
+
+ self._initialize_remotes(on_failure)
- _ostree.configure_remote(self.repo, self.remote, self.pull_url)
+ def has_fetch_remotes(self):
+ return (len(self.pull_urls) > 0)
- def can_push(self):
- if self.enable_push:
- return (not self._offline or self._local) and self.push_url is not None
- return False
+ def has_push_remotes(self):
+ return (self.push_url is not None)
# contains():
#
@@ -134,7 +118,7 @@ class OSTreeCache(ArtifactCache):
# Returns: True if the artifact is in the cache, False otherwise
#
def remote_contains_key(self, element, key):
- if not self._remote_refs:
+ if len(self._remote_refs) == 0:
return False
ref = buildref(element, key)
@@ -241,7 +225,7 @@ class OSTreeCache(ArtifactCache):
# pull():
#
- # Pull artifact from remote repository.
+ # Pull artifact from one of the configured remote repositories.
#
# Args:
# element (Element): The Element whose artifact is to be fetched
@@ -249,21 +233,13 @@ class OSTreeCache(ArtifactCache):
#
def pull(self, element, progress=None):
- if self._offline and not self._local:
- raise ArtifactError("Attempt to pull artifact while offline")
-
- if self.pull_url.startswith("/"):
- remote = "file://" + self.pull_url
- else:
- remote = self.remote
-
+ ref = buildref(element, element._get_cache_key())
weak_ref = buildref(element, element._get_cache_key(strength=_KeyStrength.WEAK))
try:
- if self.remote_contains(element, strength=_KeyStrength.STRONG):
+ if ref in self._remote_refs:
# fetch the artifact using the strong cache key
- ref = buildref(element, element._get_cache_key())
- _ostree.fetch(self.repo, remote=remote,
+ _ostree.fetch(self.repo, remote=self._remote_refs[ref],
ref=ref, progress=progress)
# resolve ref to checksum
@@ -271,9 +247,9 @@ class OSTreeCache(ArtifactCache):
# update weak ref by pointing it to this newly fetched artifact
_ostree.set_ref(self.repo, weak_ref, rev)
- elif self.remote_contains(element):
+ elif weak_ref in self._remote_refs:
# fetch the artifact using the weak cache key
- _ostree.fetch(self.repo, remote=remote,
+ _ostree.fetch(self.repo, remote=self._remote_refs[weak_ref],
ref=weak_ref, progress=progress)
# resolve weak_ref to checksum
@@ -292,35 +268,6 @@ class OSTreeCache(ArtifactCache):
raise ArtifactError("Failed to pull artifact for element {}: {}"
.format(element.name, e)) from e
- # fetch_remote_refs():
- #
- # Fetch list of artifacts from remote repository.
- #
- def fetch_remote_refs(self):
- if self.pull_url.startswith("/"):
- remote = "file://" + self.pull_url
- elif self.remote is not None:
- remote = self.remote
- else:
- raise ArtifactError("Attempt to fetch remote refs without any pull URL")
-
- def child_action(repo, remote, q):
- try:
- q.put((True, _ostree.list_remote_refs(self.repo, remote=remote)))
- except OSTreeError as e:
- q.put((False, e))
-
- q = multiprocessing.Queue()
- p = multiprocessing.Process(target=child_action, args=(self.repo, remote, q))
- p.start()
- ret, res = q.get()
- p.join()
-
- if ret:
- self._remote_refs = res
- else:
- raise ArtifactError("Failed to fetch remote refs") from res
-
# push():
#
# Push committed artifact to remote repository.
@@ -336,17 +283,14 @@ class OSTreeCache(ArtifactCache):
# (ArtifactError): if there was an error
def push(self, element):
- if self._offline and not self._local:
- raise ArtifactError("Attempt to push artifact while offline")
-
if self.push_url is None:
raise ArtifactError("The protocol in use does not support pushing.")
ref = buildref(element, element._get_cache_key_from_artifact())
weak_ref = buildref(element, element._get_cache_key(strength=_KeyStrength.WEAK))
- if self.push_url.startswith("/"):
+ if self.push_url.startswith("file://"):
# local repository
- push_repo = _ostree.ensure(self.push_url, True)
+ push_repo = _ostree.ensure(self.push_url[7:], True)
_ostree.fetch(push_repo, remote=self.repo.get_path().get_uri(), ref=ref)
_ostree.fetch(push_repo, remote=self.repo.get_path().get_uri(), ref=weak_ref)
@@ -377,9 +321,117 @@ class OSTreeCache(ArtifactCache):
return pushed
- # set_offline():
+ # _initialize_remote():
+ #
+ # Do protocol-specific initialization necessary to use a given OSTree
+ # remote.
+ #
+ # The SSH protocol that we use only supports pushing so initializing these
+ # involves contacting the remote to find out the corresponding pull URL.
+ #
+ # Args:
+ # url (str): URL of the remote
+ #
+ # Returns:
+ # (str, str): the pull URL and push URL for the remote
+ #
+ # Raises:
+ # ArtifactError: if there was an error
+ def _initialize_remote(self, url):
+ if url.startswith('ssh://'):
+ try:
+ push_url = url
+ pull_url = initialize_push_connection(url)
+ except PushException as e:
+ raise ArtifactError("BuildStream did not connect successfully "
+ "to the shared cache {}: {}".format(url, e))
+ elif url.startswith('/'):
+ push_url = pull_url = 'file://' + url
+ elif url.startswith('file://'):
+ push_url = pull_url = url
+ elif url.startswith('http://') or url.startswith('https://'):
+ push_url = None
+ pull_url = url
+ else:
+ raise ArtifactError("Unsupported URL: {}".format(url))
+
+ return push_url, pull_url
+
+ # _ensure_remote():
#
- # Do not attempt to pull or push artifacts.
+ # Ensure that our OSTree repo has a remote configured for the given URL.
+ # Note that SSH access to remotes is not handled by libostree itself.
#
- def set_offline(self):
- self._offline = True
+ # Args:
+ # repo (OSTree.Repo): an OSTree repository
+ # pull_url (str): the URL where libostree can pull from the remote
+ #
+ # Returns:
+ # (str): the name of the remote, which can be passed to various other
+ # operations implemented by the _ostree module.
+ #
+ # Raises:
+ # OSTreeError: if there was a problem reported by libostree
+ def _ensure_remote(self, repo, pull_url):
+ remote_name = utils.url_directory_name(pull_url)
+ _ostree.configure_remote(repo, remote_name, pull_url)
+ return remote_name
+
+ def _initialize_remotes(self, on_failure=None):
+ self.push_url = None
+ self.pull_urls = []
+ self._remote_refs = {}
+
+ # Callback to initialize one remote in a 'multiprocessing' subprocess.
+ #
+ # We cannot do this in the main process because of the way the tasks
+ # run by the main scheduler calls into libostree using
+ # fork()-without-exec() subprocesses. OSTree fetch operations in
+ # subprocesses hang if fetch operations were previously done in the
+ # main process.
+ #
+ def child_action(url, q):
+ try:
+ push_url, pull_url = self._initialize_remote(url)
+ remote = self._ensure_remote(self.repo, pull_url)
+ remote_refs = _ostree.list_remote_refs(self.repo, remote=remote)
+ q.put((None, push_url, pull_url, remote_refs))
+ except Exception as e:
+ # Exceptions aren't automatically propagated by
+ # multiprocessing, so we catch everything here. Note that
+ # GLib.Error subclasses can't be returned (as they don't
+ # 'pickle') and so they'll be ignored.
+ q.put((e, None, None, None))
+
+ # Kick off all the initialization jobs one by one.
+ #
+ # Note that we cannot use multiprocessing.Pool here because it's not
+ # possible to pickle local functions such as child_action().
+ #
+ q = multiprocessing.Queue()
+ for url in self.urls:
+ p = multiprocessing.Process(target=child_action, args=(url, q))
+ p.start()
+ exception, push_url, pull_url, remote_refs = q.get()
+ p.join()
+
+ if exception and on_failure:
+ on_failure(url, exception)
+ elif exception:
+ raise ArtifactError() from exception
+ else:
+ # Use first pushable remote we find for pushing.
+ if push_url and not self.push_url:
+ self.push_url = push_url
+
+ # Use all pullable remotes, in priority order
+ if pull_url and pull_url not in self.pull_urls:
+ self.pull_urls.append(pull_url)
+
+ # Update our overall map of remote refs with any refs that are
+ # present in the new remote and were not already found in
+ # higher priority ones.
+ remote = self._ensure_remote(self.repo, pull_url)
+ for ref in remote_refs:
+ if ref not in self._remote_refs:
+ self._remote_refs[ref] = remote
diff --git a/buildstream/_context.py b/buildstream/_context.py
index 6d0a19bde..78ec9f8b2 100644
--- a/buildstream/_context.py
+++ b/buildstream/_context.py
@@ -29,6 +29,7 @@ from . import _yaml
from ._exceptions import LoadError, LoadErrorReason, BstError
from ._message import Message, MessageType
from ._profile import Topics, profile_start, profile_end
+from ._artifactcache import artifact_cache_urls_from_config_node
# Context()
@@ -61,8 +62,8 @@ class Context():
# The local binary artifact cache directory
self.artifactdir = None
- # The URL from which to push and pull prebuilt artifacts
- self.artifact_url = None
+ # The URLs from which to push and pull prebuilt artifacts
+ self.artifact_urls = []
# The directory to store build logs
self.logdir = None
@@ -161,9 +162,7 @@ class Context():
setattr(self, dir, path)
# Load artifact share configuration
- artifacts = _yaml.node_get(defaults, Mapping, 'artifacts')
- _yaml.node_validate(artifacts, ['url'])
- self.artifact_url = _yaml.node_get(artifacts, str, 'url', default_value='') or None
+ self.artifact_urls = artifact_cache_urls_from_config_node(defaults)
# Load logging config
logging = _yaml.node_get(defaults, Mapping, 'logging')
diff --git a/buildstream/_ostree.py b/buildstream/_ostree.py
index c9f0389d1..165556365 100644
--- a/buildstream/_ostree.py
+++ b/buildstream/_ostree.py
@@ -357,4 +357,4 @@ def list_remote_refs(repo, remote="origin"):
_, refs = repo.remote_list_refs(remote)
return refs
except GLib.GError as e:
- raise OSTreeError("Failed to fetch remote refs from '{}': {}".format(remote, e.message)) from e
+ raise OSTreeError(message=e.message) from e
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index cb7e0ed5e..db4c7fa85 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -40,6 +40,7 @@ from . import Scope
from . import _site
from . import utils
from ._platform import Platform
+from ._artifactcache import configured_artifact_cache_urls
from ._scheduler import SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
@@ -145,8 +146,8 @@ class Pipeline():
self.initialize_workspaces()
- if use_remote_cache and self.artifacts.can_fetch():
- self.fetch_remote_refs()
+ if use_remote_cache:
+ self.initialize_remote_caches()
self.resolve_cache_keys(inconsistent)
@@ -174,14 +175,13 @@ class Pipeline():
self.project._set_workspace(element, source, workspace)
- def fetch_remote_refs(self):
- with self.timed_activity("Fetching remote refs", silent_nested=True):
- try:
- self.artifacts.initialize_remote()
- self.artifacts.fetch_remote_refs()
- except ArtifactError:
- self.message(MessageType.WARN, "Failed to fetch remote refs")
- self.artifacts.set_offline()
+ def initialize_remote_caches(self):
+ def remote_failed(url, error):
+ self.message(MessageType.WARN, "Failed to fetch remote refs from {}: {}\n".format(url, error))
+
+ with self.timed_activity("Initializing remote caches", silent_nested=True):
+ artifact_urls = configured_artifact_cache_urls(self.context, self.project)
+ self.artifacts.set_remotes(artifact_urls, on_failure=remote_failed)
def resolve_cache_keys(self, inconsistent):
if inconsistent:
@@ -446,12 +446,12 @@ class Pipeline():
if track_plan:
track = TrackQueue(save=save)
queues.append(track)
- if self.artifacts.can_fetch():
+ if self.artifacts.has_fetch_remotes():
pull = PullQueue()
queues.append(pull)
queues.append(fetch)
queues.append(build)
- if self.artifacts.can_push():
+ if self.artifacts.has_push_remotes():
push = PushQueue()
queues.append(push)
@@ -689,8 +689,8 @@ class Pipeline():
#
def pull(self, scheduler, elements):
- if not self.artifacts.can_fetch():
- raise PipelineError("Not configured for pulling artifacts")
+ if not self.artifacts.has_fetch_remotes():
+ raise PipelineError("Not artifact caches available for pulling artifacts")
plan = elements
self.assert_consistent(plan)
@@ -727,8 +727,8 @@ class Pipeline():
#
def push(self, scheduler, elements):
- if not self.artifacts.can_push():
- raise PipelineError("Not configured for pushing artifacts")
+ if not self.artifacts.has_push_remotes():
+ raise PipelineError("No artifact caches available for pushing artifacts")
plan = elements
self.assert_consistent(plan)
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 50f5d9c57..35873fb72 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -28,6 +28,7 @@ from . import _yaml
from ._profile import Topics, profile_start, profile_end
from ._exceptions import LoadError, LoadErrorReason
from ._options import OptionPool
+from ._artifactcache import artifact_cache_urls_from_config_node
# The base BuildStream format version
@@ -172,9 +173,7 @@ class Project():
#
# Load artifacts pull/push configuration for this project
- artifacts = _yaml.node_get(config, Mapping, 'artifacts', default_value={})
- _yaml.node_validate(artifacts, ['url'])
- self.artifact_url = _yaml.node_get(artifacts, str, 'url', default_value='') or None
+ self.artifact_urls = artifact_cache_urls_from_config_node(config)
# Workspace configurations
self._workspaces = self._load_workspace_config()
diff --git a/buildstream/data/userconfig.yaml b/buildstream/data/userconfig.yaml
index f43989dcc..edd74ef3f 100644
--- a/buildstream/data/userconfig.yaml
+++ b/buildstream/data/userconfig.yaml
@@ -48,14 +48,6 @@ scheduler:
#
on-error: quit
-#
-# Artifacts
-#
-artifacts:
-
- # A url from which to push and pull prebuilt artifacts.
- # Some protocols only support pushing.
- url: ''
#
# Logging
diff --git a/buildstream/utils.py b/buildstream/utils.py
index a811fe2f1..caa02135c 100644
--- a/buildstream/utils.py
+++ b/buildstream/utils.py
@@ -34,6 +34,7 @@ import stat
import string
import subprocess
import tempfile
+import itertools
from contextlib import contextmanager
import pkg_resources
@@ -994,3 +995,32 @@ def _glob2re(pat):
else:
res = res + re.escape(c)
return res + '\Z(?ms)'
+
+
+# _deduplicate()
+#
+# Remove duplicate entries in a list or other iterable.
+#
+# Copied verbatim from the unique_everseen() example at
+# https://docs.python.org/3/library/itertools.html#itertools-recipes
+#
+# Args:
+# iterable (iterable): What to deduplicate
+# key (callable): Optional function to map from list entry to value
+#
+# Returns:
+# (generator): Generator that produces a deduplicated version of 'iterable'
+#
+def _deduplicate(iterable, key=None):
+ seen = set()
+ seen_add = seen.add
+ if key is None:
+ for element in itertools.filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 5f06e550d..18a4b4654 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -1,7 +1,7 @@
import os
import shutil
import pytest
-from tests.testutils import cli, create_artifact_share
+from tests.testutils import cli, create_artifact_share, configure_remote_caches
from tests.testutils.site import IS_LINUX
from buildstream import _yaml
@@ -29,9 +29,9 @@ def assert_shared(cli, share, project, element_name):
@pytest.mark.parametrize(
'override_url, project_url, user_url',
[
- pytest.param('', '', 'share.repo', id='user-config'),
- pytest.param('', 'share.repo', '/tmp/share/user', id='project-config'),
- pytest.param('share.repo', '/tmp/share/project', '/tmp/share/user', id='project-override-in-user-config'),
+ pytest.param(None, None, 'share.repo', id='user-config'),
+ pytest.param(None, 'share.repo', None, id='project-config'),
+ pytest.param('share.repo', None, None, id='project-override-in-user-config'),
])
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull(cli, tmpdir, datafiles, override_url, project_url, user_url):
@@ -49,29 +49,8 @@ def test_push_pull(cli, tmpdir, datafiles, override_url, project_url, user_url):
project_url = share.repo if project_url == 'share.repo' else project_url
user_url = share.repo if user_url == 'share.repo' else user_url
- # Configure artifact share
- cli.configure({
- 'artifacts': {
- 'url': user_url,
- },
- 'projects': {
- 'test': {
- 'artifacts': {
- 'url': override_url,
- }
- }
- }
- })
-
- if project_url:
- project_conf_file = str(datafiles.join('project.conf'))
- project_config = _yaml.load(project_conf_file)
- project_config.update({
- 'artifacts': {
- 'url': project_url,
- }
- })
- _yaml.dump(_yaml.node_sanitize(project_config), filename=project_conf_file)
+ project_conf_file = str(datafiles.join('project.conf'))
+ configure_remote_caches(cli, project_conf_file, override_url, project_url, user_url)
# Now try bst push
result = cli.run(project=project, args=['push', 'import-bin.bst'])
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index b5eddf81c..9d897a8e5 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -1,6 +1,6 @@
import os
import pytest
-from tests.testutils import cli, create_artifact_share
+from tests.testutils import cli, create_artifact_share, configure_remote_caches
from tests.testutils.site import IS_LINUX
from buildstream import _yaml
@@ -28,9 +28,9 @@ def assert_shared(cli, share, project, element_name):
@pytest.mark.parametrize(
'override_url, project_url, user_url',
[
- pytest.param('', '', 'share.repo', id='user-config'),
- pytest.param('', 'share.repo', '/tmp/share/user', id='project-config'),
- pytest.param('share.repo', '/tmp/share/project', '/tmp/share/user', id='project-override-in-user-config'),
+ pytest.param(None, None, 'share.repo', id='user-config'),
+ pytest.param(None, 'share.repo', None, id='project-config'),
+ pytest.param('share.repo', None, None, id='project-override-in-user-config'),
])
@pytest.mark.datafiles(DATA_DIR)
def test_push(cli, tmpdir, datafiles, override_url, user_url, project_url):
@@ -48,29 +48,8 @@ def test_push(cli, tmpdir, datafiles, override_url, user_url, project_url):
project_url = share.repo if project_url == 'share.repo' else project_url
user_url = share.repo if user_url == 'share.repo' else user_url
- # Configure artifact share
- cli.configure({
- 'artifacts': {
- 'url': user_url,
- },
- 'projects': {
- 'test': {
- 'artifacts': {
- 'url': override_url,
- }
- }
- }
- })
-
- if project_url:
- project_conf_file = str(datafiles.join('project.conf'))
- project_config = _yaml.load(project_conf_file)
- project_config.update({
- 'artifacts': {
- 'url': project_url,
- }
- })
- _yaml.dump(_yaml.node_sanitize(project_config), filename=project_conf_file)
+ project_conf_file = str(datafiles.join('project.conf'))
+ configure_remote_caches(cli, project_conf_file, override_url, project_url, user_url)
# Now try bst push
result = cli.run(project=project, args=['push', 'target.bst'])
diff --git a/tests/testutils/__init__.py b/tests/testutils/__init__.py
index 9fc450a28..f0eb171c3 100644
--- a/tests/testutils/__init__.py
+++ b/tests/testutils/__init__.py
@@ -1,3 +1,3 @@
from .runcli import cli
from .repo import create_repo, ALL_REPO_KINDS
-from .artifactshare import create_artifact_share
+from .artifactshare import create_artifact_share, configure_remote_caches
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index 907ed7671..ebf38f34b 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -3,6 +3,8 @@ import pytest
import subprocess
import os
+from buildstream import _yaml
+
from .site import HAVE_OSTREE_CLI
@@ -106,3 +108,35 @@ class ArtifactShare():
def create_artifact_share(directory):
return ArtifactShare(directory)
+
+
+# Write out cache configuration into the user config and project config files.
+#
+# User config is set through a helper on the 'cli' object, while the
+# project.conf file is updated manually using the _yaml module.
+#
+def configure_remote_caches(cli, project_conf_file, override_url, project_url=None, user_url=None):
+ user_config = {}
+ if user_url is not None:
+ user_config['artifacts'] = {
+ 'url': user_url
+ }
+
+ if override_url is not None:
+ user_config['projects'] = {
+ 'test': {
+ 'artifacts': {
+ 'url': override_url,
+ }
+ }
+ }
+ cli.configure(user_config)
+
+ if project_url is not None:
+ project_config = _yaml.load(project_conf_file)
+ project_config.update({
+ 'artifacts': {
+ 'url': project_url,
+ }
+ })
+ _yaml.dump(_yaml.node_sanitize(project_config), filename=project_conf_file)