summaryrefslogtreecommitdiff
path: root/tests/artifactcache
diff options
context:
space:
mode:
authorTristan Maat <tristan.maat@codethink.co.uk>2019-08-22 17:48:34 +0100
committerTristan Maat <tristan.maat@codethink.co.uk>2019-09-06 15:55:10 +0100
commit47a3f93d9795be6af849c112d4180f0ad50ca23b (patch)
tree2d65dd2c24d9d6bd6795f0680811cf95ae3803e4 /tests/artifactcache
parente71621510de7c55aae4855f8bbb64eb2755346a8 (diff)
downloadbuildstream-47a3f93d9795be6af849c112d4180f0ad50ca23b.tar.gz
Allow splitting artifact caches
This is now split into storage/index remotes, where the former is expected to be a CASRemote and the latter a BuildStream-specific remote with the extensions required to store BuildStream artifact protos.
Diffstat (limited to 'tests/artifactcache')
-rw-r--r--tests/artifactcache/config.py77
-rw-r--r--tests/artifactcache/only-one/element.bst1
-rw-r--r--tests/artifactcache/push.py100
3 files changed, 147 insertions, 31 deletions
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index 2f235f38c..8b01a9ebe 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -6,7 +6,7 @@ import os
import pytest
-from buildstream._remote import RemoteSpec
+from buildstream._remote import RemoteSpec, RemoteType
from buildstream._artifactcache import ArtifactCache
from buildstream._project import Project
from buildstream.utils import _deduplicate
@@ -24,12 +24,28 @@ cache2 = RemoteSpec(url='https://example.com/cache2', push=False)
cache3 = RemoteSpec(url='https://example.com/cache3', push=False)
cache4 = RemoteSpec(url='https://example.com/cache4', push=False)
cache5 = RemoteSpec(url='https://example.com/cache5', push=False)
-cache6 = RemoteSpec(url='https://example.com/cache6', push=True)
+cache6 = RemoteSpec(url='https://example.com/cache6',
+ push=True,
+ type=RemoteType.ALL)
+cache7 = RemoteSpec(url='https://index.example.com/cache1',
+ push=True,
+ type=RemoteType.INDEX)
+cache8 = RemoteSpec(url='https://storage.example.com/cache1',
+ push=True,
+ type=RemoteType.STORAGE)
# Generate cache configuration fragments for the user config and project config files.
#
-def configure_remote_caches(override_caches, project_caches=None, user_caches=None):
+def configure_remote_caches(override_caches,
+ project_caches=None,
+ user_caches=None):
+ type_strings = {
+ RemoteType.INDEX: 'index',
+ RemoteType.STORAGE: 'storage',
+ RemoteType.ALL: 'all'
+ }
+
if project_caches is None:
project_caches = []
@@ -41,10 +57,15 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
user_config['artifacts'] = {
'url': user_caches[0].url,
'push': user_caches[0].push,
+ 'type': type_strings[user_caches[0].type]
}
elif len(user_caches) > 1:
user_config['artifacts'] = [
- {'url': cache.url, 'push': cache.push} for cache in user_caches
+ {
+ 'url': cache.url,
+ 'push': cache.push,
+ 'type': type_strings[cache.type]
+ } for cache in user_caches
]
if len(override_caches) == 1:
@@ -53,6 +74,7 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
'artifacts': {
'url': override_caches[0].url,
'push': override_caches[0].push,
+ 'type': type_strings[override_caches[0].type]
}
}
}
@@ -60,7 +82,11 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
user_config['projects'] = {
'test': {
'artifacts': [
- {'url': cache.url, 'push': cache.push} for cache in override_caches
+ {
+ 'url': cache.url,
+ 'push': cache.push,
+ 'type': type_strings[cache.type]
+ } for cache in override_caches
]
}
}
@@ -72,12 +98,17 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
'artifacts': {
'url': project_caches[0].url,
'push': project_caches[0].push,
+ 'type': type_strings[project_caches[0].type],
}
})
elif len(project_caches) > 1:
project_config.update({
'artifacts': [
- {'url': cache.url, 'push': cache.push} for cache in project_caches
+ {
+ 'url': cache.url,
+ 'push': cache.push,
+ 'type': type_strings[cache.type]
+ } for cache in project_caches
]
})
@@ -96,6 +127,7 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
pytest.param([cache1], [cache2], [cache3], id='project-override-in-user-config'),
pytest.param([cache1, cache2], [cache3, cache4], [cache5, cache6], id='list-order'),
pytest.param([cache1, cache2, cache1], [cache2], [cache2, cache1], id='duplicates'),
+ pytest.param([cache7, cache8], [], [cache1], id='split-caches')
])
def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user_caches):
# Produce a fake user and project config with the cache configuration.
@@ -149,3 +181,36 @@ def test_missing_certs(cli, datafiles, config_key, config_value):
# This does not happen for a simple `bst show`.
result = cli.run(project=project, args=['artifact', 'pull', 'element.bst'])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
+
+
+# Assert that BuildStream complains when someone attempts to define
+# only one type of storage.
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize(
+ 'override_caches, project_caches, user_caches',
+ [
+ # The leftmost cache is the highest priority one in all cases here.
+ pytest.param([], [], [cache7], id='index-user'),
+ pytest.param([], [], [cache8], id='storage-user'),
+ pytest.param([], [cache7], [], id='index-project'),
+ pytest.param([], [cache8], [], id='storage-project'),
+ pytest.param([cache7], [], [], id='index-override'),
+ pytest.param([cache8], [], [], id='storage-override'),
+ ])
+def test_only_one(cli, datafiles, override_caches, project_caches, user_caches):
+ project = os.path.join(datafiles.dirname, datafiles.basename, 'only-one')
+
+ # Produce a fake user and project config with the cache configuration.
+ user_config, project_config = configure_remote_caches(override_caches, project_caches, user_caches)
+ project_config['name'] = 'test'
+
+ cli.configure(user_config)
+
+ project_config_file = os.path.join(project, 'project.conf')
+ _yaml.roundtrip_dump(project_config, file=project_config_file)
+
+ # Use `pull` here to ensure we try to initialize the remotes, triggering the error
+ #
+ # This does not happen for a simple `bst show`.
+ result = cli.run(project=project, args=['artifact', 'pull', 'element.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
diff --git a/tests/artifactcache/only-one/element.bst b/tests/artifactcache/only-one/element.bst
new file mode 100644
index 000000000..3c29b4ea1
--- /dev/null
+++ b/tests/artifactcache/only-one/element.bst
@@ -0,0 +1 @@
+kind: autotools
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 20d9ccfec..364ac39f0 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -10,7 +10,7 @@ from buildstream._project import Project
from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from buildstream.testing import cli # pylint: disable=unused-import
-from tests.testutils import create_artifact_share, dummy_context
+from tests.testutils import create_artifact_share, create_split_share, dummy_context
# Project directory
@@ -20,6 +20,39 @@ DATA_DIR = os.path.join(
)
+# Push the given element and return its artifact key for assertions.
+def _push(cli, cache_dir, project_dir, config_file, target):
+ with dummy_context(config=config_file) as context:
+ # Load the project manually
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+
+ # Assert that the element's artifact is cached
+ element = project.load_elements(['target.bst'])[0]
+ element_key = cli.get_element_key(project_dir, 'target.bst')
+ assert cli.artifact.is_cached(cache_dir, element, element_key)
+
+ # Create a local artifact cache handle
+ artifactcache = context.artifactcache
+
+ # Ensure the element's artifact memeber is initialised
+ # This is duplicated from Pipeline.resolve_elements()
+ # as this test does not use the cli frontend.
+ for e in element.dependencies(Scope.ALL):
+ # Determine initial element state.
+ e._update_state()
+
+ # Manually setup the CAS remotes
+ artifactcache.setup_remotes(use_config=True)
+ artifactcache.initialize_remotes()
+
+ assert artifactcache.has_push_remotes(plugin=element), \
+ "No remote configured for element target.bst"
+ assert element._push(), "Push operation failed"
+
+ return element_key
+
+
@pytest.mark.in_subprocess
@pytest.mark.datafiles(DATA_DIR)
def test_push(cli, tmpdir, datafiles):
@@ -50,36 +83,52 @@ def test_push(cli, tmpdir, datafiles):
# Write down the user configuration file
_yaml.roundtrip_dump(user_config, file=user_config_file)
+ element_key = _push(cli, rootcache_dir, project_dir, user_config_file, 'target.bst')
+ assert share.has_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst', cache_key=element_key))
- with dummy_context(config=user_config_file) as context:
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
- # Assert that the element's artifact is cached
- element = project.load_elements(['target.bst'])[0]
- element_key = cli.get_element_key(project_dir, 'target.bst')
- assert cli.artifact.is_cached(rootcache_dir, element, element_key)
+@pytest.mark.in_subprocess
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_split(cli, tmpdir, datafiles):
+ project_dir = str(datafiles)
- # Create a local artifact cache handle
- artifactcache = context.artifactcache
+ # First build the project without the artifact cache configured
+ result = cli.run(project=project_dir, args=['build', 'target.bst'])
+ result.assert_success()
- # Ensure the element's artifact memeber is initialised
- # This is duplicated from Pipeline.resolve_elements()
- # as this test does not use the cli frontend.
- for e in element.dependencies(Scope.ALL):
- # Determine initial element state.
- e._update_state()
+ # Assert that we are now cached locally
+ assert cli.get_element_state(project_dir, 'target.bst') == 'cached'
- # Manually setup the CAS remotes
- artifactcache.setup_remotes(use_config=True)
- artifactcache.initialize_remotes()
+ indexshare = os.path.join(str(tmpdir), 'indexshare')
+ storageshare = os.path.join(str(tmpdir), 'storageshare')
- assert artifactcache.has_push_remotes(plugin=element), \
- "No remote configured for element target.bst"
- assert element._push(), "Push operation failed"
+ # Set up an artifact cache.
+ with create_split_share(indexshare, storageshare) as (index, storage):
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
+ user_config = {
+ 'scheduler': {
+ 'pushers': 1
+ },
+ 'artifacts': [{
+ 'url': index.repo,
+ 'push': True,
+ 'type': 'index'
+ }, {
+ 'url': storage.repo,
+ 'push': True,
+ 'type': 'storage'
+ }],
+ 'cachedir': rootcache_dir
+ }
+ config_path = str(tmpdir.join('buildstream.conf'))
+ _yaml.roundtrip_dump(user_config, file=config_path)
- assert share.has_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst', cache_key=element_key))
+ element_key = _push(cli, rootcache_dir, project_dir, config_path, 'target.bst')
+ proto = index.get_artifact_proto(cli.get_artifact_name(project_dir,
+ 'test',
+ 'target.bst',
+ cache_key=element_key))
+ assert storage.get_cas_files(proto) is not None
@pytest.mark.in_subprocess
@@ -88,7 +137,8 @@ def test_push_message(tmpdir, datafiles):
project_dir = str(datafiles)
# Set up an artifact cache.
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ artifactshare = os.path.join(str(tmpdir), 'artifactshare')
+ with create_artifact_share(artifactshare) as share:
# Configure artifact share
rootcache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))