summaryrefslogtreecommitdiff
path: root/tests/frontend
diff options
context:
space:
mode:
authorSam Thursfield <sam.thursfield@codethink.co.uk>2017-11-28 12:03:10 +0000
committerSam Thursfield <sam.thursfield@codethink.co.uk>2018-01-11 18:18:13 +0000
commit1f39477605f5a8caba487a6ebcf3bf4559babe5f (patch)
treea8d583fc395c8eb304fc27deb2999e6ba2b6c2d9 /tests/frontend
parent51c17e1147392f15580fb2dd925055ad8863ab3e (diff)
downloadbuildstream-1f39477605f5a8caba487a6ebcf3bf4559babe5f.tar.gz
tests: Exercise the new multiple cache support
This adds a new test for parsing artifact cache configuration, which calls the helper function from the 'artifactcache' module directly rather than trying to assert based on blind push and pull commands whether or not we got the complex precedence rules exactly right. This means frontend push/pull tests no longer need to be so thorough about testing precedence but they are instead expanded to assert that multiple caches work correctly.
Diffstat (limited to 'tests/frontend')
-rw-r--r--tests/frontend/pull.py161
-rw-r--r--tests/frontend/push.py79
2 files changed, 141 insertions, 99 deletions
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 18a4b4654..b0c923338 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -1,11 +1,9 @@
import os
import shutil
import pytest
-from tests.testutils import cli, create_artifact_share, configure_remote_caches
+from tests.testutils import cli, create_artifact_share
from tests.testutils.site import IS_LINUX
-from buildstream import _yaml
-
# Project directory
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
@@ -25,44 +23,37 @@ def assert_shared(cli, share, project, element_name):
.format(share.repo, element_name))
+# Assert that a given artifact is NOT in the share
+#
+def assert_not_shared(cli, share, project, element_name):
+ # NOTE: 'test' here is the name of the project
+ # specified in the project.conf we are testing with.
+ #
+ cache_key = cli.get_element_key(project, element_name)
+ if share.has_artifact('test', element_name, cache_key):
+ raise AssertionError("Artifact share at {} unexpectedly contains the element {}"
+ .format(share.repo, element_name))
+
+
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
-@pytest.mark.parametrize(
- 'override_url, project_url, user_url',
- [
- pytest.param(None, None, 'share.repo', id='user-config'),
- pytest.param(None, 'share.repo', None, id='project-config'),
- pytest.param('share.repo', None, None, id='project-override-in-user-config'),
- ])
@pytest.mark.datafiles(DATA_DIR)
-def test_push_pull(cli, tmpdir, datafiles, override_url, project_url, user_url):
+def test_push_pull_all(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- # First build it without the artifact cache configured
- result = cli.run(project=project, args=['build', 'import-bin.bst'])
- result.assert_success()
-
- # Assert that we are now cached locally
- assert cli.get_element_state(project, 'import-bin.bst') == 'cached'
-
- override_url = share.repo if override_url == 'share.repo' else override_url
- project_url = share.repo if project_url == 'share.repo' else project_url
- user_url = share.repo if user_url == 'share.repo' else user_url
-
- project_conf_file = str(datafiles.join('project.conf'))
- configure_remote_caches(cli, project_conf_file, override_url, project_url, user_url)
-
- # Now try bst push
- result = cli.run(project=project, args=['push', 'import-bin.bst'])
+ # First build the target element and push to the remote.
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True}
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
- # And finally assert that the artifact is in the share
- assert_shared(cli, share, project, 'import-bin.bst')
-
- # Make sure we update the summary in our artifact share,
- # we dont have a real server around to do it
- #
+ # Assert that everything is now cached in the remote.
share.update_summary()
+ all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
+ for element_name in all_elements:
+ assert_shared(cli, share, project, element_name)
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
@@ -72,76 +63,94 @@ def test_push_pull(cli, tmpdir, datafiles, override_url, project_url, user_url):
# Assert that we are now in a downloadable state, nothing
# is cached locally anymore
- assert cli.get_element_state(project, 'import-bin.bst') == 'downloadable'
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) == 'downloadable'
# Now try bst pull
- result = cli.run(project=project, args=['pull', 'import-bin.bst'])
+ result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
result.assert_success()
# And assert that it's again in the local cache, without having built
- assert cli.get_element_state(project, 'import-bin.bst') == 'cached'
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) == 'cached'
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
-def test_push_pull_all(cli, tmpdir, datafiles):
+def test_pull_secondary_cache(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- # First build it without the artifact cache configured
+ share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
+ share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
+
+ # Build the target and push it to share2 only.
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': False},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+
+ share1.update_summary()
+ share2.update_summary()
+
+ assert_not_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
+
+ # Delete the user's local artifact cache.
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
+
+ # Assert that the element is 'downloadable', i.e. we found it in share2.
+ assert cli.get_element_state(project, 'target.bst') == 'downloadable'
+
+
+@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_pull_specific_remote(cli, tmpdir, datafiles):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+
+ good_share = create_artifact_share(os.path.join(str(tmpdir), 'goodartifactshare'))
+ bad_share = create_artifact_share(os.path.join(str(tmpdir), 'badartifactshare'))
+
+ # Build the target so we have it cached locally only.
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
- # Assert that we are now cached locally
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ state = cli.get_element_state(project, 'target.bst')
+ assert state == 'cached'
- # Configure artifact share
+ # Configure the default push location to be bad_share; we will assert that
+ # nothing actually gets pushed there.
cli.configure({
- #
- # FIXME: This test hangs "sometimes" if we allow
- # concurrent push.
- #
- # It's not too bad to ignore since we're
- # using the local artifact cache functionality
- # only, but it should probably be fixed.
- #
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- }
+ 'artifacts': {'url': bad_share.repo, 'push': True},
})
- # Now try bst push
- result = cli.run(project=project, args=['push', '--deps', 'all', 'target.bst'])
+ # Now try `bst push` to the good_share.
+ result = cli.run(project=project, args=[
+ 'push', 'target.bst', '--remote', good_share.repo
+ ])
result.assert_success()
- # And finally assert that the artifact is in the share
- all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
- for element_name in all_elements:
- assert_shared(cli, share, project, element_name)
+ good_share.update_summary()
+ bad_share.update_summary()
- # Make sure we update the summary in our artifact share,
- # we dont have a real server around to do it
- #
- share.update_summary()
+ # Assert that all the artifacts are in the share we pushed
+ # to, and not the other.
+ assert_shared(cli, good_share, project, 'target.bst')
+ assert_not_shared(cli, bad_share, project, 'target.bst')
# Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the share
+ # directory and try to redownload it from the good_share.
#
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
- # Assert that we are now in a downloadable state, nothing
- # is cached locally anymore
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) == 'downloadable'
-
- # Now try bst pull
- result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
+ result = cli.run(project=project, args=['pull', 'target.bst', '--remote',
+ good_share.repo])
result.assert_success()
# And assert that it's again in the local cache, without having built
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 9d897a8e5..e4f39de65 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -1,10 +1,10 @@
import os
+import shutil
import pytest
-from tests.testutils import cli, create_artifact_share, configure_remote_caches
+from buildstream._exceptions import ErrorDomain
+from tests.testutils import cli, create_artifact_share
from tests.testutils.site import IS_LINUX
-from buildstream import _yaml
-
# Project directory
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
@@ -24,39 +24,71 @@ def assert_shared(cli, share, project, element_name):
.format(share.repo, element_name))
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
-@pytest.mark.parametrize(
- 'override_url, project_url, user_url',
- [
- pytest.param(None, None, 'share.repo', id='user-config'),
- pytest.param(None, 'share.repo', None, id='project-config'),
- pytest.param('share.repo', None, None, id='project-override-in-user-config'),
- ])
+# Assert that a given artifact is NOT in the share
+#
+def assert_not_shared(cli, share, project, element_name):
+ # NOTE: 'test' here is the name of the project
+ # specified in the project.conf we are testing with.
+ #
+ cache_key = cli.get_element_key(project, element_name)
+ if share.has_artifact('test', element_name, cache_key):
+ raise AssertionError("Artifact share at {} unexpectedly contains the element {}"
+ .format(share.repo, element_name))
+
+
@pytest.mark.datafiles(DATA_DIR)
-def test_push(cli, tmpdir, datafiles, override_url, user_url, project_url):
+def test_push(cli, tmpdir, datafiles):
project = str(datafiles)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- # First build it without the artifact cache configured
+ # First build the project without the artifact cache configured
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
# Assert that we are now cached locally
assert cli.get_element_state(project, 'target.bst') == 'cached'
- override_url = share.repo if override_url == 'share.repo' else override_url
- project_url = share.repo if project_url == 'share.repo' else project_url
- user_url = share.repo if user_url == 'share.repo' else user_url
+ # Set up two artifact shares.
+ share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
+ share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
- project_conf_file = str(datafiles.join('project.conf'))
- configure_remote_caches(cli, project_conf_file, override_url, project_url, user_url)
+ # Try pushing with no remotes configured. This should fail.
+ result = cli.run(project=project, args=['push', 'target.bst'])
+ result.assert_main_error(ErrorDomain.PIPELINE, None)
- # Now try bst push
+ # Configure bst to pull but not push from a cache and run `bst push`.
+ # This should also fail.
+ cli.configure({
+ 'artifacts': {'url': share1.repo, 'push': False},
+ })
result = cli.run(project=project, args=['push', 'target.bst'])
- result.assert_success()
+ result.assert_main_error(ErrorDomain.PIPELINE, None)
- # And finally assert that the artifact is in the share
- assert_shared(cli, share, project, 'target.bst')
+ # Configure bst to push to one of the caches and run `bst push`. This works.
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': False},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['push', 'target.bst'])
+
+ assert_not_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
+
+ # Now try pushing to both (making sure to empty the cache we just pushed
+ # to).
+ shutil.rmtree(share2.directory)
+ share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': True},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['push', 'target.bst'])
+
+ assert_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@@ -87,6 +119,7 @@ def test_push_all(cli, tmpdir, datafiles):
},
'artifacts': {
'url': share.repo,
+ 'push': True,
}
})