diff options
Diffstat (limited to 'tests/frontend')
-rw-r--r-- | tests/frontend/push.py | 102 |
1 files changed, 102 insertions, 0 deletions
diff --git a/tests/frontend/push.py b/tests/frontend/push.py index 9c3947c2a..4f0fa3c19 100644 --- a/tests/frontend/push.py +++ b/tests/frontend/push.py @@ -98,6 +98,60 @@ def test_push(cli, tmpdir, datafiles): assert_shared(cli, share1, project, 'target.bst') assert_shared(cli, share2, project, 'target.bst') + +# Tests `bst artifact push $artifact_ref` +@pytest.mark.datafiles(DATA_DIR) +def test_push_artifact(cli, tmpdir, datafiles): + project = str(datafiles) + element = 'target.bst' + + # Configure a local cache + local_cache = os.path.join(str(tmpdir), 'cache') + cli.configure({'cachedir': local_cache}) + + with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share: + + # First build it without the artifact cache configured + result = cli.run(project=project, args=['build', element]) + result.assert_success() + + # Assert that the *artifact* is cached locally + cache_key = cli.get_element_key(project, element) + artifact_ref = os.path.join('test', os.path.splitext(element)[0], cache_key) + assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref)) + + # Configure artifact share + cli.configure({ + # + # FIXME: This test hangs "sometimes" if we allow + # concurrent push. + # + # It's not too bad to ignore since we're + # using the local artifact cache functionality + # only, but it should probably be fixed. + # + 'scheduler': { + 'pushers': 1 + }, + 'artifacts': { + 'url': share.repo, + 'push': True, + } + }) + + # Now try bst artifact push all the deps + result = cli.run(project=project, args=[ + 'artifact', 'push', artifact_ref + ]) + result.assert_success() + + # And finally assert that all the artifacts are in the share + # + # Note that assert shared tests that an element is shared by obtaining + # the artifact ref and asserting that the path exists in the share + assert_shared(cli, share, project, element) + + # Tests that: # # * `bst artifact push` fails if the element is not cached locally @@ -231,6 +285,54 @@ def test_push_all(cli, tmpdir, datafiles): assert_shared(cli, share, project, 'import-dev.bst') assert_shared(cli, share, project, 'compose-all.bst') +# Tests that `bst artifact push --deps run $artifact_ref` fails +@pytest.mark.datafiles(DATA_DIR) +def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles): + project = str(datafiles) + element = 'checkout-deps.bst' + + # Configure a local cache + local_cache = os.path.join(str(tmpdir), 'cache') + cli.configure({'cachedir': local_cache}) + + with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share: + + # First build it without the artifact cache configured + result = cli.run(project=project, args=['build', element]) + result.assert_success() + + # Assert that the *artifact* is cached locally + cache_key = cli.get_element_key(project, element) + artifact_ref = os.path.join('test', os.path.splitext(element)[0], cache_key) + assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref)) + + # Configure artifact share + cli.configure({ + # + # FIXME: This test hangs "sometimes" if we allow + # concurrent push. + # + # It's not too bad to ignore since we're + # using the local artifact cache functionality + # only, but it should probably be fixed. + # + 'scheduler': { + 'pushers': 1 + }, + 'artifacts': { + 'url': share.repo, + 'push': True, + } + }) + + # Now try bst artifact push all the deps + result = cli.run(project=project, args=[ + 'artifact', 'push', '--deps', 'all', artifact_ref + ]) + result.assert_main_error(ErrorDomain.STREAM, None) + + assert "Error: '--deps all' is not supported for artifact refs" in result.stderr + # Tests that `bst build` won't push artifacts to the cache it just pulled from. # |