summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJames Ennis <james.ennis@codethink.co.uk>2019-08-07 10:50:55 +0100
committerbst-marge-bot <marge-bot@buildstream.build>2019-08-14 14:36:51 +0000
commit940a4bbe69e59e9e7568348dffa6c0075d48d69a (patch)
treea9f509316629d04d15e32f37378dad064af8ab83
parentb1b9d60b85ca0b6af06a1faac3a4380da372883c (diff)
downloadbuildstream-jennis/push_unbuilt_artifact.tar.gz
_stream.py: Ensure push does not fail if artifact not cachedjennis/push_unbuilt_artifact
A test for this has also been added to tests/frontend/push.py
-rw-r--r--src/buildstream/_stream.py41
-rw-r--r--tests/frontend/push.py85
2 files changed, 120 insertions, 6 deletions
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 41a90f8ea..c54fee1a7 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -40,7 +40,7 @@ from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, \
from ._pipeline import Pipeline, PipelineSelection
from ._profile import Topics, PROFILER
from ._state import State
-from .types import _KeyStrength
+from .types import _KeyStrength, _SchedulerErrorAction
from . import utils, _yaml, _site
from . import Scope, Consistency
@@ -470,11 +470,40 @@ class Stream():
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(require_buildtrees)
- self._scheduler.clear_queues()
- push_queue = ArtifactPushQueue(self._scheduler)
- self._add_queue(push_queue)
- self._enqueue_plan(elements, queue=push_queue)
- self._run()
+ # Before we try to push the artifacts, ensure they're cached
+ cached_elements = []
+ uncached_elements = []
+ self._message(MessageType.INFO, "Verifying that elements are cached")
+ for element in elements:
+ if element._cached():
+ cached_elements.append(element)
+ else:
+ msg = "{} is not cached".format(element.name)
+ if self._context.sched_error_action != _SchedulerErrorAction.CONTINUE:
+ raise StreamError("Push failed: " + msg)
+ else:
+ self._message(MessageType.WARN, msg)
+ uncached_elements.append(element)
+
+ if cached_elements:
+ self._scheduler.clear_queues()
+ push_queue = ArtifactPushQueue(self._scheduler)
+ self._add_queue(push_queue)
+ self._enqueue_plan(cached_elements, queue=push_queue)
+ self._run()
+
+ # If the user has selected to continue on error, fail the command
+ # and print a summary of artifacts which could not be pushed
+ #
+ # NOTE: Usually we check the _SchedulerErrorAction when a *job* has failed.
+ # However, we cannot create a PushQueue job unless we intentionally
+ # ready an uncached element in the PushQueue.
+ if self._context.sched_error_action == _SchedulerErrorAction.CONTINUE and uncached_elements:
+ names = [element.name for element in uncached_elements]
+ fail_str = "Error while pushing. The following elements were not pushed as they are " \
+ "not yet cached:\n\n\t{}\n".format("\n\t".join(names))
+
+ raise StreamError(fail_str)
# checkout()
#
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 93b7425b2..e92646154 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -98,6 +98,91 @@ def test_push(cli, tmpdir, datafiles):
assert_shared(cli, share1, project, 'target.bst')
assert_shared(cli, share2, project, 'target.bst')
+# Tests that:
+#
+# * `bst artifact push` fails if the element is not cached locally
+# * `bst artifact push` fails if multiple elements are not cached locally
+#
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_fails(cli, tmpdir, datafiles):
+ project = str(datafiles)
+
+ # Set up the share
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ # Configure bst to be able to push to the share
+ cli.configure({
+ 'artifacts': [
+ {'url': share.repo, 'push': True},
+ ]
+ })
+
+ # First ensure that the target is *NOT* cache
+ assert cli.get_element_state(project, 'target.bst') != 'cached'
+
+ # Now try and push the target
+ result = cli.run(project=project, args=['artifact', 'push', 'target.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ assert "Push failed: target.bst is not cached" in result.stderr
+
+ # Now ensure that deps are also not cached
+ assert cli.get_element_state(project, 'import-bin.bst') != 'cached'
+ assert cli.get_element_state(project, 'import-dev.bst') != 'cached'
+ assert cli.get_element_state(project, 'compose-all.bst') != 'cached'
+
+
+# Tests that:
+#
+# * `bst artifact push` fails when one of the targets is not cached, but still pushes the others
+#
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
+ project = str(datafiles)
+
+ # Set up the share
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+ # First build the target (and its deps)
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, 'import-dev.bst') == 'cached'
+
+ # Now delete the artifact of a dependency and ensure it is not in the cache
+ result = cli.run(project=project, args=['artifact', 'delete', 'import-dev.bst'])
+ assert cli.get_element_state(project, 'import-dev.bst') != 'cached'
+
+ # Configure bst to be able to push to the share
+ cli.configure({
+ 'artifacts': [
+ {'url': share.repo, 'push': True},
+ ]
+ })
+
+ # Now try and push the target with its deps using --on-error continue
+ # and assert that push failed, but what could be pushed was pushed
+ result = cli.run(project=project,
+ args=['--on-error=continue', 'artifact', 'push', '--deps', 'all', 'target.bst'])
+
+ # The overall process should return as failed
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ # We should still have pushed what we could
+ assert_shared(cli, share, project, 'import-bin.bst')
+ assert_shared(cli, share, project, 'compose-all.bst')
+ assert_shared(cli, share, project, 'target.bst')
+
+ assert_not_shared(cli, share, project, 'import-dev.bst')
+ errors = [
+ "import-dev.bst is not cached",
+ (
+ "Error while pushing. The following elements were not pushed as they are not yet cached:\n"
+ "\n"
+ "\timport-dev.bst\n"
+ )
+ ]
+ for error in errors:
+ assert error in result.stderr
+
# Tests that `bst artifact push --deps all` pushes all dependencies of the given element.
#