summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbst-marge-bot <marge-bot@buildstream.build>2019-08-14 15:18:55 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-08-14 15:18:55 +0000
commitb94d80defe3b60cc90427494c454035400d34cce (patch)
treea9f509316629d04d15e32f37378dad064af8ab83
parent4a86daca7c916ba9d50511f7627f1a40da55549c (diff)
parent940a4bbe69e59e9e7568348dffa6c0075d48d69a (diff)
downloadbuildstream-b94d80defe3b60cc90427494c454035400d34cce.tar.gz
Merge branch 'jennis/push_unbuilt_artifact' into 'master'
Ensure push fails when trying to push an unbuilt element Closes #1095 See merge request BuildStream/buildstream!1524
-rw-r--r--src/buildstream/_stream.py55
-rw-r--r--tests/frontend/push.py85
2 files changed, 120 insertions, 20 deletions
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 36610434b..c54fee1a7 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -40,7 +40,7 @@ from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, \
from ._pipeline import Pipeline, PipelineSelection
from ._profile import Topics, PROFILER
from ._state import State
-from .types import _KeyStrength
+from .types import _KeyStrength, _SchedulerErrorAction
from . import utils, _yaml, _site
from . import Scope, Consistency
@@ -469,26 +469,41 @@ class Stream():
self._message(MessageType.INFO, "Attempting to fetch missing artifact buildtrees")
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(require_buildtrees)
- else:
- # FIXME: This hack should be removed as a result of refactoring
- # Element._update_state()
- #
- # This workaround marks all dependencies of all selected elements as
- # "pulled" before trying to push.
- #
- # Instead of lying to the elements and telling them they have already
- # been pulled, we should have something more consistent with how other
- # state bits are handled; and explicitly tell the elements that they
- # need to be pulled with something like Element._schedule_pull().
- #
- for element in elements:
- element._pull_done()
- self._scheduler.clear_queues()
- push_queue = ArtifactPushQueue(self._scheduler)
- self._add_queue(push_queue)
- self._enqueue_plan(elements, queue=push_queue)
- self._run()
+ # Before we try to push the artifacts, ensure they're cached
+ cached_elements = []
+ uncached_elements = []
+ self._message(MessageType.INFO, "Verifying that elements are cached")
+ for element in elements:
+ if element._cached():
+ cached_elements.append(element)
+ else:
+ msg = "{} is not cached".format(element.name)
+ if self._context.sched_error_action != _SchedulerErrorAction.CONTINUE:
+ raise StreamError("Push failed: " + msg)
+ else:
+ self._message(MessageType.WARN, msg)
+ uncached_elements.append(element)
+
+ if cached_elements:
+ self._scheduler.clear_queues()
+ push_queue = ArtifactPushQueue(self._scheduler)
+ self._add_queue(push_queue)
+ self._enqueue_plan(cached_elements, queue=push_queue)
+ self._run()
+
+ # If the user has selected to continue on error, fail the command
+ # and print a summary of artifacts which could not be pushed
+ #
+ # NOTE: Usually we check the _SchedulerErrorAction when a *job* has failed.
+ # However, we cannot create a PushQueue job unless we intentionally
+ # ready an uncached element in the PushQueue.
+ if self._context.sched_error_action == _SchedulerErrorAction.CONTINUE and uncached_elements:
+ names = [element.name for element in uncached_elements]
+ fail_str = "Error while pushing. The following elements were not pushed as they are " \
+ "not yet cached:\n\n\t{}\n".format("\n\t".join(names))
+
+ raise StreamError(fail_str)
# checkout()
#
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 93b7425b2..e92646154 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -98,6 +98,91 @@ def test_push(cli, tmpdir, datafiles):
assert_shared(cli, share1, project, 'target.bst')
assert_shared(cli, share2, project, 'target.bst')
+# Tests that:
+#
+# * `bst artifact push` fails if the element is not cached locally
+# * `bst artifact push` fails if multiple elements are not cached locally
+#
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_fails(cli, tmpdir, datafiles):
+ project = str(datafiles)
+
+ # Set up the share
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ # Configure bst to be able to push to the share
+ cli.configure({
+ 'artifacts': [
+ {'url': share.repo, 'push': True},
+ ]
+ })
+
+ # First ensure that the target is *NOT* cache
+ assert cli.get_element_state(project, 'target.bst') != 'cached'
+
+ # Now try and push the target
+ result = cli.run(project=project, args=['artifact', 'push', 'target.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ assert "Push failed: target.bst is not cached" in result.stderr
+
+ # Now ensure that deps are also not cached
+ assert cli.get_element_state(project, 'import-bin.bst') != 'cached'
+ assert cli.get_element_state(project, 'import-dev.bst') != 'cached'
+ assert cli.get_element_state(project, 'compose-all.bst') != 'cached'
+
+
+# Tests that:
+#
+# * `bst artifact push` fails when one of the targets is not cached, but still pushes the others
+#
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
+ project = str(datafiles)
+
+ # Set up the share
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+ # First build the target (and its deps)
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, 'import-dev.bst') == 'cached'
+
+ # Now delete the artifact of a dependency and ensure it is not in the cache
+ result = cli.run(project=project, args=['artifact', 'delete', 'import-dev.bst'])
+ assert cli.get_element_state(project, 'import-dev.bst') != 'cached'
+
+ # Configure bst to be able to push to the share
+ cli.configure({
+ 'artifacts': [
+ {'url': share.repo, 'push': True},
+ ]
+ })
+
+ # Now try and push the target with its deps using --on-error continue
+ # and assert that push failed, but what could be pushed was pushed
+ result = cli.run(project=project,
+ args=['--on-error=continue', 'artifact', 'push', '--deps', 'all', 'target.bst'])
+
+ # The overall process should return as failed
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ # We should still have pushed what we could
+ assert_shared(cli, share, project, 'import-bin.bst')
+ assert_shared(cli, share, project, 'compose-all.bst')
+ assert_shared(cli, share, project, 'target.bst')
+
+ assert_not_shared(cli, share, project, 'import-dev.bst')
+ errors = [
+ "import-dev.bst is not cached",
+ (
+ "Error while pushing. The following elements were not pushed as they are not yet cached:\n"
+ "\n"
+ "\timport-dev.bst\n"
+ )
+ ]
+ for error in errors:
+ assert error in result.stderr
+
# Tests that `bst artifact push --deps all` pushes all dependencies of the given element.
#