summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPhillip Smyth <phillip.smyth@codethink.co.uk>2018-07-31 17:17:34 +0000
committerPhillip Smyth <phillip.smyth@codethink.co.uk>2018-07-31 17:17:34 +0000
commit4fc1f5d1e82ac5e2f22440e648a901a83f8eaa61 (patch)
treecfac197b99585814c15800b4535099085fdb21d2
parentfcb87b0afef504d77696268c619ffd61cc034f94 (diff)
parent88cd61ea10bff44191ebfd44e3b5cc22495ad102 (diff)
downloadbuildstream-4fc1f5d1e82ac5e2f22440e648a901a83f8eaa61.tar.gz
Merge branch 'richardmaw/cache-fail' into 'master'
Store failed builds in the cache Closes #76 See merge request BuildStream/buildstream!475
-rw-r--r--NEWS7
-rw-r--r--buildstream/_frontend/widget.py4
-rw-r--r--buildstream/_pipeline.py4
-rw-r--r--buildstream/_scheduler/queues/buildqueue.py38
-rw-r--r--buildstream/_scheduler/queues/queue.py4
-rw-r--r--buildstream/buildelement.py20
-rw-r--r--buildstream/element.py270
-rw-r--r--buildstream/scriptelement.py3
-rw-r--r--tests/integration/cachedfail.py160
9 files changed, 417 insertions, 93 deletions
diff --git a/NEWS b/NEWS
index 4410b8459..05466b8c0 100644
--- a/NEWS
+++ b/NEWS
@@ -11,6 +11,13 @@ buildstream 1.1.5
o Added new `remote` source plugin for downloading file blobs
+ o Failed builds are included in the cache as well.
+ `bst checkout` will provide anything in `%{install-root}`.
+ A build including cached fails will cause any dependant elements
+ to not be scheduled and fail during artifact assembly,
+ and display the retry prompt during an interactive session.
+
+
=================
buildstream 1.1.4
=================
diff --git a/buildstream/_frontend/widget.py b/buildstream/_frontend/widget.py
index 1bae73ca0..24e146c16 100644
--- a/buildstream/_frontend/widget.py
+++ b/buildstream/_frontend/widget.py
@@ -368,7 +368,9 @@ class LogLine(Widget):
if consistency == Consistency.INCONSISTENT:
line = p.fmt_subst(line, 'state', "no reference", fg='red')
else:
- if element._cached():
+ if element._cached_failure():
+ line = p.fmt_subst(line, 'state', "failed", fg='red')
+ elif element._cached_success():
line = p.fmt_subst(line, 'state', "cached", fg='magenta')
elif consistency == Consistency.RESOLVED:
line = p.fmt_subst(line, 'state', "fetch needed", fg='red')
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 852abf7ff..800a331fd 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -489,7 +489,7 @@ class _Planner():
self.plan_element(dep, depth)
# Dont try to plan builds of elements that are cached already
- if not element._cached():
+ if not element._cached_success():
for dep in element.dependencies(Scope.BUILD, recurse=False):
self.plan_element(dep, depth + 1)
@@ -501,4 +501,4 @@ class _Planner():
self.plan_element(root, 0)
depth_sorted = sorted(self.depth_map.items(), key=itemgetter(1), reverse=True)
- return [item[0] for item in depth_sorted if plan_cached or not item[0]._cached()]
+ return [item[0] for item in depth_sorted if plan_cached or not item[0]._cached_success()]
diff --git a/buildstream/_scheduler/queues/buildqueue.py b/buildstream/_scheduler/queues/buildqueue.py
index 376ef5ae2..5967fbf76 100644
--- a/buildstream/_scheduler/queues/buildqueue.py
+++ b/buildstream/_scheduler/queues/buildqueue.py
@@ -18,8 +18,12 @@
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
# Jürg Billeter <juerg.billeter@codethink.co.uk>
+from datetime import timedelta
+
from . import Queue, QueueStatus
+from ..jobs import ElementJob
from ..resources import ResourceType
+from ..._message import MessageType
# A queue which assembles elements
@@ -30,6 +34,38 @@ class BuildQueue(Queue):
complete_name = "Built"
resources = [ResourceType.PROCESS]
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._tried = set()
+
+ def enqueue(self, elts):
+ to_queue = []
+
+ for element in elts:
+ if not element._cached_failure() or element in self._tried:
+ to_queue.append(element)
+ continue
+
+ # Bypass queue processing entirely the first time it's tried.
+ self._tried.add(element)
+ _, description, detail = element._get_build_result()
+ logfile = element._get_build_log()
+ self._message(element, MessageType.FAIL, description,
+ detail=detail, action_name=self.action_name,
+ elapsed=timedelta(seconds=0),
+ logfile=logfile)
+ job = ElementJob(self._scheduler, self.action_name,
+ logfile, element=element, queue=self,
+ resources=self.resources,
+ action_cb=self.process,
+ complete_cb=self._job_done,
+ max_retries=self._max_retries)
+ self._done_queue.append(job)
+ self.failed_elements.append(element)
+ self._scheduler._job_complete_callback(job, False)
+
+ return super().enqueue(to_queue)
+
def process(self, element):
element._assemble()
return element._get_unique_id()
@@ -43,7 +79,7 @@ class BuildQueue(Queue):
# Keep it in the queue.
return QueueStatus.WAIT
- if element._cached():
+ if element._cached_success():
return QueueStatus.SKIP
if not element._buildable():
diff --git a/buildstream/_scheduler/queues/queue.py b/buildstream/_scheduler/queues/queue.py
index 6c1583495..28da17711 100644
--- a/buildstream/_scheduler/queues/queue.py
+++ b/buildstream/_scheduler/queues/queue.py
@@ -296,6 +296,7 @@ class Queue():
# See the Job object for an explanation of the call signature
#
def _job_done(self, job, element, success, result):
+ element._update_state()
# Update values that need to be synchronized in the main task
# before calling any queue implementation
@@ -335,8 +336,9 @@ class Queue():
# No exception occured, handle the success/failure state in the normal way
#
+ self._done_queue.append(job)
+
if success:
- self._done_queue.append(job)
if processed:
self.processed_elements.append(element)
else:
diff --git a/buildstream/buildelement.py b/buildstream/buildelement.py
index c0fef8cdb..d729eaa81 100644
--- a/buildstream/buildelement.py
+++ b/buildstream/buildelement.py
@@ -233,12 +233,14 @@ class BuildElement(Element):
return commands
def __run_command(self, sandbox, cmd, cmd_name):
- self.status("Running {}".format(cmd_name), detail=cmd)
-
- # Note the -e switch to 'sh' means to exit with an error
- # if any untested command fails.
- #
- exitcode = sandbox.run(['sh', '-c', '-e', cmd + '\n'],
- SandboxFlags.ROOT_READ_ONLY)
- if exitcode != 0:
- raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode))
+ with self.timed_activity("Running {}".format(cmd_name)):
+ self.status("Running {}".format(cmd_name), detail=cmd)
+
+ # Note the -e switch to 'sh' means to exit with an error
+ # if any untested command fails.
+ #
+ exitcode = sandbox.run(['sh', '-c', '-e', cmd + '\n'],
+ SandboxFlags.ROOT_READ_ONLY)
+ if exitcode != 0:
+ raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode),
+ collect=self.get_variable('install-root'))
diff --git a/buildstream/element.py b/buildstream/element.py
index 49cc934e1..4260d32a5 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -140,11 +140,14 @@ class ElementError(BstError):
message (str): The error message to report to the user
detail (str): A possibly multiline, more detailed error message
reason (str): An optional machine readable reason string, used for test cases
+ collect (str): An optional directory containing partial install contents
temporary (bool): An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
"""
- def __init__(self, message, *, detail=None, reason=None, temporary=False):
+ def __init__(self, message, *, detail=None, reason=None, collect=None, temporary=False):
super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=temporary)
+ self.collect = collect
+
class Element(Plugin):
"""Element()
@@ -216,6 +219,7 @@ class Element(Plugin):
self.__consistency = Consistency.INCONSISTENT # Cached overall consistency state
self.__cached = None # Whether we have a cached artifact
self.__strong_cached = None # Whether we have a cached artifact
+ self.__weak_cached = None # Whether we have a cached artifact
self.__assemble_scheduled = False # Element is scheduled to be assembled
self.__assemble_done = False # Element is assembled
self.__tracking_scheduled = False # Sources are scheduled to be tracked
@@ -227,6 +231,8 @@ class Element(Plugin):
self.__tainted = None # Whether the artifact is tainted and should not be shared
self.__required = False # Whether the artifact is required in the current session
self.__artifact_size = None # The size of data committed to the artifact cache
+ self.__build_result = None # The result of assembling this Element
+ self._build_log_path = None # The path of the build log for this Element
# hash tables of loaded artifact metadata, hashed by key
self.__metadata_keys = {} # Strong and weak keys for this key
@@ -951,7 +957,51 @@ class Element(Plugin):
# the artifact cache
#
def _cached(self):
- return self.__cached
+ return self.__is_cached(keystrength=None)
+
+ # _get_build_result():
+ #
+ # Returns:
+ # (bool): Whether the artifact of this element present in the artifact cache is of a success
+ # (str): Short description of the result
+ # (str): Detailed description of the result
+ #
+ def _get_build_result(self):
+ return self.__get_build_result(keystrength=None)
+
+ # __set_build_result():
+ #
+ # Sets the assembly result
+ #
+ # Args:
+ # success (bool): Whether the result is a success
+ # description (str): Short description of the result
+ # detail (str): Detailed description of the result
+ #
+ def __set_build_result(self, success, description, detail=None):
+ self.__build_result = (success, description, detail)
+
+ # _cached_success():
+ #
+ # Returns:
+ # (bool): Whether this element is already present in
+ # the artifact cache and the element assembled successfully
+ #
+ def _cached_success(self):
+ return self.__cached_success(keystrength=None)
+
+ # _cached_failure():
+ #
+ # Returns:
+ # (bool): Whether this element is already present in
+ # the artifact cache and the element did not assemble successfully
+ #
+ def _cached_failure(self):
+ if not self._cached():
+ return False
+
+ success, _, _ = self._get_build_result()
+ return not success
# _buildable():
#
@@ -968,7 +1018,7 @@ class Element(Plugin):
# if the pull job is still pending as the remote cache may have an artifact
# that matches the strict cache key, which is preferred over a locally
# cached artifact with a weak cache key match.
- if not dependency._cached() or not dependency._get_cache_key(strength=_KeyStrength.STRONG):
+ if not dependency._cached_success() or not dependency._get_cache_key(strength=_KeyStrength.STRONG):
return False
if not self.__assemble_scheduled:
@@ -1039,6 +1089,8 @@ class Element(Plugin):
self.__weak_cache_key = None
self.__strict_cache_key = None
self.__strong_cached = None
+ self.__weak_cached = None
+ self.__build_result = None
return
if self.__weak_cache_key is None:
@@ -1061,6 +1113,9 @@ class Element(Plugin):
# Weak cache key could not be calculated yet
return
+ if not self.__weak_cached:
+ self.__weak_cached = self.__artifacts.contains(self, self.__weak_cache_key)
+
if not context.get_strict():
# Full cache query in non-strict mode requires both the weak and
# strict cache keys. However, we need to determine as early as
@@ -1068,9 +1123,9 @@ class Element(Plugin):
# for workspaced elements. For this cache check the weak cache keys
# are sufficient. However, don't update the `cached` attributes
# until the full cache query below.
- cached = self.__artifacts.contains(self, self.__weak_cache_key)
if (not self.__assemble_scheduled and not self.__assemble_done and
- not cached and not self._pull_pending() and self._is_required()):
+ not self.__cached_success(keystrength=_KeyStrength.WEAK) and
+ not self._pull_pending() and self._is_required()):
self._schedule_assemble()
return
@@ -1090,9 +1145,12 @@ class Element(Plugin):
self.__cached = self.__artifacts.contains(self, key_for_cache_lookup)
if not self.__strong_cached:
self.__strong_cached = self.__artifacts.contains(self, self.__strict_cache_key)
+ if key_for_cache_lookup == self.__weak_cache_key:
+ if not self.__weak_cached:
+ self.__weak_cached = self.__artifacts.contains(self, self.__weak_cache_key)
if (not self.__assemble_scheduled and not self.__assemble_done and
- not self.__cached and not self._pull_pending() and self._is_required()):
+ not self._cached_success() and not self._pull_pending() and self._is_required()):
# Workspaced sources are considered unstable if a build is pending
# as the build will modify the contents of the workspace.
# Determine as early as possible if a build is pending to discard
@@ -1434,7 +1492,7 @@ class Element(Plugin):
def _assemble(self):
# Assert call ordering
- assert not self._cached()
+ assert not self._cached_success()
context = self._get_context()
with self._output_file() as output_file:
@@ -1457,6 +1515,7 @@ class Element(Plugin):
self.__dynamic_public = _yaml.node_copy(self.__public)
# Call the abstract plugin methods
+ collect = None
try:
# Step 1 - Configure
self.configure_sandbox(sandbox)
@@ -1466,6 +1525,7 @@ class Element(Plugin):
self.__prepare(sandbox)
# Step 4 - Assemble
collect = self.assemble(sandbox)
+ self.__set_build_result(success=True, description="succeeded")
except BstError as e:
# If an error occurred assembling an element in a sandbox,
# then tack on the sandbox directory to the error
@@ -1489,80 +1549,95 @@ class Element(Plugin):
self.warn("Failed to preserve workspace state for failed build sysroot: {}"
.format(e))
- raise
+ if isinstance(e, ElementError):
+ collect = e.collect # pylint: disable=no-member
- collectdir = os.path.join(sandbox_root, collect.lstrip(os.sep))
- if not os.path.exists(collectdir):
- raise ElementError(
- "Directory '{}' was not found inside the sandbox, "
- "unable to collect artifact contents"
- .format(collect))
-
- # At this point, we expect an exception was raised leading to
- # an error message, or we have good output to collect.
-
- # Create artifact directory structure
- assembledir = os.path.join(rootdir, 'artifact')
- filesdir = os.path.join(assembledir, 'files')
- logsdir = os.path.join(assembledir, 'logs')
- metadir = os.path.join(assembledir, 'meta')
- buildtreedir = os.path.join(assembledir, 'buildtree')
- os.mkdir(assembledir)
- os.mkdir(filesdir)
- os.mkdir(logsdir)
- os.mkdir(metadir)
- os.mkdir(buildtreedir)
-
- # Hard link files from collect dir to files directory
- utils.link_files(collectdir, filesdir)
-
- sandbox_build_dir = os.path.join(sandbox_root, self.get_variable('build-root').lstrip(os.sep))
- # Hard link files from build-root dir to buildtreedir directory
- if os.path.isdir(sandbox_build_dir):
- utils.link_files(sandbox_build_dir, buildtreedir)
-
- # Copy build log
- log_filename = context.get_log_filename()
- if log_filename:
- shutil.copyfile(log_filename, os.path.join(logsdir, 'build.log'))
-
- # Store public data
- _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
-
- # ensure we have cache keys
- self._assemble_done()
-
- # Store keys.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'strong': self._get_cache_key(),
- 'weak': self._get_cache_key(_KeyStrength.WEAK),
- }), os.path.join(metadir, 'keys.yaml'))
-
- # Store dependencies.yaml
- _yaml.dump(_yaml.node_sanitize({
- e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
- }), os.path.join(metadir, 'dependencies.yaml'))
-
- # Store workspaced.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'workspaced': True if self._get_workspace() else False
- }), os.path.join(metadir, 'workspaced.yaml'))
-
- # Store workspaced-dependencies.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'workspaced-dependencies': [
- e.name for e in self.dependencies(Scope.BUILD)
- if e._get_workspace()
- ]
- }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
-
- with self.timed_activity("Caching artifact"):
- self.__artifact_size = utils._get_dir_size(assembledir)
- self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
+ self.__set_build_result(success=False, description=str(e), detail=e.detail)
+ raise
+ finally:
+ if collect is not None:
+ collectdir = os.path.join(sandbox_root, collect.lstrip(os.sep))
+
+ # Create artifact directory structure
+ assembledir = os.path.join(rootdir, 'artifact')
+ filesdir = os.path.join(assembledir, 'files')
+ logsdir = os.path.join(assembledir, 'logs')
+ metadir = os.path.join(assembledir, 'meta')
+ buildtreedir = os.path.join(assembledir, 'buildtree')
+ os.mkdir(assembledir)
+ if collect is not None and os.path.exists(collectdir):
+ os.mkdir(filesdir)
+ os.mkdir(logsdir)
+ os.mkdir(metadir)
+ os.mkdir(buildtreedir)
+
+ # Hard link files from collect dir to files directory
+ if collect is not None and os.path.exists(collectdir):
+ utils.link_files(collectdir, filesdir)
+
+ sandbox_build_dir = os.path.join(sandbox_root, self.get_variable('build-root').lstrip(os.sep))
+ # Hard link files from build-root dir to buildtreedir directory
+ if os.path.isdir(sandbox_build_dir):
+ utils.link_files(sandbox_build_dir, buildtreedir)
+
+ # Copy build log
+ log_filename = context.get_log_filename()
+ self._build_log_path = os.path.join(logsdir, 'build.log')
+ if log_filename:
+ shutil.copyfile(log_filename, self._build_log_path)
+
+ # Store public data
+ _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
+
+ # Store result
+ build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
+ if self.__build_result[2] is not None:
+ build_result_dict["detail"] = self.__build_result[2]
+ _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
+
+ # ensure we have cache keys
+ self._assemble_done()
+
+ # Store keys.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'strong': self._get_cache_key(),
+ 'weak': self._get_cache_key(_KeyStrength.WEAK),
+ }), os.path.join(metadir, 'keys.yaml'))
+
+ # Store dependencies.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
+ }), os.path.join(metadir, 'dependencies.yaml'))
+
+ # Store workspaced.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'workspaced': True if self._get_workspace() else False
+ }), os.path.join(metadir, 'workspaced.yaml'))
+
+ # Store workspaced-dependencies.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'workspaced-dependencies': [
+ e.name for e in self.dependencies(Scope.BUILD)
+ if e._get_workspace()
+ ]
+ }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
+
+ with self.timed_activity("Caching artifact"):
+ self.__artifact_size = utils._get_dir_size(assembledir)
+ self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
+
+ if collect is not None and not os.path.exists(collectdir):
+ raise ElementError(
+ "Directory '{}' was not found inside the sandbox, "
+ "unable to collect artifact contents"
+ .format(collect))
# Finally cleanup the build dir
cleanup_rootdir()
+ def _get_build_log(self):
+ return self._build_log_path
+
# _pull_pending()
#
# Check whether the artifact will be pulled.
@@ -1983,12 +2058,19 @@ class Element(Plugin):
if workspace:
workspace.prepared = True
+ def __is_cached(self, keystrength):
+ if keystrength is None:
+ return self.__cached
+
+ return self.__strong_cached if keystrength == _KeyStrength.STRONG else self.__weak_cached
+
# __assert_cached()
#
# Raises an error if the artifact is not cached.
#
- def __assert_cached(self):
- assert self._cached(), "{}: Missing artifact {}".format(self, self._get_brief_display_key())
+ def __assert_cached(self, keystrength=_KeyStrength.STRONG):
+ assert self.__is_cached(keystrength=keystrength), "{}: Missing artifact {}".format(
+ self, self._get_brief_display_key())
# __get_tainted():
#
@@ -2448,6 +2530,38 @@ class Element(Plugin):
metadir = os.path.join(artifact_base, 'meta')
self.__dynamic_public = _yaml.load(os.path.join(metadir, 'public.yaml'))
+ def __load_build_result(self, keystrength):
+ self.__assert_cached(keystrength=keystrength)
+ assert self.__build_result is None
+
+ artifact_base, _ = self.__extract(key=self.__weak_cache_key if keystrength is _KeyStrength.WEAK
+ else self.__strict_cache_key)
+
+ metadir = os.path.join(artifact_base, 'meta')
+ result_path = os.path.join(metadir, 'build-result.yaml')
+ if not os.path.exists(result_path):
+ self.__build_result = (True, "succeeded", None)
+ return
+
+ data = _yaml.load(result_path)
+ self.__build_result = (data["success"], data.get("description"), data.get("detail"))
+
+ def __get_build_result(self, keystrength):
+ if keystrength is None:
+ keystrength = _KeyStrength.STRONG if self._get_context().get_strict() else _KeyStrength.WEAK
+
+ if self.__build_result is None:
+ self.__load_build_result(keystrength)
+
+ return self.__build_result
+
+ def __cached_success(self, keystrength):
+ if not self.__is_cached(keystrength=keystrength):
+ return False
+
+ success, _, _ = self.__get_build_result(keystrength=keystrength)
+ return success
+
def __get_cache_keys_for_commit(self):
keys = []
diff --git a/buildstream/scriptelement.py b/buildstream/scriptelement.py
index 46afda807..145dc2648 100644
--- a/buildstream/scriptelement.py
+++ b/buildstream/scriptelement.py
@@ -277,7 +277,8 @@ class ScriptElement(Element):
exitcode = sandbox.run(['sh', '-c', '-e', cmd + '\n'],
SandboxFlags.ROOT_READ_ONLY if self.__root_read_only else 0)
if exitcode != 0:
- raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode))
+ raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode),
+ collect=self.__install_root)
# Return where the result can be collected from
return self.__install_root
diff --git a/tests/integration/cachedfail.py b/tests/integration/cachedfail.py
new file mode 100644
index 000000000..f4cabb32c
--- /dev/null
+++ b/tests/integration/cachedfail.py
@@ -0,0 +1,160 @@
+import os
+import pytest
+
+from buildstream import _yaml
+from buildstream._exceptions import ErrorDomain
+
+from tests.testutils import cli_integration as cli, create_artifact_share
+from tests.testutils.site import IS_LINUX
+
+
+pytestmark = pytest.mark.integration
+
+
+DATA_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "project"
+)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_build_checkout_cached_fail(cli, tmpdir, datafiles):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ element_path = os.path.join(project, 'elements', 'element.bst')
+ workspace = os.path.join(cli.directory, 'workspace')
+ checkout = os.path.join(cli.directory, 'checkout')
+
+ # Write out our test target
+ element = {
+ 'kind': 'script',
+ 'depends': [
+ {
+ 'filename': 'base.bst',
+ 'type': 'build',
+ },
+ ],
+ 'config': {
+ 'commands': [
+ 'touch %{install-root}/foo',
+ 'false',
+ ],
+ },
+ }
+ _yaml.dump(element, element_path)
+
+ # Try to build it, this should result in a failure that contains the content
+ result = cli.run(project=project, args=['build', 'element.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ # Assert that it's cached in a failed artifact
+ assert cli.get_element_state(project, 'element.bst') == 'failed'
+
+ # Now check it out
+ result = cli.run(project=project, args=[
+ 'checkout', 'element.bst', checkout
+ ])
+ result.assert_success()
+
+ # Check that the checkout contains the file created before failure
+ filename = os.path.join(checkout, 'foo')
+ assert os.path.exists(filename)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_build_depend_on_cached_fail(cli, tmpdir, datafiles):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ dep_path = os.path.join(project, 'elements', 'dep.bst')
+ target_path = os.path.join(project, 'elements', 'target.bst')
+ workspace = os.path.join(cli.directory, 'workspace')
+ checkout = os.path.join(cli.directory, 'checkout')
+
+ dep = {
+ 'kind': 'script',
+ 'depends': [
+ {
+ 'filename': 'base.bst',
+ 'type': 'build',
+ },
+ ],
+ 'config': {
+ 'commands': [
+ 'touch %{install-root}/foo',
+ 'false',
+ ],
+ },
+ }
+ _yaml.dump(dep, dep_path)
+ target = {
+ 'kind': 'script',
+ 'depends': [
+ {
+ 'filename': 'base.bst',
+ 'type': 'build',
+ },
+ {
+ 'filename': 'dep.bst',
+ 'type': 'build',
+ },
+ ],
+ 'config': {
+ 'commands': [
+ 'test -e /foo',
+ ],
+ },
+ }
+ _yaml.dump(target, target_path)
+
+ # Try to build it, this should result in caching a failure to build dep
+ result = cli.run(project=project, args=['build', 'dep.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ # Assert that it's cached in a failed artifact
+ assert cli.get_element_state(project, 'dep.bst') == 'failed'
+
+ # Now we should fail because we've a cached fail of dep
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ # Assert that it's not yet built, since one of its dependencies isn't ready.
+ assert cli.get_element_state(project, 'target.bst') == 'waiting'
+
+
+@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("on_error", ("continue",))
+def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ element_path = os.path.join(project, 'elements', 'element.bst')
+ workspace = os.path.join(cli.directory, 'workspace')
+ checkout = os.path.join(cli.directory, 'checkout')
+
+ # Write out our test target
+ element = {
+ 'kind': 'script',
+ 'depends': [
+ {
+ 'filename': 'base.bst',
+ 'type': 'build',
+ },
+ ],
+ 'config': {
+ 'commands': [
+ 'false',
+ ],
+ },
+ }
+ _yaml.dump(element, element_path)
+
+ with create_artifact_share(os.path.join(str(tmpdir), 'remote')) as share:
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ })
+
+ # Build the element, continuing to finish active jobs on error.
+ result = cli.run(project=project, args=['--on-error={}'.format(on_error), 'build', 'element.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ # This element should have failed
+ assert cli.get_element_state(project, 'element.bst') == 'failed'
+ # This element should have been pushed to the remote
+ assert share.has_artifact('test', 'element.bst', cli.get_element_key(project, 'element.bst'))