summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/buildstream/_artifact.py7
-rw-r--r--src/buildstream/_frontend/cli.py2
-rw-r--r--src/buildstream/_loader/loader.py2
-rw-r--r--src/buildstream/_stream.py98
4 files changed, 59 insertions, 50 deletions
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index 4e7fa4911..493ca5d26 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -37,13 +37,13 @@ from .types import Scope
from .storage._casbaseddirectory import CasBasedDirectory
-# An Artifact class to abtract artifact operations
+# An Artifact class to abstract artifact operations
# from the Element class
#
# Args:
# element (Element): The Element object
# context (Context): The BuildStream context
-# strong_key (str): The elements strong cache key, dependant on context
+# strong_key (str): The elements strong cache key, dependent on context
# weak_key (str): The elements weak cache key
#
class Artifact():
@@ -75,7 +75,6 @@ class Artifact():
#
def get_files(self):
files_digest = self._get_field_digest("files")
-
return CasBasedDirectory(self._cas, digest=files_digest)
# get_buildtree():
@@ -338,7 +337,7 @@ class Artifact():
# Check whether the artifact corresponding to the stored cache key is
# available. This also checks whether all required parts of the artifact
# are available, which may depend on command and configuration. The cache
- # key used for querying is dependant on the current context.
+ # key used for querying is dependent on the current context.
#
# Returns:
# (bool): Whether artifact is in local cache
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index bf92161bf..12b9439cf 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -948,7 +948,7 @@ def workspace_list(app):
#############################################################
# Artifact Commands #
#############################################################
-@cli.group(short_help="Manipulate cached artifacts")
+@cli.group(short_help="Manipulate cached artifacts.")
def artifact():
"""Manipulate cached artifacts"""
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 0ec9b9e17..4b66288ca 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -116,7 +116,7 @@ class Loader():
target_elements.append(element)
#
- # Now that we've resolve the dependencies, scan them for circular dependencies
+ # Now that we've resolved the dependencies, scan them for circular dependencies
#
# Set up a dummy element that depends on all top-level targets
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 3713c87a7..cbd635af7 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -31,7 +31,8 @@ import tempfile
from contextlib import contextmanager, suppress
from fnmatch import fnmatch
-from ._artifactelement import verify_artifact_ref
+from ._artifact import Artifact
+from ._artifactelement import verify_artifact_ref, ArtifactElement
from ._exceptions import StreamError, ImplError, BstError, ArtifactElementError, ArtifactError
from ._message import Message, MessageType
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, \
@@ -521,8 +522,7 @@ class Stream():
# if pulling we need to ensure dependency artifacts are also pulled
selection = PipelineSelection.RUN if pull else PipelineSelection.NONE
- elements, _ = self._load((target,), (), selection=selection, use_artifact_config=True)
-
+ elements, _ = self._load((target,), (), selection=selection, use_artifact_config=True, load_refs=True)
target = elements[-1]
self._check_location_writable(location, force=force, tar=tar)
@@ -536,48 +536,58 @@ class Stream():
self._run()
# Stage deps into a temporary sandbox first
- try:
- with target._prepare_sandbox(scope=scope, directory=None,
- integrate=integrate) as sandbox:
-
- # Copy or move the sandbox to the target directory
- sandbox_vroot = sandbox.get_virtual_directory()
-
- if not tar:
- with target.timed_activity("Checking out files in '{}'"
- .format(location)):
- try:
- if hardlinks:
- self._checkout_hardlinks(sandbox_vroot, location)
- else:
- sandbox_vroot.export_files(location)
- except OSError as e:
- raise StreamError("Failed to checkout files: '{}'"
- .format(e)) from e
- else:
- if location == '-':
- mode = 'w|' + compression
- with target.timed_activity("Creating tarball"):
- # Save the stdout FD to restore later
- saved_fd = os.dup(sys.stdout.fileno())
- try:
- with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
- with tarfile.open(fileobj=fo, mode=mode) as tf:
- sandbox_vroot.export_to_tar(tf, '.')
- finally:
- # No matter what, restore stdout for further use
- os.dup2(saved_fd, sys.stdout.fileno())
- os.close(saved_fd)
+ if isinstance(target, ArtifactElement):
+ try:
+ key = target._get_cache_key()
+ artifact = Artifact(target, self._context, strong_key=key)
+ virdir = artifact.get_files()
+ self._export_artifact(tar, location, compression, target, hardlinks, virdir)
+ except AttributeError as e:
+ raise ArtifactError("Artifact reference '{}' seems to be invalid. "
+ "Note that an Element name can also be used.".format(artifact))from e
+ else:
+ try:
+ with target._prepare_sandbox(scope=scope, directory=None,
+ integrate=integrate) as sandbox:
+ # Copy or move the sandbox to the target directory
+ virdir = sandbox.get_virtual_directory()
+ self._export_artifact(tar, location, compression, target, hardlinks, virdir)
+ except BstError as e:
+ raise StreamError("Error while staging dependencies into a sandbox"
+ ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
+
+ def _export_artifact(self, tar, location, compression, target, hardlinks, virdir):
+ if not tar:
+ with target.timed_activity("Checking out files in '{}'"
+ .format(location)):
+ try:
+ if hardlinks:
+ self._checkout_hardlinks(virdir, location)
else:
- mode = 'w:' + compression
- with target.timed_activity("Creating tarball '{}'"
- .format(location)):
- with tarfile.open(location, mode=mode) as tf:
- sandbox_vroot.export_to_tar(tf, '.')
-
- except BstError as e:
- raise StreamError("Error while staging dependencies into a sandbox"
- ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
+ virdir.export_files(location)
+ except OSError as e:
+ raise StreamError("Failed to checkout files: '{}'"
+ .format(e)) from e
+ else:
+ if location == '-':
+ mode = 'w|' + compression
+ with target.timed_activity("Creating tarball"):
+ # Save the stdout FD to restore later
+ saved_fd = os.dup(sys.stdout.fileno())
+ try:
+ with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
+ with tarfile.open(fileobj=fo, mode=mode) as tf:
+ virdir.export_to_tar(tf, '.')
+ finally:
+ # No matter what, restore stdout for further use
+ os.dup2(saved_fd, sys.stdout.fileno())
+ os.close(saved_fd)
+ else:
+ mode = 'w:' + compression
+ with target.timed_activity("Creating tarball '{}'"
+ .format(location)):
+ with tarfile.open(location, mode=mode) as tf:
+ virdir.export_to_tar(tf, '.')
# artifact_log()
#