summaryrefslogtreecommitdiff
path: root/buildstream/_artifact.py
diff options
context:
space:
mode:
Diffstat (limited to 'buildstream/_artifact.py')
-rw-r--r--buildstream/_artifact.py311
1 files changed, 143 insertions, 168 deletions
diff --git a/buildstream/_artifact.py b/buildstream/_artifact.py
index 6cf51ee2d..2240300c7 100644
--- a/buildstream/_artifact.py
+++ b/buildstream/_artifact.py
@@ -29,11 +29,11 @@ artifact composite interaction away from Element class
"""
import os
-import shutil
+import tempfile
+from ._protos.buildstream.v2.artifact_pb2 import Artifact as ArtifactProto
from . import _yaml
from . import utils
-from ._exceptions import ArtifactError
from .types import Scope
from .storage._casbaseddirectory import CasBasedDirectory
@@ -49,12 +49,18 @@ from .storage._casbaseddirectory import CasBasedDirectory
#
class Artifact():
+ version = 0
+
def __init__(self, element, context, *, strong_key=None, weak_key=None):
self._element = element
self._context = context
self._artifacts = context.artifactcache
self._cache_key = strong_key
self._weak_cache_key = weak_key
+ self._artifactdir = context.artifactdir
+ self._cas = context.get_cascache()
+ self._tmpdir = context.tmpdir
+ self._proto = None
self._metadata_keys = None # Strong and weak key tuple extracted from the artifact
self._metadata_dependencies = None # Dictionary of dependency strong keys from the artifact
@@ -69,7 +75,9 @@ class Artifact():
# (Directory): The virtual directory object
#
def get_files(self):
- return self._get_subdirectory("files")
+ files_digest = self._get_field_digest("files")
+
+ return CasBasedDirectory(self._cas, digest=files_digest)
# get_buildtree():
#
@@ -79,7 +87,9 @@ class Artifact():
# (Directory): The virtual directory object
#
def get_buildtree(self):
- return self._get_subdirectory("buildtree")
+ buildtree_digest = self._get_field_digest("buildtree")
+
+ return CasBasedDirectory(self._cas, digest=buildtree_digest)
# get_extract_key():
#
@@ -100,7 +110,6 @@ class Artifact():
# sandbox_build_dir (Directory): Virtual Directory object for the sandbox build-root
# collectvdir (Directory): Virtual Directoy object from within the sandbox for collection
# buildresult (tuple): bool, short desc and detailed desc of result
- # keys (list): list of keys for the artifact commit metadata
# publicdata (dict): dict of public data to commit to artifact metadata
#
# Returns:
@@ -110,80 +119,78 @@ class Artifact():
context = self._context
element = self._element
+ size = 0
- assemblevdir = CasBasedDirectory(cas_cache=self._artifacts.cas)
- logsvdir = assemblevdir.descend("logs", create=True)
- metavdir = assemblevdir.descend("meta", create=True)
+ filesvdir = None
+ buildtreevdir = None
- # Create artifact directory structure
- assembledir = os.path.join(rootdir, 'artifact')
- logsdir = os.path.join(assembledir, 'logs')
- metadir = os.path.join(assembledir, 'meta')
- os.mkdir(assembledir)
- os.mkdir(logsdir)
- os.mkdir(metadir)
+ artifact = ArtifactProto()
- if collectvdir is not None:
- filesvdir = assemblevdir.descend("files", create=True)
- filesvdir.import_files(collectvdir)
+ artifact.version = self.version
- if sandbox_build_dir:
- buildtreevdir = assemblevdir.descend("buildtree", create=True)
- buildtreevdir.import_files(sandbox_build_dir)
+ # Store result
+ artifact.build_success = buildresult[0]
+ artifact.build_error = buildresult[1]
+ artifact.build_error_details = "" if not buildresult[2] else buildresult[2]
- # Write some logs out to normal directories: logsdir and metadir
- # Copy build log
- log_filename = context.get_log_filename()
- element._build_log_path = os.path.join(logsdir, 'build.log')
- if log_filename:
- shutil.copyfile(log_filename, element._build_log_path)
+ # Store keys
+ artifact.strong_key = self._cache_key
+ artifact.weak_key = self._weak_cache_key
+
+ artifact.was_workspaced = bool(element._get_workspace())
+
+ # Store files
+ if collectvdir:
+ filesvdir = CasBasedDirectory(cas_cache=self._cas)
+ filesvdir.import_files(collectvdir)
+ artifact.files.CopyFrom(filesvdir._get_digest())
+ size += filesvdir.get_size()
# Store public data
- _yaml.dump(_yaml.node_sanitize(publicdata), os.path.join(metadir, 'public.yaml'))
+ with tempfile.NamedTemporaryFile(dir=self._tmpdir) as tmp:
+ _yaml.dump(_yaml.node_sanitize(publicdata), tmp.name)
+ public_data_digest = self._cas.add_object(path=tmp.name, link_directly=True)
+ artifact.public_data.CopyFrom(public_data_digest)
+ size += public_data_digest.size_bytes
+
+ # store build dependencies
+ for e in element.dependencies(Scope.BUILD):
+ new_build = artifact.build_deps.add()
+ new_build.element_name = e.name
+ new_build.cache_key = e._get_cache_key()
+ new_build.was_workspaced = bool(e._get_workspace())
+
+ # Store log file
+ log_filename = context.get_log_filename()
+ if log_filename:
+ digest = self._cas.add_object(path=log_filename)
+ element._build_log_path = self._cas.objpath(digest)
+ log = artifact.logs.add()
+ log.name = os.path.basename(log_filename)
+ log.digest.CopyFrom(digest)
+ size += log.digest.size_bytes
+
+ # Store build tree
+ if sandbox_build_dir:
+ buildtreevdir = CasBasedDirectory(cas_cache=self._cas)
+ buildtreevdir.import_files(sandbox_build_dir)
+ artifact.buildtree.CopyFrom(buildtreevdir._get_digest())
+ size += buildtreevdir.get_size()
- # Store result
- build_result_dict = {"success": buildresult[0], "description": buildresult[1]}
- if buildresult[2] is not None:
- build_result_dict["detail"] = buildresult[2]
- _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
-
- # Store keys.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'strong': self._cache_key,
- 'weak': self._weak_cache_key,
- }), os.path.join(metadir, 'keys.yaml'))
-
- # Store dependencies.yaml
- _yaml.dump(_yaml.node_sanitize({
- e.name: e._get_cache_key() for e in element.dependencies(Scope.BUILD)
- }), os.path.join(metadir, 'dependencies.yaml'))
-
- # Store workspaced.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'workspaced': bool(element._get_workspace())
- }), os.path.join(metadir, 'workspaced.yaml'))
-
- # Store workspaced-dependencies.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'workspaced-dependencies': [
- e.name for e in element.dependencies(Scope.BUILD)
- if e._get_workspace()
- ]
- }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
-
- metavdir.import_files(metadir)
- logsvdir.import_files(logsdir)
-
- artifact_size = assemblevdir.get_size()
+ os.makedirs(os.path.dirname(os.path.join(
+ self._artifactdir, element.get_artifact_name())), exist_ok=True)
keys = utils._deduplicate([self._cache_key, self._weak_cache_key])
- self._artifacts.commit(element, assemblevdir, keys)
+ for key in keys:
+ path = os.path.join(self._artifactdir, element.get_artifact_name(key=key))
+ with open(path, mode='w+b') as f:
+ f.write(artifact.SerializeToString())
- return artifact_size
+ return size
# cached_buildtree()
#
# Check if artifact is cached with expected buildtree. A
- # buildtree will not be present if the res tof the partial artifact
+ # buildtree will not be present if the rest of the partial artifact
# is not cached.
#
# Returns:
@@ -193,14 +200,12 @@ class Artifact():
#
def cached_buildtree(self):
- element = self._element
-
- key = self.get_extract_key()
- if not self._artifacts.contains_subdir_artifact(element, key, 'buildtree'):
+ buildtree_digest = self._get_field_digest("buildtree")
+ if buildtree_digest:
+ return self._cas.contains_directory(buildtree_digest, with_files=True)
+ else:
return False
- return True
-
# buildtree_exists()
#
# Check if artifact was created with a buildtree. This does not check
@@ -211,8 +216,8 @@ class Artifact():
#
def buildtree_exists(self):
- artifact_vdir = self._get_directory()
- return artifact_vdir._exists('buildtree')
+ artifact = self._get_proto()
+ return bool(str(artifact.buildtree))
# load_public_data():
#
@@ -224,8 +229,8 @@ class Artifact():
def load_public_data(self):
# Load the public data from the artifact
- meta_vdir = self._get_subdirectory('meta')
- meta_file = meta_vdir._objpath('public.yaml')
+ artifact = self._get_proto()
+ meta_file = self._cas.objpath(artifact.public_data)
data = _yaml.load(meta_file, shortname='public.yaml')
return data
@@ -241,20 +246,10 @@ class Artifact():
#
def load_build_result(self):
- meta_vdir = self._get_subdirectory('meta')
-
- meta_file = meta_vdir._objpath('build-result.yaml')
- if not os.path.exists(meta_file):
- build_result = (True, "succeeded", None)
- return build_result
-
- data = _yaml.load(meta_file, shortname='build-result.yaml')
-
- success = _yaml.node_get(data, bool, 'success')
- description = _yaml.node_get(data, str, 'description', default_value=None)
- detail = _yaml.node_get(data, str, 'detail', default_value=None)
-
- build_result = (success, description, detail)
+ artifact = self._get_proto()
+ build_result = (artifact.build_success,
+ artifact.build_error,
+ artifact.build_error_details)
return build_result
@@ -271,14 +266,11 @@ class Artifact():
if self._metadata_keys is not None:
return self._metadata_keys
- # Extract the metadata dir
- meta_vdir = self._get_subdirectory('meta')
+ # Extract proto
+ artifact = self._get_proto()
- # Parse the expensive yaml now and cache the result
- meta_file = meta_vdir._objpath('keys.yaml')
- meta = _yaml.load(meta_file, shortname='keys.yaml')
- strong_key = _yaml.node_get(meta, str, 'strong')
- weak_key = _yaml.node_get(meta, str, 'weak')
+ strong_key = artifact.strong_key
+ weak_key = artifact.weak_key
self._metadata_keys = (strong_key, weak_key)
@@ -296,14 +288,10 @@ class Artifact():
if self._metadata_dependencies is not None:
return self._metadata_dependencies
- # Extract the metadata dir
- meta_vdir = self._get_subdirectory('meta')
-
- # Parse the expensive yaml now and cache the result
- meta_file = meta_vdir._objpath('dependencies.yaml')
- meta = _yaml.load(meta_file, shortname='dependencies.yaml')
+ # Extract proto
+ artifact = self._get_proto()
- self._metadata_dependencies = meta
+ self._metadata_dependencies = {dep.element_name: dep.cache_key for dep in artifact.build_deps}
return self._metadata_dependencies
@@ -319,14 +307,10 @@ class Artifact():
if self._metadata_workspaced is not None:
return self._metadata_workspaced
- # Extract the metadata dir
- meta_vdir = self._get_subdirectory('meta')
-
- # Parse the expensive yaml now and cache the result
- meta_file = meta_vdir._objpath('workspaced.yaml')
- meta = _yaml.load(meta_file, shortname='workspaced.yaml')
+ # Extract proto
+ artifact = self._get_proto()
- self._metadata_workspaced = _yaml.node_get(meta, bool, 'workspaced')
+ self._metadata_workspaced = artifact.was_workspaced
return self._metadata_workspaced
@@ -342,15 +326,11 @@ class Artifact():
if self._metadata_workspaced_dependencies is not None:
return self._metadata_workspaced_dependencies
- # Extract the metadata dir
- meta_vdir = self._get_subdirectory('meta')
+ # Extract proto
+ artifact = self._get_proto()
- # Parse the expensive yaml now and cache the result
- meta_file = meta_vdir._objpath('workspaced-dependencies.yaml')
- meta = _yaml.load(meta_file, shortname='workspaced-dependencies.yaml')
-
- self._metadata_workspaced_dependencies = _yaml.node_sanitize(_yaml.node_get(meta, list,
- 'workspaced-dependencies'))
+ self._metadata_workspaced_dependencies = [dep.element_name for dep in artifact.build_deps
+ if dep.was_workspaced]
return self._metadata_workspaced_dependencies
@@ -369,30 +349,21 @@ class Artifact():
def cached(self):
context = self._context
- try:
- vdir = self._get_directory()
- except ArtifactError:
- # Either ref or top-level artifact directory missing
- return False
+ artifact = self._get_proto()
- # Check whether all metadata is available
- metadigest = vdir._get_child_digest('meta')
- if not self._artifacts.cas.contains_directory(metadigest, with_files=True):
+ if not artifact:
return False
- # Additional checks only relevant if artifact was created with 'files' subdirectory
- if vdir._exists('files'):
- # Determine whether directories are required
- require_directories = context.require_artifact_directories
- # Determine whether file contents are required as well
- require_files = context.require_artifact_files or self._element._artifact_files_required()
+ # Determine whether directories are required
+ require_directories = context.require_artifact_directories
+ # Determine whether file contents are required as well
+ require_files = (context.require_artifact_files or
+ self._element._artifact_files_required())
- filesdigest = vdir._get_child_digest('files')
-
- # Check whether 'files' subdirectory is available, with or without file contents
- if (require_directories and
- not self._artifacts.cas.contains_directory(filesdigest, with_files=require_files)):
- return False
+ # Check whether 'files' subdirectory is available, with or without file contents
+ if (require_directories and str(artifact.files) and
+ not self._cas.contains_directory(artifact.files, with_files=require_files)):
+ return False
return True
@@ -408,46 +379,50 @@ class Artifact():
if not self._element._cached():
return False
- log_vdir = self._get_subdirectory('logs')
+ artifact = self._get_proto()
- logsdigest = log_vdir._get_digest()
- return self._artifacts.cas.contains_directory(logsdigest, with_files=True)
+ for logfile in artifact.logs:
+ if not self._cas.contains(logfile.digest.hash):
+ return False
- # _get_directory():
- #
- # Get a virtual directory for the artifact contents
- #
- # Args:
- # key (str): The key for the artifact to extract,
- # or None for the default key
+ return True
+
+ # _get_proto()
#
# Returns:
- # (Directory): The virtual directory object
+ # (Artifact): Artifact proto
#
- def _get_directory(self, key=None):
+ def _get_proto(self):
+ # Check if we've already cached the proto object
+ if self._proto is not None:
+ return self._proto
- element = self._element
+ key = self.get_extract_key()
- if key is None:
- key = self.get_extract_key()
+ proto_path = os.path.join(self._artifactdir,
+ self._element.get_artifact_name(key=key))
+ artifact = ArtifactProto()
+ try:
+ with open(proto_path, mode='r+b') as f:
+ artifact.ParseFromString(f.read())
+ except FileNotFoundError:
+ return None
- return self._artifacts.get_artifact_directory(element, key)
+ os.utime(proto_path)
+ # Cache the proto object
+ self._proto = artifact
- # _get_subdirectory():
- #
- # Get a virtual directory for the artifact subdir contents
- #
- # Args:
- # subdir (str): The specific artifact subdir
- # key (str): The key for the artifact to extract,
- # or None for the default key
+ return self._proto
+
+ # _get_artifact_field()
#
# Returns:
- # (Directory): The virtual subdirectory object
+ # (Digest): Digest of field specified
#
- def _get_subdirectory(self, subdir, key=None):
-
- artifact_vdir = self._get_directory(key)
- sub_vdir = artifact_vdir.descend(subdir)
+ def _get_field_digest(self, field):
+ artifact_proto = self._get_proto()
+ digest = getattr(artifact_proto, field)
+ if not str(digest):
+ return None
- return sub_vdir
+ return digest