summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbst-marge-bot <marge-bot@buildstream.build>2019-03-12 21:16:08 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-03-12 21:16:08 +0000
commit2c3dfa4e1b92ead066b3fa25fe7c3037b4ba6c58 (patch)
tree750061e5b634c6b012673644078c5f40004d3ffb
parente274adf868dc538d6ae678d2f5a9e4d636475c9a (diff)
parentbc96d007991a44c8fcf6fbe46fcf4e3eaa0cc728 (diff)
downloadbuildstream-laurence/update-readme.tar.gz
Merge branch 'tpollard/908' into 'master'laurence/update-readme
Artifact 'abstraction' class Closes #908 See merge request BuildStream/buildstream!1175
-rw-r--r--buildstream/_artifact.py464
-rw-r--r--buildstream/element.py311
2 files changed, 544 insertions, 231 deletions
diff --git a/buildstream/_artifact.py b/buildstream/_artifact.py
new file mode 100644
index 000000000..45ea53408
--- /dev/null
+++ b/buildstream/_artifact.py
@@ -0,0 +1,464 @@
+#
+# Copyright (C) 2019 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+# Authors:
+# Tom Pollard <tom.pollard@codethink.co.uk>
+# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
+
+"""
+Artifact
+=========
+
+Implementation of the Artifact class which aims to 'abstract' direct
+artifact composite interaction away from Element class
+
+"""
+
+import os
+import shutil
+
+from . import _yaml
+from . import Scope
+from .types import _KeyStrength
+from .storage._casbaseddirectory import CasBasedDirectory
+
+
+# An Artifact class to abtract artifact operations
+# from the Element class
+#
+# Args:
+# element (Element): The Element object
+# context (Context): The BuildStream context
+#
+class Artifact():
+
+ def __init__(self, element, context):
+ self._element = element
+ self._context = context
+ self._artifacts = context.artifactcache
+
+ # get_files():
+ #
+ # Get a virtual directory for the artifact files content
+ #
+ # Args:
+ # key (str): The key for the artifact to extract,
+ # or None for the default key
+ #
+ # Returns:
+ # (Directory): The virtual directory object
+ # (str): The chosen key
+ #
+ def get_files(self, key=None):
+ subdir = "files"
+
+ return self._get_subdirectory(subdir, key)
+
+ # get_buildtree():
+ #
+ # Get a virtual directory for the artifact buildtree content
+ #
+ # Args:
+ # key (str): The key for the artifact to extract,
+ # or None for the default key
+ #
+ # Returns:
+ # (Directory): The virtual directory object
+ # (str): The chosen key
+ #
+ def get_buildtree(self, key=None):
+ subdir = "buildtree"
+
+ return self._get_subdirectory(subdir, key)
+
+ # get_extract_key():
+ #
+ # Get the key used to extract the artifact
+ #
+ # Returns:
+ # (str): The key
+ #
+ def get_extract_key(self):
+
+ element = self._element
+ context = self._context
+
+ # Use weak cache key, if context allows use of weak cache keys
+ key_strength = _KeyStrength.STRONG
+ key = element._get_cache_key(strength=key_strength)
+ if not context.get_strict() and not key:
+ key = element._get_cache_key(strength=_KeyStrength.WEAK)
+
+ return key
+
+ # cache():
+ #
+ # Create the artifact and commit to cache
+ #
+ # Args:
+ # rootdir (str): An absolute path to the temp rootdir for artifact construct
+ # sandbox_build_dir (Directory): Virtual Directory object for the sandbox build-root
+ # collectvdir (Directory): Virtual Directoy object from within the sandbox for collection
+ # buildresult (tuple): bool, short desc and detailed desc of result
+ # keys (list): list of keys for the artifact commit metadata
+ # publicdata (dict): dict of public data to commit to artifact metadata
+ #
+ # Returns:
+ # (int): The size of the newly cached artifact
+ #
+ def cache(self, rootdir, sandbox_build_dir, collectvdir, buildresult, keys, publicdata):
+
+ context = self._context
+ element = self._element
+
+ assemblevdir = CasBasedDirectory(cas_cache=self._artifacts.cas)
+ logsvdir = assemblevdir.descend("logs", create=True)
+ metavdir = assemblevdir.descend("meta", create=True)
+ buildtreevdir = assemblevdir.descend("buildtree", create=True)
+
+ # Create artifact directory structure
+ assembledir = os.path.join(rootdir, 'artifact')
+ logsdir = os.path.join(assembledir, 'logs')
+ metadir = os.path.join(assembledir, 'meta')
+ os.mkdir(assembledir)
+ os.mkdir(logsdir)
+ os.mkdir(metadir)
+
+ if collectvdir is not None:
+ filesvdir = assemblevdir.descend("files", create=True)
+ filesvdir.import_files(collectvdir)
+
+ # cache_buildtrees defaults to 'always', as such the
+ # default behaviour is to attempt to cache them. If only
+ # caching failed artifact buildtrees, then query the build
+ # result. Element types without a build-root dir will be cached
+ # with an empty buildtreedir regardless of this configuration as
+ # there will not be an applicable sandbox_build_dir.
+
+ if sandbox_build_dir:
+ buildtreevdir.import_files(sandbox_build_dir)
+
+ # Write some logs out to normal directories: logsdir and metadir
+ # Copy build log
+ log_filename = context.get_log_filename()
+ element._build_log_path = os.path.join(logsdir, 'build.log')
+ if log_filename:
+ shutil.copyfile(log_filename, element._build_log_path)
+
+ # Store public data
+ _yaml.dump(_yaml.node_sanitize(publicdata), os.path.join(metadir, 'public.yaml'))
+
+ # Store result
+ build_result_dict = {"success": buildresult[0], "description": buildresult[1]}
+ if buildresult[2] is not None:
+ build_result_dict["detail"] = buildresult[2]
+ _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
+
+ # Store keys.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'strong': element._get_cache_key(),
+ 'weak': element._get_cache_key(_KeyStrength.WEAK),
+ }), os.path.join(metadir, 'keys.yaml'))
+
+ # Store dependencies.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ e.name: e._get_cache_key() for e in element.dependencies(Scope.BUILD)
+ }), os.path.join(metadir, 'dependencies.yaml'))
+
+ # Store workspaced.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'workspaced': bool(element._get_workspace())
+ }), os.path.join(metadir, 'workspaced.yaml'))
+
+ # Store workspaced-dependencies.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'workspaced-dependencies': [
+ e.name for e in element.dependencies(Scope.BUILD)
+ if e._get_workspace()
+ ]
+ }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
+
+ metavdir.import_files(metadir)
+ logsvdir.import_files(logsdir)
+
+ artifact_size = assemblevdir.get_size()
+ self._artifacts.commit(element, assemblevdir, keys)
+
+ return artifact_size
+
+ # cached_buildtree()
+ #
+ # Check if artifact is cached with expected buildtree. A
+ # buildtree will not be present if the res tof the partial artifact
+ # is not cached.
+ #
+ # Returns:
+ # (bool): True if artifact cached with buildtree, False if
+ # element not cached or missing expected buildtree.
+ # Note this only confirms if a buildtree is present,
+ # not its contents.
+ #
+ def cached_buildtree(self):
+
+ context = self._context
+ element = self._element
+
+ if not element._cached():
+ return False
+
+ key_strength = _KeyStrength.STRONG if context.get_strict() else _KeyStrength.WEAK
+ if not self._artifacts.contains_subdir_artifact(element, element._get_cache_key(strength=key_strength),
+ 'buildtree'):
+ return False
+
+ return True
+
+ # load_public_data():
+ #
+ # Loads the public data from the cached artifact
+ #
+ # Returns:
+ # (dict): The artifacts cached public data
+ #
+ def load_public_data(self):
+
+ element = self._element
+ assert element._cached()
+
+ # Load the public data from the artifact
+ artifact_vdir, _ = self._get_directory()
+ meta_file = artifact_vdir._objpath(['meta', 'public.yaml'])
+ data = _yaml.load(meta_file, shortname='meta/public.yaml')
+
+ return data
+
+ # load_build_result():
+ #
+ # Load the build result from the cached artifact
+ #
+ # Args:
+ # key (str): The key for the artifact to extract
+ #
+ # Returns:
+ # (bool): Whether the artifact of this element present in the artifact cache is of a success
+ # (str): Short description of the result
+ # (str): Detailed description of the result
+ #
+ def load_build_result(self, key):
+
+ assert key is not None
+ artifact_vdir, _ = self._get_directory(key)
+
+ meta_file = artifact_vdir._objpath(['meta', 'build-result.yaml'])
+ if not os.path.exists(meta_file):
+ build_result = (True, "succeeded", None)
+ return build_result
+
+ data = _yaml.load(meta_file, shortname='meta/build-result.yaml')
+ build_result = (data["success"], data.get("description"), data.get("detail"))
+
+ return build_result
+
+ # get_metadata_keys():
+ #
+ # Retrieve the strong and weak keys from the given artifact.
+ #
+ # Args:
+ # key (str): The artifact key, or None for the default key
+ # metadata_keys (dict): The elements cached strong/weak
+ # metadata keys, empty if not yet cached
+ #
+ # Returns:
+ # (str): The strong key
+ # (str): The weak key
+ # (dict): The key dict, None if not updated
+ #
+ def get_metadata_keys(self, key, metadata_keys):
+
+ # Now extract it and possibly derive the key
+ artifact_vdir, key = self._get_directory(key)
+
+ # Now try the cache, once we're sure about the key
+ if key in metadata_keys:
+ return (metadata_keys[key]['strong'],
+ metadata_keys[key]['weak'], None)
+
+ # Parse the expensive yaml now and cache the result
+ meta_file = artifact_vdir._objpath(['meta', 'keys.yaml'])
+ meta = _yaml.load(meta_file, shortname='meta/keys.yaml')
+ strong_key = meta['strong']
+ weak_key = meta['weak']
+
+ assert key in (strong_key, weak_key)
+
+ metadata_keys[strong_key] = meta
+ metadata_keys[weak_key] = meta
+
+ return (strong_key, weak_key, metadata_keys)
+
+ # get_metadata_dependencies():
+ #
+ # Retrieve the hash of dependency keys from the given artifact.
+ #
+ # Args:
+ # key (str): The artifact key, or None for the default key
+ # metadata_dependencies (dict): The elements cached dependency metadata keys,
+ # empty if not yet cached
+ # metadata_keys (dict): The elements cached strong/weak
+ # metadata keys, empty if not yet cached
+ #
+ # Returns:
+ # (dict): A dictionary of element names and their keys
+ # (dict): The depedencies key dict, None if not updated
+ # (dict): The elements key dict, None if not updated
+ #
+ def get_metadata_dependencies(self, key, metadata_dependencies, metadata_keys):
+
+ # Extract it and possibly derive the key
+ artifact_vdir, key = self._get_directory(key)
+
+ # Now try the cache, once we're sure about the key
+ if key in metadata_dependencies:
+ return (metadata_dependencies[key], None, None)
+
+ # Parse the expensive yaml now and cache the result
+ meta_file = artifact_vdir._objpath(['meta', 'dependencies.yaml'])
+ meta = _yaml.load(meta_file, shortname='meta/dependencies.yaml')
+
+ # Cache it under both strong and weak keys
+ strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
+ metadata_dependencies[strong_key] = meta
+ metadata_dependencies[weak_key] = meta
+
+ return (meta, metadata_dependencies, metadata_keys)
+
+ # get_metadata_workspaced():
+ #
+ # Retrieve the hash of dependency from the given artifact.
+ #
+ # Args:
+ # key (str): The artifact key, or None for the default key
+ # meta_data_workspaced (dict): The elements cached boolean metadata
+ # of whether it's workspaced, empty if
+ # not yet cached
+ # metadata_keys (dict): The elements cached strong/weak
+ # metadata keys, empty if not yet cached
+ #
+ # Returns:
+ # (bool): Whether the given artifact was workspaced
+ # (dict): The workspaced key dict, None if not updated
+ # (dict): The elements key dict, None if not updated
+ #
+ def get_metadata_workspaced(self, key, metadata_workspaced, metadata_keys):
+
+ # Extract it and possibly derive the key
+ artifact_vdir, key = self._get_directory(key)
+
+ # Now try the cache, once we're sure about the key
+ if key in metadata_workspaced:
+ return (metadata_workspaced[key], None, None)
+
+ # Parse the expensive yaml now and cache the result
+ meta_file = artifact_vdir._objpath(['meta', 'workspaced.yaml'])
+ meta = _yaml.load(meta_file, shortname='meta/workspaced.yaml')
+ workspaced = meta['workspaced']
+
+ # Cache it under both strong and weak keys
+ strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
+ metadata_workspaced[strong_key] = workspaced
+ metadata_workspaced[weak_key] = workspaced
+
+ return (workspaced, metadata_workspaced, metadata_keys)
+
+ # get_metadata_workspaced_dependencies():
+ #
+ # Retrieve the hash of workspaced dependencies keys from the given artifact.
+ #
+ # Args:
+ # key (str): The artifact key, or None for the default key
+ # metadata_workspaced_dependencies (dict): The elements cached metadata of
+ # which dependencies are workspaced,
+ # empty if not yet cached
+ # metadata_keys (dict): The elements cached strong/weak
+ # metadata keys, empty if not yet cached
+ #
+ # Returns:
+ # (list): List of which dependencies are workspaced
+ # (dict): The workspaced depedencies key dict, None if not updated
+ # (dict): The elements key dict, None if not updated
+ #
+ def get_metadata_workspaced_dependencies(self, key, metadata_workspaced_dependencies,
+ metadata_keys):
+
+ # Extract it and possibly derive the key
+ artifact_vdir, key = self._get_directory(key)
+
+ # Now try the cache, once we're sure about the key
+ if key in metadata_workspaced_dependencies:
+ return (metadata_workspaced_dependencies[key], None, None)
+
+ # Parse the expensive yaml now and cache the result
+ meta_file = artifact_vdir._objpath(['meta', 'workspaced-dependencies.yaml'])
+ meta = _yaml.load(meta_file, shortname='meta/workspaced-dependencies.yaml')
+ workspaced = meta['workspaced-dependencies']
+
+ # Cache it under both strong and weak keys
+ strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
+ metadata_workspaced_dependencies[strong_key] = workspaced
+ metadata_workspaced_dependencies[weak_key] = workspaced
+ return (workspaced, metadata_workspaced_dependencies, metadata_keys)
+
+ # _get_directory():
+ #
+ # Get a virtual directory for the artifact contents
+ #
+ # Args:
+ # key (str): The key for the artifact to extract,
+ # or None for the default key
+ #
+ # Returns:
+ # (Directory): The virtual directory object
+ # (str): The chosen key
+ #
+ def _get_directory(self, key=None):
+
+ element = self._element
+
+ if key is None:
+ key = self.get_extract_key()
+
+ return (self._artifacts.get_artifact_directory(element, key), key)
+
+ # _get_subdirectory():
+ #
+ # Get a virtual directory for the artifact subdir contents
+ #
+ # Args:
+ # subdir (str): The specific artifact subdir
+ # key (str): The key for the artifact to extract,
+ # or None for the default key
+ #
+ # Returns:
+ # (Directory): The virtual subdirectory object
+ # (str): The chosen key
+ #
+ def _get_subdirectory(self, subdir, key=None):
+
+ artifact_vdir, key = self._get_directory(key)
+ sub_vdir = artifact_vdir.descend(subdir)
+
+ return (sub_vdir, key)
diff --git a/buildstream/element.py b/buildstream/element.py
index 47ca04c28..b9643aee9 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -82,7 +82,6 @@ import contextlib
from contextlib import contextmanager
from functools import partial
import tempfile
-import shutil
import string
from . import _yaml
@@ -101,10 +100,10 @@ from ._platform import Platform
from .sandbox._config import SandboxConfig
from .sandbox._sandboxremote import SandboxRemote
from .types import _KeyStrength, CoreWarnings
+from ._artifact import Artifact
from .storage.directory import Directory
from .storage._filebaseddirectory import FileBasedDirectory
-from .storage._casbaseddirectory import CasBasedDirectory
from .storage.directory import VirtualDirectoryError
@@ -225,6 +224,7 @@ class Element(Plugin):
self.__required = False # Whether the artifact is required in the current session
self.__build_result = None # The result of assembling this Element (success, description, detail)
self._build_log_path = None # The path of the build log for this Element
+ self.__artifact = Artifact(self, context) # Artifact class for direct artifact composite interaction
self.__batch_prepare_assemble = False # Whether batching across prepare()/assemble() is configured
self.__batch_prepare_assemble_flags = 0 # Sandbox flags for batching across prepare()/assemble()
@@ -668,8 +668,7 @@ class Element(Plugin):
self.__assert_cached()
with self.timed_activity("Staging {}/{}".format(self.name, self._get_brief_display_key())):
- artifact_vdir, _ = self.__get_artifact_directory()
- files_vdir = artifact_vdir.descend('files')
+ files_vdir, _ = self.__artifact.get_files()
# Hard link it into the staging area
#
@@ -1479,19 +1478,18 @@ class Element(Plugin):
if not (mount_workspaces and self.__can_build_incrementally()):
with self.timed_activity("Staging local files at {}"
.format(workspace.get_absolute_path())):
- workspace.stage(temp_staging_directory)
+ workspace.stage(import_dir)
# Check if we have a cached buildtree to use
elif usebuildtree:
- artifact_vdir, _ = self.__get_artifact_directory()
- import_dir = artifact_vdir.descend('buildtree')
+ import_dir, _ = self.__artifact.get_buildtree()
if import_dir.is_empty():
detail = "Element type either does not expect a buildtree or it was explictily cached without one."
self.warn("WARNING: {} Artifact contains an empty buildtree".format(self.name), detail=detail)
else:
# No workspace or cached buildtree, stage source directly
for source in self.sources():
- source._stage(temp_staging_directory)
+ source._stage(import_dir)
vdirectory.import_files(import_dir)
@@ -1693,109 +1691,46 @@ class Element(Plugin):
cleanup_rootdir()
def _cache_artifact(self, rootdir, sandbox, collect):
- with self.timed_activity("Caching artifact"):
- if collect is not None:
- try:
- sandbox_vroot = sandbox.get_virtual_directory()
- collectvdir = sandbox_vroot.descend(*collect.lstrip(os.sep).split(os.sep))
- except VirtualDirectoryError:
- # No collect directory existed
- collectvdir = None
- context = self._get_context()
+ context = self._get_context()
+ buildresult = self.__build_result
+ publicdata = self.__dynamic_public
+ sandbox_vroot = sandbox.get_virtual_directory()
+ collectvdir = None
+ sandbox_build_dir = None
- assemblevdir = CasBasedDirectory(cas_cache=context.artifactcache.cas)
- logsvdir = assemblevdir.descend("logs", create=True)
- metavdir = assemblevdir.descend("meta", create=True)
- buildtreevdir = assemblevdir.descend("buildtree", create=True)
-
- # Create artifact directory structure
- assembledir = os.path.join(rootdir, 'artifact')
- logsdir = os.path.join(assembledir, 'logs')
- metadir = os.path.join(assembledir, 'meta')
- os.mkdir(assembledir)
- os.mkdir(logsdir)
- os.mkdir(metadir)
-
- if collect is not None and collectvdir is not None:
- filesvdir = assemblevdir.descend("files", create=True)
- filesvdir.import_files(collectvdir)
-
- cache_buildtrees = context.cache_buildtrees
- build_success = self.__build_result[0]
-
- # cache_buildtrees defaults to 'always', as such the
- # default behaviour is to attempt to cache them. If only
- # caching failed artifact buildtrees, then query the build
- # result. Element types without a build-root dir will be cached
- # with an empty buildtreedir regardless of this configuration.
-
- if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
- sandbox_vroot = sandbox.get_virtual_directory()
- try:
- sandbox_build_dir = sandbox_vroot.descend(
- *self.get_variable('build-root').lstrip(os.sep).split(os.sep))
- buildtreevdir.import_files(sandbox_build_dir)
- except VirtualDirectoryError:
- # Directory could not be found. Pre-virtual
- # directory behaviour was to continue silently
- # if the directory could not be found.
- pass
-
- # Write some logs out to normal directories: logsdir and metadir
- # Copy build log
- log_filename = context.get_log_filename()
- self._build_log_path = os.path.join(logsdir, 'build.log')
- if log_filename:
- shutil.copyfile(log_filename, self._build_log_path)
-
- # Store public data
- _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
-
- # Store result
- build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
- if self.__build_result[2] is not None:
- build_result_dict["detail"] = self.__build_result[2]
- _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
-
- # ensure we have cache keys
- self._assemble_done()
-
- # Store keys.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'strong': self._get_cache_key(),
- 'weak': self._get_cache_key(_KeyStrength.WEAK),
- }), os.path.join(metadir, 'keys.yaml'))
-
- # Store dependencies.yaml
- _yaml.dump(_yaml.node_sanitize({
- e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
- }), os.path.join(metadir, 'dependencies.yaml'))
-
- # Store workspaced.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'workspaced': bool(self._get_workspace())
- }), os.path.join(metadir, 'workspaced.yaml'))
-
- # Store workspaced-dependencies.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'workspaced-dependencies': [
- e.name for e in self.dependencies(Scope.BUILD)
- if e._get_workspace()
- ]
- }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
+ cache_buildtrees = context.cache_buildtrees
+ build_success = buildresult[0]
+
+ if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
+ try:
+ sandbox_build_dir = sandbox_vroot.descend(
+ *self.get_variable('build-root').lstrip(os.sep).split(os.sep))
+ except VirtualDirectoryError:
+ # Directory could not be found. Pre-virtual
+ # directory behaviour was to continue silently
+ # if the directory could not be found.
+ pass
+
+ if collect is not None:
+ try:
+ collectvdir = sandbox_vroot.descend(*collect.lstrip(os.sep).split(os.sep))
+ except VirtualDirectoryError:
+ pass
- metavdir.import_files(metadir)
- logsvdir.import_files(logsdir)
+ # ensure we have cache keys
+ self._assemble_done()
+ keys = self.__get_cache_keys_for_commit()
- artifact_size = assemblevdir.get_size()
- self.__artifacts.commit(self, assemblevdir, self.__get_cache_keys_for_commit())
+ with self.timed_activity("Caching artifact"):
+ artifact_size = self.__artifact.cache(rootdir, sandbox_build_dir, collectvdir,
+ buildresult, keys, publicdata)
- if collect is not None and collectvdir is None:
- raise ElementError(
- "Directory '{}' was not found inside the sandbox, "
- "unable to collect artifact contents"
- .format(collect))
+ if collect is not None and collectvdir is None:
+ raise ElementError(
+ "Directory '{}' was not found inside the sandbox, "
+ "unable to collect artifact contents"
+ .format(collect))
return artifact_size
@@ -2093,17 +2028,7 @@ class Element(Plugin):
# not its contents.
#
def _cached_buildtree(self):
- context = self._get_context()
-
- if not self._cached():
- return False
-
- key_strength = _KeyStrength.STRONG if context.get_strict() else _KeyStrength.WEAK
- if not self.__artifacts.contains_subdir_artifact(self, self._get_cache_key(strength=key_strength),
- 'buildtree'):
- return False
-
- return True
+ return self.__artifact.cached_buildtree()
# _fetch()
#
@@ -2657,8 +2582,7 @@ class Element(Plugin):
def __compute_splits(self, include=None, exclude=None, orphans=True):
filter_func = self.__split_filter_func(include=include, exclude=exclude, orphans=orphans)
- artifact_vdir, _ = self.__get_artifact_directory()
- files_vdir = artifact_vdir.descend('files')
+ files_vdir, _ = self.__artifact.get_files()
element_files = files_vdir.list_relative_paths()
@@ -2684,44 +2608,6 @@ class Element(Plugin):
self.__whitelist_regex = re.compile(expression)
return self.__whitelist_regex.match(os.path.join(os.sep, path))
- # __get_extract_key():
- #
- # Get the key used to extract the artifact
- #
- # Returns:
- # (str): The key
- #
- def __get_extract_key(self):
-
- context = self._get_context()
- key = self.__strict_cache_key
-
- # Use weak cache key, if artifact is missing for strong cache key
- # and the context allows use of weak cache keys
- if not context.get_strict() and not self.__artifacts.contains(self, key):
- key = self._get_cache_key(strength=_KeyStrength.WEAK)
-
- return key
-
- # __get_artifact_directory():
- #
- # Get a virtual directory for the artifact contents
- #
- # Args:
- # key (str): The key for the artifact to extract,
- # or None for the default key
- #
- # Returns:
- # (Directory): The virtual directory object
- # (str): The chosen key
- #
- def __get_artifact_directory(self, key=None):
-
- if key is None:
- key = self.__get_extract_key()
-
- return (self.__artifacts.get_artifact_directory(self, key), key)
-
# __get_artifact_metadata_keys():
#
# Retrieve the strong and weak keys from the given artifact.
@@ -2735,24 +2621,14 @@ class Element(Plugin):
#
def __get_artifact_metadata_keys(self, key=None):
- # Now extract it and possibly derive the key
- artifact_vdir, key = self.__get_artifact_directory(key)
-
- # Now try the cache, once we're sure about the key
- if key in self.__metadata_keys:
- return (self.__metadata_keys[key]['strong'],
- self.__metadata_keys[key]['weak'])
+ metadata_keys = self.__metadata_keys
- # Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath(['meta', 'keys.yaml'])
- meta = _yaml.load(meta_file, shortname='meta/keys.yaml')
- strong_key = meta['strong']
- weak_key = meta['weak']
+ strong_key, weak_key, metadata_keys = self.__artifact.get_metadata_keys(key, metadata_keys)
- assert key in (strong_key, weak_key)
+ # Update keys if needed
+ if metadata_keys:
+ self.__metadata_keys = metadata_keys
- self.__metadata_keys[strong_key] = meta
- self.__metadata_keys[weak_key] = meta
return (strong_key, weak_key)
# __get_artifact_metadata_dependencies():
@@ -2767,21 +2643,16 @@ class Element(Plugin):
#
def __get_artifact_metadata_dependencies(self, key=None):
- # Extract it and possibly derive the key
- artifact_vdir, key = self.__get_artifact_directory(key)
+ metadata = [self.__metadata_dependencies, self.__metadata_keys]
+ meta, meta_deps, meta_keys = self.__artifact.get_metadata_dependencies(key, *metadata)
- # Now try the cache, once we're sure about the key
- if key in self.__metadata_dependencies:
- return self.__metadata_dependencies[key]
+ # Update deps if needed
+ if meta_deps:
+ self.__metadata_dependencies = meta_deps
+ # Update keys if needed, no need to check if deps not updated
+ if meta_keys:
+ self.__metadata_keys = meta_keys
- # Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath(['meta', 'dependencies.yaml'])
- meta = _yaml.load(meta_file, shortname='meta/dependencies.yaml')
-
- # Cache it under both strong and weak keys
- strong_key, weak_key = self.__get_artifact_metadata_keys(key)
- self.__metadata_dependencies[strong_key] = meta
- self.__metadata_dependencies[weak_key] = meta
return meta
# __get_artifact_metadata_workspaced():
@@ -2794,24 +2665,19 @@ class Element(Plugin):
# Returns:
# (bool): Whether the given artifact was workspaced
#
- def __get_artifact_metadata_workspaced(self, key=None):
- # Extract it and possibly derive the key
- artifact_vdir, key = self.__get_artifact_directory(key)
+ def __get_artifact_metadata_workspaced(self, key=None):
- # Now try the cache, once we're sure about the key
- if key in self.__metadata_workspaced:
- return self.__metadata_workspaced[key]
+ metadata = [self.__metadata_workspaced, self.__metadata_keys]
+ workspaced, meta_workspaced, meta_keys = self.__artifact.get_metadata_workspaced(key, *metadata)
- # Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath(['meta', 'workspaced.yaml'])
- meta = _yaml.load(meta_file, shortname='meta/workspaced.yaml')
- workspaced = meta['workspaced']
+ # Update workspaced if needed
+ if meta_workspaced:
+ self.__metadata_workspaced = meta_workspaced
+ # Update keys if needed, no need to check if workspaced not updated
+ if meta_keys:
+ self.__metadata_keys = meta_keys
- # Cache it under both strong and weak keys
- strong_key, weak_key = self.__get_artifact_metadata_keys(key)
- self.__metadata_workspaced[strong_key] = workspaced
- self.__metadata_workspaced[weak_key] = workspaced
return workspaced
# __get_artifact_metadata_workspaced_dependencies():
@@ -2826,22 +2692,17 @@ class Element(Plugin):
#
def __get_artifact_metadata_workspaced_dependencies(self, key=None):
- # Extract it and possibly derive the key
- artifact_vdir, key = self.__get_artifact_directory(key)
+ metadata = [self.__metadata_workspaced_dependencies, self.__metadata_keys]
+ workspaced, meta_workspaced_deps,\
+ meta_keys = self.__artifact.get_metadata_workspaced_dependencies(key, *metadata)
- # Now try the cache, once we're sure about the key
- if key in self.__metadata_workspaced_dependencies:
- return self.__metadata_workspaced_dependencies[key]
+ # Update workspaced if needed
+ if meta_workspaced_deps:
+ self.__metadata_workspaced_dependencies = meta_workspaced_deps
+ # Update keys if needed, no need to check if workspaced not updated
+ if meta_keys:
+ self.__metadata_keys = meta_keys
- # Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath(['meta', 'workspaced-dependencies.yaml'])
- meta = _yaml.load(meta_file, shortname='meta/workspaced-dependencies.yaml')
- workspaced = meta['workspaced-dependencies']
-
- # Cache it under both strong and weak keys
- strong_key, weak_key = self.__get_artifact_metadata_keys(key)
- self.__metadata_workspaced_dependencies[strong_key] = workspaced
- self.__metadata_workspaced_dependencies[weak_key] = workspaced
return workspaced
# __load_public_data():
@@ -2849,32 +2710,20 @@ class Element(Plugin):
# Loads the public data from the cached artifact
#
def __load_public_data(self):
- self.__assert_cached()
assert self.__dynamic_public is None
- # Load the public data from the artifact
- artifact_vdir, _ = self.__get_artifact_directory()
- meta_file = artifact_vdir._objpath(['meta', 'public.yaml'])
- self.__dynamic_public = _yaml.load(meta_file, shortname='meta/public.yaml')
+ self.__dynamic_public = self.__artifact.load_public_data()
def __load_build_result(self, keystrength):
self.__assert_cached(keystrength=keystrength)
assert self.__build_result is None
- if keystrength is _KeyStrength.WEAK:
- key = self.__weak_cache_key
- else:
- key = self.__strict_cache_key
-
- artifact_vdir, _ = self.__get_artifact_directory(key)
-
- if not artifact_vdir._exists(['meta', 'build-result.yaml']):
- self.__build_result = (True, "succeeded", None)
- return
+ # _get_cache_key with _KeyStrength.STRONG returns self.__cache_key, which can be `None`
+ # leading to a failed assertion from get_artifact_directory() using get_artifact_name(),
+ # so explicility pass self.__strict_cache_key
+ key = self.__weak_cache_key if keystrength is _KeyStrength.WEAK else self.__strict_cache_key
- result_path = artifact_vdir._objpath(['meta', 'build-result.yaml'])
- data = _yaml.load(result_path)
- self.__build_result = (data["success"], data.get("description"), data.get("detail"))
+ self.__build_result = self.__artifact.load_build_result(key)
def __get_build_result(self, keystrength):
if keystrength is None: