summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbst-marge-bot <marge-bot@buildstream.build>2019-04-17 17:05:48 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-04-17 17:05:48 +0000
commit9a20bbf96b9ee72ab227a7212678a3792f31cde5 (patch)
tree1fed2ddbca36b3b346a228b4bb4ed7bf523f2cd8
parent316a0e493dc661f2f22d3f1277ee4319cfe652d9 (diff)
parentc2673f4a42c3925abb58dfc91196bb37ab5456af (diff)
downloadbuildstream-9a20bbf96b9ee72ab227a7212678a3792f31cde5.tar.gz
Merge branch 'tpollard/955' into 'master'
Follow-up from "Artifact 'abstraction' class" Closes #955 See merge request BuildStream/buildstream!1271
-rw-r--r--buildstream/_artifact.py226
-rw-r--r--buildstream/element.py211
2 files changed, 142 insertions, 295 deletions
diff --git a/buildstream/_artifact.py b/buildstream/_artifact.py
index ba9c626fc..4d9e4bf08 100644
--- a/buildstream/_artifact.py
+++ b/buildstream/_artifact.py
@@ -32,8 +32,9 @@ import os
import shutil
from . import _yaml
+from . import utils
from ._exceptions import ArtifactError
-from .types import Scope, _KeyStrength
+from .types import Scope
from .storage._casbaseddirectory import CasBasedDirectory
@@ -43,47 +44,49 @@ from .storage._casbaseddirectory import CasBasedDirectory
# Args:
# element (Element): The Element object
# context (Context): The BuildStream context
+# strong_key (str): The elements strong cache key, dependant on context
+# weak_key (str): The elements weak cache key
#
class Artifact():
- def __init__(self, element, context):
+ def __init__(self, element, context, *, strong_key=None, weak_key=None):
self._element = element
self._context = context
self._artifacts = context.artifactcache
+ self._cache_key = strong_key
+ self._weak_cache_key = weak_key
+
+ # hash tables of loaded artifact metadata, hashed by key
+ self._metadata_keys = {} # Strong and weak keys for this key
+ self._metadata_dependencies = {} # Dictionary of dependency strong keys
+ self._metadata_workspaced = {} # Boolean of whether it's workspaced
+ self._metadata_workspaced_dependencies = {} # List of which dependencies are workspaced
# get_files():
#
# Get a virtual directory for the artifact files content
#
- # Args:
- # key (str): The key for the artifact to extract,
- # or None for the default key
- #
# Returns:
# (Directory): The virtual directory object
# (str): The chosen key
#
- def get_files(self, key=None):
+ def get_files(self):
subdir = "files"
- return self._get_subdirectory(subdir, key)
+ return self._get_subdirectory(subdir)
# get_buildtree():
#
# Get a virtual directory for the artifact buildtree content
#
- # Args:
- # key (str): The key for the artifact to extract,
- # or None for the default key
- #
# Returns:
# (Directory): The virtual directory object
# (str): The chosen key
#
- def get_buildtree(self, key=None):
+ def get_buildtree(self):
subdir = "buildtree"
- return self._get_subdirectory(subdir, key)
+ return self._get_subdirectory(subdir)
# get_extract_key():
#
@@ -93,17 +96,7 @@ class Artifact():
# (str): The key
#
def get_extract_key(self):
-
- element = self._element
- context = self._context
-
- # Use weak cache key, if context allows use of weak cache keys
- key_strength = _KeyStrength.STRONG
- key = element._get_cache_key(strength=key_strength)
- if not context.get_strict() and not key:
- key = element._get_cache_key(strength=_KeyStrength.WEAK)
-
- return key
+ return self._cache_key or self._weak_cache_key
# cache():
#
@@ -120,7 +113,7 @@ class Artifact():
# Returns:
# (int): The size of the newly cached artifact
#
- def cache(self, rootdir, sandbox_build_dir, collectvdir, buildresult, keys, publicdata):
+ def cache(self, rootdir, sandbox_build_dir, collectvdir, buildresult, publicdata):
context = self._context
element = self._element
@@ -163,8 +156,8 @@ class Artifact():
# Store keys.yaml
_yaml.dump(_yaml.node_sanitize({
- 'strong': element._get_cache_key(),
- 'weak': element._get_cache_key(_KeyStrength.WEAK),
+ 'strong': self._cache_key,
+ 'weak': self._weak_cache_key,
}), os.path.join(metadir, 'keys.yaml'))
# Store dependencies.yaml
@@ -189,6 +182,7 @@ class Artifact():
logsvdir.import_files(logsdir)
artifact_size = assemblevdir.get_size()
+ keys = utils._deduplicate([self._cache_key, self._weak_cache_key])
self._artifacts.commit(element, assemblevdir, keys)
return artifact_size
@@ -201,21 +195,15 @@ class Artifact():
#
# Returns:
# (bool): True if artifact cached with buildtree, False if
- # element not cached or missing expected buildtree.
- # Note this only confirms if a buildtree is present,
- # not its contents.
+ # missing expected buildtree. Note this only confirms
+ # if a buildtree is present, not its contents.
#
def cached_buildtree(self):
- context = self._context
element = self._element
- if not element._cached():
- return False
-
- key_strength = _KeyStrength.STRONG if context.get_strict() else _KeyStrength.WEAK
- if not self._artifacts.contains_subdir_artifact(element, element._get_cache_key(strength=key_strength),
- 'buildtree'):
+ key = self.get_extract_key()
+ if not self._artifacts.contains_subdir_artifact(element, key, 'buildtree'):
return False
return True
@@ -230,9 +218,6 @@ class Artifact():
#
def buildtree_exists(self):
- if not self._element._cached():
- return False
-
artifact_vdir, _ = self._get_directory()
return artifact_vdir._exists('buildtree')
@@ -245,13 +230,10 @@ class Artifact():
#
def load_public_data(self):
- element = self._element
- assert element._cached()
-
# Load the public data from the artifact
- artifact_vdir, _ = self._get_directory()
- meta_file = artifact_vdir._objpath('meta', 'public.yaml')
- data = _yaml.load(meta_file, shortname='meta/public.yaml')
+ meta_vdir, _ = self._get_subdirectory('meta')
+ meta_file = meta_vdir._objpath('public.yaml')
+ data = _yaml.load(meta_file, shortname='public.yaml')
return data
@@ -259,25 +241,21 @@ class Artifact():
#
# Load the build result from the cached artifact
#
- # Args:
- # key (str): The key for the artifact to extract
- #
# Returns:
# (bool): Whether the artifact of this element present in the artifact cache is of a success
# (str): Short description of the result
# (str): Detailed description of the result
#
- def load_build_result(self, key):
+ def load_build_result(self):
- assert key is not None
- artifact_vdir, _ = self._get_directory(key)
+ meta_vdir, _ = self._get_subdirectory('meta')
- meta_file = artifact_vdir._objpath('meta', 'build-result.yaml')
+ meta_file = meta_vdir._objpath('build-result.yaml')
if not os.path.exists(meta_file):
build_result = (True, "succeeded", None)
return build_result
- data = _yaml.load(meta_file, shortname='meta/build-result.yaml')
+ data = _yaml.load(meta_file, shortname='build-result.yaml')
success = _yaml.node_get(data, bool, 'success')
description = _yaml.node_get(data, str, 'description', default_value=None)
@@ -291,170 +269,133 @@ class Artifact():
#
# Retrieve the strong and weak keys from the given artifact.
#
- # Args:
- # key (str): The artifact key, or None for the default key
- # metadata_keys (dict): The elements cached strong/weak
- # metadata keys, empty if not yet cached
- #
# Returns:
# (str): The strong key
# (str): The weak key
- # (dict): The key dict, None if not updated
#
- def get_metadata_keys(self, key, metadata_keys):
+ def get_metadata_keys(self):
# Now extract it and possibly derive the key
- artifact_vdir, key = self._get_directory(key)
+ meta_vdir, key = self._get_subdirectory('meta')
# Now try the cache, once we're sure about the key
- if key in metadata_keys:
- return (metadata_keys[key]['strong'],
- metadata_keys[key]['weak'], None)
+ if key in self._metadata_keys:
+ return (self._metadata_keys[key]['strong'], self._metadata_keys[key]['weak'])
# Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath('meta', 'keys.yaml')
- meta = _yaml.load(meta_file, shortname='meta/keys.yaml')
+ meta_file = meta_vdir._objpath('keys.yaml')
+ meta = _yaml.load(meta_file, shortname='keys.yaml')
strong_key = _yaml.node_get(meta, str, 'strong')
weak_key = _yaml.node_get(meta, str, 'weak')
assert key in (strong_key, weak_key)
- metadata_keys[strong_key] = _yaml.node_sanitize(meta)
- metadata_keys[weak_key] = _yaml.node_sanitize(meta)
+ self._metadata_keys[strong_key] = _yaml.node_sanitize(meta)
+ self._metadata_keys[weak_key] = _yaml.node_sanitize(meta)
- return (strong_key, weak_key, metadata_keys)
+ return (strong_key, weak_key)
# get_metadata_dependencies():
#
# Retrieve the hash of dependency keys from the given artifact.
#
- # Args:
- # key (str): The artifact key, or None for the default key
- # metadata_dependencies (dict): The elements cached dependency metadata keys,
- # empty if not yet cached
- # metadata_keys (dict): The elements cached strong/weak
- # metadata keys, empty if not yet cached
- #
# Returns:
# (dict): A dictionary of element names and their keys
- # (dict): The depedencies key dict, None if not updated
- # (dict): The elements key dict, None if not updated
#
- def get_metadata_dependencies(self, key, metadata_dependencies, metadata_keys):
+ def get_metadata_dependencies(self):
# Extract it and possibly derive the key
- artifact_vdir, key = self._get_directory(key)
+ meta_vdir, key = self._get_subdirectory('meta')
# Now try the cache, once we're sure about the key
- if key in metadata_dependencies:
- return (metadata_dependencies[key], None, None)
+ if key in self._metadata_dependencies:
+ return self._metadata_dependencies[key]
# Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath('meta', 'dependencies.yaml')
- meta = _yaml.load(meta_file, shortname='meta/dependencies.yaml')
+ meta_file = meta_vdir._objpath('dependencies.yaml')
+ meta = _yaml.load(meta_file, shortname='dependencies.yaml')
# Cache it under both strong and weak keys
- strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
- metadata_dependencies[strong_key] = _yaml.node_sanitize(meta)
- metadata_dependencies[weak_key] = _yaml.node_sanitize(meta)
+ strong_key, weak_key = self.get_metadata_keys()
+ self._metadata_dependencies[strong_key] = _yaml.node_sanitize(meta)
+ self._metadata_dependencies[weak_key] = _yaml.node_sanitize(meta)
- return (meta, metadata_dependencies, metadata_keys)
+ return meta
# get_metadata_workspaced():
#
# Retrieve the hash of dependency from the given artifact.
#
- # Args:
- # key (str): The artifact key, or None for the default key
- # meta_data_workspaced (dict): The elements cached boolean metadata
- # of whether it's workspaced, empty if
- # not yet cached
- # metadata_keys (dict): The elements cached strong/weak
- # metadata keys, empty if not yet cached
- #
# Returns:
# (bool): Whether the given artifact was workspaced
- # (dict): The workspaced key dict, None if not updated
- # (dict): The elements key dict, None if not updated
#
- def get_metadata_workspaced(self, key, metadata_workspaced, metadata_keys):
+ def get_metadata_workspaced(self):
# Extract it and possibly derive the key
- artifact_vdir, key = self._get_directory(key)
+ meta_vdir, key = self._get_subdirectory('meta')
# Now try the cache, once we're sure about the key
- if key in metadata_workspaced:
- return (metadata_workspaced[key], None, None)
+ if key in self._metadata_workspaced:
+ return self._metadata_workspaced[key]
# Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath('meta', 'workspaced.yaml')
- meta = _yaml.load(meta_file, shortname='meta/workspaced.yaml')
+ meta_file = meta_vdir._objpath('workspaced.yaml')
+ meta = _yaml.load(meta_file, shortname='workspaced.yaml')
+
workspaced = _yaml.node_get(meta, bool, 'workspaced')
# Cache it under both strong and weak keys
- strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
- metadata_workspaced[strong_key] = workspaced
- metadata_workspaced[weak_key] = workspaced
+ strong_key, weak_key = self.get_metadata_keys()
+ self._metadata_workspaced[strong_key] = workspaced
+ self._metadata_workspaced[weak_key] = workspaced
- return (workspaced, metadata_workspaced, metadata_keys)
+ return workspaced
# get_metadata_workspaced_dependencies():
#
# Retrieve the hash of workspaced dependencies keys from the given artifact.
#
- # Args:
- # key (str): The artifact key, or None for the default key
- # metadata_workspaced_dependencies (dict): The elements cached metadata of
- # which dependencies are workspaced,
- # empty if not yet cached
- # metadata_keys (dict): The elements cached strong/weak
- # metadata keys, empty if not yet cached
- #
# Returns:
# (list): List of which dependencies are workspaced
- # (dict): The workspaced depedencies key dict, None if not updated
- # (dict): The elements key dict, None if not updated
#
- def get_metadata_workspaced_dependencies(self, key, metadata_workspaced_dependencies,
- metadata_keys):
+ def get_metadata_workspaced_dependencies(self):
# Extract it and possibly derive the key
- artifact_vdir, key = self._get_directory(key)
+ meta_vdir, key = self._get_subdirectory('meta')
# Now try the cache, once we're sure about the key
- if key in metadata_workspaced_dependencies:
- return (metadata_workspaced_dependencies[key], None, None)
+ if key in self._metadata_workspaced_dependencies:
+ return self._metadata_workspaced_dependencies[key]
# Parse the expensive yaml now and cache the result
- meta_file = artifact_vdir._objpath('meta', 'workspaced-dependencies.yaml')
- meta = _yaml.load(meta_file, shortname='meta/workspaced-dependencies.yaml')
+ meta_file = meta_vdir._objpath('workspaced-dependencies.yaml')
+ meta = _yaml.load(meta_file, shortname='workspaced-dependencies.yaml')
workspaced = _yaml.node_sanitize(_yaml.node_get(meta, list, 'workspaced-dependencies'))
# Cache it under both strong and weak keys
- strong_key, weak_key, metadata_keys = self.get_metadata_keys(key, metadata_keys)
- metadata_workspaced_dependencies[strong_key] = workspaced
- metadata_workspaced_dependencies[weak_key] = workspaced
- return (workspaced, metadata_workspaced_dependencies, metadata_keys)
+ strong_key, weak_key = self.get_metadata_keys()
+ self._metadata_workspaced_dependencies[strong_key] = workspaced
+ self._metadata_workspaced_dependencies[weak_key] = workspaced
+
+ return workspaced
# cached():
#
- # Check whether the artifact corresponding to the specified cache key is
+ # Check whether the artifact corresponding to the stored cache key is
# available. This also checks whether all required parts of the artifact
- # are available, which may depend on command and configuration.
+ # are available, which may depend on command and configuration. The cache
+ # key used for querying is dependant on the current context.
#
# This is used by _update_state() to set __strong_cached and __weak_cached.
#
- # Args:
- # key (str): The artifact key
- #
# Returns:
# (bool): Whether artifact is in local cache
#
- def cached(self, key):
+ def cached(self):
context = self._context
try:
- vdir, _ = self._get_directory(key)
+ vdir, _ = self._get_directory()
except ArtifactError:
# Either ref or top-level artifact directory missing
return False
@@ -484,20 +425,17 @@ class Artifact():
#
# Check if the artifact is cached with log files.
#
- # Args:
- # key (str): The artifact key
- #
# Returns:
# (bool): True if artifact is cached with logs, False if
# element not cached or missing logs.
#
- def cached_logs(self, key=None):
+ def cached_logs(self):
if not self._element._cached():
return False
- vdir, _ = self._get_directory(key)
+ log_vdir, _ = self._get_subdirectory('logs')
- logsdigest = vdir._get_child_digest('logs')
+ logsdigest = log_vdir._get_digest()
return self._artifacts.cas.contains_directory(logsdigest, with_files=True)
# _get_directory():
diff --git a/buildstream/element.py b/buildstream/element.py
index bc8f25cf8..95081b940 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -229,19 +229,14 @@ class Element(Plugin):
self.__required = False # Whether the artifact is required in the current session
self.__artifact_files_required = False # Whether artifact files are required in the local cache
self.__build_result = None # The result of assembling this Element (success, description, detail)
- self._build_log_path = None # The path of the build log for this Element
- self.__artifact = Artifact(self, context) # Artifact class for direct artifact composite interaction
+ self._build_log_path = None # The path of the build log for this Element
+ self.__artifact = None # Artifact class for direct artifact composite interaction
+ self.__strict_artifact = None # Artifact for strict cache key
self.__batch_prepare_assemble = False # Whether batching across prepare()/assemble() is configured
self.__batch_prepare_assemble_flags = 0 # Sandbox flags for batching across prepare()/assemble()
self.__batch_prepare_assemble_collect = None # Collect dir for batching across prepare()/assemble()
- # hash tables of loaded artifact metadata, hashed by key
- self.__metadata_keys = {} # Strong and weak keys for this key
- self.__metadata_dependencies = {} # Dictionary of dependency strong keys
- self.__metadata_workspaced = {} # Boolean of whether it's workspaced
- self.__metadata_workspaced_dependencies = {} # List of which dependencies are workspaced
-
# Ensure we have loaded this class's defaults
self.__init_defaults(plugin_conf)
@@ -740,10 +735,11 @@ class Element(Plugin):
files_written = {}
old_dep_keys = None
workspace = self._get_workspace()
+ context = self._get_context()
if self.__can_build_incrementally() and workspace.last_successful:
- # Workspaces do not need to work with the special node types
- old_dep_keys = self.__get_artifact_metadata_dependencies(workspace.last_successful)
+ last_successful = Artifact(self, context, strong_key=workspace.last_successful)
+ old_dep_keys = last_successful.get_metadata_dependencies()
for dep in self.dependencies(scope):
# If we are workspaced, and we therefore perform an
@@ -1055,7 +1051,10 @@ class Element(Plugin):
# (str): Detailed description of the result
#
def _get_build_result(self):
- return self.__get_build_result(keystrength=None)
+ if self.__build_result is None:
+ self.__load_build_result()
+
+ return self.__build_result
# __set_build_result():
#
@@ -1076,7 +1075,11 @@ class Element(Plugin):
# the artifact cache and the element assembled successfully
#
def _cached_success(self):
- return self.__cached_success(keystrength=None)
+ if not self._cached():
+ return False
+
+ success, _, _ = self._get_build_result()
+ return success
# _cached_failure():
#
@@ -1180,6 +1183,8 @@ class Element(Plugin):
self.__strong_cached = None
self.__weak_cached = None
self.__build_result = None
+ self.__artifact = None
+ self.__strict_artifact = None
return
if self.__weak_cache_key is None:
@@ -1203,7 +1208,10 @@ class Element(Plugin):
return
if not context.get_strict():
- self.__weak_cached = self.__artifact.cached(self.__weak_cache_key)
+ # We've calculated the weak_key, so instantiate artifact instance member
+ self.__artifact = Artifact(self, context, weak_key=self.__weak_cache_key)
+ # and update the weak cached state (required early for workspaces)
+ self.__weak_cached = self.__artifact.cached()
if not context.get_strict():
# Full cache query in non-strict mode requires both the weak and
@@ -1213,7 +1221,7 @@ class Element(Plugin):
# are sufficient. However, don't update the `cached` attributes
# until the full cache query below.
if (not self.__assemble_scheduled and not self.__assemble_done and
- not self.__cached_success(keystrength=_KeyStrength.WEAK) and
+ not self._cached_success() and
not self._pull_pending()):
# For uncached workspaced elements, assemble is required
# even if we only need the cache key
@@ -1226,17 +1234,28 @@ class Element(Plugin):
e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
]
self.__strict_cache_key = self._calculate_cache_key(dependencies)
+
if self.__strict_cache_key is None:
# Strict cache key could not be calculated yet
return
- # Query caches now that the weak and strict cache keys are available
- key_for_cache_lookup = self.__strict_cache_key if context.get_strict() else self.__weak_cache_key
+ self.__strict_artifact = Artifact(self, context, strong_key=self.__strict_cache_key,
+ weak_key=self.__weak_cache_key)
+
+ # In strict mode, the strong cache key always matches the strict cache key
+ if context.get_strict():
+ self.__cache_key = self.__strict_cache_key
+ self.__artifact = self.__strict_artifact
+
+ # Query caches now that the weak and strict cache keys are available.
+ # strong_cached in non-strict mode is only of relevance when querying
+ # if a 'better' artifact could be pulled, which is redudant if we already
+ # have it cached locally with a strict_key. As such strong_cached is only
+ # checked against the 'strict' artifact.
if not self.__strong_cached:
- self.__strong_cached = self.__artifact.cached(self.__strict_cache_key)
- if key_for_cache_lookup == self.__weak_cache_key:
- if not self.__weak_cached:
- self.__weak_cached = self.__artifact.cached(self.__weak_cache_key)
+ self.__strong_cached = self.__strict_artifact.cached()
+ if not self.__weak_cached and not context.get_strict():
+ self.__weak_cached = self.__artifact.cached()
if (not self.__assemble_scheduled and not self.__assemble_done and
not self._cached_success() and not self._pull_pending()):
@@ -1251,16 +1270,14 @@ class Element(Plugin):
self._schedule_assemble()
return
+ # __cache_key can be None here only in non-strict mode
if self.__cache_key is None:
- # Calculate strong cache key
- if context.get_strict():
- self.__cache_key = self.__strict_cache_key
- elif self._pull_pending():
+ if self._pull_pending():
# Effective strong cache key is unknown until after the pull
pass
elif self._cached():
# Load the strong cache key from the artifact
- strong_key, _ = self.__get_artifact_metadata_keys()
+ strong_key, _ = self.__artifact.get_metadata_keys()
self.__cache_key = strong_key
elif self.__assemble_scheduled or self.__assemble_done:
# Artifact will or has been built, not downloaded
@@ -1273,6 +1290,9 @@ class Element(Plugin):
# Strong cache key could not be calculated yet
return
+ # Now we have the strong cache key, update the Artifact
+ self.__artifact._cache_key = self.__cache_key
+
if not self.__ready_for_runtime and self.__cache_key is not None:
self.__ready_for_runtime = all(
dep.__ready_for_runtime for dep in self.__runtime_dependencies)
@@ -1792,11 +1812,10 @@ class Element(Plugin):
# ensure we have cache keys
self._assemble_done()
- keys = self.__get_cache_keys_for_commit()
with self.timed_activity("Caching artifact"):
artifact_size = self.__artifact.cache(rootdir, sandbox_build_dir, collectvdir,
- buildresult, keys, publicdata)
+ buildresult, publicdata)
if collect is not None and collectvdir is None:
raise ElementError(
@@ -2132,6 +2151,9 @@ class Element(Plugin):
# not its contents.
#
def _cached_buildtree(self):
+ if not self._cached():
+ return False
+
return self.__artifact.cached_buildtree()
# _buildtree_exists()
@@ -2140,9 +2162,13 @@ class Element(Plugin):
# whether the buildtree is present in the local cache.
#
# Returns:
- # (bool): True if artifact was created with buildtree
+ # (bool): True if artifact was created with buildtree, False if
+ # element not cached or not created with a buildtree.
#
def _buildtree_exists(self):
+ if not self._cached():
+ return False
+
return self.__artifact.buildtree_exists()
# _cached_logs()
@@ -2364,10 +2390,10 @@ class Element(Plugin):
if recalculate or self.__tainted is None:
# Whether this artifact has a workspace
- workspaced = self.__get_artifact_metadata_workspaced()
+ workspaced = self.__artifact.get_metadata_workspaced()
# Whether this artifact's dependencies have workspaces
- workspaced_dependencies = self.__get_artifact_metadata_workspaced_dependencies()
+ workspaced_dependencies = self.__artifact.get_metadata_workspaced_dependencies()
# Other conditions should be or-ed
self.__tainted = (workspaced or workspaced_dependencies or
@@ -2774,138 +2800,21 @@ class Element(Plugin):
self.__whitelist_regex = re.compile(expression)
return self.__whitelist_regex.match(os.path.join(os.sep, path))
- # __get_artifact_metadata_keys():
- #
- # Retrieve the strong and weak keys from the given artifact.
- #
- # Args:
- # key (str): The artifact key, or None for the default key
- #
- # Returns:
- # (str): The strong key
- # (str): The weak key
- #
- def __get_artifact_metadata_keys(self, key=None):
-
- metadata_keys = self.__metadata_keys
-
- strong_key, weak_key, metadata_keys = self.__artifact.get_metadata_keys(key, metadata_keys)
-
- # Update keys if needed
- if metadata_keys:
- self.__metadata_keys = metadata_keys
-
- return (strong_key, weak_key)
-
- # __get_artifact_metadata_dependencies():
- #
- # Retrieve the hash of dependency strong keys from the given artifact.
- #
- # Args:
- # key (str): The artifact key, or None for the default key
- #
- # Returns:
- # (dict): A dictionary of element names and their strong keys
- #
- def __get_artifact_metadata_dependencies(self, key=None):
-
- metadata = [self.__metadata_dependencies, self.__metadata_keys]
- meta, meta_deps, meta_keys = self.__artifact.get_metadata_dependencies(key, *metadata)
-
- # Update deps if needed
- if meta_deps:
- self.__metadata_dependencies = meta_deps
- # Update keys if needed, no need to check if deps not updated
- if meta_keys:
- self.__metadata_keys = meta_keys
-
- return meta
-
- # __get_artifact_metadata_workspaced():
- #
- # Retrieve the hash of dependency strong keys from the given artifact.
- #
- # Args:
- # key (str): The artifact key, or None for the default key
- #
- # Returns:
- # (bool): Whether the given artifact was workspaced
- #
-
- def __get_artifact_metadata_workspaced(self, key=None):
-
- metadata = [self.__metadata_workspaced, self.__metadata_keys]
- workspaced, meta_workspaced, meta_keys = self.__artifact.get_metadata_workspaced(key, *metadata)
-
- # Update workspaced if needed
- if meta_workspaced:
- self.__metadata_workspaced = meta_workspaced
- # Update keys if needed, no need to check if workspaced not updated
- if meta_keys:
- self.__metadata_keys = meta_keys
-
- return workspaced
-
- # __get_artifact_metadata_workspaced_dependencies():
- #
- # Retrieve the hash of dependency strong keys from the given artifact.
- #
- # Args:
- # key (str): The artifact key, or None for the default key
- #
- # Returns:
- # (list): List of which dependencies are workspaced
- #
- def __get_artifact_metadata_workspaced_dependencies(self, key=None):
-
- metadata = [self.__metadata_workspaced_dependencies, self.__metadata_keys]
- workspaced, meta_workspaced_deps,\
- meta_keys = self.__artifact.get_metadata_workspaced_dependencies(key, *metadata)
-
- # Update workspaced if needed
- if meta_workspaced_deps:
- self.__metadata_workspaced_dependencies = meta_workspaced_deps
- # Update keys if needed, no need to check if workspaced not updated
- if meta_keys:
- self.__metadata_keys = meta_keys
-
- return workspaced
-
# __load_public_data():
#
# Loads the public data from the cached artifact
#
def __load_public_data(self):
+ self.__assert_cached()
assert self.__dynamic_public is None
self.__dynamic_public = self.__artifact.load_public_data()
- def __load_build_result(self, keystrength):
- self.__assert_cached(keystrength=keystrength)
+ def __load_build_result(self):
+ self.__assert_cached()
assert self.__build_result is None
- # _get_cache_key with _KeyStrength.STRONG returns self.__cache_key, which can be `None`
- # leading to a failed assertion from get_artifact_directory() using get_artifact_name(),
- # so explicility pass self.__strict_cache_key
- key = self.__weak_cache_key if keystrength is _KeyStrength.WEAK else self.__strict_cache_key
-
- self.__build_result = self.__artifact.load_build_result(key)
-
- def __get_build_result(self, keystrength):
- if keystrength is None:
- keystrength = _KeyStrength.STRONG if self._get_context().get_strict() else _KeyStrength.WEAK
-
- if self.__build_result is None:
- self.__load_build_result(keystrength)
-
- return self.__build_result
-
- def __cached_success(self, keystrength):
- if not self.__is_cached(keystrength=keystrength):
- return False
-
- success, _, _ = self.__get_build_result(keystrength=keystrength)
- return success
+ self.__build_result = self.__artifact.load_build_result()
def __get_cache_keys_for_commit(self):
keys = []