diff options
-rw-r--r-- | buildstream/_artifactcache/artifactcache.py | 3 | ||||
-rw-r--r-- | buildstream/_artifactcache/ostreecache.py | 11 | ||||
-rw-r--r-- | buildstream/_artifactcache/tarcache.py | 8 | ||||
-rw-r--r-- | buildstream/element.py | 22 |
4 files changed, 22 insertions, 22 deletions
diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py index aef2cde1d..927876dfb 100644 --- a/buildstream/_artifactcache/artifactcache.py +++ b/buildstream/_artifactcache/artifactcache.py @@ -164,6 +164,7 @@ class ArtifactCache(): # # Args: # element (Element): The Element to extract + # key (str): The cache key to use # # Raises: # ArtifactError: In cases there was an OSError, or if the artifact @@ -171,7 +172,7 @@ class ArtifactCache(): # # Returns: path to extracted artifact # - def extract(self, element): + def extract(self, element, key): raise ImplError("Cache '{kind}' does not implement extract()" .format(kind=type(self).__name__)) diff --git a/buildstream/_artifactcache/ostreecache.py b/buildstream/_artifactcache/ostreecache.py index f50d6ae99..cae4f7269 100644 --- a/buildstream/_artifactcache/ostreecache.py +++ b/buildstream/_artifactcache/ostreecache.py @@ -213,6 +213,7 @@ class OSTreeCache(ArtifactCache): # # Args: # element (Element): The Element to extract + # key (str): The cache key to use # # Raises: # ArtifactError: In cases there was an OSError, or if the artifact @@ -220,18 +221,12 @@ class OSTreeCache(ArtifactCache): # # Returns: path to extracted artifact # - def extract(self, element): - ref = buildref(element, element._get_strict_cache_key()) + def extract(self, element, key): + ref = buildref(element, key) # resolve ref to checksum rev = _ostree.checksum(self.repo, ref) - # resolve weak cache key, if artifact is missing for strong cache key - # and the context allows use of weak cache keys - if not rev and not element._get_strict(): - ref = buildref(element, element._get_cache_key(strength=_KeyStrength.WEAK)) - rev = _ostree.checksum(self.repo, ref) - if not rev: raise ArtifactError("Artifact missing for {}".format(ref)) diff --git a/buildstream/_artifactcache/tarcache.py b/buildstream/_artifactcache/tarcache.py index d1d6c035a..d0d019649 100644 --- a/buildstream/_artifactcache/tarcache.py +++ b/buildstream/_artifactcache/tarcache.py @@ -283,18 +283,12 @@ class TarCache(ArtifactCache): # # Implements artifactcache.extract(). # - def extract(self, element): + def extract(self, element, key): - key = element._get_strict_cache_key() ref = buildref(element, key) path = tarpath(element, key) if not os.path.isfile(os.path.join(self.tardir, path)): - key = element._get_cache_key(strength=_KeyStrength.WEAK) - ref = buildref(element, key) - path = tarpath(element, key) - - if not os.path.isfile(os.path.join(self.tardir, path)): raise ArtifactError("Artifact missing for {}".format(ref)) # If the destination already exists, the artifact has been extracted diff --git a/buildstream/element.py b/buildstream/element.py index 3338f388b..0d67f8dae 100644 --- a/buildstream/element.py +++ b/buildstream/element.py @@ -421,7 +421,7 @@ class Element(Plugin): with self.timed_activity("Staging {}/{}".format(self.name, self._get_display_key())): # Get the extracted artifact - artifact = os.path.join(self.__artifacts.extract(self), 'files') + artifact = os.path.join(self.__extract(), 'files') # Hard link it into the staging area # @@ -1251,7 +1251,7 @@ class Element(Plugin): if self.__workspaced_artifact is None: self._assert_cached() - metadir = os.path.join(self.__artifacts.extract(self), 'meta') + metadir = os.path.join(self.__extract(), 'meta') meta = _yaml.load(os.path.join(metadir, 'artifact.yaml')) if 'workspaced' in meta: self.__workspaced_artifact = meta['workspaced'] @@ -1265,7 +1265,7 @@ class Element(Plugin): if self.__workspaced_dependencies_artifact is None: self._assert_cached() - metadir = os.path.join(self.__artifacts.extract(self), 'meta') + metadir = os.path.join(self.__extract(), 'meta') meta = _yaml.load(os.path.join(metadir, 'artifact.yaml')) if 'workspaced_dependencies' in meta: self.__workspaced_dependencies_artifact = meta['workspaced_dependencies'] @@ -1573,7 +1573,7 @@ class Element(Plugin): pass elif self._cached(): # Load the strong cache key from the artifact - metadir = os.path.join(self.__artifacts.extract(self), 'meta') + metadir = os.path.join(self.__extract(), 'meta') meta = _yaml.load(os.path.join(metadir, 'artifact.yaml')) self.__cache_key = meta['keys']['strong'] elif self._buildable(): @@ -1773,7 +1773,7 @@ class Element(Plugin): } def __compute_splits(self, include=None, exclude=None, orphans=True): - basedir = os.path.join(self.__artifacts.extract(self), 'files') + basedir = os.path.join(self.__extract(), 'files') # No splitting requested, just report complete artifact if orphans and not (include or exclude): @@ -1823,12 +1823,22 @@ class Element(Plugin): if include_file and not exclude_file: yield filename.lstrip(os.sep) + def __extract(self): + key = self.__strict_cache_key + + # Use weak cache key, if artifact is missing for strong cache key + # and the context allows use of weak cache keys + if not self._get_strict() and not self.__artifacts.contains(self, key): + key = self._get_cache_key(strength=_KeyStrength.WEAK) + + return self.__artifacts.extract(self, key) + def _load_public_data(self): self._assert_cached() assert(self.__dynamic_public is None) # Load the public data from the artifact - metadir = os.path.join(self.__artifacts.extract(self), 'meta') + metadir = os.path.join(self.__extract(), 'meta') self.__dynamic_public = _yaml.load(os.path.join(metadir, 'public.yaml')) def _subst_string(self, value): |