summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2019-07-24 15:19:56 +0200
committerJürg Billeter <j@bitron.ch>2019-08-20 07:41:23 +0200
commitd811298494c4cac7503904be79c3a2431c172bc6 (patch)
tree4f5928be402b492a573998d3247dbbc7a9f8cda5
parent63a219791e1bf2a5e2031bfd5849ffc28a8e8e26 (diff)
downloadbuildstream-d811298494c4cac7503904be79c3a2431c172bc6.tar.gz
_artifactcache.py: Remove unused code tracking required elements
-rw-r--r--src/buildstream/_artifactcache.py69
1 files changed, 0 insertions, 69 deletions
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index 736a98b27..adb3eb298 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -21,7 +21,6 @@ import os
import grpc
from ._basecache import BaseCache
-from .types import _KeyStrength
from ._exceptions import ArtifactError, CASError, CASCacheError
from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
artifact_pb2, artifact_pb2_grpc
@@ -96,84 +95,16 @@ class ArtifactCache(BaseCache):
def __init__(self, context):
super().__init__(context)
- self._required_elements = set() # The elements required for this session
-
# create artifact directory
self.artifactdir = context.artifactdir
os.makedirs(self.artifactdir, exist_ok=True)
- # mark_required_elements():
- #
- # Mark elements whose artifacts are required for the current run.
- #
- # Artifacts whose elements are in this list will be locked by the artifact
- # cache and not touched for the duration of the current pipeline.
- #
- # Args:
- # elements (iterable): A set of elements to mark as required
- #
- def mark_required_elements(self, elements):
-
- # We risk calling this function with a generator, so we
- # better consume it first.
- #
- elements = list(elements)
-
- # Mark the elements as required. We cannot know that we know the
- # cache keys yet, so we only check that later when deleting.
- #
- self._required_elements.update(elements)
-
- # For the cache keys which were resolved so far, we bump
- # the mtime of them.
- #
- # This is just in case we have concurrent instances of
- # BuildStream running with the same artifact cache, it will
- # reduce the likelyhood of one instance deleting artifacts
- # which are required by the other.
- for element in elements:
- strong_key = element._get_cache_key(strength=_KeyStrength.STRONG)
- weak_key = element._get_cache_key(strength=_KeyStrength.WEAK)
- for key in (strong_key, weak_key):
- if key:
- ref = element.get_artifact_name(key)
-
- try:
- self.update_mtime(ref)
- except ArtifactError:
- pass
-
def update_mtime(self, ref):
try:
os.utime(os.path.join(self.artifactdir, ref))
except FileNotFoundError as e:
raise ArtifactError("Couldn't find artifact: {}".format(ref)) from e
- # unrequired_artifacts()
- #
- # Returns iterator over artifacts that are not required in the build plan
- #
- # Returns:
- # (iter): Iterator over tuples of (float, str) where float is the time
- # and str is the artifact ref
- #
- def unrequired_artifacts(self):
- required_artifacts = set(map(lambda x: x.get_artifact_name(),
- self._required_elements))
- for (mtime, artifact) in self._list_refs_mtimes(self.artifactdir):
- if artifact not in required_artifacts:
- yield (mtime, artifact)
-
- def required_artifacts(self):
- # Build a set of the cache keys which are required
- # based on the required elements at cleanup time
- #
- # We lock both strong and weak keys - deleting one but not the
- # other won't save space, but would be a user inconvenience.
- for element in self._required_elements:
- yield element._get_cache_key(strength=_KeyStrength.STRONG)
- yield element._get_cache_key(strength=_KeyStrength.WEAK)
-
# preflight():
#
# Preflight check.