summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2019-04-01 11:57:41 +0200
committerJürg Billeter <j@bitron.ch>2019-04-12 11:17:33 +0200
commit417a9fb231abac5c85e57b8216fe2d6012740045 (patch)
treeebbd57eeb5b7519f0f369a9d2312075d632a4b91
parent6168faf1a8bc66994c8f5b67ea2871f596451416 (diff)
downloadbuildstream-417a9fb231abac5c85e57b8216fe2d6012740045.tar.gz
cascache.py: Make _required_blobs() public
-rw-r--r--buildstream/_cas/cascache.py59
1 files changed, 32 insertions, 27 deletions
diff --git a/buildstream/_cas/cascache.py b/buildstream/_cas/cascache.py
index eae3ef04d..3a3662aa8 100644
--- a/buildstream/_cas/cascache.py
+++ b/buildstream/_cas/cascache.py
@@ -276,7 +276,7 @@ class CASCache():
self._fetch_directory(remote, tree)
# Fetch files, excluded_subdirs determined in pullqueue
- required_blobs = self._required_blobs(tree, excluded_subdirs=excluded_subdirs)
+ required_blobs = self.required_blobs_for_directory(tree, excluded_subdirs=excluded_subdirs)
missing_blobs = self.local_missing_blobs(required_blobs)
if missing_blobs:
self.fetch_blobs(remote, missing_blobs)
@@ -647,7 +647,7 @@ class CASCache():
# Returns: List of missing Digest objects
#
def remote_missing_blobs_for_directory(self, remote, digest):
- required_blobs = self._required_blobs(digest)
+ required_blobs = self.required_blobs_for_directory(digest)
missing_blobs = dict()
# Limit size of FindMissingBlobs request
@@ -685,6 +685,36 @@ class CASCache():
missing_blobs.append(digest)
return missing_blobs
+ # required_blobs_for_directory():
+ #
+ # Generator that returns the Digests of all blobs in the tree specified by
+ # the Digest of the toplevel Directory object.
+ #
+ def required_blobs_for_directory(self, directory_digest, *, excluded_subdirs=None):
+ if not excluded_subdirs:
+ excluded_subdirs = []
+
+ # parse directory, and recursively add blobs
+ d = remote_execution_pb2.Digest()
+ d.hash = directory_digest.hash
+ d.size_bytes = directory_digest.size_bytes
+ yield d
+
+ directory = remote_execution_pb2.Directory()
+
+ with open(self.objpath(directory_digest), 'rb') as f:
+ directory.ParseFromString(f.read())
+
+ for filenode in directory.files:
+ d = remote_execution_pb2.Digest()
+ d.hash = filenode.digest.hash
+ d.size_bytes = filenode.digest.size_bytes
+ yield d
+
+ for dirnode in directory.directories:
+ if dirnode.name not in excluded_subdirs:
+ yield from self.required_blobs_for_directory(dirnode.digest)
+
################################################
# Local Private Methods #
################################################
@@ -881,31 +911,6 @@ class CASCache():
for dirnode in directory.directories:
self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime, check_exists=check_exists)
- def _required_blobs(self, directory_digest, *, excluded_subdirs=None):
- if not excluded_subdirs:
- excluded_subdirs = []
-
- # parse directory, and recursively add blobs
- d = remote_execution_pb2.Digest()
- d.hash = directory_digest.hash
- d.size_bytes = directory_digest.size_bytes
- yield d
-
- directory = remote_execution_pb2.Directory()
-
- with open(self.objpath(directory_digest), 'rb') as f:
- directory.ParseFromString(f.read())
-
- for filenode in directory.files:
- d = remote_execution_pb2.Digest()
- d.hash = filenode.digest.hash
- d.size_bytes = filenode.digest.size_bytes
- yield d
-
- for dirnode in directory.directories:
- if dirnode.name not in excluded_subdirs:
- yield from self._required_blobs(dirnode.digest)
-
# _temporary_object():
#
# Returns: