summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTristan Van Berkom <tristan.vanberkom@codethink.co.uk>2018-09-08 19:03:11 +0900
committerTristan Van Berkom <tristan.vanberkom@codethink.co.uk>2018-09-10 16:53:56 +0900
commit791f7ddad656ce67022225a43c28927e3887b3a9 (patch)
tree2714ce8c72124139860d9c399720eeabcdb502de
parenta3825ba6896914dc19199b400bb2acc6250acae9 (diff)
downloadbuildstream-791f7ddad656ce67022225a43c28927e3887b3a9.tar.gz
_artifactcache/artifactcache.py: Sealing away some the estimated size
Previously, the API contract was to expose the estimated_size variable on the ArtifactCache instance for all to see, however it is only relevant to the ArtifactCache abstract class code. Subclasses were informed to update the estimated_size variable in their calculate_cache_size() implementation. To untangle this and hide away the estimated size, this commit does the following: o Introduces ArtifactCache.compute_cache_size() API for external callers o ArtifactCache.compute_cache_size() calls the abstract method for the CasCache subclass to implement o ArtifactCache.compute_cache_size() updates the private estimated_size variable o All direct callers to ArtifactCache.calculate_cache_size(), have been updated to use the ArtifactCache.compute_cache_size() method instead, which takes care of updating anything local to the ArtifactCache abstract class code (the estimated_size)
-rw-r--r--buildstream/_artifactcache/artifactcache.py63
-rw-r--r--buildstream/_artifactcache/cascache.py1
-rw-r--r--buildstream/_scheduler/jobs/cachesizejob.py2
-rw-r--r--buildstream/_scheduler/jobs/elementjob.py3
4 files changed, 41 insertions, 28 deletions
diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py
index 8d7ace2a6..e9a11e23d 100644
--- a/buildstream/_artifactcache/artifactcache.py
+++ b/buildstream/_artifactcache/artifactcache.py
@@ -81,12 +81,9 @@ ArtifactCacheSpec.__new__.__defaults__ = (None, None, None)
class ArtifactCache():
def __init__(self, context):
self.context = context
- self.required_artifacts = set()
self.extractdir = os.path.join(context.artifactdir, 'extract')
self.tmpdir = os.path.join(context.artifactdir, 'tmp')
- self.estimated_size = None
-
self.global_remote_specs = []
self.project_remote_specs = {}
@@ -94,6 +91,9 @@ class ArtifactCache():
self.cache_quota = None
self.cache_lower_threshold = None
+ self._required_artifacts = set()
+ self._estimated_size = None
+
os.makedirs(self.extractdir, exist_ok=True)
os.makedirs(self.tmpdir, exist_ok=True)
@@ -211,8 +211,8 @@ class ArtifactCache():
weak_key = element._get_cache_key(strength=_KeyStrength.WEAK)
for key in (strong_key, weak_key):
- if key and key not in self.required_artifacts:
- self.required_artifacts.add(key)
+ if key and key not in self._required_artifacts:
+ self._required_artifacts.add(key)
# We also update the usage times of any artifacts
# we will be using, which helps preventing a
@@ -230,7 +230,7 @@ class ArtifactCache():
def clean(self):
artifacts = self.list_artifacts()
- while self.calculate_cache_size() >= self.cache_quota - self.cache_lower_threshold:
+ while self.compute_cache_size() >= self.cache_quota - self.cache_lower_threshold:
try:
to_remove = artifacts.pop(0)
except IndexError:
@@ -244,7 +244,7 @@ class ArtifactCache():
"Please increase the cache-quota in {}."
.format(self.context.config_origin or default_conf))
- if self.calculate_cache_size() > self.cache_quota:
+ if self.compute_cache_size() > self.cache_quota:
raise ArtifactError("Cache too full. Aborting.",
detail=detail,
reason="cache-too-full")
@@ -252,13 +252,29 @@ class ArtifactCache():
break
key = to_remove.rpartition('/')[2]
- if key not in self.required_artifacts:
+ if key not in self._required_artifacts:
size = self.remove(to_remove)
if size:
self.cache_size -= size
# This should be O(1) if implemented correctly
- return self.calculate_cache_size()
+ return self.compute_cache_size()
+
+ # compute_cache_size()
+ #
+ # Computes the real artifact cache size by calling
+ # the abstract calculate_cache_size() method.
+ #
+ # Returns:
+ # (int): The size of the artifact cache.
+ #
+ def compute_cache_size(self):
+ cache_size = self.calculate_cache_size()
+
+ # Keep the estimated size updated here
+ self._estimated_size = cache_size
+
+ return cache_size
# get_approximate_cache_size()
#
@@ -284,29 +300,29 @@ class ArtifactCache():
def get_approximate_cache_size(self):
# If we don't currently have an estimate, figure out the real
# cache size.
- if self.estimated_size is None:
+ if self._estimated_size is None:
stored_size = self._read_cache_size()
if stored_size is not None:
- self.estimated_size = stored_size
+ self._estimated_size = stored_size
else:
- self.estimated_size = self.calculate_cache_size()
+ self.compute_cache_size()
- return self.estimated_size
+ return self._estimated_size
# add_artifact_size()
#
# Adds the reported size of a newly cached artifact to the
- # overall ArtifactCache.estimated_size.
+ # overall estimated size.
#
# Args:
# artifact_size (int): The size to add.
#
def add_artifact_size(self, artifact_size):
- if not self.estimated_size:
- self.estimated_size = self.calculate_cache_size()
+ if not self._estimated_size:
+ self.compute_cache_size()
- self.estimated_size += artifact_size
- self._write_cache_size(self.estimated_size)
+ self._estimated_size += artifact_size
+ self._write_cache_size(self._estimated_size)
# set_cache_size()
#
@@ -319,11 +335,11 @@ class ArtifactCache():
# cache_size (int): The size to set.
#
def set_cache_size(self, cache_size):
- self.estimated_size = cache_size
+ self._estimated_size = cache_size
# set_cache_size is called in cleanup, where it may set the cache to None
- if self.estimated_size is not None:
- self._write_cache_size(self.estimated_size)
+ if self._estimated_size is not None:
+ self._write_cache_size(self._estimated_size)
################################################
# Abstract methods for subclasses to implement #
@@ -515,11 +531,8 @@ class ArtifactCache():
#
# Return the real artifact cache size.
#
- # Implementations should also use this to update estimated_size.
- #
# Returns:
- #
- # (int) The size of the artifact cache.
+ # (int): The size of the artifact cache.
#
def calculate_cache_size(self):
raise ImplError("Cache '{kind}' does not implement calculate_cache_size()"
diff --git a/buildstream/_artifactcache/cascache.py b/buildstream/_artifactcache/cascache.py
index ce2b874da..e34f3ed7e 100644
--- a/buildstream/_artifactcache/cascache.py
+++ b/buildstream/_artifactcache/cascache.py
@@ -532,7 +532,6 @@ class CASCache(ArtifactCache):
def calculate_cache_size(self):
if self.cache_size is None:
self.cache_size = utils._get_dir_size(self.casdir)
- self.estimated_size = self.cache_size
return self.cache_size
diff --git a/buildstream/_scheduler/jobs/cachesizejob.py b/buildstream/_scheduler/jobs/cachesizejob.py
index a3d247fe3..be682e3de 100644
--- a/buildstream/_scheduler/jobs/cachesizejob.py
+++ b/buildstream/_scheduler/jobs/cachesizejob.py
@@ -27,7 +27,7 @@ class CacheSizeJob(Job):
self._cache = Platform._instance.artifactcache
def child_process(self):
- return self._cache.calculate_cache_size()
+ return self._cache.compute_cache_size()
def parent_complete(self, success, result):
self._cache.set_cache_size(result)
diff --git a/buildstream/_scheduler/jobs/elementjob.py b/buildstream/_scheduler/jobs/elementjob.py
index fcad20ce4..36527794e 100644
--- a/buildstream/_scheduler/jobs/elementjob.py
+++ b/buildstream/_scheduler/jobs/elementjob.py
@@ -110,7 +110,8 @@ class ElementJob(Job):
workspace = self._element._get_workspace()
artifact_size = self._element._get_artifact_size()
- cache_size = self._element._get_artifact_cache().calculate_cache_size()
+ artifacts = self._element._get_artifact_cache()
+ cache_size = artifacts.compute_cache_size()
if workspace is not None:
data['workspace'] = workspace.to_dict()