summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2019-02-19 18:42:56 +0000
committerJürg Billeter <j@bitron.ch>2019-02-19 18:42:56 +0000
commit36a60abe062d4c8514cffc374ecd2c926adac190 (patch)
tree6906d6b82a485ff592aca671c4ab51561e16ea5a
parent52c0c185d964bf696e320be97663c412e020b427 (diff)
parent5e10e2e81bee10bab56ed5b8190f8332170d7096 (diff)
downloadbuildstream-36a60abe062d4c8514cffc374ecd2c926adac190.tar.gz
Merge branch 'raoul/870-root-cache-dir' into 'master'
root cache directory Closes #870 See merge request BuildStream/buildstream!1100
-rw-r--r--buildstream/_artifactcache.py291
-rw-r--r--buildstream/_cas/__init__.py2
-rw-r--r--buildstream/_cas/cascache.py290
-rw-r--r--buildstream/_context.py83
-rw-r--r--buildstream/_frontend/status.py2
-rw-r--r--buildstream/_frontend/widget.py4
-rw-r--r--buildstream/_scheduler/jobs/cachesizejob.py6
-rw-r--r--buildstream/_scheduler/jobs/cleanupjob.py8
-rw-r--r--buildstream/data/userconfig.yaml7
-rw-r--r--buildstream/element.py2
-rw-r--r--buildstream/plugintestutils/runcli.py24
-rwxr-xr-xconftest.py12
-rwxr-xr-xdoc/bst2html.py5
-rw-r--r--doc/sessions/running-commands.run2
-rw-r--r--tests/artifactcache/cache_size.py10
-rw-r--r--tests/artifactcache/expiry.py12
-rw-r--r--tests/artifactcache/junctions.py4
-rw-r--r--tests/artifactcache/pull.py30
-rw-r--r--tests/artifactcache/push.py32
-rw-r--r--tests/frontend/pull.py32
-rw-r--r--tests/integration/artifact.py27
-rw-r--r--tests/integration/pullbuildtrees.py9
-rw-r--r--tests/integration/shellbuildtrees.py18
-rw-r--r--tests/integration/source-determinism.py12
-rw-r--r--tests/internals/context.py6
-rw-r--r--tests/internals/pluginloading.py2
-rw-r--r--tests/testutils/artifactshare.py1
27 files changed, 499 insertions, 434 deletions
diff --git a/buildstream/_artifactcache.py b/buildstream/_artifactcache.py
index b72b20fda..b73304fac 100644
--- a/buildstream/_artifactcache.py
+++ b/buildstream/_artifactcache.py
@@ -22,12 +22,12 @@ import os
from collections.abc import Mapping
from .types import _KeyStrength
-from ._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
+from ._exceptions import ArtifactError, CASError
from ._message import Message, MessageType
from . import utils
from . import _yaml
-from ._cas import CASRemote, CASRemoteSpec
+from ._cas import CASRemote, CASRemoteSpec, CASCacheUsage
from .storage._casbaseddirectory import CasBasedDirectory
@@ -46,39 +46,6 @@ class ArtifactCacheSpec(CASRemoteSpec):
pass
-# ArtifactCacheUsage
-#
-# A simple object to report the current artifact cache
-# usage details.
-#
-# Note that this uses the user configured cache quota
-# rather than the internal quota with protective headroom
-# removed, to provide a more sensible value to display to
-# the user.
-#
-# Args:
-# artifacts (ArtifactCache): The artifact cache to get the status of
-#
-class ArtifactCacheUsage():
-
- def __init__(self, artifacts):
- context = artifacts.context
- self.quota_config = context.config_cache_quota # Configured quota
- self.quota_size = artifacts._cache_quota_original # Resolved cache quota in bytes
- self.used_size = artifacts.get_cache_size() # Size used by artifacts in bytes
- self.used_percent = 0 # Percentage of the quota used
- if self.quota_size is not None:
- self.used_percent = int(self.used_size * 100 / self.quota_size)
-
- # Formattable into a human readable string
- #
- def __str__(self):
- return "{} / {} ({}%)" \
- .format(utils._pretty_size(self.used_size, dec_places=1),
- self.quota_config,
- self.used_percent)
-
-
# An ArtifactCache manages artifacts.
#
# Args:
@@ -87,19 +54,17 @@ class ArtifactCacheUsage():
class ArtifactCache():
def __init__(self, context):
self.context = context
- self.extractdir = os.path.join(context.artifactdir, 'extract')
+ self.extractdir = context.extractdir
self.cas = context.get_cascache()
+ self.casquota = context.get_casquota()
+ self.casquota._calculate_cache_quota()
self.global_remote_specs = []
self.project_remote_specs = {}
self._required_elements = set() # The elements required for this session
- self._cache_size = None # The current cache size, sometimes it's an estimate
- self._cache_quota = None # The cache quota
- self._cache_quota_original = None # The cache quota as specified by the user, in bytes
- self._cache_quota_headroom = None # The headroom in bytes before reaching the quota or full disk
- self._cache_lower_threshold = None # The target cache size for a cleanup
+
self._remotes_setup = False # Check to prevent double-setup of remotes
# Per-project list of _CASRemote instances.
@@ -110,8 +75,6 @@ class ArtifactCache():
os.makedirs(self.extractdir, exist_ok=True)
- self._calculate_cache_quota()
-
# setup_remotes():
#
# Sets up which remotes to use
@@ -235,7 +198,7 @@ class ArtifactCache():
space_saved = 0
# Start off with an announcement with as much info as possible
- volume_size, volume_avail = self._get_cache_volume_size()
+ volume_size, volume_avail = self.casquota._get_cache_volume_size()
self._message(MessageType.STATUS, "Starting cache cleanup",
detail=("Elements required by the current build plan: {}\n" +
"User specified quota: {} ({})\n" +
@@ -243,8 +206,8 @@ class ArtifactCache():
"Cache volume: {} total, {} available")
.format(len(self._required_elements),
context.config_cache_quota,
- utils._pretty_size(self._cache_quota_original, dec_places=2),
- utils._pretty_size(self.get_cache_size(), dec_places=2),
+ utils._pretty_size(self.casquota._cache_quota, dec_places=2),
+ utils._pretty_size(self.casquota.get_cache_size(), dec_places=2),
utils._pretty_size(volume_size, dec_places=2),
utils._pretty_size(volume_avail, dec_places=2)))
@@ -261,9 +224,11 @@ class ArtifactCache():
])
# Do a real computation of the cache size once, just in case
- self.compute_cache_size()
+ self.casquota.compute_cache_size()
+ usage = CASCacheUsage(self.casquota)
+ self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
- while self.get_cache_size() >= self._cache_lower_threshold:
+ while self.casquota.get_cache_size() >= self.casquota._cache_lower_threshold:
try:
to_remove = artifacts.pop(0)
except IndexError:
@@ -280,7 +245,7 @@ class ArtifactCache():
"Please increase the cache-quota in {} and/or make more disk space."
.format(removed_ref_count,
utils._pretty_size(space_saved, dec_places=2),
- utils._pretty_size(self.get_cache_size(), dec_places=2),
+ utils._pretty_size(self.casquota.get_cache_size(), dec_places=2),
len(self._required_elements),
(context.config_origin or default_conf)))
@@ -306,7 +271,7 @@ class ArtifactCache():
to_remove))
# Remove the size from the removed size
- self.set_cache_size(self._cache_size - size)
+ self.casquota.set_cache_size(self.casquota._cache_size - size)
# User callback
#
@@ -322,29 +287,12 @@ class ArtifactCache():
"Cache usage is now: {}")
.format(removed_ref_count,
utils._pretty_size(space_saved, dec_places=2),
- utils._pretty_size(self.get_cache_size(), dec_places=2)))
-
- return self.get_cache_size()
-
- # compute_cache_size()
- #
- # Computes the real artifact cache size by calling
- # the abstract calculate_cache_size() method.
- #
- # Returns:
- # (int): The size of the artifact cache.
- #
- def compute_cache_size(self):
- old_cache_size = self._cache_size
- new_cache_size = self.cas.calculate_cache_size()
-
- if old_cache_size != new_cache_size:
- self._cache_size = new_cache_size
+ utils._pretty_size(self.casquota.get_cache_size(), dec_places=2)))
- usage = ArtifactCacheUsage(self)
- self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
+ return self.casquota.get_cache_size()
- return self._cache_size
+ def full(self):
+ return self.casquota.full()
# add_artifact_size()
#
@@ -355,71 +303,10 @@ class ArtifactCache():
# artifact_size (int): The size to add.
#
def add_artifact_size(self, artifact_size):
- cache_size = self.get_cache_size()
+ cache_size = self.casquota.get_cache_size()
cache_size += artifact_size
- self.set_cache_size(cache_size)
-
- # get_cache_size()
- #
- # Fetches the cached size of the cache, this is sometimes
- # an estimate and periodically adjusted to the real size
- # when a cache size calculation job runs.
- #
- # When it is an estimate, the value is either correct, or
- # it is greater than the actual cache size.
- #
- # Returns:
- # (int) An approximation of the artifact cache size, in bytes.
- #
- def get_cache_size(self):
-
- # If we don't currently have an estimate, figure out the real cache size.
- if self._cache_size is None:
- stored_size = self._read_cache_size()
- if stored_size is not None:
- self._cache_size = stored_size
- else:
- self.compute_cache_size()
-
- return self._cache_size
-
- # set_cache_size()
- #
- # Forcefully set the overall cache size.
- #
- # This is used to update the size in the main process after
- # having calculated in a cleanup or a cache size calculation job.
- #
- # Args:
- # cache_size (int): The size to set.
- #
- def set_cache_size(self, cache_size):
-
- assert cache_size is not None
-
- self._cache_size = cache_size
- self._write_cache_size(self._cache_size)
-
- # full()
- #
- # Checks if the artifact cache is full, either
- # because the user configured quota has been exceeded
- # or because the underlying disk is almost full.
- #
- # Returns:
- # (bool): True if the artifact cache is full
- #
- def full(self):
-
- if self.get_cache_size() > self._cache_quota:
- return True
-
- _, volume_avail = self._get_cache_volume_size()
- if volume_avail < self._cache_quota_headroom:
- return True
-
- return False
+ self.casquota.set_cache_size(cache_size)
# preflight():
#
@@ -885,142 +772,6 @@ class ArtifactCache():
with self.context.timed_activity("Initializing remote caches", silent_nested=True):
self.initialize_remotes(on_failure=remote_failed)
- # _write_cache_size()
- #
- # Writes the given size of the artifact to the cache's size file
- #
- # Args:
- # size (int): The size of the artifact cache to record
- #
- def _write_cache_size(self, size):
- assert isinstance(size, int)
- size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
- with utils.save_file_atomic(size_file_path, "w") as f:
- f.write(str(size))
-
- # _read_cache_size()
- #
- # Reads and returns the size of the artifact cache that's stored in the
- # cache's size file
- #
- # Returns:
- # (int): The size of the artifact cache, as recorded in the file
- #
- def _read_cache_size(self):
- size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
-
- if not os.path.exists(size_file_path):
- return None
-
- with open(size_file_path, "r") as f:
- size = f.read()
-
- try:
- num_size = int(size)
- except ValueError as e:
- raise ArtifactError("Size '{}' parsed from '{}' was not an integer".format(
- size, size_file_path)) from e
-
- return num_size
-
- # _calculate_cache_quota()
- #
- # Calculates and sets the cache quota and lower threshold based on the
- # quota set in Context.
- # It checks that the quota is both a valid expression, and that there is
- # enough disk space to satisfy that quota
- #
- def _calculate_cache_quota(self):
- # Headroom intended to give BuildStream a bit of leeway.
- # This acts as the minimum size of cache_quota and also
- # is taken from the user requested cache_quota.
- #
- if 'BST_TEST_SUITE' in os.environ:
- self._cache_quota_headroom = 0
- else:
- self._cache_quota_headroom = 2e9
-
- try:
- cache_quota = utils._parse_size(self.context.config_cache_quota,
- self.context.artifactdir)
- except utils.UtilError as e:
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
- "\nValid values are, for example: 800M 10G 1T 50%\n"
- .format(str(e))) from e
-
- total_size, available_space = self._get_cache_volume_size()
- cache_size = self.get_cache_size()
-
- # Ensure system has enough storage for the cache_quota
- #
- # If cache_quota is none, set it to the maximum it could possibly be.
- #
- # Also check that cache_quota is at least as large as our headroom.
- #
- if cache_quota is None: # Infinity, set to max system storage
- cache_quota = cache_size + available_space
- if cache_quota < self._cache_quota_headroom: # Check minimum
- raise LoadError(LoadErrorReason.INVALID_DATA,
- "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
- "BuildStream requires a minimum cache quota of 2G.")
- elif cache_quota > total_size:
- # A quota greater than the total disk size is certianly an error
- raise ArtifactError("Your system does not have enough available " +
- "space to support the cache quota specified.",
- detail=("You have specified a quota of {quota} total disk space.\n" +
- "The filesystem containing {local_cache_path} only " +
- "has {total_size} total disk space.")
- .format(
- quota=self.context.config_cache_quota,
- local_cache_path=self.context.artifactdir,
- total_size=utils._pretty_size(total_size)),
- reason='insufficient-storage-for-quota')
- elif cache_quota > cache_size + available_space:
- # The quota does not fit in the available space, this is a warning
- if '%' in self.context.config_cache_quota:
- available = (available_space / total_size) * 100
- available = '{}% of total disk space'.format(round(available, 1))
- else:
- available = utils._pretty_size(available_space)
-
- self._message(MessageType.WARN,
- "Your system does not have enough available " +
- "space to support the cache quota specified.",
- detail=("You have specified a quota of {quota} total disk space.\n" +
- "The filesystem containing {local_cache_path} only " +
- "has {available_size} available.")
- .format(quota=self.context.config_cache_quota,
- local_cache_path=self.context.artifactdir,
- available_size=available))
-
- # Place a slight headroom (2e9 (2GB) on the cache_quota) into
- # cache_quota to try and avoid exceptions.
- #
- # Of course, we might still end up running out during a build
- # if we end up writing more than 2G, but hey, this stuff is
- # already really fuzzy.
- #
- self._cache_quota_original = cache_quota
- self._cache_quota = cache_quota - self._cache_quota_headroom
- self._cache_lower_threshold = self._cache_quota / 2
-
- # _get_cache_volume_size()
- #
- # Get the available space and total space for the volume on
- # which the artifact cache is located.
- #
- # Returns:
- # (int): The total number of bytes on the volume
- # (int): The number of available bytes on the volume
- #
- # NOTE: We use this stub to allow the test cases
- # to override what an artifact cache thinks
- # about it's disk size and available bytes.
- #
- def _get_cache_volume_size(self):
- return utils._get_volume_size(self.context.artifactdir)
-
# _configured_remote_artifact_cache_specs():
#
diff --git a/buildstream/_cas/__init__.py b/buildstream/_cas/__init__.py
index a88e41371..46bd9567f 100644
--- a/buildstream/_cas/__init__.py
+++ b/buildstream/_cas/__init__.py
@@ -17,5 +17,5 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from .cascache import CASCache
+from .cascache import CASCache, CASQuota, CASCacheUsage
from .casremote import CASRemote, CASRemoteSpec
diff --git a/buildstream/_cas/cascache.py b/buildstream/_cas/cascache.py
index 792bf3eb9..fe25efce6 100644
--- a/buildstream/_cas/cascache.py
+++ b/buildstream/_cas/cascache.py
@@ -32,17 +32,53 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from .._protos.buildstream.v2 import buildstream_pb2
from .. import utils
-from .._exceptions import CASCacheError
+from .._exceptions import CASCacheError, LoadError, LoadErrorReason
+from .._message import Message, MessageType
from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
_BUFFER_SIZE = 65536
+CACHE_SIZE_FILE = "cache_size"
+
+
+# CASCacheUsage
+#
+# A simple object to report the current CAS cache usage details.
+#
+# Note that this uses the user configured cache quota
+# rather than the internal quota with protective headroom
+# removed, to provide a more sensible value to display to
+# the user.
+#
+# Args:
+# cas (CASQuota): The CAS cache to get the status of
+#
+class CASCacheUsage():
+
+ def __init__(self, casquota):
+ self.quota_config = casquota._config_cache_quota # Configured quota
+ self.quota_size = casquota._cache_quota_original # Resolved cache quota in bytes
+ self.used_size = casquota.get_cache_size() # Size used by artifacts in bytes
+ self.used_percent = 0 # Percentage of the quota used
+ if self.quota_size is not None:
+ self.used_percent = int(self.used_size * 100 / self.quota_size)
+
+ # Formattable into a human readable string
+ #
+ def __str__(self):
+ return "{} / {} ({}%)" \
+ .format(utils._pretty_size(self.used_size, dec_places=1),
+ self.quota_config,
+ self.used_percent)
+
+
# A CASCache manages a CAS repository as specified in the Remote Execution API.
#
# Args:
# path (str): The root directory for the CAS repository
+# cache_quota (int): User configured cache quota
#
class CASCache():
@@ -459,16 +495,6 @@ class CASCache():
except FileNotFoundError as e:
raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
- # calculate_cache_size()
- #
- # Return the real disk usage of the CAS cache.
- #
- # Returns:
- # (int): The size of the cache.
- #
- def calculate_cache_size(self):
- return utils._get_dir_size(self.casdir)
-
# list_refs():
#
# List refs in Least Recently Modified (LRM) order.
@@ -1043,6 +1069,248 @@ class CASCache():
batch.send()
+class CASQuota:
+ def __init__(self, context):
+ self.cas = context.get_cascache()
+ self.casdir = self.cas.casdir
+ self._config_cache_quota = context.config_cache_quota
+ self._config_cache_quota_string = context.config_cache_quota_string
+ self._cache_size = None # The current cache size, sometimes it's an estimate
+ self._cache_quota = None # The cache quota
+ self._cache_quota_original = None # The cache quota as specified by the user, in bytes
+ self._cache_quota_headroom = None # The headroom in bytes before reaching the quota or full disk
+ self._cache_lower_threshold = None # The target cache size for a cleanup
+ self.available_space = None
+
+ self._message = context.message
+
+ self._calculate_cache_quota()
+
+ # compute_cache_size()
+ #
+ # Computes the real artifact cache size by calling
+ # the abstract calculate_cache_size() method.
+ #
+ # Returns:
+ # (int): The size of the artifact cache.
+ #
+ def compute_cache_size(self):
+ old_cache_size = self._cache_size
+ new_cache_size = self.calculate_cache_size()
+
+ if old_cache_size != new_cache_size:
+ self._cache_size = new_cache_size
+
+ return self._cache_size
+
+ # calculate_cache_size()
+ #
+ # Return the real disk usage of the CAS cache.
+ #
+ # Returns:
+ # (int): The size of the cache.
+ #
+ def calculate_cache_size(self):
+ return utils._get_dir_size(self.casdir)
+
+ # get_cache_size()
+ #
+ # Fetches the cached size of the cache, this is sometimes
+ # an estimate and periodically adjusted to the real size
+ # when a cache size calculation job runs.
+ #
+ # When it is an estimate, the value is either correct, or
+ # it is greater than the actual cache size.
+ #
+ # Returns:
+ # (int) An approximation of the artifact cache size, in bytes.
+ #
+ def get_cache_size(self):
+
+ # If we don't currently have an estimate, figure out the real cache size.
+ if self._cache_size is None:
+ stored_size = self._read_cache_size()
+ if stored_size is not None:
+ self._cache_size = stored_size
+ else:
+ self._cache_size = self.compute_cache_size()
+
+ return self._cache_size
+
+ # set_cache_size()
+ #
+ # Forcefully set the overall cache size.
+ #
+ # This is used to update the size in the main process after
+ # having calculated in a cleanup or a cache size calculation job.
+ #
+ # Args:
+ # cache_size (int): The size to set.
+ #
+ def set_cache_size(self, cache_size):
+
+ assert cache_size is not None
+
+ self._cache_size = cache_size
+ self._write_cache_size(self._cache_size)
+
+ # full()
+ #
+ # Checks if the artifact cache is full, either
+ # because the user configured quota has been exceeded
+ # or because the underlying disk is almost full.
+ #
+ # Returns:
+ # (bool): True if the artifact cache is full
+ #
+ def full(self):
+
+ if self.get_cache_size() > self._cache_quota:
+ return True
+
+ _, volume_avail = self._get_cache_volume_size()
+ if volume_avail < self._cache_quota_headroom:
+ return True
+
+ return False
+
+ ################################################
+ # Local Private Methods #
+ ################################################
+
+ # _read_cache_size()
+ #
+ # Reads and returns the size of the artifact cache that's stored in the
+ # cache's size file
+ #
+ # Returns:
+ # (int): The size of the artifact cache, as recorded in the file
+ #
+ def _read_cache_size(self):
+ size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
+
+ if not os.path.exists(size_file_path):
+ return None
+
+ with open(size_file_path, "r") as f:
+ size = f.read()
+
+ try:
+ num_size = int(size)
+ except ValueError as e:
+ raise CASCacheError("Size '{}' parsed from '{}' was not an integer".format(
+ size, size_file_path)) from e
+
+ return num_size
+
+ # _write_cache_size()
+ #
+ # Writes the given size of the artifact to the cache's size file
+ #
+ # Args:
+ # size (int): The size of the artifact cache to record
+ #
+ def _write_cache_size(self, size):
+ assert isinstance(size, int)
+ size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
+ with utils.save_file_atomic(size_file_path, "w") as f:
+ f.write(str(size))
+
+ # _get_cache_volume_size()
+ #
+ # Get the available space and total space for the volume on
+ # which the artifact cache is located.
+ #
+ # Returns:
+ # (int): The total number of bytes on the volume
+ # (int): The number of available bytes on the volume
+ #
+ # NOTE: We use this stub to allow the test cases
+ # to override what an artifact cache thinks
+ # about it's disk size and available bytes.
+ #
+ def _get_cache_volume_size(self):
+ return utils._get_volume_size(self.casdir)
+
+ # _calculate_cache_quota()
+ #
+ # Calculates and sets the cache quota and lower threshold based on the
+ # quota set in Context.
+ # It checks that the quota is both a valid expression, and that there is
+ # enough disk space to satisfy that quota
+ #
+ def _calculate_cache_quota(self):
+ # Headroom intended to give BuildStream a bit of leeway.
+ # This acts as the minimum size of cache_quota and also
+ # is taken from the user requested cache_quota.
+ #
+ if 'BST_TEST_SUITE' in os.environ:
+ self._cache_quota_headroom = 0
+ else:
+ self._cache_quota_headroom = 2e9
+
+ total_size, available_space = self._get_cache_volume_size()
+ cache_size = self.get_cache_size()
+ self.available_space = available_space
+
+ # Ensure system has enough storage for the cache_quota
+ #
+ # If cache_quota is none, set it to the maximum it could possibly be.
+ #
+ # Also check that cache_quota is at least as large as our headroom.
+ #
+ cache_quota = self._config_cache_quota
+ if cache_quota is None: # Infinity, set to max system storage
+ cache_quota = cache_size + available_space
+ if cache_quota < self._cache_quota_headroom: # Check minimum
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
+ "BuildStream requires a minimum cache quota of 2G.")
+ elif cache_quota > total_size:
+ # A quota greater than the total disk size is certianly an error
+ raise CASCacheError("Your system does not have enough available " +
+ "space to support the cache quota specified.",
+ detail=("You have specified a quota of {quota} total disk space.\n" +
+ "The filesystem containing {local_cache_path} only " +
+ "has {total_size} total disk space.")
+ .format(
+ quota=self._config_cache_quota,
+ local_cache_path=self.casdir,
+ total_size=utils._pretty_size(total_size)),
+ reason='insufficient-storage-for-quota')
+
+ elif cache_quota > cache_size + available_space:
+ # The quota does not fit in the available space, this is a warning
+ if '%' in self._config_cache_quota_string:
+ available = (available_space / total_size) * 100
+ available = '{}% of total disk space'.format(round(available, 1))
+ else:
+ available = utils._pretty_size(available_space)
+
+ self._message(Message(
+ None,
+ MessageType.WARN,
+ "Your system does not have enough available " +
+ "space to support the cache quota specified.",
+ detail=("You have specified a quota of {quota} total disk space.\n" +
+ "The filesystem containing {local_cache_path} only " +
+ "has {available_size} available.")
+ .format(quota=self._config_cache_quota,
+ local_cache_path=self.casdir,
+ available_size=available)))
+
+ # Place a slight headroom (2e9 (2GB) on the cache_quota) into
+ # cache_quota to try and avoid exceptions.
+ #
+ # Of course, we might still end up running out during a build
+ # if we end up writing more than 2G, but hey, this stuff is
+ # already really fuzzy.
+ #
+ self._cache_quota_original = cache_quota
+ self._cache_quota = cache_quota - self._cache_quota_headroom
+ self._cache_lower_threshold = self._cache_quota / 2
+
+
def _grouper(iterable, n):
while True:
try:
diff --git a/buildstream/_context.py b/buildstream/_context.py
index 2fbf415fb..75edac39d 100644
--- a/buildstream/_context.py
+++ b/buildstream/_context.py
@@ -30,8 +30,8 @@ from . import _yaml
from ._exceptions import LoadError, LoadErrorReason, BstError
from ._message import Message, MessageType
from ._profile import Topics, profile_start, profile_end
-from ._artifactcache import ArtifactCache, ArtifactCacheUsage
-from ._cas import CASCache
+from ._artifactcache import ArtifactCache
+from ._cas import CASCache, CASQuota, CASCacheUsage
from ._workspaces import Workspaces, WorkspaceProjectCache
from .plugin import _plugin_lookup
from .sandbox import SandboxRemote
@@ -58,18 +58,27 @@ class Context():
# Filename indicating which configuration file was used, or None for the defaults
self.config_origin = None
+ # The directory under which other directories are based
+ self.cachedir = None
+
# The directory where various sources are stored
self.sourcedir = None
# The directory where build sandboxes will be created
self.builddir = None
+ # The directory for CAS
+ self.casdir = None
+
+ # Extract directory
+ self.extractdir = None
+
+ # The directory for temporary files
+ self.tmpdir = None
+
# Default root location for workspaces
self.workspacedir = None
- # The local binary artifact cache directory
- self.artifactdir = None
-
# The locations from which to push and pull prebuilt artifacts
self.artifact_cache_specs = None
@@ -118,6 +127,9 @@ class Context():
# Size of the artifact cache in bytes
self.config_cache_quota = None
+ # User specified cache quota, used for display messages
+ self.config_cache_quota_string = None
+
# Whether or not to attempt to pull build trees globally
self.pull_buildtrees = None
@@ -142,6 +154,7 @@ class Context():
self._log_handle = None
self._log_filename = None
self._cascache = None
+ self._casquota = None
self._directory = directory
# load()
@@ -179,13 +192,22 @@ class Context():
user_config = _yaml.load(config)
_yaml.composite(defaults, user_config)
+ # Give obsoletion warnings
+ if defaults.get('builddir'):
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "builddir is obsolete, use cachedir")
+
+ if defaults.get('artifactdir'):
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "artifactdir is obsolete")
+
_yaml.node_validate(defaults, [
- 'sourcedir', 'builddir', 'artifactdir', 'logdir',
- 'scheduler', 'artifacts', 'logging', 'projects',
- 'cache', 'prompt', 'workspacedir', 'remote-execution'
+ 'cachedir', 'sourcedir', 'builddir', 'logdir', 'scheduler',
+ 'artifacts', 'logging', 'projects', 'cache', 'prompt',
+ 'workspacedir', 'remote-execution',
])
- for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
+ for directory in ['cachedir', 'sourcedir', 'logdir', 'workspacedir']:
# Allow the ~ tilde expansion and any environment variables in
# path specification in the config files.
#
@@ -195,14 +217,34 @@ class Context():
path = os.path.normpath(path)
setattr(self, directory, path)
+ # add directories not set by users
+ self.extractdir = os.path.join(self.cachedir, 'extract')
+ self.tmpdir = os.path.join(self.cachedir, 'tmp')
+ self.casdir = os.path.join(self.cachedir, 'cas')
+ self.builddir = os.path.join(self.cachedir, 'build')
+
+ # Move old artifact cas to cas if it exists and create symlink
+ old_casdir = os.path.join(self.cachedir, 'artifacts', 'cas')
+ if (os.path.exists(old_casdir) and not os.path.islink(old_casdir) and
+ not os.path.exists(self.casdir)):
+ os.rename(old_casdir, self.casdir)
+ os.symlink(self.casdir, old_casdir)
+
# Load quota configuration
- # We need to find the first existing directory in the path of
- # our artifactdir - the artifactdir may not have been created
- # yet.
+ # We need to find the first existing directory in the path of our
+ # cachedir - the cachedir may not have been created yet.
cache = _yaml.node_get(defaults, Mapping, 'cache')
_yaml.node_validate(cache, ['quota', 'pull-buildtrees', 'cache-buildtrees'])
- self.config_cache_quota = _yaml.node_get(cache, str, 'quota')
+ self.config_cache_quota_string = _yaml.node_get(cache, str, 'quota')
+ try:
+ self.config_cache_quota = utils._parse_size(self.config_cache_quota_string,
+ self.casdir)
+ except utils.UtilError as e:
+ raise LoadError(LoadErrorReason.INVALID_DATA,
+ "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
+ "\nValid values are, for example: 800M 10G 1T 50%\n"
+ .format(str(e))) from e
# Load artifact share configuration
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
@@ -262,15 +304,15 @@ class Context():
return self._artifactcache
- # get_artifact_cache_usage()
+ # get_cache_usage()
#
# Fetches the current usage of the artifact cache
#
# Returns:
- # (ArtifactCacheUsage): The current status
+ # (CASCacheUsage): The current status
#
- def get_artifact_cache_usage(self):
- return ArtifactCacheUsage(self.artifactcache)
+ def get_cache_usage(self):
+ return CASCacheUsage(self.get_casquota())
# add_project():
#
@@ -640,9 +682,14 @@ class Context():
def get_cascache(self):
if self._cascache is None:
- self._cascache = CASCache(self.artifactdir)
+ self._cascache = CASCache(self.cachedir)
return self._cascache
+ def get_casquota(self):
+ if self._casquota is None:
+ self._casquota = CASQuota(self)
+ return self._casquota
+
# _node_get_option_str()
#
diff --git a/buildstream/_frontend/status.py b/buildstream/_frontend/status.py
index 70f233357..91f47221a 100644
--- a/buildstream/_frontend/status.py
+++ b/buildstream/_frontend/status.py
@@ -404,7 +404,7 @@ class _StatusHeader():
#
# ~~~~~~ cache: 69% ~~~~~~
#
- usage = self._context.get_artifact_cache_usage()
+ usage = self._context.get_cache_usage()
usage_percent = '{}%'.format(usage.used_percent)
size = 21
diff --git a/buildstream/_frontend/widget.py b/buildstream/_frontend/widget.py
index 2920d657d..d1df06284 100644
--- a/buildstream/_frontend/widget.py
+++ b/buildstream/_frontend/widget.py
@@ -486,7 +486,7 @@ class LogLine(Widget):
values["Session Start"] = starttime.strftime('%A, %d-%m-%Y at %H:%M:%S')
values["Project"] = "{} ({})".format(project.name, project.directory)
values["Targets"] = ", ".join([t.name for t in stream.targets])
- values["Cache Usage"] = "{}".format(context.get_artifact_cache_usage())
+ values["Cache Usage"] = "{}".format(context.get_cache_usage())
text += self._format_values(values)
# User configurations
@@ -495,10 +495,10 @@ class LogLine(Widget):
values = OrderedDict()
values["Configuration File"] = \
"Default Configuration" if not context.config_origin else context.config_origin
+ values["Cache Directory"] = context.cachedir
values["Log Files"] = context.logdir
values["Source Mirrors"] = context.sourcedir
values["Build Area"] = context.builddir
- values["Artifact Cache"] = context.artifactdir
values["Strict Build Plan"] = "Yes" if context.get_strict() else "No"
values["Maximum Fetch Tasks"] = context.sched_fetchers
values["Maximum Build Tasks"] = context.sched_builders
diff --git a/buildstream/_scheduler/jobs/cachesizejob.py b/buildstream/_scheduler/jobs/cachesizejob.py
index a96b92353..5f27b7fc1 100644
--- a/buildstream/_scheduler/jobs/cachesizejob.py
+++ b/buildstream/_scheduler/jobs/cachesizejob.py
@@ -25,14 +25,14 @@ class CacheSizeJob(Job):
self._complete_cb = complete_cb
context = self._scheduler.context
- self._artifacts = context.artifactcache
+ self._casquota = context.get_casquota()
def child_process(self):
- return self._artifacts.compute_cache_size()
+ return self._casquota.compute_cache_size()
def parent_complete(self, status, result):
if status == JobStatus.OK:
- self._artifacts.set_cache_size(result)
+ self._casquota.set_cache_size(result)
if self._complete_cb:
self._complete_cb(status, result)
diff --git a/buildstream/_scheduler/jobs/cleanupjob.py b/buildstream/_scheduler/jobs/cleanupjob.py
index a1d49f339..e016d4cd7 100644
--- a/buildstream/_scheduler/jobs/cleanupjob.py
+++ b/buildstream/_scheduler/jobs/cleanupjob.py
@@ -25,27 +25,27 @@ class CleanupJob(Job):
self._complete_cb = complete_cb
context = self._scheduler.context
+ self._casquota = context.get_casquota()
self._artifacts = context.artifactcache
def child_process(self):
def progress():
self.send_message('update-cache-size',
- self._artifacts.get_cache_size())
+ self._casquota.get_cache_size())
return self._artifacts.clean(progress)
def handle_message(self, message_type, message):
-
# Update the cache size in the main process as we go,
# this provides better feedback in the UI.
if message_type == 'update-cache-size':
- self._artifacts.set_cache_size(message)
+ self._casquota.set_cache_size(message)
return True
return False
def parent_complete(self, status, result):
if status == JobStatus.OK:
- self._artifacts.set_cache_size(result)
+ self._casquota.set_cache_size(result)
if self._complete_cb:
self._complete_cb(status, result)
diff --git a/buildstream/data/userconfig.yaml b/buildstream/data/userconfig.yaml
index f17dac88c..d27e56ef2 100644
--- a/buildstream/data/userconfig.yaml
+++ b/buildstream/data/userconfig.yaml
@@ -13,11 +13,8 @@
# Location to store sources
sourcedir: ${XDG_CACHE_HOME}/buildstream/sources
-# Location to perform builds
-builddir: ${XDG_CACHE_HOME}/buildstream/build
-
-# Location to store local binary artifacts
-artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
+# Root location for other directories in the cache
+cachedir: ${XDG_CACHE_HOME}/buildstream
# Location to store build logs
logdir: ${XDG_CACHE_HOME}/buildstream/logs
diff --git a/buildstream/element.py b/buildstream/element.py
index d5ec5c436..5c06065b4 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -1448,7 +1448,7 @@ class Element(Plugin):
# It's advantageous to have this temporary directory on
# the same file system as the rest of our cache.
with self.timed_activity("Staging sources", silent_nested=True), \
- utils._tempdir(dir=context.artifactdir, prefix='staging-temp') as temp_staging_directory:
+ utils._tempdir(dir=context.tmpdir, prefix='staging-temp') as temp_staging_directory:
import_dir = temp_staging_directory
diff --git a/buildstream/plugintestutils/runcli.py b/buildstream/plugintestutils/runcli.py
index fb7c23c6c..83fdff721 100644
--- a/buildstream/plugintestutils/runcli.py
+++ b/buildstream/plugintestutils/runcli.py
@@ -277,10 +277,10 @@ class Cli():
*, cache_dir=None):
# Read configuration to figure out where artifacts are stored
if not cache_dir:
- default = os.path.join(project, 'cache', 'artifacts')
+ default = os.path.join(project, 'cache')
if self.config is not None:
- cache_dir = self.config.get('artifactdir', default)
+ cache_dir = self.config.get('cachedir', default)
else:
cache_dir = default
@@ -582,11 +582,21 @@ def cli_integration(tmpdir, integration_cache):
# We want to cache sources for integration tests more permanently,
# to avoid downloading the huge base-sdk repeatedly
fixture.configure({
+ 'cachedir': integration_cache.cachedir,
'sourcedir': integration_cache.sources,
- 'artifactdir': integration_cache.artifacts
})
- return fixture
+ yield fixture
+
+ # remove following folders if necessary
+ try:
+ shutil.rmtree(os.path.join(integration_cache.cachedir, 'build'))
+ except FileNotFoundError:
+ pass
+ try:
+ shutil.rmtree(os.path.join(integration_cache.cachedir, 'tmp'))
+ except FileNotFoundError:
+ pass
@contextmanager
@@ -626,10 +636,8 @@ def configured(directory, config=None):
if not config.get('sourcedir', False):
config['sourcedir'] = os.path.join(directory, 'sources')
- if not config.get('builddir', False):
- config['builddir'] = os.path.join(directory, 'build')
- if not config.get('artifactdir', False):
- config['artifactdir'] = os.path.join(directory, 'artifacts')
+ if not config.get('cachedir', False):
+ config['cachedir'] = directory
if not config.get('logdir', False):
config['logdir'] = os.path.join(directory, 'logs')
diff --git a/conftest.py b/conftest.py
index 6fc24c2d6..7f8da3633 100755
--- a/conftest.py
+++ b/conftest.py
@@ -53,16 +53,16 @@ def pytest_runtest_setup(item):
class IntegrationCache():
def __init__(self, cache):
- cache = os.path.abspath(cache)
+ self.root = os.path.abspath(cache)
os.makedirs(cache, exist_ok=True)
# Use the same sources every time
- self.sources = os.path.join(cache, 'sources')
+ self.sources = os.path.join(self.root, 'sources')
# Create a temp directory for the duration of the test for
# the artifacts directory
try:
- self.artifacts = tempfile.mkdtemp(dir=cache, prefix='artifacts-')
+ self.cachedir = tempfile.mkdtemp(dir=self.root, prefix='cache-')
except OSError as e:
raise AssertionError("Unable to create test directory !") from e
@@ -84,7 +84,11 @@ def integration_cache(request):
# Clean up the artifacts after each test run - we only want to
# cache sources between runs
try:
- shutil.rmtree(cache.artifacts)
+ shutil.rmtree(cache.cachedir)
+ except FileNotFoundError:
+ pass
+ try:
+ shutil.rmtree(os.path.join(cache.root, 'cas'))
except FileNotFoundError:
pass
diff --git a/doc/bst2html.py b/doc/bst2html.py
index af35efe24..2f4012695 100755
--- a/doc/bst2html.py
+++ b/doc/bst2html.py
@@ -194,10 +194,9 @@ def workdir(source_cache=None):
bst_config_file = os.path.join(tempdir, 'buildstream.conf')
config = {
+ 'cachedir': tempdir,
'sourcedir': source_cache,
- 'artifactdir': os.path.join(tempdir, 'artifacts'),
'logdir': os.path.join(tempdir, 'logs'),
- 'builddir': os.path.join(tempdir, 'build'),
}
_yaml.dump(config, bst_config_file)
@@ -411,12 +410,10 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
# Encode and save the output if that was asked for
output = _yaml.node_get(command, str, 'output', default_value=None)
if output is not None:
-
# Convert / Generate a nice <div>
converted = generate_html(command_out, directory, config_file,
source_cache, tempdir, palette,
command_str, command_fake_output is not None)
-
# Save it
filename = os.path.join(desc_dir, output)
filename = os.path.realpath(filename)
diff --git a/doc/sessions/running-commands.run b/doc/sessions/running-commands.run
index ce8eccd2f..41d165799 100644
--- a/doc/sessions/running-commands.run
+++ b/doc/sessions/running-commands.run
@@ -2,7 +2,7 @@
commands:
# Make it fetch first
- directory: ../examples/running-commands
- command: fetch hello.bst
+ command: source fetch hello.bst
# Capture a show output
- directory: ../examples/running-commands
diff --git a/tests/artifactcache/cache_size.py b/tests/artifactcache/cache_size.py
index 88f8eaddf..dcfc13424 100644
--- a/tests/artifactcache/cache_size.py
+++ b/tests/artifactcache/cache_size.py
@@ -50,15 +50,15 @@ def test_cache_size_write(cli, tmpdir):
create_project(project_dir)
# Artifact cache must be in a known place
- artifactdir = os.path.join(project_dir, "artifacts")
- cli.configure({"artifactdir": artifactdir})
+ casdir = os.path.join(project_dir, "cas")
+ cli.configure({"cachedir": project_dir})
# Build, to populate the cache
res = cli.run(project=project_dir, args=["build", "test.bst"])
res.assert_success()
# Inspect the artifact cache
- sizefile = os.path.join(artifactdir, CACHE_SIZE_FILE)
+ sizefile = os.path.join(casdir, CACHE_SIZE_FILE)
assert os.path.isfile(sizefile)
with open(sizefile, "r") as f:
size_data = f.read()
@@ -81,11 +81,11 @@ def test_quota_over_1024T(cli, tmpdir):
_yaml.dump({'name': 'main'}, str(project.join("project.conf")))
volume_space_patch = mock.patch(
- "buildstream._artifactcache.ArtifactCache._get_cache_volume_size",
+ "buildstream._cas.CASQuota._get_cache_volume_size",
autospec=True,
return_value=(1025 * TiB, 1025 * TiB)
)
with volume_space_patch:
result = cli.run(project, args=["build", "file.bst"])
- result.assert_main_error(ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')
+ result.assert_main_error(ErrorDomain.CAS, 'insufficient-storage-for-quota')
diff --git a/tests/artifactcache/expiry.py b/tests/artifactcache/expiry.py
index d92e68f0b..8ece6295c 100644
--- a/tests/artifactcache/expiry.py
+++ b/tests/artifactcache/expiry.py
@@ -341,7 +341,7 @@ def test_never_delete_required_track(cli, datafiles, tmpdir):
("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
# Not enough space on disk even if you cleaned up
- ("11K", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota'),
+ ("11K", ErrorDomain.CAS, 'insufficient-storage-for-quota'),
# Not enough space for these caches
("7K", 'warning', 'Your system does not have enough available'),
@@ -355,7 +355,7 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas
cli.configure({
'cache': {
'quota': quota,
- }
+ },
})
# We patch how we get space information
@@ -373,13 +373,13 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas
total_space = 10000
volume_space_patch = mock.patch(
- "buildstream._artifactcache.ArtifactCache._get_cache_volume_size",
+ "buildstream.utils._get_volume_size",
autospec=True,
return_value=(total_space, free_space),
)
cache_size_patch = mock.patch(
- "buildstream._artifactcache.ArtifactCache.get_cache_size",
+ "buildstream._cas.CASQuota.get_cache_size",
autospec=True,
return_value=0,
)
@@ -417,7 +417,7 @@ def test_extract_expiry(cli, datafiles, tmpdir):
res.assert_success()
# Get a snapshot of the extracts in advance
- extractdir = os.path.join(project, 'cache', 'artifacts', 'extract', 'test', 'target')
+ extractdir = os.path.join(project, 'cache', 'extract', 'test', 'target')
extracts = os.listdir(extractdir)
assert(len(extracts) == 1)
extract = os.path.join(extractdir, extracts[0])
@@ -436,7 +436,7 @@ def test_extract_expiry(cli, datafiles, tmpdir):
# Now we should have a directory for the cached target2.bst, which
# replaced target.bst in the cache, we should not have a directory
# for the target.bst
- refsdir = os.path.join(project, 'cache', 'artifacts', 'cas', 'refs', 'heads')
+ refsdir = os.path.join(project, 'cache', 'cas', 'refs', 'heads')
refsdirtest = os.path.join(refsdir, 'test')
refsdirtarget = os.path.join(refsdirtest, 'target')
refsdirtarget2 = os.path.join(refsdirtest, 'target2')
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index d5de16282..693a3eb1f 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -70,8 +70,8 @@ def test_push_pull(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ cas = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(cas)
# Assert that nothing is cached locally anymore
state = cli.get_element_state(project, 'target.bst')
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index 4ab35f066..3113061ca 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -57,7 +57,7 @@ def test_pull(cli, tmpdir, datafiles):
# Set up an artifact cache.
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
+ cache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
@@ -66,7 +66,8 @@ def test_pull(cli, tmpdir, datafiles):
'artifacts': {
'url': share.repo,
'push': True,
- }
+ },
+ 'cachedir': cache_dir
}
# Write down the user configuration file
@@ -93,7 +94,6 @@ def test_pull(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
context.set_message_handler(message_handler)
# Load the project and CAS cache
@@ -111,7 +111,7 @@ def test_pull(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_pull, queue, user_config_file, project_dir,
- artifact_dir, 'target.bst', element_key))
+ cache_dir, 'target.bst', element_key))
try:
# Keep SIGINT blocked in the child process
@@ -128,12 +128,14 @@ def test_pull(cli, tmpdir, datafiles):
assert cas.contains(element, element_key)
-def _test_pull(user_config_file, project_dir, artifact_dir,
+def _test_pull(user_config_file, project_dir, cache_dir,
element_name, element_key, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.artifactdir = artifact_dir
+ context.cachedir = cache_dir
+ context.casdir = os.path.join(cache_dir, 'cas')
+ context.tmpdir = os.path.join(cache_dir, 'tmp')
context.set_message_handler(message_handler)
# Load the project manually
@@ -166,7 +168,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
# Set up an artifact cache.
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
@@ -175,7 +177,8 @@ def test_pull_tree(cli, tmpdir, datafiles):
'artifacts': {
'url': share.repo,
'push': True,
- }
+ },
+ 'cachedir': rootcache_dir
}
# Write down the user configuration file
@@ -196,7 +199,6 @@ def test_pull_tree(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
context.set_message_handler(message_handler)
# Load the project and CAS cache
@@ -219,7 +221,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_push_tree, queue, user_config_file, project_dir,
- artifact_dir, artifact_digest))
+ artifact_digest))
try:
# Keep SIGINT blocked in the child process
@@ -247,7 +249,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_pull_tree, queue, user_config_file, project_dir,
- artifact_dir, tree_digest))
+ tree_digest))
try:
# Keep SIGINT blocked in the child process
@@ -269,11 +271,10 @@ def test_pull_tree(cli, tmpdir, datafiles):
assert os.path.exists(cas.objpath(directory_digest))
-def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
+def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
@@ -305,11 +306,10 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
queue.put("No remote configured")
-def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
+def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 3b2e1be09..0a39f5344 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -51,7 +51,7 @@ def test_push(cli, tmpdir, datafiles):
# Set up an artifact cache.
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
@@ -60,7 +60,8 @@ def test_push(cli, tmpdir, datafiles):
'artifacts': {
'url': share.repo,
'push': True,
- }
+ },
+ 'cachedir': rootcache_dir
}
# Write down the user configuration file
@@ -69,7 +70,6 @@ def test_push(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
@@ -89,7 +89,7 @@ def test_push(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_push, queue, user_config_file, project_dir,
- artifact_dir, 'target.bst', element_key))
+ 'target.bst', element_key))
try:
# Keep SIGINT blocked in the child process
@@ -106,12 +106,10 @@ def test_push(cli, tmpdir, datafiles):
assert share.has_artifact('test', 'target.bst', element_key)
-def _test_push(user_config_file, project_dir, artifact_dir,
- element_name, element_key, queue):
+def _test_push(user_config_file, project_dir, element_name, element_key, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
@@ -152,7 +150,7 @@ def test_push_directory(cli, tmpdir, datafiles):
# Set up an artifact cache.
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
@@ -161,7 +159,8 @@ def test_push_directory(cli, tmpdir, datafiles):
'artifacts': {
'url': share.repo,
'push': True,
- }
+ },
+ 'cachedir': rootcache_dir
}
# Write down the user configuration file
@@ -170,7 +169,6 @@ def test_push_directory(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
context.set_message_handler(message_handler)
# Load the project and CAS cache
@@ -198,7 +196,7 @@ def test_push_directory(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_push_directory, queue, user_config_file,
- project_dir, artifact_dir, artifact_digest))
+ project_dir, artifact_digest))
try:
# Keep SIGINT blocked in the child process
@@ -216,11 +214,10 @@ def test_push_directory(cli, tmpdir, datafiles):
assert share.has_object(artifact_digest)
-def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
+def _test_push_directory(user_config_file, project_dir, artifact_digest, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
@@ -254,6 +251,7 @@ def test_push_message(cli, tmpdir, datafiles):
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
# Configure artifact share
artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
user_config_file = str(tmpdir.join('buildstream.conf'))
user_config = {
'scheduler': {
@@ -262,7 +260,8 @@ def test_push_message(cli, tmpdir, datafiles):
'artifacts': {
'url': share.repo,
'push': True,
- }
+ },
+ 'cachedir': rootcache_dir
}
# Write down the user configuration file
@@ -273,7 +272,7 @@ def test_push_message(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_push_message, queue, user_config_file,
- project_dir, artifact_dir))
+ project_dir))
try:
# Keep SIGINT blocked in the child process
@@ -292,11 +291,10 @@ def test_push_message(cli, tmpdir, datafiles):
assert share.has_object(message_digest)
-def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
+def _test_push_message(user_config_file, project_dir, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.artifactdir = artifact_dir
context.set_message_handler(message_handler)
# Load the project manually
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 2555355d3..1326beccc 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -64,8 +64,8 @@ def test_push_pull_all(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ cas = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(cas)
# Assert that nothing is cached locally anymore
states = cli.get_element_states(project, all_elements)
@@ -114,7 +114,7 @@ def test_push_pull_default_targets(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- artifacts = os.path.join(cli.directory, 'artifacts')
+ artifacts = os.path.join(cli.directory, 'cas')
shutil.rmtree(artifacts)
# Assert that nothing is cached locally anymore
@@ -156,8 +156,8 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
assert_shared(cli, share2, project, 'target.bst')
# Delete the user's local artifact cache.
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ cas = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(cas)
# Assert that the element is not cached anymore.
assert cli.get_element_state(project, 'target.bst') != 'cached'
@@ -210,8 +210,8 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the good_share.
#
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ cas = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(cas)
result = cli.run(project=project, args=['artifact', 'pull', 'target.bst', '--remote',
good_share.repo])
@@ -251,8 +251,8 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ cas = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(cas)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
@@ -301,8 +301,8 @@ def test_push_pull_track_non_strict(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ cas = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(cas)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
@@ -337,7 +337,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
result.assert_success()
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
- cache_dir = os.path.join(project, 'cache', 'artifacts')
+ cache_dir = os.path.join(project, 'cache', 'cas')
shutil.rmtree(cache_dir)
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
@@ -372,8 +372,8 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ cas = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(cas)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
@@ -510,8 +510,8 @@ def test_pull_access_rights(caplog, cli, tmpdir, datafiles):
shutil.rmtree(checkout)
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ casdir = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(casdir)
result = cli.run(project=project, args=['artifact', 'pull', 'compose-all.bst'])
result.assert_success()
diff --git a/tests/integration/artifact.py b/tests/integration/artifact.py
index c36bc6aa9..35cad2599 100644
--- a/tests/integration/artifact.py
+++ b/tests/integration/artifact.py
@@ -53,7 +53,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
cli.configure({
'artifacts': {'url': share1.repo, 'push': True},
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
+ 'cachedir': str(tmpdir)
})
# Build autotools element with cache-buildtrees set via the
@@ -69,20 +69,22 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
# to not cache buildtrees
cache_key = cli.get_element_key(project, element_name)
elementdigest = share1.has_artifact('test', element_name, cache_key)
- buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
+ buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
elementdigest.hash, 'buildtree')
assert os.path.isdir(buildtreedir)
assert not os.listdir(buildtreedir)
# Delete the local cached artifacts, and assert the when pulled with --pull-buildtrees
# that is was cached in share1 as expected with an empty buildtree dir
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
assert cli.get_element_state(project, element_name) != 'cached'
result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
assert element_name in result.get_pulled_elements()
assert os.path.isdir(buildtreedir)
assert not os.listdir(buildtreedir)
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
# Assert that the default behaviour of pull to not include buildtrees on the artifact
# in share1 which was purposely cached with an empty one behaves as expected. As such the
@@ -91,13 +93,14 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
result = cli.run(project=project, args=['artifact', 'pull', element_name])
assert element_name in result.get_pulled_elements()
assert not os.path.isdir(buildtreedir)
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
# Repeat building the artifacts, this time with the default behaviour of caching buildtrees,
# as such the buildtree dir should not be empty
cli.configure({
'artifacts': {'url': share2.repo, 'push': True},
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
+ 'cachedir': str(tmpdir)
})
result = cli.run(project=project, args=['build', element_name])
assert result.exit_code == 0
@@ -106,27 +109,29 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
# Cache key will be the same however the digest hash will have changed as expected, so reconstruct paths
elementdigest = share2.has_artifact('test', element_name, cache_key)
- buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
+ buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
elementdigest.hash, 'buildtree')
assert os.path.isdir(buildtreedir)
assert os.listdir(buildtreedir) is not None
# Delete the local cached artifacts, and assert that when pulled with --pull-buildtrees
# that it was cached in share2 as expected with a populated buildtree dir
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
assert cli.get_element_state(project, element_name) != 'cached'
result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
assert element_name in result.get_pulled_elements()
assert os.path.isdir(buildtreedir)
assert os.listdir(buildtreedir) is not None
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
# Clarify that the user config option for cache-buildtrees works as the cli
# main option does. Point to share3 which does not have the artifacts cached to force
# a build
cli.configure({
'artifacts': {'url': share3.repo, 'push': True},
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
+ 'cachedir': str(tmpdir),
'cache': {'cache-buildtrees': 'never'}
})
result = cli.run(project=project, args=['build', element_name])
@@ -134,7 +139,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
assert cli.get_element_state(project, element_name) == 'cached'
cache_key = cli.get_element_key(project, element_name)
elementdigest = share3.has_artifact('test', element_name, cache_key)
- buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
+ buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
elementdigest.hash, 'buildtree')
assert os.path.isdir(buildtreedir)
assert not os.listdir(buildtreedir)
diff --git a/tests/integration/pullbuildtrees.py b/tests/integration/pullbuildtrees.py
index 24fac7e00..538ed8c37 100644
--- a/tests/integration/pullbuildtrees.py
+++ b/tests/integration/pullbuildtrees.py
@@ -21,10 +21,11 @@ DATA_DIR = os.path.join(
# to false, which is the default user context. The cache has to be
# cleared as just forcefully removing the refpath leaves dangling objects.
def default_state(cli, tmpdir, share):
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
cli.configure({
'artifacts': {'url': share.repo, 'push': False},
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
+ 'cachedir': str(tmpdir),
'cache': {'pull-buildtrees': False},
})
@@ -45,7 +46,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
cli2.configure({
'artifacts': {'url': share1.repo, 'push': True},
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
+ 'cachedir': str(tmpdir),
})
# Build autotools element, checked pushed, delete local
@@ -74,7 +75,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
result = cli2.run(project=project, args=['artifact', 'pull', element_name])
assert element_name in result.get_pulled_elements()
elementdigest = share1.has_artifact('test', element_name, cli2.get_element_key(project, element_name))
- buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
+ buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
elementdigest.hash, 'buildtree')
assert not os.path.isdir(buildtreedir)
result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
diff --git a/tests/integration/shellbuildtrees.py b/tests/integration/shellbuildtrees.py
index 4d9d24e26..3fd761f05 100644
--- a/tests/integration/shellbuildtrees.py
+++ b/tests/integration/shellbuildtrees.py
@@ -62,7 +62,7 @@ def test_buildtree_staged_warn_empty_cached(cli_integration, tmpdir, datafiles):
# Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
# caching an empty buildtree
cli_integration.configure({
- 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
+ 'cachedir': str(tmpdir)
})
res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
@@ -139,7 +139,7 @@ def test_buildtree_from_failure_option_never(cli_integration, tmpdir, datafiles)
# Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
# caching an empty buildtree
cli_integration.configure({
- 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
+ 'cachedir': str(tmpdir)
})
res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
@@ -163,7 +163,7 @@ def test_buildtree_from_failure_option_failure(cli_integration, tmpdir, datafile
# default behaviour (which is always) as the buildtree will explicitly have been
# cached with content.
cli_integration.configure({
- 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
+ 'cachedir': str(tmpdir)
})
res = cli_integration.run(project=project, args=['--cache-buildtrees', 'failure', 'build', element_name])
@@ -195,10 +195,7 @@ def test_buildtree_pulled(cli, tmpdir, datafiles):
assert cli.get_element_state(project, element_name) == 'cached'
# Discard the cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- 'artifactdir': os.path.join(cli.directory, 'artifacts2')
- })
+ shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
assert cli.get_element_state(project, element_name) != 'cached'
# Pull from cache, ensuring cli options is set to pull the buildtree
@@ -231,10 +228,7 @@ def test_buildtree_options(cli, tmpdir, datafiles):
assert share.has_artifact('test', element_name, cli.get_element_key(project, element_name))
# Discard the cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- 'artifactdir': os.path.join(cli.directory, 'artifacts2')
- })
+ shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
assert cli.get_element_state(project, element_name) != 'cached'
# Pull from cache, but do not include buildtrees.
@@ -274,7 +268,7 @@ def test_buildtree_options(cli, tmpdir, datafiles):
])
assert 'Attempting to fetch missing artifact buildtree' in res.stderr
assert 'Hi' in res.output
- shutil.rmtree(os.path.join(os.path.join(cli.directory, 'artifacts2')))
+ shutil.rmtree(os.path.join(os.path.join(str(tmpdir), 'cache', 'cas')))
assert cli.get_element_state(project, element_name) != 'cached'
# Check it's not loading the shell at all with always set for the buildtree, when the
diff --git a/tests/integration/source-determinism.py b/tests/integration/source-determinism.py
index f7b2bf2ac..0b69346cd 100644
--- a/tests/integration/source-determinism.py
+++ b/tests/integration/source-determinism.py
@@ -31,7 +31,7 @@ def create_test_directory(*path, mode=0o644):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS] + ['local'])
@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_cache):
+def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
project = str(datafiles)
element_name = 'list.bst'
element_path = os.path.join(project, 'elements', element_name)
@@ -94,9 +94,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca
return f.read()
finally:
os.umask(old_umask)
- cache_dir = integration_cache.artifacts
- cli.remove_artifact_from_cache(project, element_name,
- cache_dir=cache_dir)
+ cli.remove_artifact_from_cache(project, element_name)
assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
@@ -104,7 +102,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache):
+def test_deterministic_source_local(cli, tmpdir, datafiles):
"""Only user rights should be considered for local source.
"""
project = str(datafiles)
@@ -156,8 +154,6 @@ def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache):
with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
return f.read()
finally:
- cache_dir = integration_cache.artifacts
- cli.remove_artifact_from_cache(project, element_name,
- cache_dir=cache_dir)
+ cli.remove_artifact_from_cache(project, element_name)
assert get_value_for_mask(0o7777) == get_value_for_mask(0o0700)
diff --git a/tests/internals/context.py b/tests/internals/context.py
index 5a4b37ac5..17d950f85 100644
--- a/tests/internals/context.py
+++ b/tests/internals/context.py
@@ -43,7 +43,7 @@ def test_context_load(context_fixture):
context.load(config=os.devnull)
assert(context.sourcedir == os.path.join(cache_home, 'buildstream', 'sources'))
assert(context.builddir == os.path.join(cache_home, 'buildstream', 'build'))
- assert(context.artifactdir == os.path.join(cache_home, 'buildstream', 'artifacts'))
+ assert(context.cachedir == os.path.join(cache_home, 'buildstream'))
assert(context.logdir == os.path.join(cache_home, 'buildstream', 'logs'))
@@ -57,7 +57,7 @@ def test_context_load_envvar(context_fixture):
context.load(config=os.devnull)
assert(context.sourcedir == os.path.join('/', 'some', 'path', 'buildstream', 'sources'))
assert(context.builddir == os.path.join('/', 'some', 'path', 'buildstream', 'build'))
- assert(context.artifactdir == os.path.join('/', 'some', 'path', 'buildstream', 'artifacts'))
+ assert(context.cachedir == os.path.join('/', 'some', 'path', 'buildstream'))
assert(context.logdir == os.path.join('/', 'some', 'path', 'buildstream', 'logs'))
# Reset the environment variable
@@ -79,7 +79,7 @@ def test_context_load_user_config(context_fixture, datafiles):
assert(context.sourcedir == os.path.expanduser('~/pony'))
assert(context.builddir == os.path.join(cache_home, 'buildstream', 'build'))
- assert(context.artifactdir == os.path.join(cache_home, 'buildstream', 'artifacts'))
+ assert(context.cachedir == os.path.join(cache_home, 'buildstream'))
assert(context.logdir == os.path.join(cache_home, 'buildstream', 'logs'))
diff --git a/tests/internals/pluginloading.py b/tests/internals/pluginloading.py
index 574e59f12..607d253ee 100644
--- a/tests/internals/pluginloading.py
+++ b/tests/internals/pluginloading.py
@@ -16,7 +16,7 @@ def create_pipeline(tmpdir, basedir, target):
context = Context()
context.load(config=os.devnull)
context.deploydir = os.path.join(str(tmpdir), 'deploy')
- context.artifactdir = os.path.join(str(tmpdir), 'artifact')
+ context.casdir = os.path.join(str(tmpdir), 'cas')
project = Project(basedir, context)
def dummy_handler(message, context):
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index 6b03d8d36..8abc0fa2c 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -46,7 +46,6 @@ class ArtifactShare():
# in tests as a remote artifact push/pull configuration
#
self.repodir = os.path.join(self.directory, 'repo')
-
os.makedirs(self.repodir)
self.cas = CASCache(self.repodir)