summaryrefslogtreecommitdiff
path: root/tests/artifactcache/expiry.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/artifactcache/expiry.py')
-rw-r--r--tests/artifactcache/expiry.py259
1 files changed, 113 insertions, 146 deletions
diff --git a/tests/artifactcache/expiry.py b/tests/artifactcache/expiry.py
index 9ede1a8d3..83577f0c6 100644
--- a/tests/artifactcache/expiry.py
+++ b/tests/artifactcache/expiry.py
@@ -32,10 +32,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
from tests.testutils import create_element_size, wait_for_cache_granularity
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "expiry"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "expiry")
def get_cache_usage(directory):
@@ -59,115 +56,109 @@ def get_cache_usage(directory):
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_expires(cli, datafiles):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
- cli.configure({
- 'cache': {
- 'quota': 10000000,
- }
- })
+ cli.configure({"cache": {"quota": 10000000,}})
# Create an element that uses almost the entire cache (an empty
# ostree cache starts at about ~10KiB, so we need a bit of a
# buffer)
- create_element_size('target.bst', project, element_path, [], 6000000)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, [], 6000000)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Our cache should now be almost full. Let's create another
# artifact and see if we can cause buildstream to delete the old
# one.
- create_element_size('target2.bst', project, element_path, [], 6000000)
- res = cli.run(project=project, args=['build', 'target2.bst'])
+ create_element_size("target2.bst", project, element_path, [], 6000000)
+ res = cli.run(project=project, args=["build", "target2.bst"])
res.assert_success()
# Check that the correct element remains in the cache
- states = cli.get_element_states(project, ['target.bst', 'target2.bst'])
- assert states['target.bst'] != 'cached'
- assert states['target2.bst'] == 'cached'
+ states = cli.get_element_states(project, ["target.bst", "target2.bst"])
+ assert states["target.bst"] != "cached"
+ assert states["target2.bst"] == "cached"
# Ensure that we don't end up deleting the whole cache (or worse) if
# we try to store an artifact that is too large to fit in the quota.
-@pytest.mark.parametrize('size', [
- # Test an artifact that is obviously too large
- (500000),
- # Test an artifact that might be too large due to slight overhead
- # of storing stuff in ostree
- (399999)
-])
+@pytest.mark.parametrize(
+ "size",
+ [
+ # Test an artifact that is obviously too large
+ (500000),
+ # Test an artifact that might be too large due to slight overhead
+ # of storing stuff in ostree
+ (399999),
+ ],
+)
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_too_large(cli, datafiles, size):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
- cli.configure({
- 'cache': {
- 'quota': 400000
- }
- })
+ cli.configure({"cache": {"quota": 400000}})
# Create an element whose artifact is too large
- create_element_size('target.bst', project, element_path, [], size)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, [], size)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_main_error(ErrorDomain.STREAM, None)
- res.assert_task_error(ErrorDomain.CAS, 'cache-too-full')
+ res.assert_task_error(ErrorDomain.CAS, "cache-too-full")
@pytest.mark.datafiles(DATA_DIR)
def test_expiry_order(cli, datafiles):
project = str(datafiles)
- element_path = 'elements'
- checkout = os.path.join(project, 'workspace')
+ element_path = "elements"
+ checkout = os.path.join(project, "workspace")
- cli.configure({
- 'cache': {
- 'quota': 9000000
- }
- })
+ cli.configure({"cache": {"quota": 9000000}})
# Create an artifact
- create_element_size('dep.bst', project, element_path, [], 2000000)
- res = cli.run(project=project, args=['build', 'dep.bst'])
+ create_element_size("dep.bst", project, element_path, [], 2000000)
+ res = cli.run(project=project, args=["build", "dep.bst"])
res.assert_success()
# Create another artifact
- create_element_size('unrelated.bst', project, element_path, [], 2000000)
- res = cli.run(project=project, args=['build', 'unrelated.bst'])
+ create_element_size("unrelated.bst", project, element_path, [], 2000000)
+ res = cli.run(project=project, args=["build", "unrelated.bst"])
res.assert_success()
# And build something else
- create_element_size('target.bst', project, element_path, [], 2000000)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, [], 2000000)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
- create_element_size('target2.bst', project, element_path, [], 2000000)
- res = cli.run(project=project, args=['build', 'target2.bst'])
+ create_element_size("target2.bst", project, element_path, [], 2000000)
+ res = cli.run(project=project, args=["build", "target2.bst"])
res.assert_success()
wait_for_cache_granularity()
# Now extract dep.bst
- res = cli.run(project=project, args=['artifact', 'checkout', 'dep.bst', '--directory', checkout])
+ res = cli.run(project=project, args=["artifact", "checkout", "dep.bst", "--directory", checkout])
res.assert_success()
# Finally, build something that will cause the cache to overflow
- create_element_size('expire.bst', project, element_path, [], 2000000)
- res = cli.run(project=project, args=['build', 'expire.bst'])
+ create_element_size("expire.bst", project, element_path, [], 2000000)
+ res = cli.run(project=project, args=["build", "expire.bst"])
res.assert_success()
# While dep.bst was the first element to be created, it should not
# have been removed.
# Note that buildstream will reduce the cache to 50% of the
# original size - we therefore remove multiple elements.
- check_elements = [
- 'unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst'
- ]
+ check_elements = ["unrelated.bst", "target.bst", "target2.bst", "dep.bst", "expire.bst"]
states = cli.get_element_states(project, check_elements)
- assert (tuple(states[element] for element in check_elements) ==
- ('buildable', 'buildable', 'buildable', 'cached', 'cached', ))
+ assert tuple(states[element] for element in check_elements) == (
+ "buildable",
+ "buildable",
+ "buildable",
+ "cached",
+ "cached",
+ )
# Ensure that we don't accidentally remove an artifact from something
@@ -176,28 +167,24 @@ def test_expiry_order(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_keep_dependencies(cli, datafiles):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
- cli.configure({
- 'cache': {
- 'quota': 10000000
- }
- })
+ cli.configure({"cache": {"quota": 10000000}})
# Create a pretty big dependency
- create_element_size('dependency.bst', project, element_path, [], 5000000)
- res = cli.run(project=project, args=['build', 'dependency.bst'])
+ create_element_size("dependency.bst", project, element_path, [], 5000000)
+ res = cli.run(project=project, args=["build", "dependency.bst"])
res.assert_success()
# Now create some other unrelated artifact
- create_element_size('unrelated.bst', project, element_path, [], 4000000)
- res = cli.run(project=project, args=['build', 'unrelated.bst'])
+ create_element_size("unrelated.bst", project, element_path, [], 4000000)
+ res = cli.run(project=project, args=["build", "unrelated.bst"])
res.assert_success()
# Check that the correct element remains in the cache
- states = cli.get_element_states(project, ['dependency.bst', 'unrelated.bst'])
- assert states['dependency.bst'] == 'cached'
- assert states['unrelated.bst'] == 'cached'
+ states = cli.get_element_states(project, ["dependency.bst", "unrelated.bst"])
+ assert states["dependency.bst"] == "cached"
+ assert states["unrelated.bst"] == "cached"
# We try to build an element which depends on the LRU artifact,
# and could therefore fail if we didn't make sure dependencies
@@ -207,54 +194,45 @@ def test_keep_dependencies(cli, datafiles):
# duplicating artifacts (bad!) we need to make this equal in size
# or smaller than half the size of its dependencies.
#
- create_element_size('target.bst', project,
- element_path, ['dependency.bst'], 2000000)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, ["dependency.bst"], 2000000)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
- states = cli.get_element_states(project, ['target.bst', 'unrelated.bst'])
- assert states['target.bst'] == 'cached'
- assert states['dependency.bst'] == 'cached'
- assert states['unrelated.bst'] != 'cached'
+ states = cli.get_element_states(project, ["target.bst", "unrelated.bst"])
+ assert states["target.bst"] == "cached"
+ assert states["dependency.bst"] == "cached"
+ assert states["unrelated.bst"] != "cached"
# Assert that we never delete a dependency required for a build tree
@pytest.mark.datafiles(DATA_DIR)
def test_never_delete_required(cli, datafiles):
project = str(datafiles)
- element_path = 'elements'
-
- cli.configure({
- 'cache': {
- 'quota': 10000000
- },
- 'scheduler': {
- 'fetchers': 1,
- 'builders': 1
- }
- })
+ element_path = "elements"
+
+ cli.configure({"cache": {"quota": 10000000}, "scheduler": {"fetchers": 1, "builders": 1}})
# Create a linear build tree
- create_element_size('dep1.bst', project, element_path, [], 8000000)
- create_element_size('dep2.bst', project, element_path, ['dep1.bst'], 8000000)
- create_element_size('dep3.bst', project, element_path, ['dep2.bst'], 8000000)
- create_element_size('target.bst', project, element_path, ['dep3.bst'], 8000000)
+ create_element_size("dep1.bst", project, element_path, [], 8000000)
+ create_element_size("dep2.bst", project, element_path, ["dep1.bst"], 8000000)
+ create_element_size("dep3.bst", project, element_path, ["dep2.bst"], 8000000)
+ create_element_size("target.bst", project, element_path, ["dep3.bst"], 8000000)
# Build dep1.bst, which should fit into the cache.
- res = cli.run(project=project, args=['build', 'dep1.bst'])
+ res = cli.run(project=project, args=["build", "dep1.bst"])
res.assert_success()
# We try to build this pipeline, but it's too big for the
# cache. Since all elements are required, the build should fail.
- res = cli.run(project=project, args=['build', 'target.bst'])
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_main_error(ErrorDomain.STREAM, None)
- res.assert_task_error(ErrorDomain.CAS, 'cache-too-full')
+ res.assert_task_error(ErrorDomain.CAS, "cache-too-full")
- states = cli.get_element_states(project, ['target.bst'])
- assert states['dep1.bst'] == 'cached'
- assert states['dep2.bst'] != 'cached'
- assert states['dep3.bst'] != 'cached'
- assert states['target.bst'] != 'cached'
+ states = cli.get_element_states(project, ["target.bst"])
+ assert states["dep1.bst"] == "cached"
+ assert states["dep2.bst"] != "cached"
+ assert states["dep3.bst"] != "cached"
+ assert states["target.bst"] != "cached"
# Ensure that only valid cache quotas make it through the loading
@@ -267,32 +245,33 @@ def test_never_delete_required(cli, datafiles):
#
# If err_domain is 'success', then err_reason is unused.
#
-@pytest.mark.parametrize("quota,err_domain,err_reason", [
- # Valid configurations
- ("1", 'success', None),
- ("1K", 'success', None),
- ("50%", 'success', None),
- ("infinity", 'success', None),
- ("0", 'success', None),
- # Invalid configurations
- ("-1", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
- ("pony", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
- ("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
-])
+@pytest.mark.parametrize(
+ "quota,err_domain,err_reason",
+ [
+ # Valid configurations
+ ("1", "success", None),
+ ("1K", "success", None),
+ ("50%", "success", None),
+ ("infinity", "success", None),
+ ("0", "success", None),
+ # Invalid configurations
+ ("-1", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
+ ("pony", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
+ ("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
+ ],
+)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_cache_quota(cli, datafiles, quota, err_domain, err_reason):
project = str(datafiles)
- os.makedirs(os.path.join(project, 'elements'))
+ os.makedirs(os.path.join(project, "elements"))
- cli.configure({
- 'cache': {
- 'quota': quota,
- },
- })
+ cli.configure(
+ {"cache": {"quota": quota,},}
+ )
- res = cli.run(project=project, args=['workspace', 'list'])
+ res = cli.run(project=project, args=["workspace", "list"])
- if err_domain == 'success':
+ if err_domain == "success":
res.assert_success()
else:
res.assert_main_error(err_domain, err_reason)
@@ -304,59 +283,47 @@ def test_invalid_cache_quota(cli, datafiles, quota, err_domain, err_reason):
@pytest.mark.datafiles(DATA_DIR)
def test_cleanup_first(cli, datafiles):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
- cli.configure({
- 'cache': {
- 'quota': 10000000,
- }
- })
+ cli.configure({"cache": {"quota": 10000000,}})
# Create an element that uses almost the entire cache (an empty
# ostree cache starts at about ~10KiB, so we need a bit of a
# buffer)
- create_element_size('target.bst', project, element_path, [], 8000000)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, [], 8000000)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Now configure with a smaller quota, create a situation
# where the cache must be cleaned up before building anything else.
#
# Fix the fetchers and builders just to ensure a predictable
# sequence of events (although it does not effect this test)
- cli.configure({
- 'cache': {
- 'quota': 5000000,
- },
- 'scheduler': {
- 'fetchers': 1,
- 'builders': 1
- }
- })
+ cli.configure({"cache": {"quota": 5000000,}, "scheduler": {"fetchers": 1, "builders": 1}})
# Our cache is now more than full, BuildStream
- create_element_size('target2.bst', project, element_path, [], 4000000)
- res = cli.run(project=project, args=['build', 'target2.bst'])
+ create_element_size("target2.bst", project, element_path, [], 4000000)
+ res = cli.run(project=project, args=["build", "target2.bst"])
res.assert_success()
# Check that the correct element remains in the cache
- states = cli.get_element_states(project, ['target.bst', 'target2.bst'])
- assert states['target.bst'] != 'cached'
- assert states['target2.bst'] == 'cached'
+ states = cli.get_element_states(project, ["target.bst", "target2.bst"])
+ assert states["target.bst"] != "cached"
+ assert states["target2.bst"] == "cached"
@pytest.mark.datafiles(DATA_DIR)
def test_cache_usage_monitor(cli, tmpdir, datafiles):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
assert get_cache_usage(cli.directory) == 0
ELEMENT_SIZE = 1000000
- create_element_size('target.bst', project, element_path, [], ELEMENT_SIZE)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, [], ELEMENT_SIZE)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
assert get_cache_usage(cli.directory) >= ELEMENT_SIZE