summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTristan Maat <tristan.maat@codethink.co.uk>2018-07-17 13:07:50 +0100
committerTristan Van Berkom <tristan.vanberkom@codethink.co.uk>2018-07-18 15:07:24 +0900
commit6ff12e5e91bacd64159547fd853c2153d951e1fe (patch)
treea98131e519039fdcfe97b7cf4b6d2648af570030
parentdc17de387961ba54c4742ba504ba7314c7da16aa (diff)
downloadbuildstream-6ff12e5e91bacd64159547fd853c2153d951e1fe.tar.gz
tests/artifactcache/expiry.py: Add expiry tests
-rw-r--r--tests/artifactcache/expiry.py264
-rw-r--r--tests/artifactcache/expiry/project.conf14
2 files changed, 278 insertions, 0 deletions
diff --git a/tests/artifactcache/expiry.py b/tests/artifactcache/expiry.py
new file mode 100644
index 000000000..4c741054b
--- /dev/null
+++ b/tests/artifactcache/expiry.py
@@ -0,0 +1,264 @@
+import os
+
+import pytest
+
+from buildstream import _yaml
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
+
+from tests.testutils import cli
+
+
+DATA_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "expiry"
+)
+
+
+def create_element(name, path, dependencies, size):
+ os.makedirs(path, exist_ok=True)
+
+ # Create a file to be included in this element's artifact
+ with open(os.path.join(path, name + '_data'), 'wb+') as f:
+ f.write(os.urandom(size))
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ {
+ 'kind': 'local',
+ 'path': os.path.join(path, name + '_data')
+ }
+ ],
+ 'depends': dependencies
+ }
+ _yaml.dump(element, os.path.join(path, name))
+
+
+# Ensure that the cache successfully removes an old artifact if we do
+# not have enough space left.
+@pytest.mark.datafiles(DATA_DIR)
+def test_artifact_expires(cli, datafiles, tmpdir):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ element_path = os.path.join(project, 'elements')
+ cache_location = os.path.join(project, 'cache', 'artifacts', 'ostree')
+ checkout = os.path.join(project, 'checkout')
+
+ cli.configure({
+ 'cache': {
+ 'quota': 10000000,
+ }
+ })
+
+ # Create an element that uses almost the entire cache (an empty
+ # ostree cache starts at about ~10KiB, so we need a bit of a
+ # buffer)
+ create_element('target.bst', element_path, [], 6000000)
+ res = cli.run(project=project, args=['build', 'target.bst'])
+ res.assert_success()
+
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+ # Our cache should now be almost full. Let's create another
+ # artifact and see if we can cause buildstream to delete the old
+ # one.
+ create_element('target2.bst', element_path, [], 6000000)
+ res = cli.run(project=project, args=['build', 'target2.bst'])
+ res.assert_success()
+
+ # Check that the correct element remains in the cache
+ assert cli.get_element_state(project, 'target.bst') != 'cached'
+ assert cli.get_element_state(project, 'target2.bst') == 'cached'
+
+
+# Ensure that we don't end up deleting the whole cache (or worse) if
+# we try to store an artifact that is too large to fit in the quota.
+@pytest.mark.parametrize('size', [
+ # Test an artifact that is obviously too large
+ (500000),
+ # Test an artifact that might be too large due to slight overhead
+ # of storing stuff in ostree
+ (399999)
+])
+@pytest.mark.datafiles(DATA_DIR)
+def test_artifact_too_large(cli, datafiles, tmpdir, size):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ element_path = os.path.join(project, 'elements')
+
+ cli.configure({
+ 'cache': {
+ 'quota': 400000
+ }
+ })
+
+ # Create an element whose artifact is too large
+ create_element('target.bst', element_path, [], size)
+ res = cli.run(project=project, args=['build', 'target.bst'])
+ res.assert_main_error(ErrorDomain.STREAM, None)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_expiry_order(cli, datafiles, tmpdir):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ element_path = os.path.join(project, 'elements')
+ cache_location = os.path.join(project, 'cache', 'artifacts', 'ostree')
+ checkout = os.path.join(project, 'workspace')
+
+ cli.configure({
+ 'cache': {
+ 'quota': 9000000
+ }
+ })
+
+ # Create an artifact
+ create_element('dep.bst', element_path, [], 2000000)
+ res = cli.run(project=project, args=['build', 'dep.bst'])
+ res.assert_success()
+
+ # Create another artifact
+ create_element('unrelated.bst', element_path, [], 2000000)
+ res = cli.run(project=project, args=['build', 'unrelated.bst'])
+ res.assert_success()
+
+ # And build something else
+ create_element('target.bst', element_path, [], 2000000)
+ res = cli.run(project=project, args=['build', 'target.bst'])
+ res.assert_success()
+
+ create_element('target2.bst', element_path, [], 2000000)
+ res = cli.run(project=project, args=['build', 'target2.bst'])
+ res.assert_success()
+
+ # Now extract dep.bst
+ res = cli.run(project=project, args=['checkout', 'dep.bst', checkout])
+ res.assert_success()
+
+ # Finally, build something that will cause the cache to overflow
+ create_element('expire.bst', element_path, [], 2000000)
+ res = cli.run(project=project, args=['build', 'expire.bst'])
+ res.assert_success()
+
+ # While dep.bst was the first element to be created, it should not
+ # have been removed.
+ # Note that buildstream will reduce the cache to 50% of the
+ # original size - we therefore remove multiple elements.
+
+ assert (tuple(cli.get_element_state(project, element) for element in
+ ('unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst')) ==
+ ('buildable', 'buildable', 'buildable', 'cached', 'cached', ))
+
+
+# Ensure that we don't accidentally remove an artifact from something
+# in the current build pipeline, because that would be embarassing,
+# wouldn't it?
+@pytest.mark.datafiles(DATA_DIR)
+def test_keep_dependencies(cli, datafiles, tmpdir):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ element_path = os.path.join(project, 'elements')
+ cache_location = os.path.join(project, 'cache', 'artifacts', 'ostree')
+
+ cli.configure({
+ 'cache': {
+ 'quota': 10000000
+ }
+ })
+
+ # Create a pretty big dependency
+ create_element('dependency.bst', element_path, [], 5000000)
+ res = cli.run(project=project, args=['build', 'dependency.bst'])
+ res.assert_success()
+
+ # Now create some other unrelated artifact
+ create_element('unrelated.bst', element_path, [], 4000000)
+ res = cli.run(project=project, args=['build', 'unrelated.bst'])
+ res.assert_success()
+
+ # Check that the correct element remains in the cache
+ assert cli.get_element_state(project, 'dependency.bst') == 'cached'
+ assert cli.get_element_state(project, 'unrelated.bst') == 'cached'
+
+ # We try to build an element which depends on the LRU artifact,
+ # and could therefore fail if we didn't make sure dependencies
+ # aren't removed.
+ #
+ # Since some artifact caches may implement weak cache keys by
+ # duplicating artifacts (bad!) we need to make this equal in size
+ # or smaller than half the size of its dependencies.
+ #
+ create_element('target.bst', element_path, ['dependency.bst'], 2000000)
+ res = cli.run(project=project, args=['build', 'target.bst'])
+ res.assert_success()
+
+ assert cli.get_element_state(project, 'unrelated.bst') != 'cached'
+ assert cli.get_element_state(project, 'dependency.bst') == 'cached'
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+
+# Assert that we never delete a dependency required for a build tree
+@pytest.mark.datafiles(DATA_DIR)
+def test_never_delete_dependencies(cli, datafiles, tmpdir):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ element_path = os.path.join(project, 'elements')
+
+ cli.configure({
+ 'cache': {
+ 'quota': 10000000
+ }
+ })
+
+ # Create a build tree
+ create_element('dependency.bst', element_path, [], 8000000)
+ create_element('related.bst', element_path, ['dependency.bst'], 8000000)
+ create_element('target.bst', element_path, ['related.bst'], 8000000)
+ create_element('target2.bst', element_path, ['target.bst'], 8000000)
+
+ # We try to build this pipeline, but it's too big for the
+ # cache. Since all elements are required, the build should fail.
+ res = cli.run(project=project, args=['build', 'target2.bst'])
+ res.assert_main_error(ErrorDomain.STREAM, None)
+
+ assert cli.get_element_state(project, 'dependency.bst') == 'cached'
+
+ # This is *technically* above the cache limit. BuildStream accepts
+ # some fuzziness, since it's hard to assert that we don't create
+ # an artifact larger than the cache quota. We would have to remove
+ # the artifact after-the-fact, but since it is required for the
+ # current build and nothing broke yet, it's nicer to keep it
+ # around.
+ #
+ # This scenario is quite unlikely, and the cache overflow will be
+ # resolved if the user does something about it anyway.
+ #
+ assert cli.get_element_state(project, 'related.bst') == 'cached'
+
+ assert cli.get_element_state(project, 'target.bst') != 'cached'
+ assert cli.get_element_state(project, 'target2.bst') != 'cached'
+
+
+# Ensure that only valid cache quotas make it through the loading
+# process.
+@pytest.mark.parametrize("quota,success", [
+ ("1", True),
+ ("1K", True),
+ ("50%", True),
+ ("infinity", True),
+ ("0", True),
+ ("-1", False),
+ ("pony", False),
+ ("200%", False)
+])
+@pytest.mark.datafiles(DATA_DIR)
+def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+ element_path = os.path.join(project, 'elements')
+
+ cli.configure({
+ 'cache': {
+ 'quota': quota,
+ }
+ })
+
+ res = cli.run(project=project, args=['workspace', 'list'])
+ if success:
+ res.assert_success()
+ else:
+ res.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/artifactcache/expiry/project.conf b/tests/artifactcache/expiry/project.conf
new file mode 100644
index 000000000..18db7dab7
--- /dev/null
+++ b/tests/artifactcache/expiry/project.conf
@@ -0,0 +1,14 @@
+# Project config for cache expiry test
+name: test
+element-path: elements
+aliases:
+ project_dir: file://{project_dir}
+options:
+ linux:
+ type: bool
+ description: Whether to expect a linux platform
+ default: True
+split-rules:
+ test:
+ - |
+ /tests/*