summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/artifactcache/junctions.py98
-rw-r--r--tests/artifactcache/tar.py82
-rw-r--r--tests/cachekey/project/elements/build1.expected2
-rw-r--r--tests/cachekey/project/elements/build2.expected2
-rw-r--r--tests/cachekey/project/elements/compose1.expected2
-rw-r--r--tests/cachekey/project/elements/compose2.expected2
-rw-r--r--tests/cachekey/project/elements/compose3.expected2
-rw-r--r--tests/cachekey/project/elements/compose4.expected2
-rw-r--r--tests/cachekey/project/elements/compose5.expected2
-rw-r--r--tests/cachekey/project/elements/import1.expected2
-rw-r--r--tests/cachekey/project/elements/import2.expected2
-rw-r--r--tests/cachekey/project/elements/import3.expected2
-rw-r--r--tests/cachekey/project/elements/script1.expected2
-rw-r--r--tests/cachekey/project/sources/bzr1.expected2
-rw-r--r--tests/cachekey/project/sources/git1.expected2
-rw-r--r--tests/cachekey/project/sources/git2.expected2
-rw-r--r--tests/cachekey/project/sources/local1.expected2
-rw-r--r--tests/cachekey/project/sources/local2.expected2
-rw-r--r--tests/cachekey/project/sources/ostree1.expected2
-rw-r--r--tests/cachekey/project/sources/patch1.expected2
-rw-r--r--tests/cachekey/project/sources/patch2.expected2
-rw-r--r--tests/cachekey/project/sources/patch3.expected2
-rw-r--r--tests/cachekey/project/sources/tar1.expected2
-rw-r--r--tests/cachekey/project/sources/tar2.expected2
-rw-r--r--tests/cachekey/project/sources/zip1.expected2
-rw-r--r--tests/cachekey/project/sources/zip2.expected2
-rw-r--r--tests/cachekey/project/target.expected2
-rw-r--r--tests/frontend/pull.py377
-rw-r--r--tests/frontend/push.py501
-rw-r--r--tests/integration/workspace.py1
-rw-r--r--tests/testutils/artifactshare.py98
-rw-r--r--tests/testutils/runcli.py7
32 files changed, 557 insertions, 657 deletions
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index 12423f937..378d007a0 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -2,7 +2,6 @@ import os
import shutil
import pytest
from tests.testutils import cli, create_artifact_share
-from tests.testutils.site import IS_LINUX
from buildstream import _yaml
@@ -37,60 +36,53 @@ def project_set_artifacts(project, url):
_yaml.dump(_yaml.node_sanitize(project_config), filename=project_conf_file)
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull(cli, tmpdir, datafiles):
project = os.path.join(str(datafiles), 'foo')
base_project = os.path.join(str(project), 'base')
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-foo'))
- base_share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-base'))
-
- # First build it without the artifact cache configured
- result = cli.run(project=project, args=['build', 'target.bst'])
- assert result.exit_code == 0
-
- # Assert that we are now cached locally
- state = cli.get_element_state(project, 'target.bst')
- assert state == 'cached'
- state = cli.get_element_state(base_project, 'target.bst')
- assert state == 'cached'
-
- project_set_artifacts(project, share.repo)
- project_set_artifacts(base_project, base_share.repo)
-
- # Now try bst push
- result = cli.run(project=project, args=['push', '--deps', 'all', 'target.bst'])
- assert result.exit_code == 0
-
- # And finally assert that the artifacts are in the right shares
- assert_shared(cli, share, 'foo', project, 'target.bst')
- assert_shared(cli, base_share, 'base', base_project, 'target.bst')
-
- # Make sure we update the summary in our artifact shares,
- # we dont have a real server around to do it
- #
- share.update_summary()
- base_share.update_summary()
-
- # Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the share
- #
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
-
- # Assert that nothing is cached locally anymore
- state = cli.get_element_state(project, 'target.bst')
- assert state != 'cached'
- state = cli.get_element_state(base_project, 'target.bst')
- assert state != 'cached'
-
- # Now try bst pull
- result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
- assert result.exit_code == 0
-
- # And assert that they are again in the local cache, without having built
- state = cli.get_element_state(project, 'target.bst')
- assert state == 'cached'
- state = cli.get_element_state(base_project, 'target.bst')
- assert state == 'cached'
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-foo')) as share,\
+ create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-base')) as base_share:
+
+ # First build it without the artifact cache configured
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ assert result.exit_code == 0
+
+ # Assert that we are now cached locally
+ state = cli.get_element_state(project, 'target.bst')
+ assert state == 'cached'
+ state = cli.get_element_state(base_project, 'target.bst')
+ assert state == 'cached'
+
+ project_set_artifacts(project, share.repo)
+ project_set_artifacts(base_project, base_share.repo)
+
+ # Now try bst push
+ result = cli.run(project=project, args=['push', '--deps', 'all', 'target.bst'])
+ assert result.exit_code == 0
+
+ # And finally assert that the artifacts are in the right shares
+ assert_shared(cli, share, 'foo', project, 'target.bst')
+ assert_shared(cli, base_share, 'base', base_project, 'target.bst')
+
+ # Now we've pushed, delete the user's local artifact cache
+ # directory and try to redownload it from the share
+ #
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
+
+ # Assert that nothing is cached locally anymore
+ state = cli.get_element_state(project, 'target.bst')
+ assert state != 'cached'
+ state = cli.get_element_state(base_project, 'target.bst')
+ assert state != 'cached'
+
+ # Now try bst pull
+ result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
+ assert result.exit_code == 0
+
+ # And assert that they are again in the local cache, without having built
+ state = cli.get_element_state(project, 'target.bst')
+ assert state == 'cached'
+ state = cli.get_element_state(base_project, 'target.bst')
+ assert state == 'cached'
diff --git a/tests/artifactcache/tar.py b/tests/artifactcache/tar.py
deleted file mode 100644
index ef39be31c..000000000
--- a/tests/artifactcache/tar.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import os
-import tarfile
-import tempfile
-from contextlib import ExitStack
-
-import pytest
-
-from buildstream._artifactcache.tarcache import _Tar
-from buildstream import utils, ProgramNotFoundError
-
-
-# Test that it 'works' - this may be equivalent to test_archive_no_tar()
-# on some systems.
-def test_archive_default():
- with ExitStack() as stack:
- src = stack.enter_context(tempfile.TemporaryDirectory())
- tar_dir = stack.enter_context(tempfile.TemporaryDirectory())
- scratch = stack.enter_context(tempfile.TemporaryDirectory())
- test_file = stack.enter_context(open(os.path.join(src, 'test'), 'a'))
- test_file.write('Test')
-
- _Tar.archive(os.path.join(tar_dir, 'test.tar'), '.', src)
-
- with tarfile.open(os.path.join(tar_dir, 'test.tar')) as tar:
- tar.extractall(path=scratch)
-
- assert os.listdir(scratch) == os.listdir(src)
-
-
-def test_archive_no_tar():
- # Modify the path to exclude 'tar'
- old_path = os.environ.get('PATH')
- os.environ['PATH'] = ''
-
- # Ensure we can't find 'tar' or 'gtar'
- try:
- for tar in ['gtar', 'tar']:
- with pytest.raises(ProgramNotFoundError):
- utils.get_host_tool(tar)
-
- # Run the same test as before, this time 'tar' should not be available
- test_archive_default()
-
- # Reset the environment
- finally:
- os.environ['PATH'] = old_path
-
-
-# Same thing as test_archive_default()
-def test_extract_default():
- with ExitStack() as stack:
- src = stack.enter_context(tempfile.TemporaryDirectory())
- tar_dir = stack.enter_context(tempfile.TemporaryDirectory())
- scratch = stack.enter_context(tempfile.TemporaryDirectory())
- test_file = stack.enter_context(open(os.path.join(src, 'test'), 'a'))
- test_file.write('Test')
-
- with tarfile.open(os.path.join(tar_dir, 'test.tar'), 'a:') as tar:
- tar.add(src, 'contents')
-
- _Tar.extract(os.path.join(tar_dir, 'test.tar'), scratch)
-
- assert os.listdir(os.path.join(scratch, 'contents')) == os.listdir(src)
-
-
-def test_extract_no_tar():
- # Modify the path to exclude 'tar'
- old_path = os.environ.get('PATH')
- os.environ['PATH'] = ''
-
- # Ensure we can't find 'tar' or 'gtar'
- for tar in ['gtar', 'tar']:
- with pytest.raises(ProgramNotFoundError):
- utils.get_host_tool(tar)
-
- # Run the same test as before, this time 'tar' should not be available
- try:
- test_extract_default()
-
- # Reset the environment
- finally:
- os.environ['PATH'] = old_path
diff --git a/tests/cachekey/project/elements/build1.expected b/tests/cachekey/project/elements/build1.expected
index ab8adf225..7c5af6054 100644
--- a/tests/cachekey/project/elements/build1.expected
+++ b/tests/cachekey/project/elements/build1.expected
@@ -1 +1 @@
-93594f53df6c599598ea9c1d5101a8f7e57bbd82cac521494ce680e6f84de67d \ No newline at end of file
+3db51572837956b28ffbc4aabdce659b4a1d91dcbb8b75954210346959ed5fa9 \ No newline at end of file
diff --git a/tests/cachekey/project/elements/build2.expected b/tests/cachekey/project/elements/build2.expected
index 94990176a..e1bd91218 100644
--- a/tests/cachekey/project/elements/build2.expected
+++ b/tests/cachekey/project/elements/build2.expected
@@ -1 +1 @@
-3ae596efed1126d440780ef33d2144a06cb7215a778c4f59b12a2f77fa0ee3b2 \ No newline at end of file
+bcde6fc389b7d8bb7788989b68f68653ab8ed658117012c0611f218f4a585d38 \ No newline at end of file
diff --git a/tests/cachekey/project/elements/compose1.expected b/tests/cachekey/project/elements/compose1.expected
index e912fbe62..86a2a2f2a 100644
--- a/tests/cachekey/project/elements/compose1.expected
+++ b/tests/cachekey/project/elements/compose1.expected
@@ -1 +1 @@
-d67fccd867504706010f9f36b07cd35b3129e9d79ae287c3dc2bf9ec03e309ea \ No newline at end of file
+6736bbcc055e1801a19288d3a64b622e0b9223164f8ad2ce842b18a4eaa0cfb9 \ No newline at end of file
diff --git a/tests/cachekey/project/elements/compose2.expected b/tests/cachekey/project/elements/compose2.expected
index 4c3b901b1..a811cc421 100644
--- a/tests/cachekey/project/elements/compose2.expected
+++ b/tests/cachekey/project/elements/compose2.expected
@@ -1 +1 @@
-743eaac4f261d389d2c12fb9c8605eb70d5e42c8a0bccadef9f651dd137cedde \ No newline at end of file
+9294428a0b5c0d44fdb3ab0f883ee87f9e62d51f96c7de1e5e81ed5e3934d403 \ No newline at end of file
diff --git a/tests/cachekey/project/elements/compose3.expected b/tests/cachekey/project/elements/compose3.expected
index 85843a190..ce28c853a 100644
--- a/tests/cachekey/project/elements/compose3.expected
+++ b/tests/cachekey/project/elements/compose3.expected
@@ -1 +1 @@
-5b401864d1d91809f59c258d37f78b410b244fcb20cab4bd0c1da17257515643 \ No newline at end of file
+4f1569b9a6317280e6299f9f7f706a6adcc89603030cde51d529dd6dfe2851be \ No newline at end of file
diff --git a/tests/cachekey/project/elements/compose4.expected b/tests/cachekey/project/elements/compose4.expected
index 38060ae5a..8d95a3d87 100644
--- a/tests/cachekey/project/elements/compose4.expected
+++ b/tests/cachekey/project/elements/compose4.expected
@@ -1 +1 @@
-450664eb37302835e3289b95dfb38cab0b24e6c30c4b7b59a5dc1b5a7f1f01e0 \ No newline at end of file
+4c83744bec21c8c38bce2d48396b8df1eb4df7b2f155424016bd012743efd808 \ No newline at end of file
diff --git a/tests/cachekey/project/elements/compose5.expected b/tests/cachekey/project/elements/compose5.expected
index 2f6307ca9..183534aa4 100644
--- a/tests/cachekey/project/elements/compose5.expected
+++ b/tests/cachekey/project/elements/compose5.expected
@@ -1 +1 @@
-fedaf8a315f8a9fb94d11c6f74a409188ff9397eac710e5ba6d9532162bd6973 \ No newline at end of file
+97385aa2192ef0295dd2601e78491d8bdf6b74e98938d0f8011747c2caf3a5c6 \ No newline at end of file
diff --git a/tests/cachekey/project/elements/import1.expected b/tests/cachekey/project/elements/import1.expected
index 4669ed485..387da88b7 100644
--- a/tests/cachekey/project/elements/import1.expected
+++ b/tests/cachekey/project/elements/import1.expected
@@ -1 +1 @@
-20582fab199a8d110fd65b5616f45bc08ae3eccc7bfe8b94ba987f3986b69ce5 \ No newline at end of file
+99c8f61d415de3a6c96e48299fda5554bf4bbaf56bb4b5acd85861ab37ede0c3 \ No newline at end of file
diff --git a/tests/cachekey/project/elements/import2.expected b/tests/cachekey/project/elements/import2.expected
index 2b071ac53..0893dde2a 100644
--- a/tests/cachekey/project/elements/import2.expected
+++ b/tests/cachekey/project/elements/import2.expected
@@ -1 +1 @@
-4fcc04697288b0fdc0785b7350c308c3b40177d2ad0ec47ee4e59afbbe7634a9 \ No newline at end of file
+5f5884c5e4bb7066eede3a135e49753ec06b757a30983513a7a4e0cdd2a8f402 \ No newline at end of file
diff --git a/tests/cachekey/project/elements/import3.expected b/tests/cachekey/project/elements/import3.expected
index 538daae37..6d0fe864a 100644
--- a/tests/cachekey/project/elements/import3.expected
+++ b/tests/cachekey/project/elements/import3.expected
@@ -1 +1 @@
-203a3749724d461a237f22ff261870616cedfe34bfb59603c935fd05644059b3 \ No newline at end of file
+e11f93ec629bc3556e15bd374e67a0b5e34350e1e9b1d1f98f8de984a27bbead \ No newline at end of file
diff --git a/tests/cachekey/project/elements/script1.expected b/tests/cachekey/project/elements/script1.expected
index cf1213901..e8d5b24c4 100644
--- a/tests/cachekey/project/elements/script1.expected
+++ b/tests/cachekey/project/elements/script1.expected
@@ -1 +1 @@
-93de2701d76db777a560e1e531883b7922b07683d4e7c14ea26b0500946f2c62 \ No newline at end of file
+d8388b756de5c8441375ba32cedd9560a65a8f9a85e41038837d342c8fb10004 \ No newline at end of file
diff --git a/tests/cachekey/project/sources/bzr1.expected b/tests/cachekey/project/sources/bzr1.expected
index 0e2a851e5..ca11c959a 100644
--- a/tests/cachekey/project/sources/bzr1.expected
+++ b/tests/cachekey/project/sources/bzr1.expected
@@ -1 +1 @@
-8509b1e54cc11bc2681425a11498037ad3841295c26fec86ff61a6b09d83e10a \ No newline at end of file
+519ee88fcca7fea091245713ec68baa048e3d876ea22559d4b2035d3d2ab2494 \ No newline at end of file
diff --git a/tests/cachekey/project/sources/git1.expected b/tests/cachekey/project/sources/git1.expected
index 07fc21cfd..85dc88500 100644
--- a/tests/cachekey/project/sources/git1.expected
+++ b/tests/cachekey/project/sources/git1.expected
@@ -1 +1 @@
-c1931acaea82971f1fc243dbe035a228c6103d52e09e618c7eda85f141c726cc \ No newline at end of file
+a5424aa7cc25f0ada9ac1245b33d55d078559ae6c50b10bea3db9acb964b058c \ No newline at end of file
diff --git a/tests/cachekey/project/sources/git2.expected b/tests/cachekey/project/sources/git2.expected
index b08e08cf7..9a643c000 100644
--- a/tests/cachekey/project/sources/git2.expected
+++ b/tests/cachekey/project/sources/git2.expected
@@ -1 +1 @@
-6d1ee891d29e0af504ed59ccd46c653b74946d3778d7e941f4d8b6e68cf3ca50 \ No newline at end of file
+93bf7344c118664f0d7f2b8e5a6731b2a95de6df83ba7fa2a2ab28227b0b3e8b \ No newline at end of file
diff --git a/tests/cachekey/project/sources/local1.expected b/tests/cachekey/project/sources/local1.expected
index 4669ed485..387da88b7 100644
--- a/tests/cachekey/project/sources/local1.expected
+++ b/tests/cachekey/project/sources/local1.expected
@@ -1 +1 @@
-20582fab199a8d110fd65b5616f45bc08ae3eccc7bfe8b94ba987f3986b69ce5 \ No newline at end of file
+99c8f61d415de3a6c96e48299fda5554bf4bbaf56bb4b5acd85861ab37ede0c3 \ No newline at end of file
diff --git a/tests/cachekey/project/sources/local2.expected b/tests/cachekey/project/sources/local2.expected
index 4a0796ec2..598fe73ba 100644
--- a/tests/cachekey/project/sources/local2.expected
+++ b/tests/cachekey/project/sources/local2.expected
@@ -1 +1 @@
-527685945072d971075edf6e4a06ce7146ef1cd023da0001c6e1613d525c76aa \ No newline at end of file
+780a7e62bbe5bc0f975ec6cd749de6a85f9080d3628f16f881605801597916a7 \ No newline at end of file
diff --git a/tests/cachekey/project/sources/ostree1.expected b/tests/cachekey/project/sources/ostree1.expected
index 5b4bf12e9..0e8e83014 100644
--- a/tests/cachekey/project/sources/ostree1.expected
+++ b/tests/cachekey/project/sources/ostree1.expected
@@ -1 +1 @@
-b78e79c5ba297cf5cb41d6eaa5f4ca170216c967b84935364d30938021202341 \ No newline at end of file
+9b06b6e0c213a5475d2b0fcfee537c41dbec579e6109e95f7e7aeb0488f079f6 \ No newline at end of file
diff --git a/tests/cachekey/project/sources/patch1.expected b/tests/cachekey/project/sources/patch1.expected
index a04b8fd40..d7cf73c34 100644
--- a/tests/cachekey/project/sources/patch1.expected
+++ b/tests/cachekey/project/sources/patch1.expected
@@ -1 +1 @@
-84830ad8577e5fa5a9dab14ce3f995b4dc16699aebc33122aa2dc5fade34528d \ No newline at end of file
+d5b0f1fa5b4e3e7aa617de303125268c7a7461e415ecf1eccc8aee2cda56897e \ No newline at end of file
diff --git a/tests/cachekey/project/sources/patch2.expected b/tests/cachekey/project/sources/patch2.expected
index 3fafb87b8..56a92dc8e 100644
--- a/tests/cachekey/project/sources/patch2.expected
+++ b/tests/cachekey/project/sources/patch2.expected
@@ -1 +1 @@
-1d137c65e7f2f9c8a0a74a46461dfe9ba5c675d53a1ff96a4bf15f0889891883 \ No newline at end of file
+6decb6b49e48a5869b2a438254c911423275662aff73348cd95e64148011c097 \ No newline at end of file
diff --git a/tests/cachekey/project/sources/patch3.expected b/tests/cachekey/project/sources/patch3.expected
index 6a62b7049..f1257bb31 100644
--- a/tests/cachekey/project/sources/patch3.expected
+++ b/tests/cachekey/project/sources/patch3.expected
@@ -1 +1 @@
-fd1f209c8f44fd629fb5201d6f299c47567b64828235b470b2ff8ff6edba4478 \ No newline at end of file
+ab91e0ab9e167c4e9d31480c96a6a91a47ff27246f4eeff4ce6b671cbd865901 \ No newline at end of file
diff --git a/tests/cachekey/project/sources/tar1.expected b/tests/cachekey/project/sources/tar1.expected
index 5b52a4c00..ab0bd56ea 100644
--- a/tests/cachekey/project/sources/tar1.expected
+++ b/tests/cachekey/project/sources/tar1.expected
@@ -1 +1 @@
-003d5c53c81ab4bf7e375c4e9704bdbc260473fecb334c9f78ed24ec5c1a908e \ No newline at end of file
+ccb35d04789b0d83fd93a6c2f8688c4abfe20f5bc77420f63054893450b2a832 \ No newline at end of file
diff --git a/tests/cachekey/project/sources/tar2.expected b/tests/cachekey/project/sources/tar2.expected
index d823bdee0..03241f460 100644
--- a/tests/cachekey/project/sources/tar2.expected
+++ b/tests/cachekey/project/sources/tar2.expected
@@ -1 +1 @@
-f501ed7c8df19071712634049fed1a1fb22fbeb6f27973595bc8139e56c6c446 \ No newline at end of file
+441c80ed92c77df8247344337f470ac7ab7fe91d2fe3900b498708b0faeac4b5 \ No newline at end of file
diff --git a/tests/cachekey/project/sources/zip1.expected b/tests/cachekey/project/sources/zip1.expected
index 64c0655b4..a3ac93ecf 100644
--- a/tests/cachekey/project/sources/zip1.expected
+++ b/tests/cachekey/project/sources/zip1.expected
@@ -1 +1 @@
-6a3c3a788c6a6ddae204a013d0622b6c352a91ff31cdf6d652b96ad0ac5eda52 \ No newline at end of file
+be47de64162c9cce0322d0af327092c7afc3a890ba9d6ef92eef016dcced5bae \ No newline at end of file
diff --git a/tests/cachekey/project/sources/zip2.expected b/tests/cachekey/project/sources/zip2.expected
index 64bb77289..49bd45fd0 100644
--- a/tests/cachekey/project/sources/zip2.expected
+++ b/tests/cachekey/project/sources/zip2.expected
@@ -1 +1 @@
-50a555bf892822b8f5e4d59b940ba4359afe8e6d01dff013d918a3befd9c3d8f \ No newline at end of file
+bedd330938f9405e2febcf1de8428b7180eb62ab73f8e31e49871874ae351735 \ No newline at end of file
diff --git a/tests/cachekey/project/target.expected b/tests/cachekey/project/target.expected
index dcb6a6642..4f4c7c1f8 100644
--- a/tests/cachekey/project/target.expected
+++ b/tests/cachekey/project/target.expected
@@ -1 +1 @@
-0de68ec99d39b12857a5350ebfdc7f49fdde9a3457a31b2330896307fb503f7b \ No newline at end of file
+a408b3e4b6ba4d6a6338bd3153728be89a18b74b13bde554411a4371fda487bc \ No newline at end of file
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index c43cc83e3..a41c3498a 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -2,7 +2,6 @@ import os
import shutil
import pytest
from tests.testutils import cli, create_artifact_share
-from tests.testutils.site import IS_LINUX
from . import generate_junction
@@ -42,43 +41,42 @@ def assert_not_shared(cli, share, project, element_name):
# * `bst build` pushes all build elements to configured 'push' cache
# * `bst pull --deps all` downloads everything from cache after local deletion
#
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_all(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
-
- # Assert that everything is now cached in the remote.
- share.update_summary()
- all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
- for element_name in all_elements:
- assert_shared(cli, share, project, element_name)
-
- # Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the share
- #
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
- # Assert that nothing is cached locally anymore
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) != 'cached'
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+ # First build the target element and push to the remote.
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True}
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+ # Assert that everything is now cached in the remote.
+ all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
+ for element_name in all_elements:
+ assert_shared(cli, share, project, element_name)
+
+ # Now we've pushed, delete the user's local artifact cache
+ # directory and try to redownload it from the share
+ #
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
- # Now try bst pull
- result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
- result.assert_success()
+ # Assert that nothing is cached locally anymore
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) != 'cached'
- # And assert that it's again in the local cache, without having built
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) == 'cached'
+ # Now try bst pull
+ result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
+ result.assert_success()
+
+ # And assert that it's again in the local cache, without having built
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) == 'cached'
# Tests that:
@@ -86,44 +84,40 @@ def test_push_pull_all(cli, tmpdir, datafiles):
# * `bst build` pushes all build elements ONLY to configured 'push' cache
# * `bst pull` finds artifacts that are available only in the secondary cache
#
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_pull_secondary_cache(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
- share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
-
- # Build the target and push it to share2 only.
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': False},
- {'url': share2.repo, 'push': True},
- ]
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as share1,\
+ create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
- share1.update_summary()
- share2.update_summary()
+ # Build the target and push it to share2 only.
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': False},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
- assert_not_shared(cli, share1, project, 'target.bst')
- assert_shared(cli, share2, project, 'target.bst')
+ assert_not_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
- # Delete the user's local artifact cache.
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ # Delete the user's local artifact cache.
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
- # Assert that the element is not cached anymore.
- assert cli.get_element_state(project, 'target.bst') != 'cached'
+ # Assert that the element is not cached anymore.
+ assert cli.get_element_state(project, 'target.bst') != 'cached'
- # Now try bst pull
- result = cli.run(project=project, args=['pull', 'target.bst'])
- result.assert_success()
+ # Now try bst pull
+ result = cli.run(project=project, args=['pull', 'target.bst'])
+ result.assert_success()
- # And assert that it's again in the local cache, without having built,
- # i.e. we found it in share2.
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ # And assert that it's again in the local cache, without having built,
+ # i.e. we found it in share2.
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
# Tests that:
@@ -131,182 +125,175 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
# * `bst push --remote` pushes to the given remote, not one from the config
# * `bst pull --remote` pulls from the given remote
#
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- good_share = create_artifact_share(os.path.join(str(tmpdir), 'goodartifactshare'))
- bad_share = create_artifact_share(os.path.join(str(tmpdir), 'badartifactshare'))
+ with create_artifact_share(os.path.join(str(tmpdir), 'goodartifactshare')) as good_share,\
+ create_artifact_share(os.path.join(str(tmpdir), 'badartifactshare')) as bad_share:
- # Build the target so we have it cached locally only.
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
+ # Build the target so we have it cached locally only.
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
- state = cli.get_element_state(project, 'target.bst')
- assert state == 'cached'
+ state = cli.get_element_state(project, 'target.bst')
+ assert state == 'cached'
- # Configure the default push location to be bad_share; we will assert that
- # nothing actually gets pushed there.
- cli.configure({
- 'artifacts': {'url': bad_share.repo, 'push': True},
- })
+ # Configure the default push location to be bad_share; we will assert that
+ # nothing actually gets pushed there.
+ cli.configure({
+ 'artifacts': {'url': bad_share.repo, 'push': True},
+ })
- # Now try `bst push` to the good_share.
- result = cli.run(project=project, args=[
- 'push', 'target.bst', '--remote', good_share.repo
- ])
- result.assert_success()
+ # Now try `bst push` to the good_share.
+ result = cli.run(project=project, args=[
+ 'push', 'target.bst', '--remote', good_share.repo
+ ])
+ result.assert_success()
- good_share.update_summary()
- bad_share.update_summary()
+ # Assert that all the artifacts are in the share we pushed
+ # to, and not the other.
+ assert_shared(cli, good_share, project, 'target.bst')
+ assert_not_shared(cli, bad_share, project, 'target.bst')
- # Assert that all the artifacts are in the share we pushed
- # to, and not the other.
- assert_shared(cli, good_share, project, 'target.bst')
- assert_not_shared(cli, bad_share, project, 'target.bst')
-
- # Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the good_share.
- #
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ # Now we've pushed, delete the user's local artifact cache
+ # directory and try to redownload it from the good_share.
+ #
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
- result = cli.run(project=project, args=['pull', 'target.bst', '--remote',
- good_share.repo])
- result.assert_success()
+ result = cli.run(project=project, args=['pull', 'target.bst', '--remote',
+ good_share.repo])
+ result.assert_success()
- # And assert that it's again in the local cache, without having built
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ # And assert that it's again in the local cache, without having built
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
# Tests that:
#
# * In non-strict mode, dependency changes don't block artifact reuse
#
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_non_strict(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- workspace = os.path.join(str(tmpdir), 'workspace')
-
- # First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- 'projects': {
- 'test': {'strict': False}
- }
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
-
- # Assert that everything is now cached in the remote.
- share.update_summary()
- all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
- for element_name in all_elements:
- assert_shared(cli, share, project, element_name)
-
- # Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the share
- #
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
- # Assert that nothing is cached locally anymore
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) != 'cached'
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ workspace = os.path.join(str(tmpdir), 'workspace')
+
+ # First build the target element and push to the remote.
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ 'projects': {
+ 'test': {'strict': False}
+ }
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+ # Assert that everything is now cached in the remote.
+ all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
+ for element_name in all_elements:
+ assert_shared(cli, share, project, element_name)
+
+ # Now we've pushed, delete the user's local artifact cache
+ # directory and try to redownload it from the share
+ #
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
- # Add a file to force change in strict cache key of import-bin.bst
- with open(os.path.join(str(project), 'files', 'bin-files', 'usr', 'bin', 'world'), 'w') as f:
- f.write('world')
+ # Assert that nothing is cached locally anymore
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) != 'cached'
- # Assert that the workspaced element requires a rebuild
- assert cli.get_element_state(project, 'import-bin.bst') == 'buildable'
- # Assert that the target is still waiting due to --no-strict
- assert cli.get_element_state(project, 'target.bst') == 'waiting'
+ # Add a file to force change in strict cache key of import-bin.bst
+ with open(os.path.join(str(project), 'files', 'bin-files', 'usr', 'bin', 'world'), 'w') as f:
+ f.write('world')
- # Now try bst pull
- result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
- result.assert_success()
+ # Assert that the workspaced element requires a rebuild
+ assert cli.get_element_state(project, 'import-bin.bst') == 'buildable'
+ # Assert that the target is still waiting due to --no-strict
+ assert cli.get_element_state(project, 'target.bst') == 'waiting'
- # And assert that the target is again in the local cache, without having built
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ # Now try bst pull
+ result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
+ result.assert_success()
+
+ # And assert that the target is again in the local cache, without having built
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
# Regression test for https://gitlab.com/BuildStream/buildstream/issues/202
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_track_non_strict(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- 'projects': {
- 'test': {'strict': False}
- }
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
-
- # Assert that everything is now cached in the remote.
- share.update_summary()
- all_elements = {'target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst'}
- for element_name in all_elements:
- assert_shared(cli, share, project, element_name)
-
- # Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the share
- #
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
- # Assert that nothing is cached locally anymore
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) != 'cached'
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+ # First build the target element and push to the remote.
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ 'projects': {
+ 'test': {'strict': False}
+ }
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+ # Assert that everything is now cached in the remote.
+ all_elements = {'target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst'}
+ for element_name in all_elements:
+ assert_shared(cli, share, project, element_name)
+
+ # Now we've pushed, delete the user's local artifact cache
+ # directory and try to redownload it from the share
+ #
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
+
+ # Assert that nothing is cached locally anymore
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) != 'cached'
+
+ # Now try bst build with tracking and pulling.
+ # Tracking will be skipped for target.bst as it doesn't have any sources.
+ # With the non-strict build plan target.bst immediately enters the pull queue.
+ # However, pulling has to be deferred until the dependencies have been
+ # tracked as the strict cache key needs to be calculated before querying
+ # the caches.
+ result = cli.run(project=project, args=['build', '--track-all', '--all', 'target.bst'])
+ result.assert_success()
+ assert set(result.get_pulled_elements()) == all_elements
- # Now try bst build with tracking and pulling.
- # Tracking will be skipped for target.bst as it doesn't have any sources.
- # With the non-strict build plan target.bst immediately enters the pull queue.
- # However, pulling has to be deferred until the dependencies have been
- # tracked as the strict cache key needs to be calculated before querying
- # the caches.
- result = cli.run(project=project, args=['build', '--track-all', '--all', 'target.bst'])
- result.assert_success()
- assert set(result.get_pulled_elements()) == all_elements
-
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_cross_junction(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ junction_path = os.path.join(project, 'elements', 'junction.bst')
+
+ generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
- # First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['build', 'junction.bst:import-etc.bst'])
- result.assert_success()
- assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
+ # First build the target element and push to the remote.
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True}
+ })
+ result = cli.run(project=project, args=['build', 'junction.bst:import-etc.bst'])
+ result.assert_success()
+ assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
- cache_dir = os.path.join(project, 'cache', 'artifacts')
- shutil.rmtree(cache_dir)
+ cache_dir = os.path.join(project, 'cache', 'artifacts')
+ shutil.rmtree(cache_dir)
- share.update_summary()
- assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
+ assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
- # Now try bst pull
- result = cli.run(project=project, args=['pull', 'junction.bst:import-etc.bst'])
- result.assert_success()
+ # Now try bst pull
+ result = cli.run(project=project, args=['pull', 'junction.bst:import-etc.bst'])
+ result.assert_success()
- # And assert that it's again in the local cache, without having built
- assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
+ # And assert that it's again in the local cache, without having built
+ assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index ca46b0447..e8f7d7d04 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -1,12 +1,10 @@
import os
-import shutil
import pytest
from collections import namedtuple
from unittest.mock import MagicMock
from buildstream._exceptions import ErrorDomain
from tests.testutils import cli, create_artifact_share, create_element_size
-from tests.testutils.site import IS_LINUX
from . import configure_project, generate_junction
@@ -58,256 +56,247 @@ def test_push(cli, tmpdir, datafiles):
assert cli.get_element_state(project, 'target.bst') == 'cached'
# Set up two artifact shares.
- share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
- share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
-
- # Try pushing with no remotes configured. This should fail.
- result = cli.run(project=project, args=['push', 'target.bst'])
- result.assert_main_error(ErrorDomain.STREAM, None)
-
- # Configure bst to pull but not push from a cache and run `bst push`.
- # This should also fail.
- cli.configure({
- 'artifacts': {'url': share1.repo, 'push': False},
- })
- result = cli.run(project=project, args=['push', 'target.bst'])
- result.assert_main_error(ErrorDomain.STREAM, None)
-
- # Configure bst to push to one of the caches and run `bst push`. This works.
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': False},
- {'url': share2.repo, 'push': True},
- ]
- })
- result = cli.run(project=project, args=['push', 'target.bst'])
-
- assert_not_shared(cli, share1, project, 'target.bst')
- assert_shared(cli, share2, project, 'target.bst')
-
- # Now try pushing to both (making sure to empty the cache we just pushed
- # to).
- shutil.rmtree(share2.directory)
- share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': True},
- {'url': share2.repo, 'push': True},
- ]
- })
- result = cli.run(project=project, args=['push', 'target.bst'])
-
- assert_shared(cli, share1, project, 'target.bst')
- assert_shared(cli, share2, project, 'target.bst')
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as share1:
+
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
+
+ # Try pushing with no remotes configured. This should fail.
+ result = cli.run(project=project, args=['push', 'target.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ # Configure bst to pull but not push from a cache and run `bst push`.
+ # This should also fail.
+ cli.configure({
+ 'artifacts': {'url': share1.repo, 'push': False},
+ })
+ result = cli.run(project=project, args=['push', 'target.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ # Configure bst to push to one of the caches and run `bst push`. This works.
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': False},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['push', 'target.bst'])
+
+ assert_not_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
+
+ # Now try pushing to both
+
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': True},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['push', 'target.bst'])
+
+ assert_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
# Tests that `bst push --deps all` pushes all dependencies of the given element.
#
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_all(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # First build it without the artifact cache configured
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
-
- # Assert that we are now cached locally
- assert cli.get_element_state(project, 'target.bst') == 'cached'
- # Configure artifact share
- cli.configure({
- #
- # FIXME: This test hangs "sometimes" if we allow
- # concurrent push.
- #
- # It's not too bad to ignore since we're
- # using the local artifact cache functionality
- # only, but it should probably be fixed.
- #
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
- }
- })
-
- # Now try bst push all the deps
- result = cli.run(project=project, args=[
- 'push', 'target.bst',
- '--deps', 'all'
- ])
- result.assert_success()
-
- # And finally assert that all the artifacts are in the share
- assert_shared(cli, share, project, 'target.bst')
- assert_shared(cli, share, project, 'import-bin.bst')
- assert_shared(cli, share, project, 'import-dev.bst')
- assert_shared(cli, share, project, 'compose-all.bst')
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+ # First build it without the artifact cache configured
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+
+ # Assert that we are now cached locally
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+ # Configure artifact share
+ cli.configure({
+ #
+ # FIXME: This test hangs "sometimes" if we allow
+ # concurrent push.
+ #
+ # It's not too bad to ignore since we're
+ # using the local artifact cache functionality
+ # only, but it should probably be fixed.
+ #
+ 'scheduler': {
+ 'pushers': 1
+ },
+ 'artifacts': {
+ 'url': share.repo,
+ 'push': True,
+ }
+ })
+
+ # Now try bst push all the deps
+ result = cli.run(project=project, args=[
+ 'push', 'target.bst',
+ '--deps', 'all'
+ ])
+ result.assert_success()
+
+ # And finally assert that all the artifacts are in the share
+ assert_shared(cli, share, project, 'target.bst')
+ assert_shared(cli, share, project, 'import-bin.bst')
+ assert_shared(cli, share, project, 'import-dev.bst')
+ assert_shared(cli, share, project, 'compose-all.bst')
# Tests that `bst build` won't push artifacts to the cache it just pulled from.
#
# Regression test for https://gitlab.com/BuildStream/buildstream/issues/233.
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_after_pull(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
# Set up two artifact shares.
- share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
- share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as share1,\
+ create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
- # Set the scene: share1 has the artifact, share2 does not.
- #
- cli.configure({
- 'artifacts': {'url': share1.repo, 'push': True},
- })
+ # Set the scene: share1 has the artifact, share2 does not.
+ #
+ cli.configure({
+ 'artifacts': {'url': share1.repo, 'push': True},
+ })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
- share1.update_summary()
- cli.remove_artifact_from_cache(project, 'target.bst')
+ cli.remove_artifact_from_cache(project, 'target.bst')
- assert_shared(cli, share1, project, 'target.bst')
- assert_not_shared(cli, share2, project, 'target.bst')
- assert cli.get_element_state(project, 'target.bst') != 'cached'
+ assert_shared(cli, share1, project, 'target.bst')
+ assert_not_shared(cli, share2, project, 'target.bst')
+ assert cli.get_element_state(project, 'target.bst') != 'cached'
- # Now run the build again. Correct `bst build` behaviour is to download the
- # artifact from share1 but not push it back again.
- #
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
- assert result.get_pulled_elements() == ['target.bst']
- assert result.get_pushed_elements() == []
-
- # Delete the artifact locally again.
- cli.remove_artifact_from_cache(project, 'target.bst')
-
- # Now we add share2 into the mix as a second push remote. This time,
- # `bst build` should push to share2 after pulling from share1.
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': True},
- {'url': share2.repo, 'push': True},
- ]
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
- assert result.get_pulled_elements() == ['target.bst']
- assert result.get_pushed_elements() == ['target.bst']
+ # Now run the build again. Correct `bst build` behaviour is to download the
+ # artifact from share1 but not push it back again.
+ #
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+ assert result.get_pulled_elements() == ['target.bst']
+ assert result.get_pushed_elements() == []
+
+ # Delete the artifact locally again.
+ cli.remove_artifact_from_cache(project, 'target.bst')
+
+ # Now we add share2 into the mix as a second push remote. This time,
+ # `bst build` should push to share2 after pulling from share1.
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': True},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+ assert result.get_pulled_elements() == ['target.bst']
+ assert result.get_pushed_elements() == ['target.bst']
# Ensure that when an artifact's size exceeds available disk space
# the least recently pushed artifact is deleted in order to make room for
# the incoming artifact.
+@pytest.mark.xfail
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_expires(cli, datafiles, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
element_path = os.path.join(project, 'elements')
# Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # Mock the os.statvfs() call to return a named tuple which emulates an
- # os.statvfs_result object
- statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
- os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9),
- f_bfree=(int(12e6) + int(2e9)),
- f_bsize=1))
-
- # Configure bst to push to the cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
-
- # Create and build an element of 5 MB
- create_element_size('element1.bst', element_path, [], int(5e6)) # [] => no deps
- result = cli.run(project=project, args=['build', 'element1.bst'])
- result.assert_success()
-
- # Create and build an element of 5 MB
- create_element_size('element2.bst', element_path, [], int(5e6)) # [] => no deps
- result = cli.run(project=project, args=['build', 'element2.bst'])
- result.assert_success()
-
- # update the share
- share.update_summary()
-
- # check that element's 1 and 2 are cached both locally and remotely
- assert cli.get_element_state(project, 'element1.bst') == 'cached'
- assert_shared(cli, share, project, 'element1.bst')
- assert cli.get_element_state(project, 'element2.bst') == 'cached'
- assert_shared(cli, share, project, 'element2.bst')
-
- # update mocked available disk space now that two 5 MB artifacts have been added
- os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9),
- f_bfree=(int(2e6) + int(2e9)),
- f_bsize=1))
-
- # Create and build another element of 5 MB (This will exceed the free disk space available)
- create_element_size('element3.bst', element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element3.bst'])
- result.assert_success()
-
- # update the share
- share.update_summary()
-
- # Ensure it is cached both locally and remotely
- assert cli.get_element_state(project, 'element3.bst') == 'cached'
- assert_shared(cli, share, project, 'element3.bst')
-
- # Ensure element1 has been removed from the share
- assert_not_shared(cli, share, project, 'element1.bst')
- # Ensure that elemen2 remains
- assert_shared(cli, share, project, 'element2.bst')
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+ # Mock the os.statvfs() call to return a named tuple which emulates an
+ # os.statvfs_result object
+ statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
+ os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9),
+ f_bfree=(int(12e6) + int(2e9)),
+ f_bsize=1))
+
+ # Configure bst to push to the cache
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ })
+
+ # Create and build an element of 5 MB
+ create_element_size('element1.bst', element_path, [], int(5e6)) # [] => no deps
+ result = cli.run(project=project, args=['build', 'element1.bst'])
+ result.assert_success()
+
+ # Create and build an element of 5 MB
+ create_element_size('element2.bst', element_path, [], int(5e6)) # [] => no deps
+ result = cli.run(project=project, args=['build', 'element2.bst'])
+ result.assert_success()
+
+ # check that element's 1 and 2 are cached both locally and remotely
+ assert cli.get_element_state(project, 'element1.bst') == 'cached'
+ assert_shared(cli, share, project, 'element1.bst')
+ assert cli.get_element_state(project, 'element2.bst') == 'cached'
+ assert_shared(cli, share, project, 'element2.bst')
+
+ # update mocked available disk space now that two 5 MB artifacts have been added
+ os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9),
+ f_bfree=(int(2e6) + int(2e9)),
+ f_bsize=1))
+
+ # Create and build another element of 5 MB (This will exceed the free disk space available)
+ create_element_size('element3.bst', element_path, [], int(5e6))
+ result = cli.run(project=project, args=['build', 'element3.bst'])
+ result.assert_success()
+
+ # Ensure it is cached both locally and remotely
+ assert cli.get_element_state(project, 'element3.bst') == 'cached'
+ assert_shared(cli, share, project, 'element3.bst')
+
+ # Ensure element1 has been removed from the share
+ assert_not_shared(cli, share, project, 'element1.bst')
+ # Ensure that elemen2 remains
+ assert_shared(cli, share, project, 'element2.bst')
# Test that a large artifact, whose size exceeds the quota, is not pushed
# to the remote share
+@pytest.mark.xfail
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_too_large(cli, datafiles, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
element_path = os.path.join(project, 'elements')
# Create an artifact share (remote cache) in tmpdir/artifactshare
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # Mock a file system with 5 MB total space
- statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
- os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(5e6) + int(2e9),
- f_bfree=(int(5e6) + int(2e9)),
- f_bsize=1))
-
- # Configure bst to push to the remote cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
-
- # Create and push a 3MB element
- create_element_size('small_element.bst', element_path, [], int(3e6))
- result = cli.run(project=project, args=['build', 'small_element.bst'])
- result.assert_success()
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
- # Create and try to push a 6MB element.
- create_element_size('large_element.bst', element_path, [], int(6e6))
- result = cli.run(project=project, args=['build', 'large_element.bst'])
- result.assert_success()
+ # Mock a file system with 5 MB total space
+ statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
+ os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(5e6) + int(2e9),
+ f_bfree=(int(5e6) + int(2e9)),
+ f_bsize=1))
+
+ # Configure bst to push to the remote cache
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ })
- # update the cache
- share.update_summary()
+ # Create and push a 3MB element
+ create_element_size('small_element.bst', element_path, [], int(3e6))
+ result = cli.run(project=project, args=['build', 'small_element.bst'])
+ result.assert_success()
- # Ensure that the small artifact is still in the share
- assert cli.get_element_state(project, 'small_element.bst') == 'cached'
- assert_shared(cli, share, project, 'small_element.bst')
+ # Create and try to push a 6MB element.
+ create_element_size('large_element.bst', element_path, [], int(6e6))
+ result = cli.run(project=project, args=['build', 'large_element.bst'])
+ result.assert_success()
- # Ensure that the artifact is cached locally but NOT remotely
- assert cli.get_element_state(project, 'large_element.bst') == 'cached'
- assert_not_shared(cli, share, project, 'large_element.bst')
+ # Ensure that the small artifact is still in the share
+ assert cli.get_element_state(project, 'small_element.bst') == 'cached'
+ assert_shared(cli, share, project, 'small_element.bst')
+
+ # Ensure that the artifact is cached locally but NOT remotely
+ assert cli.get_element_state(project, 'large_element.bst') == 'cached'
+ assert_not_shared(cli, share, project, 'large_element.bst')
# Test that when an element is pulled recently, it is not considered the LRU element.
@@ -321,64 +310,60 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
element_path = os.path.join(project, 'elements')
# Create an artifact share (remote cache) in tmpdir/artifactshare
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # Mock a file system with 12 MB free disk space
- statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
- os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9) + int(2e9),
- f_bfree=(int(12e6) + int(2e9)),
- f_bsize=1))
-
- # Configure bst to push to the cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
-
- # Create and build 2 elements, each of 5 MB.
- create_element_size('element1.bst', element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element1.bst'])
- result.assert_success()
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
- create_element_size('element2.bst', element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element2.bst'])
- result.assert_success()
+ # Mock a file system with 12 MB free disk space
+ statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
+ os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9) + int(2e9),
+ f_bfree=(int(12e6) + int(2e9)),
+ f_bsize=1))
- share.update_summary()
+ # Configure bst to push to the cache
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ })
- # Ensure they are cached locally
- assert cli.get_element_state(project, 'element1.bst') == 'cached'
- assert cli.get_element_state(project, 'element2.bst') == 'cached'
+ # Create and build 2 elements, each of 5 MB.
+ create_element_size('element1.bst', element_path, [], int(5e6))
+ result = cli.run(project=project, args=['build', 'element1.bst'])
+ result.assert_success()
- # Ensure that they have been pushed to the cache
- assert_shared(cli, share, project, 'element1.bst')
- assert_shared(cli, share, project, 'element2.bst')
+ create_element_size('element2.bst', element_path, [], int(5e6))
+ result = cli.run(project=project, args=['build', 'element2.bst'])
+ result.assert_success()
- # Remove element1 from the local cache
- cli.remove_artifact_from_cache(project, 'element1.bst')
- assert cli.get_element_state(project, 'element1.bst') != 'cached'
+ # Ensure they are cached locally
+ assert cli.get_element_state(project, 'element1.bst') == 'cached'
+ assert cli.get_element_state(project, 'element2.bst') == 'cached'
- # Pull the element1 from the remote cache (this should update its mtime)
- result = cli.run(project=project, args=['pull', 'element1.bst', '--remote',
- share.repo])
- result.assert_success()
+ # Ensure that they have been pushed to the cache
+ assert_shared(cli, share, project, 'element1.bst')
+ assert_shared(cli, share, project, 'element2.bst')
- # Ensure element1 is cached locally
- assert cli.get_element_state(project, 'element1.bst') == 'cached'
+ # Remove element1 from the local cache
+ cli.remove_artifact_from_cache(project, 'element1.bst')
+ assert cli.get_element_state(project, 'element1.bst') != 'cached'
- # Create and build the element3 (of 5 MB)
- create_element_size('element3.bst', element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element3.bst'])
- result.assert_success()
+ # Pull the element1 from the remote cache (this should update its mtime)
+ result = cli.run(project=project, args=['pull', 'element1.bst', '--remote',
+ share.repo])
+ result.assert_success()
+
+ # Ensure element1 is cached locally
+ assert cli.get_element_state(project, 'element1.bst') == 'cached'
- share.update_summary()
+ # Create and build the element3 (of 5 MB)
+ create_element_size('element3.bst', element_path, [], int(5e6))
+ result = cli.run(project=project, args=['build', 'element3.bst'])
+ result.assert_success()
- # Make sure it's cached locally and remotely
- assert cli.get_element_state(project, 'element3.bst') == 'cached'
- assert_shared(cli, share, project, 'element3.bst')
+ # Make sure it's cached locally and remotely
+ assert cli.get_element_state(project, 'element3.bst') == 'cached'
+ assert_shared(cli, share, project, 'element3.bst')
- # Ensure that element2 was deleted from the share and element1 remains
- assert_not_shared(cli, share, project, 'element2.bst')
- assert_shared(cli, share, project, 'element1.bst')
+ # Ensure that element2 was deleted from the share and element1 remains
+ assert_not_shared(cli, share, project, 'element2.bst')
+ assert_shared(cli, share, project, 'element1.bst')
@pytest.mark.datafiles(DATA_DIR)
@@ -394,11 +379,11 @@ def test_push_cross_junction(cli, tmpdir, datafiles):
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
- result = cli.run(project=project, args=['push', 'junction.bst:import-etc.bst'])
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ })
+ result = cli.run(project=project, args=['push', 'junction.bst:import-etc.bst'])
- cache_key = cli.get_element_key(project, 'junction.bst:import-etc.bst')
- assert share.has_artifact('subtest', 'import-etc.bst', cache_key)
+ cache_key = cli.get_element_key(project, 'junction.bst:import-etc.bst')
+ assert share.has_artifact('subtest', 'import-etc.bst', cache_key)
diff --git a/tests/integration/workspace.py b/tests/integration/workspace.py
index 6eae1efc8..102d053fc 100644
--- a/tests/integration/workspace.py
+++ b/tests/integration/workspace.py
@@ -216,7 +216,6 @@ def test_updated_dependency_nested(cli, tmpdir, datafiles):
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not IS_LINUX, reason='Incremental builds are not supported by the unix platform')
def test_incremental_configure_commands_run_only_once(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
workspace = os.path.join(cli.directory, 'workspace')
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index 8664c69d0..6b9117b48 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -2,10 +2,18 @@ import string
import pytest
import subprocess
import os
+import shutil
+import signal
-from buildstream import _yaml
+from contextlib import contextmanager
+from multiprocessing import Process, Queue
+import pytest_cov
-from .site import HAVE_OSTREE_CLI
+from buildstream import _yaml
+from buildstream._artifactcache.cascache import CASCache
+from buildstream._artifactcache.casserver import create_server
+from buildstream._context import Context
+from buildstream._exceptions import ArtifactError
# ArtifactShare()
@@ -20,11 +28,6 @@ class ArtifactShare():
def __init__(self, directory):
- # We need the ostree CLI for tests which use this
- #
- if not HAVE_OSTREE_CLI:
- pytest.skip("ostree cli is not available")
-
# The working directory for the artifact share (in case it
# needs to do something outside of it's backend's storage folder).
#
@@ -35,34 +38,42 @@ class ArtifactShare():
# Unless this gets more complicated, just use this directly
# in tests as a remote artifact push/pull configuration
#
- self.repo = os.path.join(self.directory, 'repo')
+ self.repodir = os.path.join(self.directory, 'repo')
- os.makedirs(self.repo)
+ os.makedirs(self.repodir)
- self.init()
- self.update_summary()
+ context = Context()
+ context.artifactdir = self.repodir
- # init():
- #
- # Initializes the artifact share
- #
- # Returns:
- # (smth): A new ref corresponding to this commit, which can
- # be passed as the ref in the Repo.source_config() API.
- #
- def init(self):
- subprocess.call(['ostree', 'init',
- '--repo', self.repo,
- '--mode', 'archive-z2'])
+ self.cas = CASCache(context)
+
+ q = Queue()
+
+ self.process = Process(target=self.run, args=(q,))
+ self.process.start()
- # update_summary():
+ # Retrieve port from server subprocess
+ port = q.get()
+
+ self.repo = 'http://localhost:{}'.format(port)
+
+ # run():
#
- # Ensure that the summary is up to date
+ # Run the artifact server.
#
- def update_summary(self):
- subprocess.call(['ostree', 'summary',
- '--update',
- '--repo', self.repo])
+ def run(self, q):
+ pytest_cov.embed.cleanup_on_sigterm()
+
+ server = create_server(self.repodir, enable_push=True)
+ port = server.add_insecure_port('localhost:0')
+
+ server.start()
+
+ # Send port to parent
+ q.put(port)
+
+ # Sleep until termination by signal
+ signal.pause()
# has_artifact():
#
@@ -77,8 +88,8 @@ class ArtifactShare():
# (bool): True if the artifact exists in the share, otherwise false.
def has_artifact(self, project_name, element_name, cache_key):
- # NOTE: This should be kept in line with our ostree
- # based artifact cache code, the below is the
+ # NOTE: This should be kept in line with our
+ # artifact cache code, the below is the
# same algo for creating an artifact reference
#
@@ -93,18 +104,31 @@ class ArtifactShare():
])
artifact_key = '{0}/{1}/{2}'.format(project_name, element_name, cache_key)
- if not subprocess.call(['ostree', 'rev-parse',
- '--repo', self.repo,
- artifact_key]):
+ try:
+ tree = self.cas.resolve_ref(artifact_key)
return True
+ except ArtifactError:
+ return False
- return False
+ # close():
+ #
+ # Remove the artifact share.
+ #
+ def close(self):
+ self.process.terminate()
+ self.process.join()
+
+ shutil.rmtree(self.directory)
# create_artifact_share()
#
# Create an ArtifactShare for use in a test case
#
+@contextmanager
def create_artifact_share(directory):
-
- return ArtifactShare(directory)
+ share = ArtifactShare(directory)
+ try:
+ yield share
+ finally:
+ share.close()
diff --git a/tests/testutils/runcli.py b/tests/testutils/runcli.py
index 658e38874..96d4ea457 100644
--- a/tests/testutils/runcli.py
+++ b/tests/testutils/runcli.py
@@ -19,8 +19,6 @@ import pytest
#
from _pytest.capture import MultiCapture, FDCapture
-from tests.testutils.site import IS_LINUX
-
# Import the main cli entrypoint
from buildstream._frontend import cli as bst_cli
from buildstream import _yaml
@@ -203,10 +201,7 @@ class Cli():
def remove_artifact_from_cache(self, project, element_name):
cache_dir = os.path.join(project, 'cache', 'artifacts')
- if IS_LINUX:
- cache_dir = os.path.join(cache_dir, 'ostree', 'refs', 'heads')
- else:
- cache_dir = os.path.join(cache_dir, 'tar')
+ cache_dir = os.path.join(cache_dir, 'cas', 'refs', 'heads')
cache_dir = os.path.splitext(os.path.join(cache_dir, 'test', element_name))[0]
shutil.rmtree(cache_dir)