diff options
Diffstat (limited to 'tests/artifactcache/push.py')
-rw-r--r-- | tests/artifactcache/push.py | 105 |
1 files changed, 42 insertions, 63 deletions
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py index 62c443d61..238d5f7ef 100644 --- a/tests/artifactcache/push.py +++ b/tests/artifactcache/push.py @@ -14,10 +14,7 @@ from tests.testutils import create_artifact_share, create_split_share, dummy_con # Project directory -DATA_DIR = os.path.join( - os.path.dirname(os.path.realpath(__file__)), - "project", -) +DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",) # Push the given element and return its artifact key for assertions. @@ -28,8 +25,8 @@ def _push(cli, cache_dir, project_dir, config_file, target): project.ensure_fully_loaded() # Assert that the element's artifact is cached - element = project.load_elements(['target.bst'])[0] - element_key = cli.get_element_key(project_dir, 'target.bst') + element = project.load_elements(["target.bst"])[0] + element_key = cli.get_element_key(project_dir, "target.bst") assert cli.artifact.is_cached(cache_dir, element, element_key) # Create a local artifact cache handle @@ -46,8 +43,7 @@ def _push(cli, cache_dir, project_dir, config_file, target): artifactcache.setup_remotes(use_config=True) artifactcache.initialize_remotes() - assert artifactcache.has_push_remotes(plugin=element), \ - "No remote configured for element target.bst" + assert artifactcache.has_push_remotes(plugin=element), "No remote configured for element target.bst" assert element._push(), "Push operation failed" return element_key @@ -58,32 +54,27 @@ def test_push(cli, tmpdir, datafiles): project_dir = str(datafiles) # First build the project without the artifact cache configured - result = cli.run(project=project_dir, args=['build', 'target.bst']) + result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally - assert cli.get_element_state(project_dir, 'target.bst') == 'cached' + assert cli.get_element_state(project_dir, "target.bst") == "cached" # Set up an artifact cache. - with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share: + with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # Configure artifact share - rootcache_dir = os.path.join(str(tmpdir), 'cache') - user_config_file = str(tmpdir.join('buildstream.conf')) + rootcache_dir = os.path.join(str(tmpdir), "cache") + user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { - 'scheduler': { - 'pushers': 1 - }, - 'artifacts': { - 'url': share.repo, - 'push': True, - }, - 'cachedir': rootcache_dir + "scheduler": {"pushers": 1}, + "artifacts": {"url": share.repo, "push": True,}, + "cachedir": rootcache_dir, } # Write down the user configuration file _yaml.roundtrip_dump(user_config, file=user_config_file) - element_key = _push(cli, rootcache_dir, project_dir, user_config_file, 'target.bst') - assert share.get_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst', cache_key=element_key)) + element_key = _push(cli, rootcache_dir, project_dir, user_config_file, "target.bst") + assert share.get_artifact(cli.get_artifact_name(project_dir, "test", "target.bst", cache_key=element_key)) @pytest.mark.datafiles(DATA_DIR) @@ -91,41 +82,33 @@ def test_push_split(cli, tmpdir, datafiles): project_dir = str(datafiles) # First build the project without the artifact cache configured - result = cli.run(project=project_dir, args=['build', 'target.bst']) + result = cli.run(project=project_dir, args=["build", "target.bst"]) result.assert_success() # Assert that we are now cached locally - assert cli.get_element_state(project_dir, 'target.bst') == 'cached' + assert cli.get_element_state(project_dir, "target.bst") == "cached" - indexshare = os.path.join(str(tmpdir), 'indexshare') - storageshare = os.path.join(str(tmpdir), 'storageshare') + indexshare = os.path.join(str(tmpdir), "indexshare") + storageshare = os.path.join(str(tmpdir), "storageshare") # Set up an artifact cache. with create_split_share(indexshare, storageshare) as (index, storage): - rootcache_dir = os.path.join(str(tmpdir), 'cache') + rootcache_dir = os.path.join(str(tmpdir), "cache") user_config = { - 'scheduler': { - 'pushers': 1 - }, - 'artifacts': [{ - 'url': index.repo, - 'push': True, - 'type': 'index' - }, { - 'url': storage.repo, - 'push': True, - 'type': 'storage' - }], - 'cachedir': rootcache_dir + "scheduler": {"pushers": 1}, + "artifacts": [ + {"url": index.repo, "push": True, "type": "index"}, + {"url": storage.repo, "push": True, "type": "storage"}, + ], + "cachedir": rootcache_dir, } - config_path = str(tmpdir.join('buildstream.conf')) + config_path = str(tmpdir.join("buildstream.conf")) _yaml.roundtrip_dump(user_config, file=config_path) - element_key = _push(cli, rootcache_dir, project_dir, config_path, 'target.bst') - proto = index.get_artifact_proto(cli.get_artifact_name(project_dir, - 'test', - 'target.bst', - cache_key=element_key)) + element_key = _push(cli, rootcache_dir, project_dir, config_path, "target.bst") + proto = index.get_artifact_proto( + cli.get_artifact_name(project_dir, "test", "target.bst", cache_key=element_key) + ) assert storage.get_cas_files(proto) is not None @@ -134,20 +117,15 @@ def test_push_message(tmpdir, datafiles): project_dir = str(datafiles) # Set up an artifact cache. - artifactshare = os.path.join(str(tmpdir), 'artifactshare') + artifactshare = os.path.join(str(tmpdir), "artifactshare") with create_artifact_share(artifactshare) as share: # Configure artifact share - rootcache_dir = os.path.join(str(tmpdir), 'cache') - user_config_file = str(tmpdir.join('buildstream.conf')) + rootcache_dir = os.path.join(str(tmpdir), "cache") + user_config_file = str(tmpdir.join("buildstream.conf")) user_config = { - 'scheduler': { - 'pushers': 1 - }, - 'artifacts': { - 'url': share.repo, - 'push': True, - }, - 'cachedir': rootcache_dir + "scheduler": {"pushers": 1}, + "artifacts": {"url": share.repo, "push": True,}, + "cachedir": rootcache_dir, } # Write down the user configuration file @@ -166,15 +144,16 @@ def test_push_message(tmpdir, datafiles): artifactcache.initialize_remotes() assert artifactcache.has_push_remotes() - command = remote_execution_pb2.Command(arguments=['/usr/bin/gcc', '--help'], - working_directory='/buildstream-build', - output_directories=['/buildstream-install']) + command = remote_execution_pb2.Command( + arguments=["/usr/bin/gcc", "--help"], + working_directory="/buildstream-build", + output_directories=["/buildstream-install"], + ) # Push the message object command_digest = artifactcache.push_message(project, command) message_hash, message_size = command_digest.hash, command_digest.size_bytes assert message_hash and message_size - message_digest = remote_execution_pb2.Digest(hash=message_hash, - size_bytes=message_size) + message_digest = remote_execution_pb2.Digest(hash=message_hash, size_bytes=message_size) assert share.has_object(message_digest) |