summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2019-03-13 16:34:34 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-03-27 16:24:32 +0000
commitc0315a4a9a96a735aec991d931e745499c1109e5 (patch)
treeb4935b289d419c771e02489873d2c83a15d427e9
parentefd07ed838d2d2fe02af5206e92c0c60d36aa965 (diff)
downloadbuildstream-c0315a4a9a96a735aec991d931e745499c1109e5.tar.gz
tests/artifactcache/push.py: Remove push_directory() test
push_directory() is unused and will be removed.
-rw-r--r--tests/artifactcache/push.py108
1 files changed, 0 insertions, 108 deletions
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 69f3fbfbb..56af50a0d 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -136,114 +136,6 @@ def _test_push(user_config_file, project_dir, element_name, element_key, queue):
@pytest.mark.datafiles(DATA_DIR)
-def test_push_directory(cli, tmpdir, datafiles):
- project_dir = str(datafiles)
-
- # First build the project without the artifact cache configured
- result = cli.run(project=project_dir, args=['build', 'target.bst'])
- result.assert_success()
-
- # Assert that we are now cached locally
- assert cli.get_element_state(project_dir, 'target.bst') == 'cached'
-
- # Set up an artifact cache.
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
- # Configure artifact share
- rootcache_dir = os.path.join(str(tmpdir), 'cache')
- user_config_file = str(tmpdir.join('buildstream.conf'))
- user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
- },
- 'cachedir': rootcache_dir
- }
-
- # Write down the user configuration file
- _yaml.dump(_yaml.node_sanitize(user_config), filename=user_config_file)
-
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.set_message_handler(message_handler)
-
- # Load the project and CAS cache
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
- artifactcache = context.artifactcache
- cas = artifactcache.cas
-
- # Assert that the element's artifact is cached
- element = project.load_elements(['target.bst'])[0]
- element_key = cli.get_element_key(project_dir, 'target.bst')
- assert artifactcache.contains(element, element_key)
-
- # Manually setup the CAS remote
- artifactcache.setup_remotes(use_config=True)
- artifactcache.initialize_remotes()
- assert artifactcache.has_push_remotes(plugin=element)
-
- # Recreate the CasBasedDirectory object from the cached artifact
- artifact_ref = element.get_artifact_name(element_key)
- artifact_digest = cas.resolve_ref(artifact_ref)
-
- queue = multiprocessing.Queue()
- # Use subprocess to avoid creation of gRPC threads in main BuildStream process
- # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
- process = multiprocessing.Process(target=_queue_wrapper,
- args=(_test_push_directory, queue, user_config_file,
- project_dir, artifact_digest))
-
- try:
- # Keep SIGINT blocked in the child process
- with _signals.blocked([signal.SIGINT], ignore=False):
- process.start()
-
- directory_hash = queue.get()
- process.join()
- except KeyboardInterrupt:
- utils._kill_process_tree(process.pid)
- raise
-
- assert directory_hash
- assert artifact_digest.hash == directory_hash
- assert share.has_object(artifact_digest)
-
-
-def _test_push_directory(user_config_file, project_dir, artifact_digest, queue):
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.set_message_handler(message_handler)
-
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
-
- # Create a local CAS cache handle
- cas = context.artifactcache
-
- # Manually setup the CAS remote
- cas.setup_remotes(use_config=True)
- cas.initialize_remotes()
-
- if cas.has_push_remotes():
- # Create a CasBasedDirectory from local CAS cache content
- directory = CasBasedDirectory(context.artifactcache.cas, digest=artifact_digest)
-
- # Push the CasBasedDirectory object
- cas.push_directory(project, directory)
-
- digest = directory._get_digest()
- queue.put(digest.hash)
- else:
- queue.put("No remote configured")
-
-
-@pytest.mark.datafiles(DATA_DIR)
def test_push_message(tmpdir, datafiles):
project_dir = str(datafiles)