summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbst-marge-bot <marge-bot@buildstream.build>2019-07-16 14:27:12 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-07-16 14:27:12 +0000
commit5c9753b3bd6bc135a5952f407a6f107bd777fb01 (patch)
treed6e63f2e28c5635e612c8ba50b1c319be25ee37f
parent0f1938dd6b263fcf754221815986b3cee5cde9c9 (diff)
parent7e9d42e4f110601642551939706f0d723f28a34d (diff)
downloadbuildstream-5c9753b3bd6bc135a5952f407a6f107bd777fb01.tar.gz
Merge branch 'juerg/context' into 'master'
Make Context class a Python context manager See merge request BuildStream/buildstream!1465
-rw-r--r--src/buildstream/_context.py14
-rw-r--r--src/buildstream/_frontend/app.py255
-rw-r--r--src/buildstream/_frontend/cli.py46
-rw-r--r--tests/artifactcache/config.py20
-rw-r--r--tests/artifactcache/pull.py225
-rw-r--r--tests/artifactcache/push.py143
-rw-r--r--tests/format/include_composition.py163
-rw-r--r--tests/internals/context.py9
-rw-r--r--tests/internals/loader.py49
-rw-r--r--tests/internals/pluginloading.py41
-rw-r--r--tests/sourcecache/fetch.py160
-rw-r--r--tests/sourcecache/push.py55
-rw-r--r--tests/sourcecache/staging.py110
-rw-r--r--tests/testutils/__init__.py1
-rw-r--r--tests/testutils/context.py46
15 files changed, 659 insertions, 678 deletions
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 286ed6d3d..1e45d776d 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -164,6 +164,20 @@ class Context():
self._cascache = None
self._casquota = None
+ # __enter__()
+ #
+ # Called when entering the with-statement context.
+ #
+ def __enter__(self):
+ return self
+
+ # __exit__()
+ #
+ # Called when exiting the with-statement context.
+ #
+ def __exit__(self, exc_type, exc_value, traceback):
+ return None
+
# load()
#
# Loads the configuration files
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index f04e9595e..372ade191 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -169,149 +169,152 @@ class App():
self._session_name = session_name
- #
- # Load the Context
- #
- try:
- self.context = Context()
- self.context.load(config)
- except BstError as e:
- self._error_exit(e, "Error loading user configuration")
-
- # Override things in the context from our command line options,
- # the command line when used, trumps the config files.
- #
- override_map = {
- 'strict': '_strict_build_plan',
- 'debug': 'log_debug',
- 'verbose': 'log_verbose',
- 'error_lines': 'log_error_lines',
- 'message_lines': 'log_message_lines',
- 'on_error': 'sched_error_action',
- 'fetchers': 'sched_fetchers',
- 'builders': 'sched_builders',
- 'pushers': 'sched_pushers',
- 'max_jobs': 'build_max_jobs',
- 'network_retries': 'sched_network_retries',
- 'pull_buildtrees': 'pull_buildtrees',
- 'cache_buildtrees': 'cache_buildtrees'
- }
- for cli_option, context_attr in override_map.items():
- option_value = self._main_options.get(cli_option)
- if option_value is not None:
- setattr(self.context, context_attr, option_value)
- try:
- Platform.get_platform()
- except BstError as e:
- self._error_exit(e, "Error instantiating platform")
-
- # Create the stream right away, we'll need to pass it around.
- self.stream = Stream(self.context, self._session_start,
- session_start_callback=self.session_start_cb,
- interrupt_callback=self._interrupt_handler,
- ticker_callback=self._tick)
-
- self._state = self.stream.get_state()
+ # Instantiate Context
+ with Context() as context:
+ self.context = context
- # Register callbacks with the State
- self._state.register_task_failed_callback(self._job_failed)
+ #
+ # Load the configuration
+ #
+ try:
+ self.context.load(config)
+ except BstError as e:
+ self._error_exit(e, "Error loading user configuration")
+
+ # Override things in the context from our command line options,
+ # the command line when used, trumps the config files.
+ #
+ override_map = {
+ 'strict': '_strict_build_plan',
+ 'debug': 'log_debug',
+ 'verbose': 'log_verbose',
+ 'error_lines': 'log_error_lines',
+ 'message_lines': 'log_message_lines',
+ 'on_error': 'sched_error_action',
+ 'fetchers': 'sched_fetchers',
+ 'builders': 'sched_builders',
+ 'pushers': 'sched_pushers',
+ 'max_jobs': 'build_max_jobs',
+ 'network_retries': 'sched_network_retries',
+ 'pull_buildtrees': 'pull_buildtrees',
+ 'cache_buildtrees': 'cache_buildtrees'
+ }
+ for cli_option, context_attr in override_map.items():
+ option_value = self._main_options.get(cli_option)
+ if option_value is not None:
+ setattr(self.context, context_attr, option_value)
+ try:
+ Platform.get_platform()
+ except BstError as e:
+ self._error_exit(e, "Error instantiating platform")
+
+ # Create the stream right away, we'll need to pass it around.
+ self.stream = Stream(self.context, self._session_start,
+ session_start_callback=self.session_start_cb,
+ interrupt_callback=self._interrupt_handler,
+ ticker_callback=self._tick)
+
+ self._state = self.stream.get_state()
+
+ # Register callbacks with the State
+ self._state.register_task_failed_callback(self._job_failed)
+
+ # Create the logger right before setting the message handler
+ self.logger = LogLine(self.context, self._state,
+ self._content_profile,
+ self._format_profile,
+ self._success_profile,
+ self._error_profile,
+ self._detail_profile,
+ indent=INDENT)
+
+ # Propagate pipeline feedback to the user
+ self.context.messenger.set_message_handler(self._message_handler)
+
+ # Preflight the artifact cache after initializing logging,
+ # this can cause messages to be emitted.
+ try:
+ self.context.artifactcache.preflight()
+ except BstError as e:
+ self._error_exit(e, "Error instantiating artifact cache")
- # Create the logger right before setting the message handler
- self.logger = LogLine(self.context, self._state,
- self._content_profile,
- self._format_profile,
- self._success_profile,
- self._error_profile,
- self._detail_profile,
- indent=INDENT)
+ # Now that we have a logger and message handler,
+ # we can override the global exception hook.
+ sys.excepthook = self._global_exception_handler
- # Propagate pipeline feedback to the user
- self.context.messenger.set_message_handler(self._message_handler)
+ # Initialize the parts of Stream that have side-effects
+ self.stream.init()
- # Preflight the artifact cache after initializing logging,
- # this can cause messages to be emitted.
- try:
- self.context.artifactcache.preflight()
- except BstError as e:
- self._error_exit(e, "Error instantiating artifact cache")
+ # Create our status printer, only available in interactive
+ self._status = Status(self.context, self._state,
+ self._content_profile, self._format_profile,
+ self._success_profile, self._error_profile,
+ self.stream, colors=self.colors)
- # Now that we have a logger and message handler,
- # we can override the global exception hook.
- sys.excepthook = self._global_exception_handler
-
- # Initialize the parts of Stream that have side-effects
- self.stream.init()
-
- # Create our status printer, only available in interactive
- self._status = Status(self.context, self._state,
- self._content_profile, self._format_profile,
- self._success_profile, self._error_profile,
- self.stream, colors=self.colors)
+ # Mark the beginning of the session
+ if session_name:
+ self._message(MessageType.START, session_name)
- # Mark the beginning of the session
- if session_name:
- self._message(MessageType.START, session_name)
+ #
+ # Load the Project
+ #
+ try:
+ self.project = Project(directory, self.context, cli_options=self._main_options['option'],
+ default_mirror=self._main_options.get('default_mirror'),
+ fetch_subprojects=self.stream.fetch_subprojects)
- #
- # Load the Project
- #
- try:
- self.project = Project(directory, self.context, cli_options=self._main_options['option'],
- default_mirror=self._main_options.get('default_mirror'),
- fetch_subprojects=self.stream.fetch_subprojects)
-
- self.stream.set_project(self.project)
- except LoadError as e:
-
- # Help users that are new to BuildStream by suggesting 'init'.
- # We don't want to slow down users that just made a mistake, so
- # don't stop them with an offer to create a project for them.
- if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
- click.echo("No project found. You can create a new project like so:", err=True)
- click.echo("", err=True)
- click.echo(" bst init", err=True)
+ self.stream.set_project(self.project)
+ except LoadError as e:
- self._error_exit(e, "Error loading project")
+ # Help users that are new to BuildStream by suggesting 'init'.
+ # We don't want to slow down users that just made a mistake, so
+ # don't stop them with an offer to create a project for them.
+ if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
+ click.echo("No project found. You can create a new project like so:", err=True)
+ click.echo("", err=True)
+ click.echo(" bst init", err=True)
- except BstError as e:
- self._error_exit(e, "Error loading project")
+ self._error_exit(e, "Error loading project")
- # Run the body of the session here, once everything is loaded
- try:
- yield
- except BstError as e:
+ except BstError as e:
+ self._error_exit(e, "Error loading project")
- # Print a nice summary if this is a session
- if session_name:
- elapsed = self.stream.elapsed_time
+ # Run the body of the session here, once everything is loaded
+ try:
+ yield
+ except BstError as e:
- if isinstance(e, StreamError) and e.terminated: # pylint: disable=no-member
- self._message(MessageType.WARN, session_name + ' Terminated', elapsed=elapsed)
- else:
- self._message(MessageType.FAIL, session_name, elapsed=elapsed)
+ # Print a nice summary if this is a session
+ if session_name:
+ elapsed = self.stream.elapsed_time
- # Notify session failure
- self._notify("{} failed".format(session_name), e)
+ if isinstance(e, StreamError) and e.terminated: # pylint: disable=no-member
+ self._message(MessageType.WARN, session_name + ' Terminated', elapsed=elapsed)
+ else:
+ self._message(MessageType.FAIL, session_name, elapsed=elapsed)
- if self._started:
- self._print_summary()
+ # Notify session failure
+ self._notify("{} failed".format(session_name), e)
- # Exit with the error
- self._error_exit(e)
- except RecursionError:
- click.echo("RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.",
- err=True)
- sys.exit(-1)
+ if self._started:
+ self._print_summary()
- else:
- # No exceptions occurred, print session time and summary
- if session_name:
- self._message(MessageType.SUCCESS, session_name, elapsed=self.stream.elapsed_time)
- if self._started:
- self._print_summary()
+ # Exit with the error
+ self._error_exit(e)
+ except RecursionError:
+ click.echo("RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.",
+ err=True)
+ sys.exit(-1)
- # Notify session success
- self._notify("{} succeeded".format(session_name), "")
+ else:
+ # No exceptions occurred, print session time and summary
+ if session_name:
+ self._message(MessageType.SUCCESS, session_name, elapsed=self.stream.elapsed_time)
+ if self._started:
+ self._print_summary()
+
+ # Notify session success
+ self._notify("{} succeeded".format(session_name), "")
# init_project()
#
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index 8fd834825..d4686e4fc 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -117,30 +117,30 @@ def complete_target(args, incomplete):
def complete_artifact(orig_args, args, incomplete):
from .._context import Context
- ctx = Context()
-
- config = None
- if orig_args:
- for i, arg in enumerate(orig_args):
- if arg in ('-c', '--config'):
- try:
- config = orig_args[i + 1]
- except IndexError:
- pass
- if args:
- for i, arg in enumerate(args):
- if arg in ('-c', '--config'):
- try:
- config = args[i + 1]
- except IndexError:
- pass
- ctx.load(config)
-
- # element targets are valid artifact names
- complete_list = complete_target(args, incomplete)
- complete_list.extend(ref for ref in ctx.artifactcache.list_artifacts() if ref.startswith(incomplete))
+ with Context() as ctx:
- return complete_list
+ config = None
+ if orig_args:
+ for i, arg in enumerate(orig_args):
+ if arg in ('-c', '--config'):
+ try:
+ config = orig_args[i + 1]
+ except IndexError:
+ pass
+ if args:
+ for i, arg in enumerate(args):
+ if arg in ('-c', '--config'):
+ try:
+ config = args[i + 1]
+ except IndexError:
+ pass
+ ctx.load(config)
+
+ # element targets are valid artifact names
+ complete_list = complete_target(args, incomplete)
+ complete_list.extend(ref for ref in ctx.artifactcache.list_artifacts() if ref.startswith(incomplete))
+
+ return complete_list
def override_completions(orig_args, cmd, cmd_param, args, incomplete):
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index db51d196c..08d6f74bb 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -7,7 +7,6 @@ import os
import pytest
from buildstream._artifactcache import ArtifactCacheSpec, ArtifactCache
-from buildstream._context import Context
from buildstream._project import Project
from buildstream.utils import _deduplicate
from buildstream import _yaml
@@ -15,6 +14,8 @@ from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream.testing.runcli import cli # pylint: disable=unused-import
+from tests.testutils import dummy_context
+
DATA_DIR = os.path.dirname(os.path.realpath(__file__))
cache1 = ArtifactCacheSpec(url='https://example.com/cache1', push=True)
@@ -107,17 +108,16 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
project_config_file = str(project_dir.join('project.conf'))
_yaml.roundtrip_dump(project_config, file=project_config_file)
- context = Context()
- context.load(config=user_config_file)
- project = Project(str(project_dir), context)
- project.ensure_fully_loaded()
+ with dummy_context(config=user_config_file) as context:
+ project = Project(str(project_dir), context)
+ project.ensure_fully_loaded()
- # Use the helper from the artifactcache module to parse our configuration.
- parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(context, project)
+ # Use the helper from the artifactcache module to parse our configuration.
+ parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(context, project)
- # Verify that it was correctly read.
- expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches)))
- assert parsed_cache_specs == expected_cache_specs
+ # Verify that it was correctly read.
+ expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches)))
+ assert parsed_cache_specs == expected_cache_specs
# Assert that if either the client key or client cert is specified
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index 72f3103f0..6003cea41 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -8,12 +8,11 @@ import signal
import pytest
from buildstream import _yaml, _signals, utils
-from buildstream._context import Context
from buildstream._project import Project
from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from buildstream.testing import cli # pylint: disable=unused-import
-from tests.testutils import create_artifact_share
+from tests.testutils import create_artifact_share, dummy_context
# Project directory
@@ -23,11 +22,6 @@ DATA_DIR = os.path.join(
)
-# Handle messages from the pipeline
-def message_handler(message, is_silenced):
- pass
-
-
# Since parent processes wait for queue events, we need
# to put something on it if the called process raises an
# exception.
@@ -92,73 +86,66 @@ def test_pull(cli, tmpdir, datafiles):
# Assert that we are not cached locally anymore
assert cli.get_element_state(project_dir, 'target.bst') != 'cached'
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- # Load the project
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
+ with dummy_context(config=user_config_file) as context:
+ # Load the project
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
- # Assert that the element's artifact is **not** cached
- element = project.load_elements(['target.bst'])[0]
- element_key = cli.get_element_key(project_dir, 'target.bst')
- assert not cli.artifact.is_cached(cache_dir, element, element_key)
+ # Assert that the element's artifact is **not** cached
+ element = project.load_elements(['target.bst'])[0]
+ element_key = cli.get_element_key(project_dir, 'target.bst')
+ assert not cli.artifact.is_cached(cache_dir, element, element_key)
- queue = multiprocessing.Queue()
- # Use subprocess to avoid creation of gRPC threads in main BuildStream process
- # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
- process = multiprocessing.Process(target=_queue_wrapper,
- args=(_test_pull, queue, user_config_file, project_dir,
- cache_dir, 'target.bst', element_key))
+ queue = multiprocessing.Queue()
+ # Use subprocess to avoid creation of gRPC threads in main BuildStream process
+ # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
+ process = multiprocessing.Process(target=_queue_wrapper,
+ args=(_test_pull, queue, user_config_file, project_dir,
+ cache_dir, 'target.bst', element_key))
- try:
- # Keep SIGINT blocked in the child process
- with _signals.blocked([signal.SIGINT], ignore=False):
- process.start()
+ try:
+ # Keep SIGINT blocked in the child process
+ with _signals.blocked([signal.SIGINT], ignore=False):
+ process.start()
- error = queue.get()
- process.join()
- except KeyboardInterrupt:
- utils._kill_process_tree(process.pid)
- raise
+ error = queue.get()
+ process.join()
+ except KeyboardInterrupt:
+ utils._kill_process_tree(process.pid)
+ raise
- assert not error
- assert cli.artifact.is_cached(cache_dir, element, element_key)
+ assert not error
+ assert cli.artifact.is_cached(cache_dir, element, element_key)
def _test_pull(user_config_file, project_dir, cache_dir,
element_name, element_key, queue):
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.cachedir = cache_dir
- context.casdir = os.path.join(cache_dir, 'cas')
- context.tmpdir = os.path.join(cache_dir, 'tmp')
- context.messenger.set_message_handler(message_handler)
-
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
-
- # Create a local artifact cache handle
- artifactcache = context.artifactcache
-
- # Load the target element
- element = project.load_elements([element_name])[0]
-
- # Manually setup the CAS remote
- artifactcache.setup_remotes(use_config=True)
-
- if artifactcache.has_push_remotes(plugin=element):
- # Push the element's artifact
- if not artifactcache.pull(element, element_key):
- queue.put("Pull operation failed")
+ with dummy_context(config=user_config_file) as context:
+ context.cachedir = cache_dir
+ context.casdir = os.path.join(cache_dir, 'cas')
+ context.tmpdir = os.path.join(cache_dir, 'tmp')
+
+ # Load the project manually
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+
+ # Create a local artifact cache handle
+ artifactcache = context.artifactcache
+
+ # Load the target element
+ element = project.load_elements([element_name])[0]
+
+ # Manually setup the CAS remote
+ artifactcache.setup_remotes(use_config=True)
+
+ if artifactcache.has_push_remotes(plugin=element):
+ # Push the element's artifact
+ if not artifactcache.pull(element, element_key):
+ queue.put("Pull operation failed")
+ else:
+ queue.put(None)
else:
- queue.put(None)
- else:
- queue.put("No remote configured for element {}".format(element_name))
+ queue.put("No remote configured for element {}".format(element_name))
@pytest.mark.datafiles(DATA_DIR)
@@ -195,23 +182,19 @@ def test_pull_tree(cli, tmpdir, datafiles):
# Assert that we shared/pushed the cached artifact
assert share.has_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst'))
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- # Load the project and CAS cache
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
- cas = context.get_cascache()
+ with dummy_context(config=user_config_file) as context:
+ # Load the project and CAS cache
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+ cas = context.get_cascache()
- # Assert that the element's artifact is cached
- element = project.load_elements(['target.bst'])[0]
- element_key = cli.get_element_key(project_dir, 'target.bst')
- assert cli.artifact.is_cached(rootcache_dir, element, element_key)
+ # Assert that the element's artifact is cached
+ element = project.load_elements(['target.bst'])[0]
+ element_key = cli.get_element_key(project_dir, 'target.bst')
+ assert cli.artifact.is_cached(rootcache_dir, element, element_key)
- # Retrieve the Directory object from the cached artifact
- artifact_digest = cli.artifact.get_digest(rootcache_dir, element, element_key)
+ # Retrieve the Directory object from the cached artifact
+ artifact_digest = cli.artifact.get_digest(rootcache_dir, element, element_key)
queue = multiprocessing.Queue()
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
@@ -270,59 +253,51 @@ def test_pull_tree(cli, tmpdir, datafiles):
def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
+ with dummy_context(config=user_config_file) as context:
+ # Load the project manually
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
- # Create a local artifact cache and cas handle
- artifactcache = context.artifactcache
- cas = context.get_cascache()
+ # Create a local artifact cache and cas handle
+ artifactcache = context.artifactcache
+ cas = context.get_cascache()
- # Manually setup the CAS remote
- artifactcache.setup_remotes(use_config=True)
+ # Manually setup the CAS remote
+ artifactcache.setup_remotes(use_config=True)
- if artifactcache.has_push_remotes():
- directory = remote_execution_pb2.Directory()
+ if artifactcache.has_push_remotes():
+ directory = remote_execution_pb2.Directory()
- with open(cas.objpath(artifact_digest), 'rb') as f:
- directory.ParseFromString(f.read())
+ with open(cas.objpath(artifact_digest), 'rb') as f:
+ directory.ParseFromString(f.read())
- # Build the Tree object while we are still cached
- tree = remote_execution_pb2.Tree()
- tree_maker(cas, tree, directory)
+ # Build the Tree object while we are still cached
+ tree = remote_execution_pb2.Tree()
+ tree_maker(cas, tree, directory)
- # Push the Tree as a regular message
- tree_digest = artifactcache.push_message(project, tree)
+ # Push the Tree as a regular message
+ tree_digest = artifactcache.push_message(project, tree)
- queue.put((tree_digest.hash, tree_digest.size_bytes))
- else:
- queue.put("No remote configured")
+ queue.put((tree_digest.hash, tree_digest.size_bytes))
+ else:
+ queue.put("No remote configured")
def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
-
- # Create a local artifact cache handle
- artifactcache = context.artifactcache
-
- # Manually setup the CAS remote
- artifactcache.setup_remotes(use_config=True)
-
- if artifactcache.has_push_remotes():
- # Pull the artifact using the Tree object
- directory_digest = artifactcache.pull_tree(project, artifact_digest)
- queue.put((directory_digest.hash, directory_digest.size_bytes))
- else:
- queue.put("No remote configured")
+ with dummy_context(config=user_config_file) as context:
+ # Load the project manually
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+
+ # Create a local artifact cache handle
+ artifactcache = context.artifactcache
+
+ # Manually setup the CAS remote
+ artifactcache.setup_remotes(use_config=True)
+
+ if artifactcache.has_push_remotes():
+ # Pull the artifact using the Tree object
+ directory_digest = artifactcache.pull_tree(project, artifact_digest)
+ queue.put((directory_digest.hash, directory_digest.size_bytes))
+ else:
+ queue.put("No remote configured")
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index da658f76b..81d75023d 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -8,11 +8,11 @@ import signal
import pytest
from buildstream import _yaml, _signals, utils, Scope
-from buildstream._context import Context
from buildstream._project import Project
from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from buildstream.testing import cli # pylint: disable=unused-import
-from tests.testutils import create_artifact_share
+
+from tests.testutils import create_artifact_share, dummy_context
# Project directory
@@ -22,11 +22,6 @@ DATA_DIR = os.path.join(
)
-# Handle messages from the pipeline
-def message_handler(message, is_silenced):
- pass
-
-
# Since parent processes wait for queue events, we need
# to put something on it if the called process raises an
# exception.
@@ -68,19 +63,15 @@ def test_push(cli, tmpdir, datafiles):
# Write down the user configuration file
_yaml.roundtrip_dump(user_config, file=user_config_file)
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
+ with dummy_context(config=user_config_file) as context:
+ # Load the project manually
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
- # Assert that the element's artifact is cached
- element = project.load_elements(['target.bst'])[0]
- element_key = cli.get_element_key(project_dir, 'target.bst')
- assert cli.artifact.is_cached(rootcache_dir, element, element_key)
+ # Assert that the element's artifact is cached
+ element = project.load_elements(['target.bst'])[0]
+ element_key = cli.get_element_key(project_dir, 'target.bst')
+ assert cli.artifact.is_cached(rootcache_dir, element, element_key)
queue = multiprocessing.Queue()
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
@@ -105,40 +96,36 @@ def test_push(cli, tmpdir, datafiles):
def _test_push(user_config_file, project_dir, element_name, queue):
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
-
- # Create a local artifact cache handle
- artifactcache = context.artifactcache
-
- # Load the target element
- element = project.load_elements([element_name])[0]
-
- # Ensure the element's artifact memeber is initialised
- # This is duplicated from Pipeline.resolve_elements()
- # as this test does not use the cli frontend.
- for e in element.dependencies(Scope.ALL):
- # Determine initial element state.
- e._update_state()
-
- # Manually setup the CAS remotes
- artifactcache.setup_remotes(use_config=True)
- artifactcache.initialize_remotes()
-
- if artifactcache.has_push_remotes(plugin=element):
- # Push the element's artifact
- if not element._push():
- queue.put("Push operation failed")
+ with dummy_context(config=user_config_file) as context:
+ # Load the project manually
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+
+ # Create a local artifact cache handle
+ artifactcache = context.artifactcache
+
+ # Load the target element
+ element = project.load_elements([element_name])[0]
+
+ # Ensure the element's artifact memeber is initialised
+ # This is duplicated from Pipeline.resolve_elements()
+ # as this test does not use the cli frontend.
+ for e in element.dependencies(Scope.ALL):
+ # Determine initial element state.
+ e._update_state()
+
+ # Manually setup the CAS remotes
+ artifactcache.setup_remotes(use_config=True)
+ artifactcache.initialize_remotes()
+
+ if artifactcache.has_push_remotes(plugin=element):
+ # Push the element's artifact
+ if not element._push():
+ queue.put("Push operation failed")
+ else:
+ queue.put(None)
else:
- queue.put(None)
- else:
- queue.put("No remote configured for element {}".format(element_name))
+ queue.put("No remote configured for element {}".format(element_name))
@pytest.mark.datafiles(DATA_DIR)
@@ -189,31 +176,27 @@ def test_push_message(tmpdir, datafiles):
def _test_push_message(user_config_file, project_dir, queue):
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
-
- # Create a local artifact cache handle
- artifactcache = context.artifactcache
-
- # Manually setup the artifact remote
- artifactcache.setup_remotes(use_config=True)
- artifactcache.initialize_remotes()
-
- if artifactcache.has_push_remotes():
- # Create an example message object
- command = remote_execution_pb2.Command(arguments=['/usr/bin/gcc', '--help'],
- working_directory='/buildstream-build',
- output_directories=['/buildstream-install'])
-
- # Push the message object
- command_digest = artifactcache.push_message(project, command)
-
- queue.put((command_digest.hash, command_digest.size_bytes))
- else:
- queue.put("No remote configured")
+ with dummy_context(config=user_config_file) as context:
+ # Load the project manually
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+
+ # Create a local artifact cache handle
+ artifactcache = context.artifactcache
+
+ # Manually setup the artifact remote
+ artifactcache.setup_remotes(use_config=True)
+ artifactcache.initialize_remotes()
+
+ if artifactcache.has_push_remotes():
+ # Create an example message object
+ command = remote_execution_pb2.Command(arguments=['/usr/bin/gcc', '--help'],
+ working_directory='/buildstream-build',
+ output_directories=['/buildstream-install'])
+
+ # Push the message object
+ command_digest = artifactcache.push_message(project, command)
+
+ queue.put((command_digest.hash, command_digest.size_bytes))
+ else:
+ queue.put("No remote configured")
diff --git a/tests/format/include_composition.py b/tests/format/include_composition.py
index f764b16a6..ec48d82a2 100644
--- a/tests/format/include_composition.py
+++ b/tests/format/include_composition.py
@@ -1,146 +1,151 @@
import os
-from buildstream._context import Context
+
+from contextlib import contextmanager
+
from buildstream._project import Project
from buildstream._includes import Includes
from buildstream import _yaml
+from tests.testutils import dummy_context
+
+@contextmanager
def make_includes(basedir):
_yaml.roundtrip_dump({'name': 'test'}, os.path.join(basedir, 'project.conf'))
- context = Context()
- project = Project(basedir, context)
- loader = project.loader
- return Includes(loader)
+ with dummy_context() as context:
+ project = Project(basedir, context)
+ loader = project.loader
+ yield Includes(loader)
def test_main_has_priority(tmpdir):
- includes = make_includes(str(tmpdir))
+ with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']},
+ str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
- includes.process(main)
+ includes.process(main)
- assert main.get_sequence('test').as_str_list() == ['main']
+ assert main.get_sequence('test').as_str_list() == ['main']
def test_include_cannot_append(tmpdir):
- includes = make_includes(str(tmpdir))
+ with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.roundtrip_dump({'test': {'(>)': ['a']}},
- str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': {'(>)': ['a']}},
+ str(tmpdir.join('a.yml')))
- includes.process(main)
+ includes.process(main)
- assert main.get_sequence('test').as_str_list() == ['main']
+ assert main.get_sequence('test').as_str_list() == ['main']
def test_main_can_append(tmpdir):
- includes = make_includes(str(tmpdir))
+ with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': {'(>)': ['main']}},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': {'(>)': ['main']}},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
- includes.process(main)
+ includes.process(main)
- assert main.get_sequence('test').as_str_list() == ['a', 'main']
+ assert main.get_sequence('test').as_str_list() == ['a', 'main']
def test_sibling_cannot_append_backward(tmpdir):
- includes = make_includes(str(tmpdir))
+ with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.roundtrip_dump({'test': {'(>)': ['a']}},
- str(tmpdir.join('a.yml')))
- _yaml.roundtrip_dump({'test': ['b']},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({'test': {'(>)': ['a']}},
+ str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['b']},
+ str(tmpdir.join('b.yml')))
- includes.process(main)
+ includes.process(main)
- assert main.get_sequence('test').as_str_list() == ['b']
+ assert main.get_sequence('test').as_str_list() == ['b']
def test_sibling_can_append_forward(tmpdir):
- includes = make_includes(str(tmpdir))
+ with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.roundtrip_dump({'test': ['a']},
- str(tmpdir.join('a.yml')))
- _yaml.roundtrip_dump({'test': {'(>)': ['b']}},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({'test': ['a']},
+ str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': {'(>)': ['b']}},
+ str(tmpdir.join('b.yml')))
- includes.process(main)
+ includes.process(main)
- assert main.get_sequence('test').as_str_list() == ['a', 'b']
+ assert main.get_sequence('test').as_str_list() == ['a', 'b']
def test_lastest_sibling_has_priority(tmpdir):
- includes = make_includes(str(tmpdir))
+ with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.roundtrip_dump({'test': ['a']},
- str(tmpdir.join('a.yml')))
- _yaml.roundtrip_dump({'test': ['b']},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({'test': ['a']},
+ str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['b']},
+ str(tmpdir.join('b.yml')))
- includes.process(main)
+ includes.process(main)
- assert main.get_sequence('test').as_str_list() == ['b']
+ assert main.get_sequence('test').as_str_list() == ['b']
def test_main_keeps_keys(tmpdir):
- includes = make_includes(str(tmpdir))
+ with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml'], 'something': 'else'},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml'], 'something': 'else'},
+ str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
- _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
- includes.process(main)
+ includes.process(main)
- assert main.get_sequence('test').as_str_list() == ['a']
- assert main.get_str('something') == 'else'
+ assert main.get_sequence('test').as_str_list() == ['a']
+ assert main.get_str('something') == 'else'
def test_overwrite_directive_on_later_composite(tmpdir):
- includes = make_includes(str(tmpdir))
+ with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml'], 'test': {'(=)': ['Overwritten']}},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml'], 'test': {'(=)': ['Overwritten']}},
+ str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join('main.yml')))
- # a.yml
- _yaml.roundtrip_dump({'test': ['some useless', 'list', 'to be overwritten'],
- 'foo': 'should not be present'},
- str(tmpdir.join('a.yml')))
+ # a.yml
+ _yaml.roundtrip_dump({'test': ['some useless', 'list', 'to be overwritten'],
+ 'foo': 'should not be present'},
+ str(tmpdir.join('a.yml')))
- # b.yaml isn't going to have a 'test' node to overwrite
- _yaml.roundtrip_dump({'foo': 'should be present'},
- str(tmpdir.join('b.yml')))
+ # b.yaml isn't going to have a 'test' node to overwrite
+ _yaml.roundtrip_dump({'foo': 'should be present'},
+ str(tmpdir.join('b.yml')))
- includes.process(main)
+ includes.process(main)
- assert main.get_sequence('test').as_str_list() == ['Overwritten']
- assert main.get_str('foo') == 'should be present'
+ assert main.get_sequence('test').as_str_list() == ['Overwritten']
+ assert main.get_str('foo') == 'should be present'
diff --git a/tests/internals/context.py b/tests/internals/context.py
index da7b307d8..ddd558b6c 100644
--- a/tests/internals/context.py
+++ b/tests/internals/context.py
@@ -21,10 +21,11 @@ def context_fixture():
else:
cache_home = os.path.expanduser('~/.cache')
- return {
- 'xdg-cache': cache_home,
- 'context': Context()
- }
+ with Context() as context:
+ yield {
+ 'xdg-cache': cache_home,
+ 'context': context
+ }
#######################################
diff --git a/tests/internals/loader.py b/tests/internals/loader.py
index a4ebdb9ac..9af2bf161 100644
--- a/tests/internals/loader.py
+++ b/tests/internals/loader.py
@@ -1,11 +1,13 @@
+from contextlib import contextmanager
import os
import pytest
from buildstream._exceptions import LoadError, LoadErrorReason
-from buildstream._context import Context
from buildstream._project import Project
from buildstream._loader import MetaElement
+from tests.testutils import dummy_context
+
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
@@ -13,16 +15,11 @@ DATA_DIR = os.path.join(
)
-def dummy_handler(message, is_silenced):
- pass
-
-
+@contextmanager
def make_loader(basedir):
- context = Context()
- context.load(config=os.devnull)
- context.messenger.set_message_handler(dummy_handler)
- project = Project(basedir, context)
- return project.loader
+ with dummy_context() as context:
+ project = Project(basedir, context)
+ yield project.loader
##############################################################
@@ -32,21 +29,18 @@ def make_loader(basedir):
def test_one_file(datafiles):
basedir = str(datafiles)
- loader = make_loader(basedir)
+ with make_loader(basedir) as loader:
+ element = loader.load(['elements/onefile.bst'])[0]
- element = loader.load(['elements/onefile.bst'])[0]
-
- assert isinstance(element, MetaElement)
- assert element.kind == 'pony'
+ assert isinstance(element, MetaElement)
+ assert element.kind == 'pony'
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'onefile'))
def test_missing_file(datafiles):
basedir = str(datafiles)
- loader = make_loader(basedir)
-
- with pytest.raises(LoadError) as exc:
+ with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load(['elements/missing.bst'])
assert exc.value.reason == LoadErrorReason.MISSING_FILE
@@ -56,9 +50,7 @@ def test_missing_file(datafiles):
def test_invalid_reference(datafiles):
basedir = str(datafiles)
- loader = make_loader(basedir)
-
- with pytest.raises(LoadError) as exc:
+ with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load(['elements/badreference.bst'])
assert exc.value.reason == LoadErrorReason.INVALID_YAML
@@ -68,9 +60,7 @@ def test_invalid_reference(datafiles):
def test_invalid_yaml(datafiles):
basedir = str(datafiles)
- loader = make_loader(basedir)
-
- with pytest.raises(LoadError) as exc:
+ with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load(['elements/badfile.bst'])
assert exc.value.reason == LoadErrorReason.INVALID_YAML
@@ -82,8 +72,7 @@ def test_fail_fullpath_target(datafiles):
basedir = str(datafiles)
fullpath = os.path.join(basedir, 'elements', 'onefile.bst')
- with pytest.raises(LoadError) as exc:
- loader = make_loader(basedir)
+ with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load([fullpath])
assert exc.value.reason == LoadErrorReason.INVALID_DATA
@@ -93,9 +82,7 @@ def test_fail_fullpath_target(datafiles):
def test_invalid_key(datafiles):
basedir = str(datafiles)
- loader = make_loader(basedir)
-
- with pytest.raises(LoadError) as exc:
+ with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load(['elements/invalidkey.bst'])
assert exc.value.reason == LoadErrorReason.INVALID_DATA
@@ -105,9 +92,7 @@ def test_invalid_key(datafiles):
def test_invalid_directory_load(datafiles):
basedir = str(datafiles)
- loader = make_loader(basedir)
-
- with pytest.raises(LoadError) as exc:
+ with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load(['elements/'])
assert exc.value.reason == LoadErrorReason.LOADING_DIRECTORY
diff --git a/tests/internals/pluginloading.py b/tests/internals/pluginloading.py
index 9093680f4..4b6baf229 100644
--- a/tests/internals/pluginloading.py
+++ b/tests/internals/pluginloading.py
@@ -1,56 +1,53 @@
+from contextlib import contextmanager
import os
import pytest
-from buildstream._context import Context
from buildstream._project import Project
from buildstream._exceptions import LoadError, LoadErrorReason
from buildstream._pipeline import Pipeline
+from tests.testutils import dummy_context
+
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'pluginloading',
)
+@contextmanager
def create_pipeline(tmpdir, basedir, target):
- context = Context()
- context.load(config=os.devnull)
- context.deploydir = os.path.join(str(tmpdir), 'deploy')
- context.casdir = os.path.join(str(tmpdir), 'cas')
- project = Project(basedir, context)
-
- def dummy_handler(message, is_silenced):
- pass
+ with dummy_context() as context:
+ context.deploydir = os.path.join(str(tmpdir), 'deploy')
+ context.casdir = os.path.join(str(tmpdir), 'cas')
+ project = Project(basedir, context)
- context.messenger.set_message_handler(dummy_handler)
-
- pipeline = Pipeline(context, project, None)
- targets, = pipeline.load([(target,)])
- return targets
+ pipeline = Pipeline(context, project, None)
+ targets, = pipeline.load([(target,)])
+ yield targets
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'customsource'))
def test_customsource(datafiles, tmpdir):
basedir = str(datafiles)
- targets = create_pipeline(tmpdir, basedir, 'simple.bst')
- assert targets[0].get_kind() == "autotools"
+ with create_pipeline(tmpdir, basedir, 'simple.bst') as targets:
+ assert targets[0].get_kind() == "autotools"
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'customelement'))
def test_customelement(datafiles, tmpdir):
basedir = str(datafiles)
- targets = create_pipeline(tmpdir, basedir, 'simple.bst')
- assert targets[0].get_kind() == "foo"
+ with create_pipeline(tmpdir, basedir, 'simple.bst') as targets:
+ assert targets[0].get_kind() == "foo"
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badversionsource'))
def test_badversionsource(datafiles, tmpdir):
basedir = str(datafiles)
- with pytest.raises(LoadError) as exc:
- create_pipeline(tmpdir, basedir, 'simple.bst')
+ with pytest.raises(LoadError) as exc, create_pipeline(tmpdir, basedir, 'simple.bst'):
+ pass
assert exc.value.reason == LoadErrorReason.UNSUPPORTED_PLUGIN
@@ -59,7 +56,7 @@ def test_badversionsource(datafiles, tmpdir):
def test_badversionelement(datafiles, tmpdir):
basedir = str(datafiles)
- with pytest.raises(LoadError) as exc:
- create_pipeline(tmpdir, basedir, 'simple.bst')
+ with pytest.raises(LoadError) as exc, create_pipeline(tmpdir, basedir, 'simple.bst'):
+ pass
assert exc.value.reason == LoadErrorReason.UNSUPPORTED_PLUGIN
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index 300f0c84c..99c00f120 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -24,18 +24,14 @@ import shutil
import pytest
from buildstream._exceptions import ErrorDomain
-from buildstream._context import Context
from buildstream._project import Project
from buildstream import _yaml
from buildstream.testing import cli # pylint: disable=unused-import
from buildstream.testing import create_repo
-from tests.testutils import create_artifact_share
-
-DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
+from tests.testutils import create_artifact_share, dummy_context
-def message_handler(message, is_silenced):
- pass
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
@@ -69,55 +65,52 @@ def test_source_fetch(cli, tmpdir, datafiles):
}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
+ with dummy_context(config=user_config_file) as context:
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
+ element = project.load_elements(['fetch.bst'])[0]
+ assert not element._source_cached()
+ source = list(element.sources())[0]
- element = project.load_elements(['fetch.bst'])[0]
- assert not element._source_cached()
- source = list(element.sources())[0]
+ cas = context.get_cascache()
+ assert not cas.contains(source._get_source_name())
- cas = context.get_cascache()
- assert not cas.contains(source._get_source_name())
+ # Just check that we sensibly fetch and build the element
+ res = cli.run(project=project_dir, args=['build', 'fetch.bst'])
+ res.assert_success()
- # Just check that we sensibly fetch and build the element
- res = cli.run(project=project_dir, args=['build', 'fetch.bst'])
- res.assert_success()
+ assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) != []
- assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) != []
+ # get root digest of source
+ sourcecache = context.sourcecache
+ digest = sourcecache.export(source)._get_digest()
- # get root digest of source
- sourcecache = context.sourcecache
- digest = sourcecache.export(source)._get_digest()
+ # Move source in local cas to repo
+ shutil.rmtree(os.path.join(str(tmpdir), 'sourceshare', 'repo', 'cas'))
+ shutil.move(
+ os.path.join(str(tmpdir), 'cache', 'source_protos'),
+ os.path.join(str(tmpdir), 'sourceshare', 'repo'))
+ shutil.move(
+ os.path.join(str(tmpdir), 'cache', 'cas'),
+ os.path.join(str(tmpdir), 'sourceshare', 'repo'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'sources'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'artifacts'))
- # Move source in local cas to repo
- shutil.rmtree(os.path.join(str(tmpdir), 'sourceshare', 'repo', 'cas'))
- shutil.move(
- os.path.join(str(tmpdir), 'cache', 'source_protos'),
- os.path.join(str(tmpdir), 'sourceshare', 'repo'))
- shutil.move(
- os.path.join(str(tmpdir), 'cache', 'cas'),
- os.path.join(str(tmpdir), 'sourceshare', 'repo'))
- shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'sources'))
- shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'artifacts'))
+ # check the share has the object
+ assert share.has_object(digest)
- # check the share has the object
- assert share.has_object(digest)
+ state = cli.get_element_state(project_dir, 'fetch.bst')
+ assert state == 'fetch needed'
- state = cli.get_element_state(project_dir, 'fetch.bst')
- assert state == 'fetch needed'
+ # Now fetch the source and check
+ res = cli.run(project=project_dir, args=['source', 'fetch', 'fetch.bst'])
+ res.assert_success()
+ assert "Pulled source" in res.stderr
- # Now fetch the source and check
- res = cli.run(project=project_dir, args=['source', 'fetch', 'fetch.bst'])
- res.assert_success()
- assert "Pulled source" in res.stderr
-
- # check that we have the source in the cas now and it's not fetched
- assert element._source_cached()
- assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) == []
+ # check that we have the source in the cas now and it's not fetched
+ assert element._source_cached()
+ assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) == []
@pytest.mark.datafiles(DATA_DIR)
@@ -151,32 +144,29 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
+ with dummy_context(config=user_config_file) as context:
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
- element = project.load_elements(['fetch.bst'])[0]
- assert not element._source_cached()
- source = list(element.sources())[0]
+ element = project.load_elements(['fetch.bst'])[0]
+ assert not element._source_cached()
+ source = list(element.sources())[0]
- cas = context.get_cascache()
- assert not cas.contains(source._get_source_name())
- assert not os.path.exists(os.path.join(cache_dir, 'sources'))
+ cas = context.get_cascache()
+ assert not cas.contains(source._get_source_name())
+ assert not os.path.exists(os.path.join(cache_dir, 'sources'))
- # Now check if it falls back to the source fetch method.
- res = cli.run(project=project_dir, args=['source', 'fetch', 'fetch.bst'])
- res.assert_success()
- brief_key = source._get_brief_display_key()
- assert ("Remote source service ({}) does not have source {} cached"
- .format(share.repo, brief_key)) in res.stderr
- assert ("SUCCESS Fetching from {}"
- .format(repo.source_config(ref=ref)['url'])) in res.stderr
+ # Now check if it falls back to the source fetch method.
+ res = cli.run(project=project_dir, args=['source', 'fetch', 'fetch.bst'])
+ res.assert_success()
+ brief_key = source._get_brief_display_key()
+ assert ("Remote source service ({}) does not have source {} cached"
+ .format(share.repo, brief_key)) in res.stderr
+ assert ("SUCCESS Fetching from {}"
+ .format(repo.source_config(ref=ref)['url'])) in res.stderr
- # Check that the source in both in the source dir and the local CAS
- assert element._source_cached()
+ # Check that the source in both in the source dir and the local CAS
+ assert element._source_cached()
@pytest.mark.datafiles(DATA_DIR)
@@ -209,22 +199,20 @@ def test_pull_fail(cli, tmpdir, datafiles):
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# get the source object
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
-
- element = project.load_elements(['push.bst'])[0]
- assert not element._source_cached()
- source = list(element.sources())[0]
-
- # remove files and check that it doesn't build
- shutil.rmtree(repo.repo)
-
- # Should fail in stream, with a plugin tasks causing the error
- res = cli.run(project=project_dir, args=['build', 'push.bst'])
- res.assert_main_error(ErrorDomain.STREAM, None)
- res.assert_task_error(ErrorDomain.PLUGIN, None)
- assert "Remote source service ({}) does not have source {} cached".format(
- share.repo, source._get_brief_display_key()) in res.stderr
+ with dummy_context(config=user_config_file) as context:
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+
+ element = project.load_elements(['push.bst'])[0]
+ assert not element._source_cached()
+ source = list(element.sources())[0]
+
+ # remove files and check that it doesn't build
+ shutil.rmtree(repo.repo)
+
+ # Should fail in stream, with a plugin task causing the error
+ res = cli.run(project=project_dir, args=['build', 'push.bst'])
+ res.assert_main_error(ErrorDomain.STREAM, None)
+ res.assert_task_error(ErrorDomain.PLUGIN, None)
+ assert "Remote source service ({}) does not have source {} cached".format(
+ share.repo, source._get_brief_display_key()) in res.stderr
diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py
index e9c72d47c..b0fae616e 100644
--- a/tests/sourcecache/push.py
+++ b/tests/sourcecache/push.py
@@ -23,14 +23,13 @@ import os
import shutil
import pytest
-from buildstream._context import Context
from buildstream._exceptions import ErrorDomain
from buildstream._project import Project
from buildstream import _yaml
from buildstream.testing import cli # pylint: disable=unused-import
from buildstream.testing import create_repo
-from tests.testutils import create_artifact_share
+from tests.testutils import create_artifact_share, dummy_context
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@@ -70,33 +69,31 @@ def test_source_push(cli, tmpdir, datafiles):
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# get the source object
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
-
- element = project.load_elements(['push.bst'])[0]
- assert not element._source_cached()
- source = list(element.sources())[0]
-
- # check we don't have it in the current cache
- cas = context.get_cascache()
- assert not cas.contains(source._get_source_name())
-
- # build the element, this should fetch and then push the source to the
- # remote
- res = cli.run(project=project_dir, args=['build', 'push.bst'])
- res.assert_success()
- assert "Pushed source" in res.stderr
-
- # check that we've got the remote locally now
- sourcecache = context.sourcecache
- assert sourcecache.contains(source)
-
- # check that's the remote CAS now has it
- digest = sourcecache.export(source)._get_digest()
- assert share.has_object(digest)
+ with dummy_context(config=user_config_file) as context:
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+
+ element = project.load_elements(['push.bst'])[0]
+ assert not element._source_cached()
+ source = list(element.sources())[0]
+
+ # check we don't have it in the current cache
+ cas = context.get_cascache()
+ assert not cas.contains(source._get_source_name())
+
+ # build the element, this should fetch and then push the source to the
+ # remote
+ res = cli.run(project=project_dir, args=['build', 'push.bst'])
+ res.assert_success()
+ assert "Pushed source" in res.stderr
+
+ # check that we've got the remote locally now
+ sourcecache = context.sourcecache
+ assert sourcecache.contains(source)
+
+ # check that the remote CAS now has it
+ digest = sourcecache.export(source)._get_digest()
+ assert share.has_object(digest)
@pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/sourcecache/staging.py b/tests/sourcecache/staging.py
index c15bed215..186a4bd9f 100644
--- a/tests/sourcecache/staging.py
+++ b/tests/sourcecache/staging.py
@@ -25,20 +25,17 @@ import os
import shutil
import pytest
-from buildstream._context import Context
from buildstream._project import Project
from buildstream.testing.runcli import cli # pylint: disable=unused-import
+
+from tests.testutils import dummy_context
from tests.testutils.element_generators import create_element_size
DATA_DIR = os.path.dirname(os.path.realpath(__file__))
-def dummy_message_handler(message, is_silenced):
- pass
-
-
# walk that removes the root directory from roots
def relative_walk(rootdir):
for root, dirnames, filenames in os.walk(rootdir):
@@ -55,36 +52,32 @@ def test_source_staged(tmpdir, cli, datafiles):
'cachedir': cachedir
})
- # set up minimal context
- context = Context()
- context.load()
-
- # load project and sourcecache
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
- context.cachedir = cachedir
- context.messenger.set_message_handler(dummy_message_handler)
- sourcecache = context.sourcecache
- cas = context.get_cascache()
-
res = cli.run(project=project_dir, args=["build", "import-bin.bst"])
res.assert_success()
- # now check that the source is in the refs file, this is pretty messy but
- # seems to be the only way to get the sources?
- element = project.load_elements(["import-bin.bst"])[0]
- source = list(element.sources())[0]
- assert element._source_cached()
- assert sourcecache.contains(source)
+ with dummy_context() as context:
+ context.cachedir = cachedir
+ # load project and sourcecache
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+ sourcecache = context.sourcecache
+ cas = context.get_cascache()
- # Extract the file and check it's the same as the one we imported
- digest = sourcecache.export(source)._get_digest()
- extractdir = os.path.join(str(tmpdir), "extract")
- cas.checkout(extractdir, digest)
- dir1 = extractdir
- dir2 = os.path.join(project_dir, "files", "bin-files")
+ # now check that the source is in the refs file, this is pretty messy but
+ # seems to be the only way to get the sources?
+ element = project.load_elements(["import-bin.bst"])[0]
+ source = list(element.sources())[0]
+ assert element._source_cached()
+ assert sourcecache.contains(source)
- assert list(relative_walk(dir1)) == list(relative_walk(dir2))
+ # Extract the file and check it's the same as the one we imported
+ digest = sourcecache.export(source)._get_digest()
+ extractdir = os.path.join(str(tmpdir), "extract")
+ cas.checkout(extractdir, digest)
+ dir1 = extractdir
+ dir2 = os.path.join(project_dir, "files", "bin-files")
+
+ assert list(relative_walk(dir1)) == list(relative_walk(dir2))
# Check sources are staged during a fetch
@@ -97,33 +90,29 @@ def test_source_fetch(tmpdir, cli, datafiles):
'cachedir': cachedir
})
- # set up minimal context
- context = Context()
- context.load()
-
- # load project and sourcecache
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
- context.cachedir = cachedir
- context.messenger.set_message_handler(dummy_message_handler)
- cas = context.get_cascache()
- sourcecache = context.sourcecache
-
res = cli.run(project=project_dir, args=["source", "fetch", "import-dev.bst"])
res.assert_success()
- element = project.load_elements(["import-dev.bst"])[0]
- source = list(element.sources())[0]
- assert element._source_cached()
+ with dummy_context() as context:
+ context.cachedir = cachedir
+ # load project and sourcecache
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+ cas = context.get_cascache()
+ sourcecache = context.sourcecache
+
+ element = project.load_elements(["import-dev.bst"])[0]
+ source = list(element.sources())[0]
+ assert element._source_cached()
- # check that the directory structures are idetical
- digest = sourcecache.export(source)._get_digest()
- extractdir = os.path.join(str(tmpdir), "extract")
- cas.checkout(extractdir, digest)
- dir1 = extractdir
- dir2 = os.path.join(project_dir, "files", "dev-files")
+ # check that the directory structures are identical
+ digest = sourcecache.export(source)._get_digest()
+ extractdir = os.path.join(str(tmpdir), "extract")
+ cas.checkout(extractdir, digest)
+ dir1 = extractdir
+ dir2 = os.path.join(project_dir, "files", "dev-files")
- assert list(relative_walk(dir1)) == list(relative_walk(dir2))
+ assert list(relative_walk(dir1)) == list(relative_walk(dir2))
# Check that with sources only in the CAS build successfully completes
@@ -141,18 +130,15 @@ def test_staged_source_build(tmpdir, datafiles, cli):
create_element_size('target.bst', project_dir, element_path, [], 10000)
- # get the source object
- context = Context()
- context.load()
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
- context.cachedir = cachedir
- context.messenger.set_message_handler(dummy_message_handler)
+ with dummy_context() as context:
+ context.cachedir = cachedir
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
- element = project.load_elements(["import-dev.bst"])[0]
+ element = project.load_elements(["import-dev.bst"])[0]
- # check consistency of the source
- assert not element._source_cached()
+ # check consistency of the source
+ assert not element._source_cached()
res = cli.run(project=project_dir, args=['build', 'target.bst'])
res.assert_success()
diff --git a/tests/testutils/__init__.py b/tests/testutils/__init__.py
index 9913e880d..25fa6d763 100644
--- a/tests/testutils/__init__.py
+++ b/tests/testutils/__init__.py
@@ -24,6 +24,7 @@
#
from .artifactshare import create_artifact_share, assert_shared, assert_not_shared
+from .context import dummy_context
from .element_generators import create_element_size, update_element_size
from .junction import generate_junction
from .runner_integration import wait_for_cache_granularity
diff --git a/tests/testutils/context.py b/tests/testutils/context.py
new file mode 100644
index 000000000..899bad247
--- /dev/null
+++ b/tests/testutils/context.py
@@ -0,0 +1,46 @@
+#
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+
+import os
+
+from contextlib import contextmanager
+
+from buildstream._context import Context
+
+
+# Handle messages from the pipeline
+def _dummy_message_handler(message, is_silenced):
+ pass
+
+
+# dummy_context()
+#
+# Context manager to create minimal context for tests.
+#
+# Args:
+# config (filename): The configuration file, if any
+#
+@contextmanager
+def dummy_context(*, config=None):
+ with Context() as context:
+ if not config:
+ config = os.devnull
+
+ context.load(config=config)
+
+ context.messenger.set_message_handler(_dummy_message_handler)
+
+ yield context