From 863d9c6b28aae5b13849edde4c89ad186d4fa0f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 15:30:40 +0200 Subject: _context.py: Make Context class a Python context manager This will allow reliable cleanup. --- src/buildstream/_context.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py index 286ed6d3d..1e45d776d 100644 --- a/src/buildstream/_context.py +++ b/src/buildstream/_context.py @@ -164,6 +164,20 @@ class Context(): self._cascache = None self._casquota = None + # __enter__() + # + # Called when entering the with-statement context. + # + def __enter__(self): + return self + + # __exit__() + # + # Called when exiting the with-statement context. + # + def __exit__(self, exc_type, exc_value, traceback): + return None + # load() # # Loads the configuration files -- cgit v1.2.1 From 3bd487e06e340efabe45ab54646e03b9610ae584 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 15:40:31 +0200 Subject: app.py: Use Context as context manager --- src/buildstream/_frontend/app.py | 255 ++++++++++++++++++++------------------- 1 file changed, 129 insertions(+), 126 deletions(-) diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py index f04e9595e..372ade191 100644 --- a/src/buildstream/_frontend/app.py +++ b/src/buildstream/_frontend/app.py @@ -169,149 +169,152 @@ class App(): self._session_name = session_name - # - # Load the Context - # - try: - self.context = Context() - self.context.load(config) - except BstError as e: - self._error_exit(e, "Error loading user configuration") - - # Override things in the context from our command line options, - # the command line when used, trumps the config files. - # - override_map = { - 'strict': '_strict_build_plan', - 'debug': 'log_debug', - 'verbose': 'log_verbose', - 'error_lines': 'log_error_lines', - 'message_lines': 'log_message_lines', - 'on_error': 'sched_error_action', - 'fetchers': 'sched_fetchers', - 'builders': 'sched_builders', - 'pushers': 'sched_pushers', - 'max_jobs': 'build_max_jobs', - 'network_retries': 'sched_network_retries', - 'pull_buildtrees': 'pull_buildtrees', - 'cache_buildtrees': 'cache_buildtrees' - } - for cli_option, context_attr in override_map.items(): - option_value = self._main_options.get(cli_option) - if option_value is not None: - setattr(self.context, context_attr, option_value) - try: - Platform.get_platform() - except BstError as e: - self._error_exit(e, "Error instantiating platform") - - # Create the stream right away, we'll need to pass it around. - self.stream = Stream(self.context, self._session_start, - session_start_callback=self.session_start_cb, - interrupt_callback=self._interrupt_handler, - ticker_callback=self._tick) - - self._state = self.stream.get_state() + # Instantiate Context + with Context() as context: + self.context = context - # Register callbacks with the State - self._state.register_task_failed_callback(self._job_failed) + # + # Load the configuration + # + try: + self.context.load(config) + except BstError as e: + self._error_exit(e, "Error loading user configuration") + + # Override things in the context from our command line options, + # the command line when used, trumps the config files. + # + override_map = { + 'strict': '_strict_build_plan', + 'debug': 'log_debug', + 'verbose': 'log_verbose', + 'error_lines': 'log_error_lines', + 'message_lines': 'log_message_lines', + 'on_error': 'sched_error_action', + 'fetchers': 'sched_fetchers', + 'builders': 'sched_builders', + 'pushers': 'sched_pushers', + 'max_jobs': 'build_max_jobs', + 'network_retries': 'sched_network_retries', + 'pull_buildtrees': 'pull_buildtrees', + 'cache_buildtrees': 'cache_buildtrees' + } + for cli_option, context_attr in override_map.items(): + option_value = self._main_options.get(cli_option) + if option_value is not None: + setattr(self.context, context_attr, option_value) + try: + Platform.get_platform() + except BstError as e: + self._error_exit(e, "Error instantiating platform") + + # Create the stream right away, we'll need to pass it around. + self.stream = Stream(self.context, self._session_start, + session_start_callback=self.session_start_cb, + interrupt_callback=self._interrupt_handler, + ticker_callback=self._tick) + + self._state = self.stream.get_state() + + # Register callbacks with the State + self._state.register_task_failed_callback(self._job_failed) + + # Create the logger right before setting the message handler + self.logger = LogLine(self.context, self._state, + self._content_profile, + self._format_profile, + self._success_profile, + self._error_profile, + self._detail_profile, + indent=INDENT) + + # Propagate pipeline feedback to the user + self.context.messenger.set_message_handler(self._message_handler) + + # Preflight the artifact cache after initializing logging, + # this can cause messages to be emitted. + try: + self.context.artifactcache.preflight() + except BstError as e: + self._error_exit(e, "Error instantiating artifact cache") - # Create the logger right before setting the message handler - self.logger = LogLine(self.context, self._state, - self._content_profile, - self._format_profile, - self._success_profile, - self._error_profile, - self._detail_profile, - indent=INDENT) + # Now that we have a logger and message handler, + # we can override the global exception hook. + sys.excepthook = self._global_exception_handler - # Propagate pipeline feedback to the user - self.context.messenger.set_message_handler(self._message_handler) + # Initialize the parts of Stream that have side-effects + self.stream.init() - # Preflight the artifact cache after initializing logging, - # this can cause messages to be emitted. - try: - self.context.artifactcache.preflight() - except BstError as e: - self._error_exit(e, "Error instantiating artifact cache") + # Create our status printer, only available in interactive + self._status = Status(self.context, self._state, + self._content_profile, self._format_profile, + self._success_profile, self._error_profile, + self.stream, colors=self.colors) - # Now that we have a logger and message handler, - # we can override the global exception hook. - sys.excepthook = self._global_exception_handler - - # Initialize the parts of Stream that have side-effects - self.stream.init() - - # Create our status printer, only available in interactive - self._status = Status(self.context, self._state, - self._content_profile, self._format_profile, - self._success_profile, self._error_profile, - self.stream, colors=self.colors) + # Mark the beginning of the session + if session_name: + self._message(MessageType.START, session_name) - # Mark the beginning of the session - if session_name: - self._message(MessageType.START, session_name) + # + # Load the Project + # + try: + self.project = Project(directory, self.context, cli_options=self._main_options['option'], + default_mirror=self._main_options.get('default_mirror'), + fetch_subprojects=self.stream.fetch_subprojects) - # - # Load the Project - # - try: - self.project = Project(directory, self.context, cli_options=self._main_options['option'], - default_mirror=self._main_options.get('default_mirror'), - fetch_subprojects=self.stream.fetch_subprojects) - - self.stream.set_project(self.project) - except LoadError as e: - - # Help users that are new to BuildStream by suggesting 'init'. - # We don't want to slow down users that just made a mistake, so - # don't stop them with an offer to create a project for them. - if e.reason == LoadErrorReason.MISSING_PROJECT_CONF: - click.echo("No project found. You can create a new project like so:", err=True) - click.echo("", err=True) - click.echo(" bst init", err=True) + self.stream.set_project(self.project) + except LoadError as e: - self._error_exit(e, "Error loading project") + # Help users that are new to BuildStream by suggesting 'init'. + # We don't want to slow down users that just made a mistake, so + # don't stop them with an offer to create a project for them. + if e.reason == LoadErrorReason.MISSING_PROJECT_CONF: + click.echo("No project found. You can create a new project like so:", err=True) + click.echo("", err=True) + click.echo(" bst init", err=True) - except BstError as e: - self._error_exit(e, "Error loading project") + self._error_exit(e, "Error loading project") - # Run the body of the session here, once everything is loaded - try: - yield - except BstError as e: + except BstError as e: + self._error_exit(e, "Error loading project") - # Print a nice summary if this is a session - if session_name: - elapsed = self.stream.elapsed_time + # Run the body of the session here, once everything is loaded + try: + yield + except BstError as e: - if isinstance(e, StreamError) and e.terminated: # pylint: disable=no-member - self._message(MessageType.WARN, session_name + ' Terminated', elapsed=elapsed) - else: - self._message(MessageType.FAIL, session_name, elapsed=elapsed) + # Print a nice summary if this is a session + if session_name: + elapsed = self.stream.elapsed_time - # Notify session failure - self._notify("{} failed".format(session_name), e) + if isinstance(e, StreamError) and e.terminated: # pylint: disable=no-member + self._message(MessageType.WARN, session_name + ' Terminated', elapsed=elapsed) + else: + self._message(MessageType.FAIL, session_name, elapsed=elapsed) - if self._started: - self._print_summary() + # Notify session failure + self._notify("{} failed".format(session_name), e) - # Exit with the error - self._error_exit(e) - except RecursionError: - click.echo("RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.", - err=True) - sys.exit(-1) + if self._started: + self._print_summary() - else: - # No exceptions occurred, print session time and summary - if session_name: - self._message(MessageType.SUCCESS, session_name, elapsed=self.stream.elapsed_time) - if self._started: - self._print_summary() + # Exit with the error + self._error_exit(e) + except RecursionError: + click.echo("RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.", + err=True) + sys.exit(-1) - # Notify session success - self._notify("{} succeeded".format(session_name), "") + else: + # No exceptions occurred, print session time and summary + if session_name: + self._message(MessageType.SUCCESS, session_name, elapsed=self.stream.elapsed_time) + if self._started: + self._print_summary() + + # Notify session success + self._notify("{} succeeded".format(session_name), "") # init_project() # -- cgit v1.2.1 From 20ba24b5c4233f02c9a9c973edf584589f920fc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 17:03:13 +0200 Subject: cli.py: Use Context as context manager --- src/buildstream/_frontend/cli.py | 46 ++++++++++++++++++++-------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py index 8fd834825..d4686e4fc 100644 --- a/src/buildstream/_frontend/cli.py +++ b/src/buildstream/_frontend/cli.py @@ -117,30 +117,30 @@ def complete_target(args, incomplete): def complete_artifact(orig_args, args, incomplete): from .._context import Context - ctx = Context() - - config = None - if orig_args: - for i, arg in enumerate(orig_args): - if arg in ('-c', '--config'): - try: - config = orig_args[i + 1] - except IndexError: - pass - if args: - for i, arg in enumerate(args): - if arg in ('-c', '--config'): - try: - config = args[i + 1] - except IndexError: - pass - ctx.load(config) - - # element targets are valid artifact names - complete_list = complete_target(args, incomplete) - complete_list.extend(ref for ref in ctx.artifactcache.list_artifacts() if ref.startswith(incomplete)) + with Context() as ctx: - return complete_list + config = None + if orig_args: + for i, arg in enumerate(orig_args): + if arg in ('-c', '--config'): + try: + config = orig_args[i + 1] + except IndexError: + pass + if args: + for i, arg in enumerate(args): + if arg in ('-c', '--config'): + try: + config = args[i + 1] + except IndexError: + pass + ctx.load(config) + + # element targets are valid artifact names + complete_list = complete_target(args, incomplete) + complete_list.extend(ref for ref in ctx.artifactcache.list_artifacts() if ref.startswith(incomplete)) + + return complete_list def override_completions(orig_args, cmd, cmd_param, args, incomplete): -- cgit v1.2.1 From 2fc76fca4d48e8dfcfa815f06aafe8f406619dc4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 15:52:11 +0200 Subject: tests/testutils: Add dummy_context() helper --- tests/testutils/__init__.py | 1 + tests/testutils/context.py | 46 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 tests/testutils/context.py diff --git a/tests/testutils/__init__.py b/tests/testutils/__init__.py index 9913e880d..25fa6d763 100644 --- a/tests/testutils/__init__.py +++ b/tests/testutils/__init__.py @@ -24,6 +24,7 @@ # from .artifactshare import create_artifact_share, assert_shared, assert_not_shared +from .context import dummy_context from .element_generators import create_element_size, update_element_size from .junction import generate_junction from .runner_integration import wait_for_cache_granularity diff --git a/tests/testutils/context.py b/tests/testutils/context.py new file mode 100644 index 000000000..899bad247 --- /dev/null +++ b/tests/testutils/context.py @@ -0,0 +1,46 @@ +# +# Copyright (C) 2019 Bloomberg Finance LP +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library. If not, see . + +import os + +from contextlib import contextmanager + +from buildstream._context import Context + + +# Handle messages from the pipeline +def _dummy_message_handler(message, is_silenced): + pass + + +# dummy_context() +# +# Context manager to create minimal context for tests. +# +# Args: +# config (filename): The configuration file, if any +# +@contextmanager +def dummy_context(*, config=None): + with Context() as context: + if not config: + config = os.devnull + + context.load(config=config) + + context.messenger.set_message_handler(_dummy_message_handler) + + yield context -- cgit v1.2.1 From b7e7f6e6e72c13cea04596d47669c5ee63f58988 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 15:55:18 +0200 Subject: tests/artifactcache/config.py: Use dummy_context() --- tests/artifactcache/config.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py index db51d196c..08d6f74bb 100644 --- a/tests/artifactcache/config.py +++ b/tests/artifactcache/config.py @@ -7,7 +7,6 @@ import os import pytest from buildstream._artifactcache import ArtifactCacheSpec, ArtifactCache -from buildstream._context import Context from buildstream._project import Project from buildstream.utils import _deduplicate from buildstream import _yaml @@ -15,6 +14,8 @@ from buildstream._exceptions import ErrorDomain, LoadErrorReason from buildstream.testing.runcli import cli # pylint: disable=unused-import +from tests.testutils import dummy_context + DATA_DIR = os.path.dirname(os.path.realpath(__file__)) cache1 = ArtifactCacheSpec(url='https://example.com/cache1', push=True) @@ -107,17 +108,16 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user project_config_file = str(project_dir.join('project.conf')) _yaml.roundtrip_dump(project_config, file=project_config_file) - context = Context() - context.load(config=user_config_file) - project = Project(str(project_dir), context) - project.ensure_fully_loaded() + with dummy_context(config=user_config_file) as context: + project = Project(str(project_dir), context) + project.ensure_fully_loaded() - # Use the helper from the artifactcache module to parse our configuration. - parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(context, project) + # Use the helper from the artifactcache module to parse our configuration. + parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(context, project) - # Verify that it was correctly read. - expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches))) - assert parsed_cache_specs == expected_cache_specs + # Verify that it was correctly read. + expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches))) + assert parsed_cache_specs == expected_cache_specs # Assert that if either the client key or client cert is specified -- cgit v1.2.1 From bfef9e23807ab920ae402d743d5ea96ef857ca68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 16:04:48 +0200 Subject: tests/artifactcache/pull.py: Use dummy_context() --- tests/artifactcache/pull.py | 225 ++++++++++++++++++++------------------------ 1 file changed, 100 insertions(+), 125 deletions(-) diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py index 72f3103f0..6003cea41 100644 --- a/tests/artifactcache/pull.py +++ b/tests/artifactcache/pull.py @@ -8,12 +8,11 @@ import signal import pytest from buildstream import _yaml, _signals, utils -from buildstream._context import Context from buildstream._project import Project from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 from buildstream.testing import cli # pylint: disable=unused-import -from tests.testutils import create_artifact_share +from tests.testutils import create_artifact_share, dummy_context # Project directory @@ -23,11 +22,6 @@ DATA_DIR = os.path.join( ) -# Handle messages from the pipeline -def message_handler(message, is_silenced): - pass - - # Since parent processes wait for queue events, we need # to put something on it if the called process raises an # exception. @@ -92,73 +86,66 @@ def test_pull(cli, tmpdir, datafiles): # Assert that we are not cached locally anymore assert cli.get_element_state(project_dir, 'target.bst') != 'cached' - # Fake minimal context - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) - - # Load the project - project = Project(project_dir, context) - project.ensure_fully_loaded() + with dummy_context(config=user_config_file) as context: + # Load the project + project = Project(project_dir, context) + project.ensure_fully_loaded() - # Assert that the element's artifact is **not** cached - element = project.load_elements(['target.bst'])[0] - element_key = cli.get_element_key(project_dir, 'target.bst') - assert not cli.artifact.is_cached(cache_dir, element, element_key) + # Assert that the element's artifact is **not** cached + element = project.load_elements(['target.bst'])[0] + element_key = cli.get_element_key(project_dir, 'target.bst') + assert not cli.artifact.is_cached(cache_dir, element, element_key) - queue = multiprocessing.Queue() - # Use subprocess to avoid creation of gRPC threads in main BuildStream process - # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details - process = multiprocessing.Process(target=_queue_wrapper, - args=(_test_pull, queue, user_config_file, project_dir, - cache_dir, 'target.bst', element_key)) + queue = multiprocessing.Queue() + # Use subprocess to avoid creation of gRPC threads in main BuildStream process + # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details + process = multiprocessing.Process(target=_queue_wrapper, + args=(_test_pull, queue, user_config_file, project_dir, + cache_dir, 'target.bst', element_key)) - try: - # Keep SIGINT blocked in the child process - with _signals.blocked([signal.SIGINT], ignore=False): - process.start() + try: + # Keep SIGINT blocked in the child process + with _signals.blocked([signal.SIGINT], ignore=False): + process.start() - error = queue.get() - process.join() - except KeyboardInterrupt: - utils._kill_process_tree(process.pid) - raise + error = queue.get() + process.join() + except KeyboardInterrupt: + utils._kill_process_tree(process.pid) + raise - assert not error - assert cli.artifact.is_cached(cache_dir, element, element_key) + assert not error + assert cli.artifact.is_cached(cache_dir, element, element_key) def _test_pull(user_config_file, project_dir, cache_dir, element_name, element_key, queue): - # Fake minimal context - context = Context() - context.load(config=user_config_file) - context.cachedir = cache_dir - context.casdir = os.path.join(cache_dir, 'cas') - context.tmpdir = os.path.join(cache_dir, 'tmp') - context.messenger.set_message_handler(message_handler) - - # Load the project manually - project = Project(project_dir, context) - project.ensure_fully_loaded() - - # Create a local artifact cache handle - artifactcache = context.artifactcache - - # Load the target element - element = project.load_elements([element_name])[0] - - # Manually setup the CAS remote - artifactcache.setup_remotes(use_config=True) - - if artifactcache.has_push_remotes(plugin=element): - # Push the element's artifact - if not artifactcache.pull(element, element_key): - queue.put("Pull operation failed") + with dummy_context(config=user_config_file) as context: + context.cachedir = cache_dir + context.casdir = os.path.join(cache_dir, 'cas') + context.tmpdir = os.path.join(cache_dir, 'tmp') + + # Load the project manually + project = Project(project_dir, context) + project.ensure_fully_loaded() + + # Create a local artifact cache handle + artifactcache = context.artifactcache + + # Load the target element + element = project.load_elements([element_name])[0] + + # Manually setup the CAS remote + artifactcache.setup_remotes(use_config=True) + + if artifactcache.has_push_remotes(plugin=element): + # Push the element's artifact + if not artifactcache.pull(element, element_key): + queue.put("Pull operation failed") + else: + queue.put(None) else: - queue.put(None) - else: - queue.put("No remote configured for element {}".format(element_name)) + queue.put("No remote configured for element {}".format(element_name)) @pytest.mark.datafiles(DATA_DIR) @@ -195,23 +182,19 @@ def test_pull_tree(cli, tmpdir, datafiles): # Assert that we shared/pushed the cached artifact assert share.has_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst')) - # Fake minimal context - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) - - # Load the project and CAS cache - project = Project(project_dir, context) - project.ensure_fully_loaded() - cas = context.get_cascache() + with dummy_context(config=user_config_file) as context: + # Load the project and CAS cache + project = Project(project_dir, context) + project.ensure_fully_loaded() + cas = context.get_cascache() - # Assert that the element's artifact is cached - element = project.load_elements(['target.bst'])[0] - element_key = cli.get_element_key(project_dir, 'target.bst') - assert cli.artifact.is_cached(rootcache_dir, element, element_key) + # Assert that the element's artifact is cached + element = project.load_elements(['target.bst'])[0] + element_key = cli.get_element_key(project_dir, 'target.bst') + assert cli.artifact.is_cached(rootcache_dir, element, element_key) - # Retrieve the Directory object from the cached artifact - artifact_digest = cli.artifact.get_digest(rootcache_dir, element, element_key) + # Retrieve the Directory object from the cached artifact + artifact_digest = cli.artifact.get_digest(rootcache_dir, element, element_key) queue = multiprocessing.Queue() # Use subprocess to avoid creation of gRPC threads in main BuildStream process @@ -270,59 +253,51 @@ def test_pull_tree(cli, tmpdir, datafiles): def _test_push_tree(user_config_file, project_dir, artifact_digest, queue): - # Fake minimal context - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) - - # Load the project manually - project = Project(project_dir, context) - project.ensure_fully_loaded() + with dummy_context(config=user_config_file) as context: + # Load the project manually + project = Project(project_dir, context) + project.ensure_fully_loaded() - # Create a local artifact cache and cas handle - artifactcache = context.artifactcache - cas = context.get_cascache() + # Create a local artifact cache and cas handle + artifactcache = context.artifactcache + cas = context.get_cascache() - # Manually setup the CAS remote - artifactcache.setup_remotes(use_config=True) + # Manually setup the CAS remote + artifactcache.setup_remotes(use_config=True) - if artifactcache.has_push_remotes(): - directory = remote_execution_pb2.Directory() + if artifactcache.has_push_remotes(): + directory = remote_execution_pb2.Directory() - with open(cas.objpath(artifact_digest), 'rb') as f: - directory.ParseFromString(f.read()) + with open(cas.objpath(artifact_digest), 'rb') as f: + directory.ParseFromString(f.read()) - # Build the Tree object while we are still cached - tree = remote_execution_pb2.Tree() - tree_maker(cas, tree, directory) + # Build the Tree object while we are still cached + tree = remote_execution_pb2.Tree() + tree_maker(cas, tree, directory) - # Push the Tree as a regular message - tree_digest = artifactcache.push_message(project, tree) + # Push the Tree as a regular message + tree_digest = artifactcache.push_message(project, tree) - queue.put((tree_digest.hash, tree_digest.size_bytes)) - else: - queue.put("No remote configured") + queue.put((tree_digest.hash, tree_digest.size_bytes)) + else: + queue.put("No remote configured") def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue): - # Fake minimal context - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) - - # Load the project manually - project = Project(project_dir, context) - project.ensure_fully_loaded() - - # Create a local artifact cache handle - artifactcache = context.artifactcache - - # Manually setup the CAS remote - artifactcache.setup_remotes(use_config=True) - - if artifactcache.has_push_remotes(): - # Pull the artifact using the Tree object - directory_digest = artifactcache.pull_tree(project, artifact_digest) - queue.put((directory_digest.hash, directory_digest.size_bytes)) - else: - queue.put("No remote configured") + with dummy_context(config=user_config_file) as context: + # Load the project manually + project = Project(project_dir, context) + project.ensure_fully_loaded() + + # Create a local artifact cache handle + artifactcache = context.artifactcache + + # Manually setup the CAS remote + artifactcache.setup_remotes(use_config=True) + + if artifactcache.has_push_remotes(): + # Pull the artifact using the Tree object + directory_digest = artifactcache.pull_tree(project, artifact_digest) + queue.put((directory_digest.hash, directory_digest.size_bytes)) + else: + queue.put("No remote configured") -- cgit v1.2.1 From f62b5f26adcdee8ab7cbf739cc5d2388ba25aefe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 16:43:31 +0200 Subject: tests/artifactcache/push.py: Use dummy_context() --- tests/artifactcache/push.py | 143 +++++++++++++++++++------------------------- 1 file changed, 63 insertions(+), 80 deletions(-) diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py index da658f76b..81d75023d 100644 --- a/tests/artifactcache/push.py +++ b/tests/artifactcache/push.py @@ -8,11 +8,11 @@ import signal import pytest from buildstream import _yaml, _signals, utils, Scope -from buildstream._context import Context from buildstream._project import Project from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 from buildstream.testing import cli # pylint: disable=unused-import -from tests.testutils import create_artifact_share + +from tests.testutils import create_artifact_share, dummy_context # Project directory @@ -22,11 +22,6 @@ DATA_DIR = os.path.join( ) -# Handle messages from the pipeline -def message_handler(message, is_silenced): - pass - - # Since parent processes wait for queue events, we need # to put something on it if the called process raises an # exception. @@ -68,19 +63,15 @@ def test_push(cli, tmpdir, datafiles): # Write down the user configuration file _yaml.roundtrip_dump(user_config, file=user_config_file) - # Fake minimal context - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) - - # Load the project manually - project = Project(project_dir, context) - project.ensure_fully_loaded() + with dummy_context(config=user_config_file) as context: + # Load the project manually + project = Project(project_dir, context) + project.ensure_fully_loaded() - # Assert that the element's artifact is cached - element = project.load_elements(['target.bst'])[0] - element_key = cli.get_element_key(project_dir, 'target.bst') - assert cli.artifact.is_cached(rootcache_dir, element, element_key) + # Assert that the element's artifact is cached + element = project.load_elements(['target.bst'])[0] + element_key = cli.get_element_key(project_dir, 'target.bst') + assert cli.artifact.is_cached(rootcache_dir, element, element_key) queue = multiprocessing.Queue() # Use subprocess to avoid creation of gRPC threads in main BuildStream process @@ -105,40 +96,36 @@ def test_push(cli, tmpdir, datafiles): def _test_push(user_config_file, project_dir, element_name, queue): - # Fake minimal context - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) - - # Load the project manually - project = Project(project_dir, context) - project.ensure_fully_loaded() - - # Create a local artifact cache handle - artifactcache = context.artifactcache - - # Load the target element - element = project.load_elements([element_name])[0] - - # Ensure the element's artifact memeber is initialised - # This is duplicated from Pipeline.resolve_elements() - # as this test does not use the cli frontend. - for e in element.dependencies(Scope.ALL): - # Determine initial element state. - e._update_state() - - # Manually setup the CAS remotes - artifactcache.setup_remotes(use_config=True) - artifactcache.initialize_remotes() - - if artifactcache.has_push_remotes(plugin=element): - # Push the element's artifact - if not element._push(): - queue.put("Push operation failed") + with dummy_context(config=user_config_file) as context: + # Load the project manually + project = Project(project_dir, context) + project.ensure_fully_loaded() + + # Create a local artifact cache handle + artifactcache = context.artifactcache + + # Load the target element + element = project.load_elements([element_name])[0] + + # Ensure the element's artifact memeber is initialised + # This is duplicated from Pipeline.resolve_elements() + # as this test does not use the cli frontend. + for e in element.dependencies(Scope.ALL): + # Determine initial element state. + e._update_state() + + # Manually setup the CAS remotes + artifactcache.setup_remotes(use_config=True) + artifactcache.initialize_remotes() + + if artifactcache.has_push_remotes(plugin=element): + # Push the element's artifact + if not element._push(): + queue.put("Push operation failed") + else: + queue.put(None) else: - queue.put(None) - else: - queue.put("No remote configured for element {}".format(element_name)) + queue.put("No remote configured for element {}".format(element_name)) @pytest.mark.datafiles(DATA_DIR) @@ -189,31 +176,27 @@ def test_push_message(tmpdir, datafiles): def _test_push_message(user_config_file, project_dir, queue): - # Fake minimal context - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) - - # Load the project manually - project = Project(project_dir, context) - project.ensure_fully_loaded() - - # Create a local artifact cache handle - artifactcache = context.artifactcache - - # Manually setup the artifact remote - artifactcache.setup_remotes(use_config=True) - artifactcache.initialize_remotes() - - if artifactcache.has_push_remotes(): - # Create an example message object - command = remote_execution_pb2.Command(arguments=['/usr/bin/gcc', '--help'], - working_directory='/buildstream-build', - output_directories=['/buildstream-install']) - - # Push the message object - command_digest = artifactcache.push_message(project, command) - - queue.put((command_digest.hash, command_digest.size_bytes)) - else: - queue.put("No remote configured") + with dummy_context(config=user_config_file) as context: + # Load the project manually + project = Project(project_dir, context) + project.ensure_fully_loaded() + + # Create a local artifact cache handle + artifactcache = context.artifactcache + + # Manually setup the artifact remote + artifactcache.setup_remotes(use_config=True) + artifactcache.initialize_remotes() + + if artifactcache.has_push_remotes(): + # Create an example message object + command = remote_execution_pb2.Command(arguments=['/usr/bin/gcc', '--help'], + working_directory='/buildstream-build', + output_directories=['/buildstream-install']) + + # Push the message object + command_digest = artifactcache.push_message(project, command) + + queue.put((command_digest.hash, command_digest.size_bytes)) + else: + queue.put("No remote configured") -- cgit v1.2.1 From 0bfad0b0eac1400573f06b134583365f620f1a8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 16:49:13 +0200 Subject: tests/format/include_composition.py: Use dummy_context() --- tests/format/include_composition.py | 163 +++++++++++++++++++----------------- 1 file changed, 84 insertions(+), 79 deletions(-) diff --git a/tests/format/include_composition.py b/tests/format/include_composition.py index f764b16a6..ec48d82a2 100644 --- a/tests/format/include_composition.py +++ b/tests/format/include_composition.py @@ -1,146 +1,151 @@ import os -from buildstream._context import Context + +from contextlib import contextmanager + from buildstream._project import Project from buildstream._includes import Includes from buildstream import _yaml +from tests.testutils import dummy_context + +@contextmanager def make_includes(basedir): _yaml.roundtrip_dump({'name': 'test'}, os.path.join(basedir, 'project.conf')) - context = Context() - project = Project(basedir, context) - loader = project.loader - return Includes(loader) + with dummy_context() as context: + project = Project(basedir, context) + loader = project.loader + yield Includes(loader) def test_main_has_priority(tmpdir): - includes = make_includes(str(tmpdir)) + with make_includes(str(tmpdir)) as includes: - _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']}, - str(tmpdir.join('main.yml'))) + _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']}, + str(tmpdir.join('main.yml'))) - main = _yaml.load(str(tmpdir.join('main.yml'))) + main = _yaml.load(str(tmpdir.join('main.yml'))) - _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml'))) + _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml'))) - includes.process(main) + includes.process(main) - assert main.get_sequence('test').as_str_list() == ['main'] + assert main.get_sequence('test').as_str_list() == ['main'] def test_include_cannot_append(tmpdir): - includes = make_includes(str(tmpdir)) + with make_includes(str(tmpdir)) as includes: - _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']}, - str(tmpdir.join('main.yml'))) - main = _yaml.load(str(tmpdir.join('main.yml'))) + _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']}, + str(tmpdir.join('main.yml'))) + main = _yaml.load(str(tmpdir.join('main.yml'))) - _yaml.roundtrip_dump({'test': {'(>)': ['a']}}, - str(tmpdir.join('a.yml'))) + _yaml.roundtrip_dump({'test': {'(>)': ['a']}}, + str(tmpdir.join('a.yml'))) - includes.process(main) + includes.process(main) - assert main.get_sequence('test').as_str_list() == ['main'] + assert main.get_sequence('test').as_str_list() == ['main'] def test_main_can_append(tmpdir): - includes = make_includes(str(tmpdir)) + with make_includes(str(tmpdir)) as includes: - _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': {'(>)': ['main']}}, - str(tmpdir.join('main.yml'))) - main = _yaml.load(str(tmpdir.join('main.yml'))) + _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': {'(>)': ['main']}}, + str(tmpdir.join('main.yml'))) + main = _yaml.load(str(tmpdir.join('main.yml'))) - _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml'))) + _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml'))) - includes.process(main) + includes.process(main) - assert main.get_sequence('test').as_str_list() == ['a', 'main'] + assert main.get_sequence('test').as_str_list() == ['a', 'main'] def test_sibling_cannot_append_backward(tmpdir): - includes = make_includes(str(tmpdir)) + with make_includes(str(tmpdir)) as includes: - _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']}, - str(tmpdir.join('main.yml'))) - main = _yaml.load(str(tmpdir.join('main.yml'))) + _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']}, + str(tmpdir.join('main.yml'))) + main = _yaml.load(str(tmpdir.join('main.yml'))) - _yaml.roundtrip_dump({'test': {'(>)': ['a']}}, - str(tmpdir.join('a.yml'))) - _yaml.roundtrip_dump({'test': ['b']}, - str(tmpdir.join('b.yml'))) + _yaml.roundtrip_dump({'test': {'(>)': ['a']}}, + str(tmpdir.join('a.yml'))) + _yaml.roundtrip_dump({'test': ['b']}, + str(tmpdir.join('b.yml'))) - includes.process(main) + includes.process(main) - assert main.get_sequence('test').as_str_list() == ['b'] + assert main.get_sequence('test').as_str_list() == ['b'] def test_sibling_can_append_forward(tmpdir): - includes = make_includes(str(tmpdir)) + with make_includes(str(tmpdir)) as includes: - _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']}, - str(tmpdir.join('main.yml'))) - main = _yaml.load(str(tmpdir.join('main.yml'))) + _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']}, + str(tmpdir.join('main.yml'))) + main = _yaml.load(str(tmpdir.join('main.yml'))) - _yaml.roundtrip_dump({'test': ['a']}, - str(tmpdir.join('a.yml'))) - _yaml.roundtrip_dump({'test': {'(>)': ['b']}}, - str(tmpdir.join('b.yml'))) + _yaml.roundtrip_dump({'test': ['a']}, + str(tmpdir.join('a.yml'))) + _yaml.roundtrip_dump({'test': {'(>)': ['b']}}, + str(tmpdir.join('b.yml'))) - includes.process(main) + includes.process(main) - assert main.get_sequence('test').as_str_list() == ['a', 'b'] + assert main.get_sequence('test').as_str_list() == ['a', 'b'] def test_lastest_sibling_has_priority(tmpdir): - includes = make_includes(str(tmpdir)) + with make_includes(str(tmpdir)) as includes: - _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']}, - str(tmpdir.join('main.yml'))) - main = _yaml.load(str(tmpdir.join('main.yml'))) + _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']}, + str(tmpdir.join('main.yml'))) + main = _yaml.load(str(tmpdir.join('main.yml'))) - _yaml.roundtrip_dump({'test': ['a']}, - str(tmpdir.join('a.yml'))) - _yaml.roundtrip_dump({'test': ['b']}, - str(tmpdir.join('b.yml'))) + _yaml.roundtrip_dump({'test': ['a']}, + str(tmpdir.join('a.yml'))) + _yaml.roundtrip_dump({'test': ['b']}, + str(tmpdir.join('b.yml'))) - includes.process(main) + includes.process(main) - assert main.get_sequence('test').as_str_list() == ['b'] + assert main.get_sequence('test').as_str_list() == ['b'] def test_main_keeps_keys(tmpdir): - includes = make_includes(str(tmpdir)) + with make_includes(str(tmpdir)) as includes: - _yaml.roundtrip_dump({'(@)': ['a.yml'], 'something': 'else'}, - str(tmpdir.join('main.yml'))) - main = _yaml.load(str(tmpdir.join('main.yml'))) + _yaml.roundtrip_dump({'(@)': ['a.yml'], 'something': 'else'}, + str(tmpdir.join('main.yml'))) + main = _yaml.load(str(tmpdir.join('main.yml'))) - _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml'))) + _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml'))) - includes.process(main) + includes.process(main) - assert main.get_sequence('test').as_str_list() == ['a'] - assert main.get_str('something') == 'else' + assert main.get_sequence('test').as_str_list() == ['a'] + assert main.get_str('something') == 'else' def test_overwrite_directive_on_later_composite(tmpdir): - includes = make_includes(str(tmpdir)) + with make_includes(str(tmpdir)) as includes: - _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml'], 'test': {'(=)': ['Overwritten']}}, - str(tmpdir.join('main.yml'))) + _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml'], 'test': {'(=)': ['Overwritten']}}, + str(tmpdir.join('main.yml'))) - main = _yaml.load(str(tmpdir.join('main.yml'))) + main = _yaml.load(str(tmpdir.join('main.yml'))) - # a.yml - _yaml.roundtrip_dump({'test': ['some useless', 'list', 'to be overwritten'], - 'foo': 'should not be present'}, - str(tmpdir.join('a.yml'))) + # a.yml + _yaml.roundtrip_dump({'test': ['some useless', 'list', 'to be overwritten'], + 'foo': 'should not be present'}, + str(tmpdir.join('a.yml'))) - # b.yaml isn't going to have a 'test' node to overwrite - _yaml.roundtrip_dump({'foo': 'should be present'}, - str(tmpdir.join('b.yml'))) + # b.yaml isn't going to have a 'test' node to overwrite + _yaml.roundtrip_dump({'foo': 'should be present'}, + str(tmpdir.join('b.yml'))) - includes.process(main) + includes.process(main) - assert main.get_sequence('test').as_str_list() == ['Overwritten'] - assert main.get_str('foo') == 'should be present' + assert main.get_sequence('test').as_str_list() == ['Overwritten'] + assert main.get_str('foo') == 'should be present' -- cgit v1.2.1 From b0248da3b4b4d4d9a59636e442ffa6d647fe3dca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 16:51:01 +0200 Subject: tests/internals/context.py: Use dummy_context() --- tests/internals/context.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/internals/context.py b/tests/internals/context.py index da7b307d8..ddd558b6c 100644 --- a/tests/internals/context.py +++ b/tests/internals/context.py @@ -21,10 +21,11 @@ def context_fixture(): else: cache_home = os.path.expanduser('~/.cache') - return { - 'xdg-cache': cache_home, - 'context': Context() - } + with Context() as context: + yield { + 'xdg-cache': cache_home, + 'context': context + } ####################################### -- cgit v1.2.1 From 4f1d1b6ceb0a4eb19ad9f1f8c0f5dcd08eba28e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 16:56:22 +0200 Subject: tests/internals/loader.py: Use dummy_context() --- tests/internals/loader.py | 49 ++++++++++++++++------------------------------- 1 file changed, 17 insertions(+), 32 deletions(-) diff --git a/tests/internals/loader.py b/tests/internals/loader.py index a4ebdb9ac..9af2bf161 100644 --- a/tests/internals/loader.py +++ b/tests/internals/loader.py @@ -1,11 +1,13 @@ +from contextlib import contextmanager import os import pytest from buildstream._exceptions import LoadError, LoadErrorReason -from buildstream._context import Context from buildstream._project import Project from buildstream._loader import MetaElement +from tests.testutils import dummy_context + DATA_DIR = os.path.join( os.path.dirname(os.path.realpath(__file__)), @@ -13,16 +15,11 @@ DATA_DIR = os.path.join( ) -def dummy_handler(message, is_silenced): - pass - - +@contextmanager def make_loader(basedir): - context = Context() - context.load(config=os.devnull) - context.messenger.set_message_handler(dummy_handler) - project = Project(basedir, context) - return project.loader + with dummy_context() as context: + project = Project(basedir, context) + yield project.loader ############################################################## @@ -32,21 +29,18 @@ def make_loader(basedir): def test_one_file(datafiles): basedir = str(datafiles) - loader = make_loader(basedir) + with make_loader(basedir) as loader: + element = loader.load(['elements/onefile.bst'])[0] - element = loader.load(['elements/onefile.bst'])[0] - - assert isinstance(element, MetaElement) - assert element.kind == 'pony' + assert isinstance(element, MetaElement) + assert element.kind == 'pony' @pytest.mark.datafiles(os.path.join(DATA_DIR, 'onefile')) def test_missing_file(datafiles): basedir = str(datafiles) - loader = make_loader(basedir) - - with pytest.raises(LoadError) as exc: + with make_loader(basedir) as loader, pytest.raises(LoadError) as exc: loader.load(['elements/missing.bst']) assert exc.value.reason == LoadErrorReason.MISSING_FILE @@ -56,9 +50,7 @@ def test_missing_file(datafiles): def test_invalid_reference(datafiles): basedir = str(datafiles) - loader = make_loader(basedir) - - with pytest.raises(LoadError) as exc: + with make_loader(basedir) as loader, pytest.raises(LoadError) as exc: loader.load(['elements/badreference.bst']) assert exc.value.reason == LoadErrorReason.INVALID_YAML @@ -68,9 +60,7 @@ def test_invalid_reference(datafiles): def test_invalid_yaml(datafiles): basedir = str(datafiles) - loader = make_loader(basedir) - - with pytest.raises(LoadError) as exc: + with make_loader(basedir) as loader, pytest.raises(LoadError) as exc: loader.load(['elements/badfile.bst']) assert exc.value.reason == LoadErrorReason.INVALID_YAML @@ -82,8 +72,7 @@ def test_fail_fullpath_target(datafiles): basedir = str(datafiles) fullpath = os.path.join(basedir, 'elements', 'onefile.bst') - with pytest.raises(LoadError) as exc: - loader = make_loader(basedir) + with make_loader(basedir) as loader, pytest.raises(LoadError) as exc: loader.load([fullpath]) assert exc.value.reason == LoadErrorReason.INVALID_DATA @@ -93,9 +82,7 @@ def test_fail_fullpath_target(datafiles): def test_invalid_key(datafiles): basedir = str(datafiles) - loader = make_loader(basedir) - - with pytest.raises(LoadError) as exc: + with make_loader(basedir) as loader, pytest.raises(LoadError) as exc: loader.load(['elements/invalidkey.bst']) assert exc.value.reason == LoadErrorReason.INVALID_DATA @@ -105,9 +92,7 @@ def test_invalid_key(datafiles): def test_invalid_directory_load(datafiles): basedir = str(datafiles) - loader = make_loader(basedir) - - with pytest.raises(LoadError) as exc: + with make_loader(basedir) as loader, pytest.raises(LoadError) as exc: loader.load(['elements/']) assert exc.value.reason == LoadErrorReason.LOADING_DIRECTORY -- cgit v1.2.1 From 858c51902e603726e70d076a96cbd174f376295c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 17:14:35 +0200 Subject: tests/internals/pluginloading.py: Use dummy_context() --- tests/internals/pluginloading.py | 41 +++++++++++++++++++--------------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/tests/internals/pluginloading.py b/tests/internals/pluginloading.py index 9093680f4..4b6baf229 100644 --- a/tests/internals/pluginloading.py +++ b/tests/internals/pluginloading.py @@ -1,56 +1,53 @@ +from contextlib import contextmanager import os import pytest -from buildstream._context import Context from buildstream._project import Project from buildstream._exceptions import LoadError, LoadErrorReason from buildstream._pipeline import Pipeline +from tests.testutils import dummy_context + DATA_DIR = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'pluginloading', ) +@contextmanager def create_pipeline(tmpdir, basedir, target): - context = Context() - context.load(config=os.devnull) - context.deploydir = os.path.join(str(tmpdir), 'deploy') - context.casdir = os.path.join(str(tmpdir), 'cas') - project = Project(basedir, context) - - def dummy_handler(message, is_silenced): - pass + with dummy_context() as context: + context.deploydir = os.path.join(str(tmpdir), 'deploy') + context.casdir = os.path.join(str(tmpdir), 'cas') + project = Project(basedir, context) - context.messenger.set_message_handler(dummy_handler) - - pipeline = Pipeline(context, project, None) - targets, = pipeline.load([(target,)]) - return targets + pipeline = Pipeline(context, project, None) + targets, = pipeline.load([(target,)]) + yield targets @pytest.mark.datafiles(os.path.join(DATA_DIR, 'customsource')) def test_customsource(datafiles, tmpdir): basedir = str(datafiles) - targets = create_pipeline(tmpdir, basedir, 'simple.bst') - assert targets[0].get_kind() == "autotools" + with create_pipeline(tmpdir, basedir, 'simple.bst') as targets: + assert targets[0].get_kind() == "autotools" @pytest.mark.datafiles(os.path.join(DATA_DIR, 'customelement')) def test_customelement(datafiles, tmpdir): basedir = str(datafiles) - targets = create_pipeline(tmpdir, basedir, 'simple.bst') - assert targets[0].get_kind() == "foo" + with create_pipeline(tmpdir, basedir, 'simple.bst') as targets: + assert targets[0].get_kind() == "foo" @pytest.mark.datafiles(os.path.join(DATA_DIR, 'badversionsource')) def test_badversionsource(datafiles, tmpdir): basedir = str(datafiles) - with pytest.raises(LoadError) as exc: - create_pipeline(tmpdir, basedir, 'simple.bst') + with pytest.raises(LoadError) as exc, create_pipeline(tmpdir, basedir, 'simple.bst'): + pass assert exc.value.reason == LoadErrorReason.UNSUPPORTED_PLUGIN @@ -59,7 +56,7 @@ def test_badversionsource(datafiles, tmpdir): def test_badversionelement(datafiles, tmpdir): basedir = str(datafiles) - with pytest.raises(LoadError) as exc: - create_pipeline(tmpdir, basedir, 'simple.bst') + with pytest.raises(LoadError) as exc, create_pipeline(tmpdir, basedir, 'simple.bst'): + pass assert exc.value.reason == LoadErrorReason.UNSUPPORTED_PLUGIN -- cgit v1.2.1 From 36737b18cedc754d7b8b3c2f513bb44f5c30ee43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 17:18:34 +0200 Subject: tests/sourcecache/fetch.py: Use dummy_context() --- tests/sourcecache/fetch.py | 160 +++++++++++++++++++++------------------------ 1 file changed, 74 insertions(+), 86 deletions(-) diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py index 300f0c84c..015ed8af9 100644 --- a/tests/sourcecache/fetch.py +++ b/tests/sourcecache/fetch.py @@ -24,18 +24,14 @@ import shutil import pytest from buildstream._exceptions import ErrorDomain -from buildstream._context import Context from buildstream._project import Project from buildstream import _yaml from buildstream.testing import cli # pylint: disable=unused-import from buildstream.testing import create_repo -from tests.testutils import create_artifact_share - -DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project") +from tests.testutils import create_artifact_share, dummy_context -def message_handler(message, is_silenced): - pass +DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project") @pytest.mark.datafiles(DATA_DIR) @@ -69,55 +65,52 @@ def test_source_fetch(cli, tmpdir, datafiles): } _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) + with dummy_context(config=user_config_file) as context: + project = Project(project_dir, context) + project.ensure_fully_loaded() - project = Project(project_dir, context) - project.ensure_fully_loaded() + element = project.load_elements(['fetch.bst'])[0] + assert not element._source_cached() + source = list(element.sources())[0] - element = project.load_elements(['fetch.bst'])[0] - assert not element._source_cached() - source = list(element.sources())[0] + cas = context.get_cascache() + assert not cas.contains(source._get_source_name()) - cas = context.get_cascache() - assert not cas.contains(source._get_source_name()) + # Just check that we sensibly fetch and build the element + res = cli.run(project=project_dir, args=['build', 'fetch.bst']) + res.assert_success() - # Just check that we sensibly fetch and build the element - res = cli.run(project=project_dir, args=['build', 'fetch.bst']) - res.assert_success() + assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) != [] - assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) != [] + # get root digest of source + sourcecache = context.sourcecache + digest = sourcecache.export(source)._get_digest() - # get root digest of source - sourcecache = context.sourcecache - digest = sourcecache.export(source)._get_digest() + # Move source in local cas to repo + shutil.rmtree(os.path.join(str(tmpdir), 'sourceshare', 'repo', 'cas')) + shutil.move( + os.path.join(str(tmpdir), 'cache', 'source_protos'), + os.path.join(str(tmpdir), 'sourceshare', 'repo')) + shutil.move( + os.path.join(str(tmpdir), 'cache', 'cas'), + os.path.join(str(tmpdir), 'sourceshare', 'repo')) + shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'sources')) + shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'artifacts')) - # Move source in local cas to repo - shutil.rmtree(os.path.join(str(tmpdir), 'sourceshare', 'repo', 'cas')) - shutil.move( - os.path.join(str(tmpdir), 'cache', 'source_protos'), - os.path.join(str(tmpdir), 'sourceshare', 'repo')) - shutil.move( - os.path.join(str(tmpdir), 'cache', 'cas'), - os.path.join(str(tmpdir), 'sourceshare', 'repo')) - shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'sources')) - shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'artifacts')) + # check the share has the object + assert share.has_object(digest) - # check the share has the object - assert share.has_object(digest) + state = cli.get_element_state(project_dir, 'fetch.bst') + assert state == 'fetch needed' - state = cli.get_element_state(project_dir, 'fetch.bst') - assert state == 'fetch needed' + # Now fetch the source and check + res = cli.run(project=project_dir, args=['source', 'fetch', 'fetch.bst']) + res.assert_success() + assert "Pulled source" in res.stderr - # Now fetch the source and check - res = cli.run(project=project_dir, args=['source', 'fetch', 'fetch.bst']) - res.assert_success() - assert "Pulled source" in res.stderr - - # check that we have the source in the cas now and it's not fetched - assert element._source_cached() - assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) == [] + # check that we have the source in the cas now and it's not fetched + assert element._source_cached() + assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) == [] @pytest.mark.datafiles(DATA_DIR) @@ -151,32 +144,29 @@ def test_fetch_fallback(cli, tmpdir, datafiles): } _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) - - project = Project(project_dir, context) - project.ensure_fully_loaded() + with dummy_context(config=user_config_file) as context: + project = Project(project_dir, context) + project.ensure_fully_loaded() - element = project.load_elements(['fetch.bst'])[0] - assert not element._source_cached() - source = list(element.sources())[0] + element = project.load_elements(['fetch.bst'])[0] + assert not element._source_cached() + source = list(element.sources())[0] - cas = context.get_cascache() - assert not cas.contains(source._get_source_name()) - assert not os.path.exists(os.path.join(cache_dir, 'sources')) + cas = context.get_cascache() + assert not cas.contains(source._get_source_name()) + assert not os.path.exists(os.path.join(cache_dir, 'sources')) - # Now check if it falls back to the source fetch method. - res = cli.run(project=project_dir, args=['source', 'fetch', 'fetch.bst']) - res.assert_success() - brief_key = source._get_brief_display_key() - assert ("Remote source service ({}) does not have source {} cached" - .format(share.repo, brief_key)) in res.stderr - assert ("SUCCESS Fetching from {}" - .format(repo.source_config(ref=ref)['url'])) in res.stderr + # Now check if it falls back to the source fetch method. + res = cli.run(project=project_dir, args=['source', 'fetch', 'fetch.bst']) + res.assert_success() + brief_key = source._get_brief_display_key() + assert ("Remote source service ({}) does not have source {} cached" + .format(share.repo, brief_key)) in res.stderr + assert ("SUCCESS Fetching from {}" + .format(repo.source_config(ref=ref)['url'])) in res.stderr - # Check that the source in both in the source dir and the local CAS - assert element._source_cached() + # Check that the source in both in the source dir and the local CAS + assert element._source_cached() @pytest.mark.datafiles(DATA_DIR) @@ -209,22 +199,20 @@ def test_pull_fail(cli, tmpdir, datafiles): _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # get the source object - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) - project = Project(project_dir, context) - project.ensure_fully_loaded() - - element = project.load_elements(['push.bst'])[0] - assert not element._source_cached() - source = list(element.sources())[0] - - # remove files and check that it doesn't build - shutil.rmtree(repo.repo) - - # Should fail in stream, with a plugin tasks causing the error - res = cli.run(project=project_dir, args=['build', 'push.bst']) - res.assert_main_error(ErrorDomain.STREAM, None) - res.assert_task_error(ErrorDomain.PLUGIN, None) - assert "Remote source service ({}) does not have source {} cached".format( - share.repo, source._get_brief_display_key()) in res.stderr + with dummy_context(config=user_config_file) as context: + project = Project(project_dir, context) + project.ensure_fully_loaded() + + element = project.load_elements(['push.bst'])[0] + assert not element._source_cached() + source = list(element.sources())[0] + + # remove files and check that it doesn't build + shutil.rmtree(repo.repo) + + # Should fail in stream, with a plugin tasks causing the error + res = cli.run(project=project_dir, args=['build', 'push.bst']) + res.assert_main_error(ErrorDomain.STREAM, None) + res.assert_task_error(ErrorDomain.PLUGIN, None) + assert "Remote source service ({}) does not have source {} cached".format( + share.repo, source._get_brief_display_key()) in res.stderr -- cgit v1.2.1 From fdba3062724675a4d55f93898d1604100d6e12b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 17:22:17 +0200 Subject: tests/sourcecache/push.py: Use dummy_context() --- tests/sourcecache/push.py | 55 ++++++++++++++++++++++------------------------- 1 file changed, 26 insertions(+), 29 deletions(-) diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py index e9c72d47c..3fe6d78f7 100644 --- a/tests/sourcecache/push.py +++ b/tests/sourcecache/push.py @@ -23,14 +23,13 @@ import os import shutil import pytest -from buildstream._context import Context from buildstream._exceptions import ErrorDomain from buildstream._project import Project from buildstream import _yaml from buildstream.testing import cli # pylint: disable=unused-import from buildstream.testing import create_repo -from tests.testutils import create_artifact_share +from tests.testutils import create_artifact_share, dummy_context DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project") @@ -70,33 +69,31 @@ def test_source_push(cli, tmpdir, datafiles): _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # get the source object - context = Context() - context.load(config=user_config_file) - context.messenger.set_message_handler(message_handler) - project = Project(project_dir, context) - project.ensure_fully_loaded() - - element = project.load_elements(['push.bst'])[0] - assert not element._source_cached() - source = list(element.sources())[0] - - # check we don't have it in the current cache - cas = context.get_cascache() - assert not cas.contains(source._get_source_name()) - - # build the element, this should fetch and then push the source to the - # remote - res = cli.run(project=project_dir, args=['build', 'push.bst']) - res.assert_success() - assert "Pushed source" in res.stderr - - # check that we've got the remote locally now - sourcecache = context.sourcecache - assert sourcecache.contains(source) - - # check that's the remote CAS now has it - digest = sourcecache.export(source)._get_digest() - assert share.has_object(digest) + with dummy_context(config=user_config_file) as context: + project = Project(project_dir, context) + project.ensure_fully_loaded() + + element = project.load_elements(['push.bst'])[0] + assert not element._source_cached() + source = list(element.sources())[0] + + # check we don't have it in the current cache + cas = context.get_cascache() + assert not cas.contains(source._get_source_name()) + + # build the element, this should fetch and then push the source to the + # remote + res = cli.run(project=project_dir, args=['build', 'push.bst']) + res.assert_success() + assert "Pushed source" in res.stderr + + # check that we've got the remote locally now + sourcecache = context.sourcecache + assert sourcecache.contains(source) + + # check that's the remote CAS now has it + digest = sourcecache.export(source)._get_digest() + assert share.has_object(digest) @pytest.mark.datafiles(DATA_DIR) -- cgit v1.2.1 From 9a7bf4b3d2978f79026e9b72873a6215f6d25c0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 9 Jul 2019 17:27:33 +0200 Subject: tests/sourcecache/staging.py: Use dummy_context() --- tests/sourcecache/staging.py | 110 +++++++++++++++++++------------------------ 1 file changed, 48 insertions(+), 62 deletions(-) diff --git a/tests/sourcecache/staging.py b/tests/sourcecache/staging.py index c15bed215..6f812ee9c 100644 --- a/tests/sourcecache/staging.py +++ b/tests/sourcecache/staging.py @@ -25,20 +25,17 @@ import os import shutil import pytest -from buildstream._context import Context from buildstream._project import Project from buildstream.testing.runcli import cli # pylint: disable=unused-import + +from tests.testutils import dummy_context from tests.testutils.element_generators import create_element_size DATA_DIR = os.path.dirname(os.path.realpath(__file__)) -def dummy_message_handler(message, is_silenced): - pass - - # walk that removes the root directory from roots def relative_walk(rootdir): for root, dirnames, filenames in os.walk(rootdir): @@ -55,36 +52,32 @@ def test_source_staged(tmpdir, cli, datafiles): 'cachedir': cachedir }) - # set up minimal context - context = Context() - context.load() - - # load project and sourcecache - project = Project(project_dir, context) - project.ensure_fully_loaded() - context.cachedir = cachedir - context.messenger.set_message_handler(dummy_message_handler) - sourcecache = context.sourcecache - cas = context.get_cascache() - res = cli.run(project=project_dir, args=["build", "import-bin.bst"]) res.assert_success() - # now check that the source is in the refs file, this is pretty messy but - # seems to be the only way to get the sources? - element = project.load_elements(["import-bin.bst"])[0] - source = list(element.sources())[0] - assert element._source_cached() - assert sourcecache.contains(source) + with dummy_context() as context: + context.cachedir = cachedir + # load project and sourcecache + project = Project(project_dir, context) + project.ensure_fully_loaded() + sourcecache = context.sourcecache + cas = context.get_cascache() - # Extract the file and check it's the same as the one we imported - digest = sourcecache.export(source)._get_digest() - extractdir = os.path.join(str(tmpdir), "extract") - cas.checkout(extractdir, digest) - dir1 = extractdir - dir2 = os.path.join(project_dir, "files", "bin-files") + # now check that the source is in the refs file, this is pretty messy but + # seems to be the only way to get the sources? + element = project.load_elements(["import-bin.bst"])[0] + source = list(element.sources())[0] + assert element._source_cached() + assert sourcecache.contains(source) - assert list(relative_walk(dir1)) == list(relative_walk(dir2)) + # Extract the file and check it's the same as the one we imported + digest = sourcecache.export(source)._get_digest() + extractdir = os.path.join(str(tmpdir), "extract") + cas.checkout(extractdir, digest) + dir1 = extractdir + dir2 = os.path.join(project_dir, "files", "bin-files") + + assert list(relative_walk(dir1)) == list(relative_walk(dir2)) # Check sources are staged during a fetch @@ -97,33 +90,29 @@ def test_source_fetch(tmpdir, cli, datafiles): 'cachedir': cachedir }) - # set up minimal context - context = Context() - context.load() - - # load project and sourcecache - project = Project(project_dir, context) - project.ensure_fully_loaded() - context.cachedir = cachedir - context.messenger.set_message_handler(dummy_message_handler) - cas = context.get_cascache() - sourcecache = context.sourcecache - res = cli.run(project=project_dir, args=["source", "fetch", "import-dev.bst"]) res.assert_success() - element = project.load_elements(["import-dev.bst"])[0] - source = list(element.sources())[0] - assert element._source_cached() + with dummy_context() as context: + context.cachedir = cachedir + # load project and sourcecache + project = Project(project_dir, context) + project.ensure_fully_loaded() + cas = context.get_cascache() + sourcecache = context.sourcecache + + element = project.load_elements(["import-dev.bst"])[0] + source = list(element.sources())[0] + assert element._source_cached() - # check that the directory structures are idetical - digest = sourcecache.export(source)._get_digest() - extractdir = os.path.join(str(tmpdir), "extract") - cas.checkout(extractdir, digest) - dir1 = extractdir - dir2 = os.path.join(project_dir, "files", "dev-files") + # check that the directory structures are idetical + digest = sourcecache.export(source)._get_digest() + extractdir = os.path.join(str(tmpdir), "extract") + cas.checkout(extractdir, digest) + dir1 = extractdir + dir2 = os.path.join(project_dir, "files", "dev-files") - assert list(relative_walk(dir1)) == list(relative_walk(dir2)) + assert list(relative_walk(dir1)) == list(relative_walk(dir2)) # Check that with sources only in the CAS build successfully completes @@ -141,18 +130,15 @@ def test_staged_source_build(tmpdir, datafiles, cli): create_element_size('target.bst', project_dir, element_path, [], 10000) - # get the source object - context = Context() - context.load() - project = Project(project_dir, context) - project.ensure_fully_loaded() - context.cachedir = cachedir - context.messenger.set_message_handler(dummy_message_handler) + with dummy_context() as context: + context.cachedir = cachedir + project = Project(project_dir, context) + project.ensure_fully_loaded() - element = project.load_elements(["import-dev.bst"])[0] + element = project.load_elements(["import-dev.bst"])[0] - # check consistency of the source - assert not element._source_cached() + # check consistency of the source + assert not element._source_cached() res = cli.run(project=project_dir, args=['build', 'target.bst']) res.assert_success() -- cgit v1.2.1 From 7e9d42e4f110601642551939706f0d723f28a34d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrg=20Billeter?= Date: Tue, 16 Jul 2019 13:35:34 +0000 Subject: tests/sourcecache: Fix typos in comments Spotted by Darius. --- tests/sourcecache/fetch.py | 2 +- tests/sourcecache/push.py | 2 +- tests/sourcecache/staging.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py index 015ed8af9..99c00f120 100644 --- a/tests/sourcecache/fetch.py +++ b/tests/sourcecache/fetch.py @@ -210,7 +210,7 @@ def test_pull_fail(cli, tmpdir, datafiles): # remove files and check that it doesn't build shutil.rmtree(repo.repo) - # Should fail in stream, with a plugin tasks causing the error + # Should fail in stream, with a plugin task causing the error res = cli.run(project=project_dir, args=['build', 'push.bst']) res.assert_main_error(ErrorDomain.STREAM, None) res.assert_task_error(ErrorDomain.PLUGIN, None) diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py index 3fe6d78f7..b0fae616e 100644 --- a/tests/sourcecache/push.py +++ b/tests/sourcecache/push.py @@ -91,7 +91,7 @@ def test_source_push(cli, tmpdir, datafiles): sourcecache = context.sourcecache assert sourcecache.contains(source) - # check that's the remote CAS now has it + # check that the remote CAS now has it digest = sourcecache.export(source)._get_digest() assert share.has_object(digest) diff --git a/tests/sourcecache/staging.py b/tests/sourcecache/staging.py index 6f812ee9c..186a4bd9f 100644 --- a/tests/sourcecache/staging.py +++ b/tests/sourcecache/staging.py @@ -105,7 +105,7 @@ def test_source_fetch(tmpdir, cli, datafiles): source = list(element.sources())[0] assert element._source_cached() - # check that the directory structures are idetical + # check that the directory structures are identical digest = sourcecache.export(source)._get_digest() extractdir = os.path.join(str(tmpdir), "extract") cas.checkout(extractdir, digest) -- cgit v1.2.1