summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/buildstream/_artifact.py2
-rw-r--r--src/buildstream/_basecache.py4
-rw-r--r--src/buildstream/_cas/cascache.py2
-rw-r--r--src/buildstream/_context.py135
-rw-r--r--src/buildstream/_frontend/app.py4
-rw-r--r--src/buildstream/_loader/loader.py2
-rw-r--r--src/buildstream/_messenger.py6
-rw-r--r--src/buildstream/_pipeline.py8
-rw-r--r--src/buildstream/_project.py8
-rw-r--r--src/buildstream/_scheduler/jobs/job.py10
-rw-r--r--src/buildstream/_scheduler/queues/queue.py2
-rw-r--r--src/buildstream/_stream.py10
-rw-r--r--src/buildstream/plugin.py12
-rw-r--r--src/buildstream/sandbox/_sandboxremote.py6
-rw-r--r--src/buildstream/sandbox/sandbox.py4
-rw-r--r--src/buildstream/source.py6
-rw-r--r--tests/artifactcache/pull.py10
-rw-r--r--tests/artifactcache/push.py6
-rw-r--r--tests/internals/loader.py2
-rw-r--r--tests/internals/pluginloading.py2
-rw-r--r--tests/sourcecache/fetch.py6
-rw-r--r--tests/sourcecache/push.py2
-rw-r--r--tests/sourcecache/staging.py6
23 files changed, 62 insertions, 193 deletions
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index ec80e6417..02adb3404 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -162,7 +162,7 @@ class Artifact():
new_build.was_workspaced = bool(e._get_workspace())
# Store log file
- log_filename = context.get_log_filename()
+ log_filename = context.messenger.get_log_filename()
if log_filename:
digest = self._cas.add_object(path=log_filename)
element._build_log_path = self._cas.objpath(digest)
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index 49930d4ca..a29973158 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -244,7 +244,7 @@ class BaseCache():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self.context.message(
+ self.context.messenger.message(
Message(None, message_type, message, **args))
# _set_remotes():
@@ -272,7 +272,7 @@ class BaseCache():
def remote_failed(url, error):
self._message(MessageType.WARN, "Failed to initialize remote {}: {}".format(url, error))
- with self.context.timed_activity("Initializing remote caches", silent_nested=True):
+ with self.context.messenger.timed_activity("Initializing remote caches", silent_nested=True):
self.initialize_remotes(on_failure=remote_failed)
# _list_refs_mtimes()
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index 53f6029db..771e31208 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -1052,7 +1052,7 @@ class CASQuota:
self._cache_lower_threshold = None # The target cache size for a cleanup
self.available_space = None
- self._message = context.message
+ self._message = context.messenger.message
self._remove_callbacks = [] # Callbacks to remove unrequired refs and their remove method
self._list_refs_callbacks = [] # Callbacks to all refs
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 8d8509f27..52e4c3db9 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -19,7 +19,6 @@
import os
import shutil
-from contextlib import contextmanager
from . import utils
from . import _cachekey
from . import _site
@@ -145,6 +144,8 @@ class Context():
# Make sure the XDG vars are set in the environment before loading anything
self._init_xdg()
+ self.messenger = Messenger()
+
# Private variables
self._cache_key = None
self._artifactcache = None
@@ -155,7 +156,6 @@ class Context():
self._workspace_project_cache = WorkspaceProjectCache()
self._cascache = None
self._casquota = None
- self._messenger = Messenger()
# load()
#
@@ -433,116 +433,6 @@ class Context():
return self._cache_key
- # set_message_handler()
- #
- # Sets the handler for any status messages propagated through
- # the context.
- #
- # The handler should have the signature:
- #
- # def handler(
- # message: _message.Message, # The message to send.
- # is_silenced: bool, # Whether messages are currently being silenced.
- # ) -> None
- #
- def set_message_handler(self, handler):
- self._messenger.set_message_handler(handler)
-
- # silent_messages():
- #
- # Returns:
- # (bool): Whether messages are currently being silenced
- #
- def silent_messages(self):
- return self._messenger.silent_messages()
-
- # message():
- #
- # Proxies a message back to the caller, this is the central
- # point through which all messages pass.
- #
- # Args:
- # message: A Message object
- #
- def message(self, message):
- self._messenger.message(message)
-
- # silence()
- #
- # A context manager to silence messages, this behaves in
- # the same way as the `silent_nested` argument of the
- # Context._timed_activity() context manager: especially
- # important messages will not be silenced.
- #
- @contextmanager
- def silence(self):
- with self._messenger.silence():
- yield
-
- # timed_activity()
- #
- # Context manager for performing timed activities and logging those
- #
- # Args:
- # context (Context): The invocation context object
- # activity_name (str): The name of the activity
- # detail (str): An optional detailed message, can be multiline output
- # silent_nested (bool): If specified, nested messages will be silenced
- #
- @contextmanager
- def timed_activity(self, activity_name, *, unique_id=None, detail=None, silent_nested=False):
- with self._messenger.timed_activity(
- activity_name, unique_id=unique_id, detail=detail, silent_nested=silent_nested):
- yield
-
- # recorded_messages()
- #
- # Records all messages in a log file while the context manager
- # is active.
- #
- # In addition to automatically writing all messages to the
- # specified logging file, an open file handle for process stdout
- # and stderr will be available via the Context.get_log_handle() API,
- # and the full logfile path will be available via the
- # Context.get_log_filename() API.
- #
- # Args:
- # filename (str): A logging directory relative filename,
- # the pid and .log extension will be automatically
- # appended
- #
- # Yields:
- # (str): The fully qualified log filename
- #
- @contextmanager
- def recorded_messages(self, filename):
- with self._messenger.recorded_messages(filename, logdir=self.logdir) as messages:
- yield messages
-
- # get_log_handle()
- #
- # Fetches the active log handle, this will return the active
- # log file handle when the Context.recorded_messages() context
- # manager is active
- #
- # Returns:
- # (file): The active logging file handle, or None
- #
- def get_log_handle(self):
- return self._messenger.get_log_handle()
-
- # get_log_filename()
- #
- # Fetches the active log filename, this will return the active
- # log filename when the Context.recorded_messages() context
- # manager is active
- #
- # Returns:
- # (str): The active logging filename, or None
- #
- def get_log_filename(self):
- return self._messenger.get_log_filename()
-
# set_artifact_directories_optional()
#
# This indicates that the current context (command or configuration)
@@ -562,27 +452,6 @@ class Context():
def set_artifact_files_optional(self):
self.require_artifact_files = False
- # _record_message()
- #
- # Records the message if recording is enabled
- #
- # Args:
- # message (Message): The message to record
- #
- def _record_message(self, message):
- self._messenger._record_message(message)
-
- # _push_message_depth() / _pop_message_depth()
- #
- # For status messages, send the depth of timed
- # activities inside a given task through the message
- #
- def _push_message_depth(self, silent_nested):
- self._messenger._push_message_depth(silent_nested)
-
- def _pop_message_depth(self):
- self._messenger._pop_message_depth()
-
# Force the resolved XDG variables into the environment,
# this is so that they can be used directly to specify
# preferred locations of things from user configuration
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 52d5f2d47..9550fea40 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -213,7 +213,7 @@ class App():
indent=INDENT)
# Propagate pipeline feedback to the user
- self.context.set_message_handler(self._message_handler)
+ self.context.messenger.set_message_handler(self._message_handler)
# Preflight the artifact cache after initializing logging,
# this can cause messages to be emitted.
@@ -459,7 +459,7 @@ class App():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self.context.message(
+ self.context.messenger.message(
Message(None, message_type, message, **args))
# Exception handler
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 5f98b127c..b221c48d0 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -721,7 +721,7 @@ class Loader():
raise LoadError(warning_token, brief)
message = Message(None, MessageType.WARN, brief)
- self._context.message(message)
+ self._context.messenger.message(message)
# Print warning messages if any of the specified elements have invalid names.
#
diff --git a/src/buildstream/_messenger.py b/src/buildstream/_messenger.py
index 069ca124a..cb5051595 100644
--- a/src/buildstream/_messenger.py
+++ b/src/buildstream/_messenger.py
@@ -52,12 +52,12 @@ class Messenger():
def set_message_handler(self, handler):
self._message_handler = handler
- # silent_messages():
+ # _silent_messages():
#
# Returns:
# (bool): Whether messages are currently being silenced
#
- def silent_messages(self):
+ def _silent_messages(self):
for silent in self._message_depth:
if silent:
return True
@@ -85,7 +85,7 @@ class Messenger():
# to the frontend)
assert self._message_handler
- self._message_handler(message, is_silenced=self.silent_messages())
+ self._message_handler(message, is_silenced=self._silent_messages())
# silence()
#
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index 0758cf5ff..4352df56c 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -123,7 +123,7 @@ class Pipeline():
# targets (list of Element): The list of toplevel element targets
#
def resolve_elements(self, targets):
- with self._context.timed_activity("Resolving cached state", silent_nested=True):
+ with self._context.messenger.timed_activity("Resolving cached state", silent_nested=True):
for element in self.dependencies(targets, Scope.ALL):
# Preflight
@@ -355,7 +355,7 @@ class Pipeline():
def assert_consistent(self, elements):
inconsistent = []
inconsistent_workspaced = []
- with self._context.timed_activity("Checking sources"):
+ with self._context.messenger.timed_activity("Checking sources"):
for element in elements:
if element._get_consistency() == Consistency.INCONSISTENT:
if element._get_workspace():
@@ -391,7 +391,7 @@ class Pipeline():
#
def assert_sources_cached(self, elements):
uncached = []
- with self._context.timed_activity("Checking sources"):
+ with self._context.messenger.timed_activity("Checking sources"):
for element in elements:
if element._get_consistency() < Consistency.CACHED and \
not element._source_cached():
@@ -466,7 +466,7 @@ class Pipeline():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self._context.message(
+ self._context.messenger.message(
Message(None, message_type, message, **args))
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 114d25054..5f433c090 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -445,10 +445,10 @@ class Project():
# (list): A list of loaded Element
#
def load_elements(self, targets, *, rewritable=False):
- with self._context.timed_activity("Loading elements", silent_nested=True):
+ with self._context.messenger.timed_activity("Loading elements", silent_nested=True):
meta_elements = self.loader.load(targets, rewritable=rewritable, ticker=None)
- with self._context.timed_activity("Resolving elements"):
+ with self._context.messenger.timed_activity("Resolving elements"):
elements = [
Element._new_from_meta(meta)
for meta in meta_elements
@@ -466,7 +466,7 @@ class Project():
for source, ref in redundant_refs
]
detail += "\n".join(lines)
- self._context.message(
+ self._context.messenger.message(
Message(None, MessageType.WARN, "Ignoring redundant source references", detail=detail))
return elements
@@ -694,7 +694,7 @@ class Project():
# Deprecation check
if fail_on_overlap is not None:
- self._context.message(
+ self._context.messenger.message(
Message(
None,
MessageType.WARN,
diff --git a/src/buildstream/_scheduler/jobs/job.py b/src/buildstream/_scheduler/jobs/job.py
index e970481ab..00e4a0c10 100644
--- a/src/buildstream/_scheduler/jobs/job.py
+++ b/src/buildstream/_scheduler/jobs/job.py
@@ -347,7 +347,7 @@ class Job():
if "unique_id" in kwargs:
unique_id = kwargs["unique_id"]
del kwargs["unique_id"]
- self._scheduler.context.message(
+ self._scheduler.context.messenger.message(
Message(unique_id, message_type, message, **kwargs))
#######################################################
@@ -470,7 +470,7 @@ class Job():
if envelope.message_type is _MessageType.LOG_MESSAGE:
# Propagate received messages from children
# back through the context.
- self._scheduler.context.message(envelope.message)
+ self._scheduler.context.messenger.message(envelope.message)
elif envelope.message_type is _MessageType.ERROR:
# For regression tests only, save the last error domain / reason
# reported from a child task in the main process, this global state
@@ -592,7 +592,7 @@ class ChildJob():
if "unique_id" in kwargs:
unique_id = kwargs["unique_id"]
del kwargs["unique_id"]
- self._context.message(
+ self._context.messenger.message(
Message(unique_id, message_type, message, **kwargs))
# send_message()
@@ -673,7 +673,7 @@ class ChildJob():
# Set the global message handler in this child
# process to forward messages to the parent process
self._queue = queue
- self._context.set_message_handler(self._child_message_handler)
+ self._context.messenger.set_message_handler(self._child_message_handler)
starttime = datetime.datetime.now()
stopped_time = None
@@ -690,7 +690,7 @@ class ChildJob():
# Time, log and and run the action function
#
with _signals.suspendable(stop_time, resume_time), \
- self._context.recorded_messages(self._logfile) as filename:
+ self._context.messenger.recorded_messages(self._logfile, self._context.logdir) as filename:
self.message(MessageType.START, self.action_name, logfile=filename)
diff --git a/src/buildstream/_scheduler/queues/queue.py b/src/buildstream/_scheduler/queues/queue.py
index f2cefd5d2..49a5381c1 100644
--- a/src/buildstream/_scheduler/queues/queue.py
+++ b/src/buildstream/_scheduler/queues/queue.py
@@ -332,7 +332,7 @@ class Queue():
def _message(self, element, message_type, brief, **kwargs):
context = element._get_context()
message = Message(element._unique_id, message_type, brief, **kwargs)
- context.message(message)
+ context.messenger.message(message)
def _element_log_path(self, element):
project = element._get_project()
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 195be55ba..c6d748f91 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -617,7 +617,7 @@ class Stream():
# Prune the artifact cache
if ref_removed and not no_prune:
- with self._context.timed_activity("Pruning artifact cache"):
+ with self._context.messenger.timed_activity("Pruning artifact cache"):
self._artifacts.prune()
if not ref_removed:
@@ -789,8 +789,8 @@ class Stream():
# Remove workspace directory if prompted
if remove_dir:
- with self._context.timed_activity("Removing workspace directory {}"
- .format(workspace.get_absolute_path())):
+ with self._context.messenger.timed_activity("Removing workspace directory {}"
+ .format(workspace.get_absolute_path())):
try:
shutil.rmtree(workspace.get_absolute_path())
except OSError as e:
@@ -1195,7 +1195,7 @@ class Stream():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self._context.message(
+ self._context.messenger.message(
Message(None, message_type, message, **args))
# _add_queue()
@@ -1434,7 +1434,7 @@ class Stream():
# Collect the sources in the given sandbox into a tarfile
def _collect_sources(self, directory, tar_name, element_name, compression):
- with self._context.timed_activity("Creating tarball {}".format(tar_name)):
+ with self._context.messenger.timed_activity("Creating tarball {}".format(tar_name)):
if compression == "none":
permissions = "w:"
else:
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index a12ff61ec..de969c267 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -657,10 +657,10 @@ class Plugin():
# This will raise SourceError on its own
self.call(... command which takes time ...)
"""
- with self.__context.timed_activity(activity_name,
- unique_id=self._unique_id,
- detail=detail,
- silent_nested=silent_nested):
+ with self.__context.messenger.timed_activity(activity_name,
+ unique_id=self._unique_id,
+ detail=detail,
+ silent_nested=silent_nested):
yield
def call(self, *popenargs, fail=None, fail_temporarily=False, **kwargs):
@@ -798,7 +798,7 @@ class Plugin():
#
@contextmanager
def _output_file(self):
- log = self.__context.get_log_handle()
+ log = self.__context.messenger.get_log_handle()
if log is None:
with open(os.devnull, "w") as output:
yield output
@@ -870,7 +870,7 @@ class Plugin():
def __message(self, message_type, brief, **kwargs):
message = Message(self._unique_id, message_type, brief, **kwargs)
- self.__context.message(message)
+ self.__context.messenger.message(message)
def __note_command(self, output, *popenargs, **kwargs):
workdir = kwargs.get('cwd', os.getcwd())
diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py
index d90b164bc..075a69a2b 100644
--- a/src/buildstream/sandbox/_sandboxremote.py
+++ b/src/buildstream/sandbox/_sandboxremote.py
@@ -106,7 +106,7 @@ class SandboxRemote(Sandbox):
self.operation_name = None
def info(self, msg):
- self._get_context().message(Message(None, MessageType.INFO, msg))
+ self._get_context().messenger.message(Message(None, MessageType.INFO, msg))
@staticmethod
def specs_from_config_node(config_node, basedir=None):
@@ -226,8 +226,8 @@ class SandboxRemote(Sandbox):
# Set up signal handler to trigger cancel_operation on SIGTERM
operation = None
- with self._get_context().timed_activity("Waiting for the remote build to complete"), \
- _signals.terminator(partial(self.cancel_operation, channel)):
+ with self._get_context().messenger.timed_activity("Waiting for the remote build to complete"), \
+ _signals.terminator(partial(self.cancel_operation, channel)):
operation = __run_remote_command(stub, execute_request=request)
if operation is None:
return None
diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py
index c96ccb57b..a651fb783 100644
--- a/src/buildstream/sandbox/sandbox.py
+++ b/src/buildstream/sandbox/sandbox.py
@@ -628,7 +628,7 @@ class _SandboxBatch():
def execute_group(self, group):
if group.label:
context = self.sandbox._get_context()
- cm = context.timed_activity(group.label, unique_id=self.sandbox._get_plugin_id())
+ cm = context.messenger.timed_activity(group.label, unique_id=self.sandbox._get_plugin_id())
else:
cm = contextlib.suppress()
@@ -640,7 +640,7 @@ class _SandboxBatch():
context = self.sandbox._get_context()
message = Message(self.sandbox._get_plugin_id(), MessageType.STATUS,
'Running command', detail=command.label)
- context.message(message)
+ context.messenger.message(message)
exitcode = self.sandbox._run(command.command, self.flags, cwd=command.cwd, env=command.env)
if exitcode != 0:
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index 9fc9cf17d..b5c8f9a63 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -698,7 +698,7 @@ class Source(Plugin):
# Source consistency interrogations are silent.
context = self._get_context()
- with context.silence():
+ with context.messenger.silence():
self.__consistency = self.get_consistency() # pylint: disable=assignment-from-no-return
# Give the Source an opportunity to validate the cached
@@ -1150,7 +1150,7 @@ class Source(Plugin):
# Silence the STATUS messages which might happen as a result
# of checking the source fetchers.
- with context.silence():
+ with context.messenger.silence():
source_fetchers = self.get_source_fetchers()
# Use the source fetchers if they are provided
@@ -1165,7 +1165,7 @@ class Source(Plugin):
while True:
- with context.silence():
+ with context.messenger.silence():
try:
fetcher = next(source_fetchers)
except StopIteration:
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index b42af9e6e..a4ea74633 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -95,7 +95,7 @@ def test_pull(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
# Load the project
project = Project(project_dir, context)
@@ -136,7 +136,7 @@ def _test_pull(user_config_file, project_dir, cache_dir,
context.cachedir = cache_dir
context.casdir = os.path.join(cache_dir, 'cas')
context.tmpdir = os.path.join(cache_dir, 'tmp')
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
# Load the project manually
project = Project(project_dir, context)
@@ -198,7 +198,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
# Load the project and CAS cache
project = Project(project_dir, context)
@@ -273,7 +273,7 @@ def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
# Load the project manually
project = Project(project_dir, context)
@@ -308,7 +308,7 @@ def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
# Load the project manually
project = Project(project_dir, context)
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 996091fca..a54c1df09 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -71,7 +71,7 @@ def test_push(cli, tmpdir, datafiles):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
# Load the project manually
project = Project(project_dir, context)
@@ -108,7 +108,7 @@ def _test_push(user_config_file, project_dir, element_name, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
# Load the project manually
project = Project(project_dir, context)
@@ -194,7 +194,7 @@ def _test_push_message(user_config_file, project_dir, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
# Load the project manually
project = Project(project_dir, context)
diff --git a/tests/internals/loader.py b/tests/internals/loader.py
index f0b4f2622..a4ebdb9ac 100644
--- a/tests/internals/loader.py
+++ b/tests/internals/loader.py
@@ -20,7 +20,7 @@ def dummy_handler(message, is_silenced):
def make_loader(basedir):
context = Context()
context.load(config=os.devnull)
- context.set_message_handler(dummy_handler)
+ context.messenger.set_message_handler(dummy_handler)
project = Project(basedir, context)
return project.loader
diff --git a/tests/internals/pluginloading.py b/tests/internals/pluginloading.py
index fa0b9afe8..9093680f4 100644
--- a/tests/internals/pluginloading.py
+++ b/tests/internals/pluginloading.py
@@ -22,7 +22,7 @@ def create_pipeline(tmpdir, basedir, target):
def dummy_handler(message, is_silenced):
pass
- context.set_message_handler(dummy_handler)
+ context.messenger.set_message_handler(dummy_handler)
pipeline = Pipeline(context, project, None)
targets, = pipeline.load([(target,)])
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index d7145d7f4..3fc9d96a6 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -71,7 +71,7 @@ def test_source_fetch(cli, tmpdir, datafiles):
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
project = Project(project_dir, context)
project.ensure_fully_loaded()
@@ -146,7 +146,7 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
project = Project(project_dir, context)
project.ensure_fully_loaded()
@@ -204,7 +204,7 @@ def test_pull_fail(cli, tmpdir, datafiles):
# get the source object
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
project = Project(project_dir, context)
project.ensure_fully_loaded()
diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py
index af4e262f0..6282b6e60 100644
--- a/tests/sourcecache/push.py
+++ b/tests/sourcecache/push.py
@@ -72,7 +72,7 @@ def test_source_push(cli, tmpdir, datafiles):
# get the source object
context = Context()
context.load(config=user_config_file)
- context.set_message_handler(message_handler)
+ context.messenger.set_message_handler(message_handler)
project = Project(project_dir, context)
project.ensure_fully_loaded()
diff --git a/tests/sourcecache/staging.py b/tests/sourcecache/staging.py
index cffd0bb58..9dc431bda 100644
--- a/tests/sourcecache/staging.py
+++ b/tests/sourcecache/staging.py
@@ -63,7 +63,7 @@ def test_source_staged(tmpdir, cli, datafiles):
project = Project(project_dir, context)
project.ensure_fully_loaded()
context.cachedir = cachedir
- context.set_message_handler(dummy_message_handler)
+ context.messenger.set_message_handler(dummy_message_handler)
sourcecache = context.sourcecache
cas = context.get_cascache()
@@ -106,7 +106,7 @@ def test_source_fetch(tmpdir, cli, datafiles):
project = Project(project_dir, context)
project.ensure_fully_loaded()
context.cachedir = cachedir
- context.set_message_handler(dummy_message_handler)
+ context.messenger.set_message_handler(dummy_message_handler)
cas = context.get_cascache()
res = cli.run(project=project_dir, args=["source", "fetch", "import-dev.bst"])
@@ -148,7 +148,7 @@ def test_staged_source_build(tmpdir, datafiles, cli):
project = Project(project_dir, context)
project.ensure_fully_loaded()
context.cachedir = cachedir
- context.set_message_handler(dummy_message_handler)
+ context.messenger.set_message_handler(dummy_message_handler)
element = project.load_elements(["import-dev.bst"])[0]