summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTom Pollard <tom.pollard@codethink.co.uk>2019-07-31 16:31:47 +0100
committerbst-marge-bot <marge-bot@buildstream.build>2019-08-08 13:52:36 +0000
commit0026e37918998addb61d7cefcbb9b5f7f6f7b4f4 (patch)
treea756eaf5ba545882f8ec69204759ac22b8112d23
parent1701aa1239f472317edfc6f675378dc91b1fcd61 (diff)
downloadbuildstream-tpollard/messageobject.tar.gz
_message.py: Use element_name & element_key instead of unique_idtpollard/messageobject
Adding the element full name and display key into all element related messages removes the need to look up the plugintable via a plugin unique_id just to retrieve the same values for logging and widget frontend display. Relying on plugintable state is also incompatible if the frontend will be running in a different process, as it will exist in multiple states. The element full name is now displayed instead of the unique_id, such as in the debugging widget. It is also displayed in place of 'name' (i.e including any junction prepend) to be more informative.
-rw-r--r--src/buildstream/_basecache.py2
-rw-r--r--src/buildstream/_cas/cascache.py9
-rw-r--r--src/buildstream/_frontend/app.py23
-rw-r--r--src/buildstream/_frontend/widget.py40
-rw-r--r--src/buildstream/_loader/loader.py2
-rw-r--r--src/buildstream/_message.py9
-rw-r--r--src/buildstream/_messenger.py39
-rw-r--r--src/buildstream/_pipeline.py2
-rw-r--r--src/buildstream/_project.py3
-rw-r--r--src/buildstream/_scheduler/jobs/elementjob.py12
-rw-r--r--src/buildstream/_scheduler/jobs/job.py117
-rw-r--r--src/buildstream/_scheduler/queues/queue.py2
-rw-r--r--src/buildstream/_scheduler/scheduler.py11
-rw-r--r--src/buildstream/_state.py9
-rw-r--r--src/buildstream/_stream.py2
-rw-r--r--src/buildstream/plugin.py13
-rw-r--r--src/buildstream/sandbox/_sandboxremote.py2
-rw-r--r--src/buildstream/sandbox/sandbox.py23
-rw-r--r--src/buildstream/source.py7
-rw-r--r--tests/frontend/logging.py2
20 files changed, 173 insertions, 156 deletions
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index 5d93562fd..739d14b6a 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -248,7 +248,7 @@ class BaseCache():
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
self.context.messenger.message(
- Message(None, message_type, message, **args))
+ Message(message_type, message, **args))
# _set_remotes():
#
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index 535d9199e..b5089e36c 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -1262,7 +1262,6 @@ class CASQuota:
available = utils._pretty_size(available_space)
self._message(Message(
- None,
MessageType.WARN,
"Your system does not have enough available " +
"space to support the cache quota specified.",
@@ -1308,7 +1307,7 @@ class CASQuota:
# Start off with an announcement with as much info as possible
volume_size, volume_avail = self._get_cache_volume_size()
self._message(Message(
- None, MessageType.STATUS, "Starting cache cleanup",
+ MessageType.STATUS, "Starting cache cleanup",
detail=("Elements required by the current build plan:\n" + "{}\n" +
"User specified quota: {} ({})\n" +
"Cache usage: {}\n" +
@@ -1324,7 +1323,7 @@ class CASQuota:
# Do a real computation of the cache size once, just in case
self.compute_cache_size()
usage = CASCacheUsage(self)
- self._message(Message(None, MessageType.STATUS,
+ self._message(Message(MessageType.STATUS,
"Cache usage recomputed: {}".format(usage)))
# Collect digests and their remove method
@@ -1344,7 +1343,7 @@ class CASQuota:
space_saved += size
self._message(Message(
- None, MessageType.STATUS,
+ MessageType.STATUS,
"Freed {: <7} {}".format(
utils._pretty_size(size, dec_places=2),
ref)))
@@ -1387,7 +1386,7 @@ class CASQuota:
# Informational message about the side effects of the cleanup
self._message(Message(
- None, MessageType.INFO, "Cleanup completed",
+ MessageType.INFO, "Cleanup completed",
detail=("Removed {} refs and saving {} disk space.\n" +
"Cache usage is now: {}")
.format(removed_ref_count,
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index a5588e672..826e797e1 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -31,7 +31,6 @@ from .. import Scope
# Import various buildstream internals
from .._context import Context
-from ..plugin import Plugin
from .._project import Project
from .._exceptions import BstError, StreamError, LoadError, LoadErrorReason, AppError
from .._message import Message, MessageType, unconditional_messages
@@ -474,7 +473,7 @@ class App():
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
self.context.messenger.message(
- Message(None, message_type, message, **args))
+ Message(message_type, message, **args))
# Exception handler
#
@@ -559,25 +558,22 @@ class App():
# Args:
# action_name (str): The name of the action being performed,
# same as the task group, if it exists
- # full_name (str): The name of this specific task, e.g. the element name
- # unique_id (int): If an element job failed, the unique ID of the element.
+ # full_name (str): The name of this specific task, e.g. the element full name
+ # element (Element): If an element job failed the Element instance
#
- def _job_failed(self, action_name, full_name, unique_id=None):
+ def _job_failed(self, action_name, full_name, element=None):
# Dont attempt to handle a failure if the user has already opted to
# terminate
if not self.stream.terminated:
- if unique_id:
+ if element:
# look-up queue
for q in self.stream.queues:
if q.action_name == action_name:
queue = q
assert queue, "Job action {} does not have a corresponding queue".format(action_name)
- # look-up element
- element = Plugin._lookup(unique_id)
-
# Get the last failure message for additional context
- failure = self._fail_messages.get(element._unique_id)
+ failure = self._fail_messages.get(full_name)
# XXX This is dangerous, sometimes we get the job completed *before*
# the failure message reaches us ??
@@ -585,11 +581,12 @@ class App():
self._status.clear()
click.echo("\n\n\nBUG: Message handling out of sync, " +
"unable to retrieve failure message for element {}\n\n\n\n\n"
- .format(element), err=True)
+ .format(full_name), err=True)
else:
self._handle_failure(element, queue, failure)
else:
+ # Not an element_job, we don't handle the failure
click.echo("\nTerminating all jobs\n", err=True)
self.stream.terminate()
@@ -739,8 +736,8 @@ class App():
return
# Hold on to the failure messages
- if message.message_type in [MessageType.FAIL, MessageType.BUG] and message.unique_id is not None:
- self._fail_messages[message.unique_id] = message
+ if message.message_type in [MessageType.FAIL, MessageType.BUG] and message.element_name is not None:
+ self._fail_messages[message.element_name] = message
# Send to frontend if appropriate
if is_silenced and (message.message_type not in unconditional_messages):
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index fbde249a9..31f69a539 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -27,11 +27,10 @@ from ruamel import yaml
import click
from .profile import Profile
-from .. import Element, Consistency, Scope
+from .. import Consistency, Scope
from .. import __version__ as bst_version
from .._exceptions import ImplError
from .._message import MessageType
-from ..plugin import Plugin
# These messages are printed a bit differently
@@ -110,12 +109,12 @@ class WallclockTime(Widget):
class Debug(Widget):
def render(self, message):
- unique_id = 0 if message.unique_id is None else message.unique_id
+ element_name = "n/a" if message.element_name is None else message.element_name
text = self.format_profile.fmt('pid:')
text += self.content_profile.fmt("{: <5}".format(message.pid))
- text += self.format_profile.fmt(" id:")
- text += self.content_profile.fmt("{:0>3}".format(unique_id))
+ text += self.format_profile.fmt("element name:")
+ text += self.content_profile.fmt("{: <30}".format(element_name))
return text
@@ -181,11 +180,9 @@ class ElementName(Widget):
def render(self, message):
action_name = message.action_name
- element_id = message.task_id or message.unique_id
- if element_id is not None:
- plugin = Plugin._lookup(element_id)
- name = plugin._get_full_name()
- name = '{: <30}'.format(name)
+ element_name = message.element_name
+ if element_name is not None:
+ name = '{: <30}'.format(element_name)
else:
name = 'core activity'
name = '{: <30}'.format(name)
@@ -215,18 +212,16 @@ class CacheKey(Widget):
def render(self, message):
- element_id = message.task_id or message.unique_id
if not self._key_length:
return ""
- if element_id is None:
+ if message.element_name is None:
return ' ' * self._key_length
missing = False
key = ' ' * self._key_length
- plugin = Plugin._lookup(element_id)
- if isinstance(plugin, Element):
- _, key, missing = plugin._get_display_key()
+ if message.element_key:
+ _, key, missing = message.element_key
if message.message_type in ERROR_MESSAGES:
text = self._err_profile.fmt(key)
@@ -557,12 +552,12 @@ class LogLine(Widget):
if self._failure_messages:
values = OrderedDict()
- for element, messages in sorted(self._failure_messages.items(), key=lambda x: x[0].name):
+ for element_name, messages in sorted(self._failure_messages.items()):
for group in self._state.task_groups.values():
# Exclude the failure messages if the job didn't ultimately fail
# (e.g. succeeded on retry)
- if element.name in group.failed_tasks:
- values[element.name] = ''.join(self._render(v) for v in messages)
+ if element_name in group.failed_tasks:
+ values[element_name] = ''.join(self._render(v) for v in messages)
if values:
text += self.content_profile.fmt("Failure Summary\n", bold=True)
@@ -616,10 +611,9 @@ class LogLine(Widget):
def render(self, message):
# Track logfiles for later use
- element_id = message.task_id or message.unique_id
- if message.message_type in ERROR_MESSAGES and element_id is not None:
- plugin = Plugin._lookup(element_id)
- self._failure_messages[plugin].append(message)
+ element_name = message.element_name
+ if message.message_type in ERROR_MESSAGES and element_name is not None:
+ self._failure_messages[element_name].append(message)
return self._render(message)
@@ -666,7 +660,7 @@ class LogLine(Widget):
if message.detail:
# Identify frontend messages, we never abbreviate these
- frontend_message = not (message.task_id or message.unique_id)
+ frontend_message = not message.element_name
# Split and truncate message detail down to message_lines lines
lines = message.detail.splitlines(True)
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 4b66288ca..061d28bf0 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -679,7 +679,7 @@ class Loader():
if self.project._warning_is_fatal(warning_token):
raise LoadError(brief, warning_token)
- message = Message(None, MessageType.WARN, brief)
+ message = Message(MessageType.WARN, brief)
self._context.messenger.message(message)
# Print warning messages if any of the specified elements have invalid names.
diff --git a/src/buildstream/_message.py b/src/buildstream/_message.py
index 7f1a939d2..195eba679 100644
--- a/src/buildstream/_message.py
+++ b/src/buildstream/_message.py
@@ -54,8 +54,9 @@ unconditional_messages = [
#
class Message():
- def __init__(self, unique_id, message_type, message,
- task_id=None,
+ def __init__(self, message_type, message, *,
+ element_name=None,
+ element_key=None,
detail=None,
action_name=None,
elapsed=None,
@@ -64,14 +65,14 @@ class Message():
scheduler=False):
self.message_type = message_type # Message type
self.message = message # The message string
+ self.element_name = element_name # The instance element name of the issuing plugin
+ self.element_key = element_key # The display key of the issuing plugin element
self.detail = detail # An additional detail string
self.action_name = action_name # Name of the task queue (fetch, refresh, build, etc)
self.elapsed = elapsed # The elapsed time, in timed messages
self.logfile = logfile # The log file path where commands took place
self.sandbox = sandbox # The error that caused this message used a sandbox
self.pid = os.getpid() # The process pid
- self.unique_id = unique_id # The plugin object ID issueing the message
- self.task_id = task_id # The plugin object ID of the task
self.scheduler = scheduler # Whether this is a scheduler level message
self.creation_time = datetime.datetime.now()
if message_type in (MessageType.SUCCESS, MessageType.FAIL):
diff --git a/src/buildstream/_messenger.py b/src/buildstream/_messenger.py
index d768abf0c..be5f12c0a 100644
--- a/src/buildstream/_messenger.py
+++ b/src/buildstream/_messenger.py
@@ -25,7 +25,6 @@ from . import _signals
from . import utils
from ._exceptions import BstError
from ._message import Message, MessageType
-from .plugin import Plugin
_RENDER_INTERVAL = datetime.timedelta(seconds=1)
@@ -149,16 +148,16 @@ class Messenger():
#
# Args:
# activity_name (str): The name of the activity
- # unique_id (int): Optionally, the unique id of the plugin related to the message
+ # element_name (str): Optionally, the element full name of the plugin related to the message
# detail (str): An optional detailed message, can be multiline output
# silent_nested (bool): If True, all but _message.unconditional_messages are silenced
#
@contextmanager
- def timed_activity(self, activity_name, *, unique_id=None, detail=None, silent_nested=False):
+ def timed_activity(self, activity_name, *, element_name=None, detail=None, silent_nested=False):
with self._timed_suspendable() as timedata:
try:
# Push activity depth for status messages
- message = Message(unique_id, MessageType.START, activity_name, detail=detail)
+ message = Message(MessageType.START, activity_name, detail=detail, element_name=element_name)
self.message(message)
with self.silence(actually_silence=silent_nested):
yield
@@ -167,12 +166,12 @@ class Messenger():
# Note the failure in status messages and reraise, the scheduler
# expects an error when there is an error.
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(unique_id, MessageType.FAIL, activity_name, elapsed=elapsed)
+ message = Message(MessageType.FAIL, activity_name, elapsed=elapsed, element_name=element_name)
self.message(message)
raise
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(unique_id, MessageType.SUCCESS, activity_name, elapsed=elapsed)
+ message = Message(MessageType.SUCCESS, activity_name, elapsed=elapsed, element_name=element_name)
self.message(message)
# simple_task()
@@ -181,7 +180,7 @@ class Messenger():
#
# Args:
# activity_name (str): The name of the activity
- # unique_id (int): Optionally, the unique id of the plugin related to the message
+ # element_name (str): Optionally, the element full name of the plugin related to the message
# full_name (str): Optionally, the distinguishing name of the activity, e.g. element name
# silent_nested (bool): If True, all but _message.unconditional_messages are silenced
#
@@ -189,10 +188,10 @@ class Messenger():
# Task: A Task object that represents this activity, principally used to report progress
#
@contextmanager
- def simple_task(self, activity_name, *, unique_id=None, full_name=None, silent_nested=False):
+ def simple_task(self, activity_name, *, element_name=None, full_name=None, silent_nested=False):
# Bypass use of State when none exists (e.g. tests)
if not self._state:
- with self.timed_activity(activity_name, unique_id=unique_id, silent_nested=silent_nested):
+ with self.timed_activity(activity_name, element_name=element_name, silent_nested=silent_nested):
yield
return
@@ -201,7 +200,7 @@ class Messenger():
with self._timed_suspendable() as timedata:
try:
- message = Message(unique_id, MessageType.START, activity_name)
+ message = Message(MessageType.START, activity_name, element_name=element_name)
self.message(message)
task = self._state.add_task(activity_name, full_name)
@@ -215,7 +214,7 @@ class Messenger():
except BstError:
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(unique_id, MessageType.FAIL, activity_name, elapsed=elapsed)
+ message = Message(MessageType.FAIL, activity_name, elapsed=elapsed, element_name=element_name)
self.message(message)
raise
finally:
@@ -232,7 +231,8 @@ class Messenger():
detail = "{} subtasks processed".format(task.current_progress)
else:
detail = None
- message = Message(unique_id, MessageType.SUCCESS, activity_name, elapsed=elapsed, detail=detail)
+ message = Message(MessageType.SUCCESS, activity_name, elapsed=elapsed, detail=detail,
+ element_name=element_name)
self.message(message)
# recorded_messages()
@@ -336,13 +336,12 @@ class Messenger():
EMPTYTIME = "--:--:--"
template = "[{timecode: <8}] {type: <7}"
- # If this message is associated with a plugin, print what
- # we know about the plugin.
- plugin_name = ""
- if message.unique_id:
- template += " {plugin}"
- plugin = Plugin._lookup(message.unique_id)
- plugin_name = plugin.name
+ # If this message is associated with an element or source plugin, print the
+ # full element name of the instance.
+ element_name = ""
+ if message.element_name:
+ template += " {element_name}"
+ element_name = message.element_name
template += ": {message}"
@@ -359,7 +358,7 @@ class Messenger():
timecode = "{0:02d}:{1:02d}:{2:02d}".format(hours, minutes, seconds)
text = template.format(timecode=timecode,
- plugin=plugin_name,
+ element_name=element_name,
type=message.message_type.upper(),
message=message.message,
detail=detail)
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index af60ffde5..b6896b3de 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -485,7 +485,7 @@ class Pipeline():
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
self._context.messenger.message(
- Message(None, message_type, message, **args))
+ Message(message_type, message, **args))
# _Planner()
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index dff101582..d3bbc3939 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -459,7 +459,7 @@ class Project():
]
detail += "\n".join(lines)
self._context.messenger.message(
- Message(None, MessageType.WARN, "Ignoring redundant source references", detail=detail))
+ Message(MessageType.WARN, "Ignoring redundant source references", detail=detail))
return elements
@@ -687,7 +687,6 @@ class Project():
if not fail_on_overlap.is_none():
self._context.messenger.message(
Message(
- None,
MessageType.WARN,
"Use of fail-on-overlap within project.conf " +
"is deprecated. Consider using fatal-warnings instead."
diff --git a/src/buildstream/_scheduler/jobs/elementjob.py b/src/buildstream/_scheduler/jobs/elementjob.py
index 138448685..246eb75c6 100644
--- a/src/buildstream/_scheduler/jobs/elementjob.py
+++ b/src/buildstream/_scheduler/jobs/elementjob.py
@@ -69,17 +69,13 @@ class ElementJob(Job):
super().__init__(*args, **kwargs)
self.set_name(element._get_full_name())
self.queue = queue
- self._element = element
+ self._element = element # Set the Element pertaining to the job
self._action_cb = action_cb # The action callable function
self._complete_cb = complete_cb # The complete callable function
- # Set the ID for logging purposes
- self.set_message_unique_id(element._unique_id)
- self.set_task_id(element._unique_id)
-
- @property
- def element(self):
- return self._element
+ # Set the plugin element name & key for logging purposes
+ self.set_message_element_name(self.name)
+ self.set_message_element_key(self._element._get_display_key())
def parent_complete(self, status, result):
self._complete_cb(self, self._element, status, self._result)
diff --git a/src/buildstream/_scheduler/jobs/job.py b/src/buildstream/_scheduler/jobs/job.py
index 2e6c7bb32..32c5559fb 100644
--- a/src/buildstream/_scheduler/jobs/job.py
+++ b/src/buildstream/_scheduler/jobs/job.py
@@ -147,8 +147,9 @@ class Job():
self._terminated = False # Whether this job has been explicitly terminated
self._logfile = logfile
- self._message_unique_id = None
- self._task_id = None
+ self._message_element_name = None # The plugin instance element name for messaging
+ self._message_element_key = None # The element key for messaging
+ self._element = None # The Element() passed to the Job() constructor, if applicable
# set_name()
#
@@ -174,8 +175,8 @@ class Job():
self._logfile,
self._max_retries,
self._tries,
- self._message_unique_id,
- self._task_id,
+ self._message_element_name,
+ self._message_element_key
)
# Make sure that picklability doesn't break, by exercising it during
@@ -311,56 +312,62 @@ class Job():
os.kill(self._process.pid, signal.SIGCONT)
self._suspended = False
- # set_message_unique_id()
+ # set_message_element_name()
#
- # This is called by Job subclasses to set the plugin ID
- # issuing the message (if an element is related to the Job).
+ # This is called by Job subclasses to set the plugin instance element
+ # name issuing the message (if an element is related to the Job).
#
# Args:
- # unique_id (int): The id to be supplied to the Message() constructor
+ # element_name (int): The element_name to be supplied to the Message() constructor
#
- def set_message_unique_id(self, unique_id):
- self._message_unique_id = unique_id
+ def set_message_element_name(self, element_name):
+ self._message_element_name = element_name
- # set_task_id()
+ # set_message_element_key()
#
- # This is called by Job subclasses to set a plugin ID
- # associated with the task at large (if any element is related
- # to the task).
- #
- # This will only be used in the child process running the task.
- #
- # The task ID helps keep messages in the frontend coherent
- # in the case that multiple plugins log in the context of
- # a single task (e.g. running integration commands should appear
- # in the frontend for the element being built, not the element
- # running the integration commands).
+ # This is called by Job subclasses to set the element
+ # key for for the issuing message (if an element is related to the Job).
#
# Args:
- # task_id (int): The plugin identifier for this task
+ # element_key (tuple): The element_key tuple to be supplied to the Message() constructor
#
- def set_task_id(self, task_id):
- self._task_id = task_id
+ def set_message_element_key(self, element_key):
+ self._message_element_key = element_key
# message():
#
# Logs a message, this will be logged in the task's logfile and
# conditionally also be sent to the frontend.
#
+ # XXX: Note no calls to message() currently override the default
+ # name & key (previously unique_id), potential to be removed.
+ #
# Args:
# message_type (MessageType): The type of message to send
# message (str): The message
# kwargs: Remaining Message() constructor arguments, note that you can
- # override 'unique_id' this way.
+ # override 'element_name' and 'element_key' this way.
#
- def message(self, message_type, message, **kwargs):
+ def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
kwargs['scheduler'] = True
- unique_id = self._message_unique_id
- if "unique_id" in kwargs:
- unique_id = kwargs["unique_id"]
- del kwargs["unique_id"]
+ # If default name & key values not provided, set as given job attributes
+ if element_name is None:
+ element_name = self._message_element_name
+ if element_key is None:
+ element_key = self._message_element_key
self._scheduler.context.messenger.message(
- Message(unique_id, message_type, message, **kwargs))
+ Message(message_type, message, element_name=element_name, element_key=element_key, **kwargs))
+
+ # get_element()
+ #
+ # Get the Element() related to the job, if jobtype (i.e ElementJob) is
+ # applicable, default None.
+ #
+ # Returns:
+ # (Element): The Element() instance pertaining to the Job, else None.
+ #
+ def get_element(self):
+ return self._element
#######################################################
# Abstract Methods #
@@ -573,13 +580,16 @@ class Job():
# that should be used - should contain {pid}.
# max_retries (int): The maximum number of retries.
# tries (int): The number of retries so far.
-# message_unique_id (int): None, or the id to be supplied to the Message() constructor.
-# task_id (int): None, or the plugin identifier for this job.
+# message_element_name (str): None, or the plugin instance element name
+# to be supplied to the Message() constructor.
+# message_element_key (tuple): None, or the element display key tuple
+# to be supplied to the Message() constructor.
#
class ChildJob():
def __init__(
- self, action_name, messenger, logdir, logfile, max_retries, tries, message_unique_id, task_id):
+ self, action_name, messenger, logdir, logfile, max_retries, tries,
+ message_element_name, message_element_key):
self.action_name = action_name
@@ -588,8 +598,8 @@ class ChildJob():
self._logfile = logfile
self._max_retries = max_retries
self._tries = tries
- self._message_unique_id = message_unique_id
- self._task_id = task_id
+ self._message_element_name = message_element_name
+ self._message_element_key = message_element_key
self._queue = None
@@ -598,20 +608,26 @@ class ChildJob():
# Logs a message, this will be logged in the task's logfile and
# conditionally also be sent to the frontend.
#
+ # XXX: Note no calls to message() currently override the default
+ # name & key (previously unique_id), potential to be removed.
+ #
# Args:
# message_type (MessageType): The type of message to send
# message (str): The message
- # kwargs: Remaining Message() constructor arguments, note that you can
- # override 'unique_id' this way.
+ # kwargs: Remaining Message() constructor arguments, note
+ # element_key is set in _child_message_handler
+ # for front end display if not already set or explicitly
+ # overriden here.
#
- def message(self, message_type, message, **kwargs):
+ def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
kwargs['scheduler'] = True
- unique_id = self._message_unique_id
- if "unique_id" in kwargs:
- unique_id = kwargs["unique_id"]
- del kwargs["unique_id"]
- self._messenger.message(
- Message(unique_id, message_type, message, **kwargs))
+ # If default name & key values not provided, set as given job attributes
+ if element_name is None:
+ element_name = self._message_element_name
+ if element_key is None:
+ element_key = self._message_element_key
+ self._messenger.message(Message(message_type, message, element_name=element_name,
+ element_key=element_key, **kwargs))
# send_message()
#
@@ -844,6 +860,8 @@ class ChildJob():
# frontend's main message handler in the context of a child task
# and performs local logging to the local log file before sending
# the message back to the parent process for further propagation.
+ # The related element display key is added to the message for
+ # widget rendering if not already set for an element childjob.
#
# Args:
# message (Message): The message to log
@@ -852,7 +870,12 @@ class ChildJob():
def _child_message_handler(self, message, is_silenced):
message.action_name = self.action_name
- message.task_id = self._task_id
+
+ # If no key has been set at this point, and the element job has
+ # a related key, set it. This is needed for messages going
+ # straight to the message handler from the child process.
+ if message.element_key is None and self._message_element_key:
+ message.element_key = self._message_element_key
# Send to frontend if appropriate
if is_silenced and (message.message_type not in unconditional_messages):
diff --git a/src/buildstream/_scheduler/queues/queue.py b/src/buildstream/_scheduler/queues/queue.py
index 79f1fa44f..8c81ce21d 100644
--- a/src/buildstream/_scheduler/queues/queue.py
+++ b/src/buildstream/_scheduler/queues/queue.py
@@ -341,7 +341,7 @@ class Queue():
# a message for the element they are processing
def _message(self, element, message_type, brief, **kwargs):
context = element._get_context()
- message = Message(element._unique_id, message_type, brief, **kwargs)
+ message = Message(message_type, brief, element_name=element._get_full_name(), **kwargs)
context.messenger.message(message)
def _element_log_path(self, element):
diff --git a/src/buildstream/_scheduler/scheduler.py b/src/buildstream/_scheduler/scheduler.py
index 2dea1d48b..9d7cf5d09 100644
--- a/src/buildstream/_scheduler/scheduler.py
+++ b/src/buildstream/_scheduler/scheduler.py
@@ -28,7 +28,7 @@ from contextlib import contextmanager
# Local imports
from .resources import Resources, ResourceType
-from .jobs import JobStatus, CacheSizeJob, CleanupJob, ElementJob
+from .jobs import JobStatus, CacheSizeJob, CleanupJob
from .._profile import Topics, PROFILER
@@ -253,10 +253,11 @@ class Scheduler():
self._state.remove_task(job.action_name, job.name)
if status == JobStatus.FAIL:
- unique_id = None
- if isinstance(job, ElementJob):
- unique_id = job._element._unique_id
- self._state.fail_task(job.action_name, job.name, unique_id)
+ # If it's an elementjob, we want to compare against the failure messages
+ # and send the Element() instance. Note this will change if the frontend
+ # is run in a separate process for pickling
+ element = job.get_element()
+ self._state.fail_task(job.action_name, job.name, element=element)
# Now check for more jobs
self._sched()
diff --git a/src/buildstream/_state.py b/src/buildstream/_state.py
index 388ed8151..a4f767de8 100644
--- a/src/buildstream/_state.py
+++ b/src/buildstream/_state.py
@@ -208,8 +208,7 @@ class State():
# full_name (str): The full name of the task, distinguishing
# it from other tasks with the same action name
# e.g. an element's name.
- # unique_id (int): (optionally) the element's unique ID, if the failure
- # came from an element
+ # element_job (bool): (optionally) If an element job failed.
#
def register_task_failed_callback(self, callback):
self._task_failed_cbs.append(callback)
@@ -324,11 +323,11 @@ class State():
# full_name (str): The full name of the task, distinguishing
# it from other tasks with the same action name
# e.g. an element's name.
- # unique_id (int): (optionally) the element's unique ID, if the failure came from an element
+ # element (Element): (optionally) The element instance if an element job
#
- def fail_task(self, action_name, full_name, unique_id=None):
+ def fail_task(self, action_name, full_name, element=None):
for cb in self._task_failed_cbs:
- cb(action_name, full_name, unique_id)
+ cb(action_name, full_name, element)
# _Task
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index cbd635af7..410b601b8 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -1235,7 +1235,7 @@ class Stream():
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
self._context.messenger.message(
- Message(None, message_type, message, **args))
+ Message(message_type, message, **args))
# _add_queue()
#
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index 3cce0fbf1..5783eb270 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -428,6 +428,9 @@ class Plugin():
Note: Informative messages tell the user something they might want
to know, like if refreshing an element caused it to change.
+ The instance full name of the plugin will be generated with the
+ message, this being the name of the given element, as appose to
+ the class name of the underlying plugin __kind identifier.
"""
self.__message(MessageType.INFO, brief, detail=detail)
@@ -491,7 +494,7 @@ class Plugin():
self.call(... command which takes time ...)
"""
with self.__context.messenger.timed_activity(activity_name,
- unique_id=self._unique_id,
+ element_name=self._get_full_name(),
detail=detail,
silent_nested=silent_nested):
yield
@@ -733,7 +736,7 @@ class Plugin():
return (exit_code, output)
def __message(self, message_type, brief, **kwargs):
- message = Message(self._unique_id, message_type, brief, **kwargs)
+ message = Message(message_type, brief, element_name=self._get_full_name(), **kwargs)
self.__context.messenger.message(message)
def __note_command(self, output, *popenargs, **kwargs):
@@ -761,10 +764,12 @@ class Plugin():
def __get_full_name(self):
project = self.__project
+ # Set the name, depending on element or source plugin type
+ name = self._element_name if self.__type_tag == "source" else self.name # pylint: disable=no-member
if project.junction:
- return '{}:{}'.format(project.junction.name, self.name)
+ return '{}:{}'.format(project.junction.name, name)
else:
- return self.name
+ return name
# A local table for _prefix_warning()
diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py
index 8c9515497..affe597dd 100644
--- a/src/buildstream/sandbox/_sandboxremote.py
+++ b/src/buildstream/sandbox/_sandboxremote.py
@@ -106,7 +106,7 @@ class SandboxRemote(Sandbox):
self.operation_name = None
def info(self, msg):
- self._get_context().messenger.message(Message(None, MessageType.INFO, msg))
+ self._get_context().messenger.message(Message(MessageType.INFO, msg))
@staticmethod
def specs_from_config_node(config_node, basedir=None):
diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py
index 8ff0aafa2..6956a7d59 100644
--- a/src/buildstream/sandbox/sandbox.py
+++ b/src/buildstream/sandbox/sandbox.py
@@ -120,12 +120,12 @@ class Sandbox():
self.__allow_real_directory = kwargs['allow_real_directory']
self.__allow_run = True
- # Plugin ID for logging
+ # Plugin element full name for logging
plugin = kwargs.get('plugin', None)
if plugin:
- self.__plugin_id = plugin._unique_id
+ self.__element_name = plugin._get_full_name()
else:
- self.__plugin_id = None
+ self.__element_name = None
# Configuration from kwargs common to all subclasses
self.__config = kwargs['config']
@@ -574,12 +574,12 @@ class Sandbox():
return False
- # _get_plugin_id()
+ # _get_element_name()
#
- # Get the plugin's unique identifier
+ # Get the plugin's element full name
#
- def _get_plugin_id(self):
- return self.__plugin_id
+ def _get_element_name(self):
+ return self.__element_name
# _callback()
#
@@ -633,8 +633,7 @@ class Sandbox():
# details (str): optional, more detatils
def _issue_warning(self, message, detail=None):
self.__context.messenger.message(
- Message(None,
- MessageType.WARN,
+ Message(MessageType.WARN,
message,
detail=detail
)
@@ -660,7 +659,7 @@ class _SandboxBatch():
def execute_group(self, group):
if group.label:
context = self.sandbox._get_context()
- cm = context.messenger.timed_activity(group.label, unique_id=self.sandbox._get_plugin_id())
+ cm = context.messenger.timed_activity(group.label, element_name=self.sandbox._get_element_name())
else:
cm = contextlib.suppress()
@@ -670,8 +669,8 @@ class _SandboxBatch():
def execute_command(self, command):
if command.label:
context = self.sandbox._get_context()
- message = Message(self.sandbox._get_plugin_id(), MessageType.STATUS,
- 'Running command', detail=command.label)
+ message = Message(MessageType.STATUS, 'Running command', detail=command.label,
+ element_name=self.sandbox._get_element_name())
context.messenger.message(message)
exitcode = self.sandbox._run(command.command, self.flags, cwd=command.cwd, env=command.env)
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index b513fdb2a..c5fb61415 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -308,10 +308,11 @@ class Source(Plugin):
def __init__(self, context, project, meta, *, alias_override=None, unique_id=None):
provenance = meta.config.get_provenance()
+ # Set element_name member before parent init, as needed for debug messaging
+ self.__element_name = meta.element_name # The name of the element owning this source
super().__init__("{}-{}".format(meta.element_name, meta.element_index),
context, project, provenance, "source", unique_id=unique_id)
- self.__element_name = meta.element_name # The name of the element owning this source
self.__element_index = meta.element_index # The index of the source in the owning element's source list
self.__element_kind = meta.element_kind # The kind of the element owning this source
self.__directory = meta.directory # Staging relative directory
@@ -1076,6 +1077,10 @@ class Source(Plugin):
length = min(len(key), context.log_key_length)
return key[:length]
+ @property
+ def _element_name(self):
+ return self.__element_name
+
# _get_args_for_child_job_pickling(self)
#
# Return data necessary to reconstruct this object in a child job process.
diff --git a/tests/frontend/logging.py b/tests/frontend/logging.py
index 6a17bf771..462af821f 100644
--- a/tests/frontend/logging.py
+++ b/tests/frontend/logging.py
@@ -124,7 +124,7 @@ def test_failed_build_listing(cli, datafiles):
assert m.end() in failure_summary_range
assert len(matches) == 3 # each element should be matched once.
- # Note that if we mess up the 'unique_id' of Messages, they won't be printed
+ # Note that if we mess up the 'element_name' of Messages, they won't be printed
# with the name of the relevant element, e.g. 'testfail-1.bst'. Check that
# they have the name as expected.
pattern = r"\[..:..:..\] FAILURE testfail-.\.bst: Staged artifacts do not provide command 'sh'"