From 485f55319f5cfcb5cb76a05e67f01137cde2ea86 Mon Sep 17 00:00:00 2001 From: Angelos Evripiotis Date: Thu, 17 Oct 2019 14:56:13 +0100 Subject: element: remove double MetaSource import It turns out we don't even need it once. --- src/buildstream/element.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/buildstream/element.py b/src/buildstream/element.py index 60eec85d0..5c3cc804b 100644 --- a/src/buildstream/element.py +++ b/src/buildstream/element.py @@ -121,8 +121,6 @@ if TYPE_CHECKING: from .source import Source from ._context import Context from ._loader.metaelement import MetaElement - from ._loader.metasource import MetaSource - from ._loader.metasource import MetaSource from ._project import Project # pylint: enable=cyclic-import -- cgit v1.2.1 From 04e1c533e6a25f4136dfc95c05b29047c94efeab Mon Sep 17 00:00:00 2001 From: Angelos Evripiotis Date: Thu, 17 Oct 2019 10:13:41 +0100 Subject: job pickling: plugins don't return their factories Remove the need for plugins to find and return the factory they came from. Also take the opportunity to combine source and element pickling into a single 'plugin' pickling path. This will make it easier for us to later support pickling plugins from the 'first_pass_config' of projects. --- src/buildstream/_scheduler/jobs/jobpickler.py | 36 ++++++++++++++++----------- src/buildstream/element.py | 12 ++++----- src/buildstream/plugin.py | 9 +++---- src/buildstream/source.py | 13 ++++------ 4 files changed, 35 insertions(+), 35 deletions(-) diff --git a/src/buildstream/_scheduler/jobs/jobpickler.py b/src/buildstream/_scheduler/jobs/jobpickler.py index 6a4a8213b..82900eb5b 100644 --- a/src/buildstream/_scheduler/jobs/jobpickler.py +++ b/src/buildstream/_scheduler/jobs/jobpickler.py @@ -79,26 +79,30 @@ _PROTO_CLASS_TO_NAME = { # def pickle_child_job(child_job, projects): - element_classes = [ - cls + factory_list = [ + factory for p in projects - if p.config.element_factory is not None - for cls, _ in p.config.element_factory.all_loaded_plugins() - ] - source_classes = [ - cls - for p in projects - if p.config.source_factory is not None - for cls, _ in p.config.source_factory.all_loaded_plugins() + for factory in [ + p.config.element_factory, + p.config.source_factory, + ] ] + plugin_class_to_factory = { + cls: factory + for factory in factory_list + if factory is not None + for cls, _ in factory.all_loaded_plugins() + } + data = io.BytesIO() pickler = pickle.Pickler(data) pickler.dispatch_table = copyreg.dispatch_table.copy() - for cls in element_classes: - pickler.dispatch_table[cls] = _reduce_plugin - for cls in source_classes: + def _reduce_plugin(plugin): + return _reduce_plugin_with_factory_dict(plugin, plugin_class_to_factory) + + for cls in plugin_class_to_factory: pickler.dispatch_table[cls] = _reduce_plugin pickler.dispatch_table[ArtifactProto] = _reduce_proto pickler.dispatch_table[DigestProto] = _reduce_proto @@ -130,8 +134,10 @@ def _new_proto_from_reduction_args(name, data): return instance -def _reduce_plugin(plugin): - factory, meta_kind, state = plugin._get_args_for_child_job_pickling() +def _reduce_plugin_with_factory_dict(plugin, plugin_class_to_factory): + meta_kind, state = plugin._get_args_for_child_job_pickling() + assert meta_kind + factory = plugin_class_to_factory[type(plugin)] args = (factory, meta_kind) return (_new_plugin_from_reduction_args, args, state) diff --git a/src/buildstream/element.py b/src/buildstream/element.py index 5c3cc804b..dac6eb1f2 100644 --- a/src/buildstream/element.py +++ b/src/buildstream/element.py @@ -2306,11 +2306,10 @@ class Element(Plugin): # Return data necessary to reconstruct this object in a child job process. # # Returns: - # (PluginContext, str, dict): A tuple of (factory, meta_kind, state), - # where `factory` is an object that can use `meta_kind` to create an - # instance of the same type as `self`. `state` is what we want - # `self.__dict__` to be restored to after instantiation in the child - # process. + # (str, dict): A tuple of (meta_kind, state), where a factory can use + # `meta_kind` to create an instance of the same type as `self`. `state` + # is what we want `self.__dict__` to be restored to after instantiation + # in the child process. # def _get_args_for_child_job_pickling(self): state = self.__dict__.copy() @@ -2334,8 +2333,7 @@ class Element(Plugin): # let us know, and we will need to update accordingly. del state["_Element__required_callback"] - factory = self._get_project().config.element_factory - return factory, self.__meta_kind, state + return self.__meta_kind, state def _walk_artifact_files(self): yield from self.__artifact.get_files().walk() diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py index e269a4b1a..c1ee333f7 100644 --- a/src/buildstream/plugin.py +++ b/src/buildstream/plugin.py @@ -712,11 +712,10 @@ class Plugin(): # Return data necessary to reconstruct this object in a child job process. # # Returns: - # (PluginContext, str, dict): A tuple of (factory, meta_kind, state), - # where `factory` is an object that can use `meta_kind` to create an - # instance of the same type as `self`. `state` is what we want - # `self.__dict__` to be restored to after instantiation in the child - # process. + # (str, dict): A tuple of (meta_kind, state), where a factory can use + # `meta_kind` to create an instance of the same type as `self`. `state` + # is what we want `self.__dict__` to be restored to after instantiation + # in the child process. # def _get_args_for_child_job_pickling(self): # Note that this is only to be implemented as a BuildStream internal, diff --git a/src/buildstream/source.py b/src/buildstream/source.py index 7fc2e9fc0..a2b566a05 100644 --- a/src/buildstream/source.py +++ b/src/buildstream/source.py @@ -1113,15 +1113,12 @@ class Source(Plugin): # Return data necessary to reconstruct this object in a child job process. # # Returns: - # (PluginContext, str, dict): A tuple of (factory, meta_kind, state), - # where `factory` is an object that can use `meta_kind` to create an - # instance of the same type as `self`. `state` is what we want - # `self.__dict__` to be restored to after instantiation in the child - # process. + # (str, dict): A tuple of (meta_kind, state), where a factory can use + # `meta_kind` to create an instance of the same type as `self`. `state` + # is what we want `self.__dict__` to be restored to after instantiation + # in the child process. # def _get_args_for_child_job_pickling(self): - factory = self._get_project().config.source_factory - # In case you're wondering, note that it doesn't seem to be necessary # to make a copy of `self.__dict__` here, because: # @@ -1131,7 +1128,7 @@ class Source(Plugin): # o The code sketch of how pickling works also returns `self.__dict__`: # https://docs.python.org/3/library/pickle.html#pickling-class-instances # - return factory, self.__meta_kind, self.__dict__ + return self.__meta_kind, self.__dict__ ############################################################# # Local Private Methods # -- cgit v1.2.1 From a125bc07d3fec9d0a9aad52fc167ff6744d8f396 Mon Sep 17 00:00:00 2001 From: Angelos Evripiotis Date: Thu, 17 Oct 2019 14:28:45 +0100 Subject: job pickling: pickle first_pass_config factories Note that for multiple-pass setups, i.e. where we have junctions, we also have to pickle things that belong to the 'first_pass_config'. --- src/buildstream/_scheduler/jobs/jobpickler.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/buildstream/_scheduler/jobs/jobpickler.py b/src/buildstream/_scheduler/jobs/jobpickler.py index 82900eb5b..d514e87ae 100644 --- a/src/buildstream/_scheduler/jobs/jobpickler.py +++ b/src/buildstream/_scheduler/jobs/jobpickler.py @@ -84,7 +84,9 @@ def pickle_child_job(child_job, projects): for p in projects for factory in [ p.config.element_factory, + p.first_pass_config.element_factory, p.config.source_factory, + p.first_pass_config.source_factory, ] ] @@ -99,11 +101,11 @@ def pickle_child_job(child_job, projects): pickler = pickle.Pickler(data) pickler.dispatch_table = copyreg.dispatch_table.copy() - def _reduce_plugin(plugin): + def reduce_plugin(plugin): return _reduce_plugin_with_factory_dict(plugin, plugin_class_to_factory) for cls in plugin_class_to_factory: - pickler.dispatch_table[cls] = _reduce_plugin + pickler.dispatch_table[cls] = reduce_plugin pickler.dispatch_table[ArtifactProto] = _reduce_proto pickler.dispatch_table[DigestProto] = _reduce_proto pickler.dispatch_table[Loader] = _reduce_object -- cgit v1.2.1 From 299ed94a3f602794165bd366c320f48cf139d1b6 Mon Sep 17 00:00:00 2001 From: Angelos Evripiotis Date: Thu, 17 Oct 2019 14:11:14 +0100 Subject: .gitlab-ci: test-spawn, enable tests/format/* --- .gitlab-ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 413592f41..927db6e36 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -199,7 +199,8 @@ tests-spawn-multiprocessing-start-method: - mkdir -p "${INTEGRATION_CACHE}" - useradd -Um buildstream - chown -R buildstream:buildstream . - - su buildstream -c "tox -- ${PYTEST_ARGS} tests/{cachekey,plugins,internals,sourcecache}" + - su buildstream -c "tox -- ${PYTEST_ARGS} tests/{cachekey,format,plugins,internals,sourcecache}" + # Run type checkers mypy: -- cgit v1.2.1