summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbst-marge-bot <marge-bot@buildstream.build>2019-10-25 15:02:07 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-10-25 15:02:07 +0000
commit732f4742e775c3297a8fed7da4225afc070430e3 (patch)
tree19863b4234076093c2dad132fac6c9501b485666
parent44e8e25d88a473d69197998f80a91bedaec15ddd (diff)
parent299ed94a3f602794165bd366c320f48cf139d1b6 (diff)
downloadbuildstream-732f4742e775c3297a8fed7da4225afc070430e3.tar.gz
Merge branch 'aevri/enable_spawn_ci_3' into 'master'1.91.2
pickle `first_pass_config` factories See merge request BuildStream/buildstream!1654
-rw-r--r--.gitlab-ci.yml3
-rw-r--r--src/buildstream/_scheduler/jobs/jobpickler.py40
-rw-r--r--src/buildstream/element.py14
-rw-r--r--src/buildstream/plugin.py9
-rw-r--r--src/buildstream/source.py13
5 files changed, 40 insertions, 39 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 413592f41..927db6e36 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -199,7 +199,8 @@ tests-spawn-multiprocessing-start-method:
- mkdir -p "${INTEGRATION_CACHE}"
- useradd -Um buildstream
- chown -R buildstream:buildstream .
- - su buildstream -c "tox -- ${PYTEST_ARGS} tests/{cachekey,plugins,internals,sourcecache}"
+ - su buildstream -c "tox -- ${PYTEST_ARGS} tests/{cachekey,format,plugins,internals,sourcecache}"
+
# Run type checkers
mypy:
diff --git a/src/buildstream/_scheduler/jobs/jobpickler.py b/src/buildstream/_scheduler/jobs/jobpickler.py
index 6a4a8213b..d514e87ae 100644
--- a/src/buildstream/_scheduler/jobs/jobpickler.py
+++ b/src/buildstream/_scheduler/jobs/jobpickler.py
@@ -79,27 +79,33 @@ _PROTO_CLASS_TO_NAME = {
#
def pickle_child_job(child_job, projects):
- element_classes = [
- cls
+ factory_list = [
+ factory
for p in projects
- if p.config.element_factory is not None
- for cls, _ in p.config.element_factory.all_loaded_plugins()
- ]
- source_classes = [
- cls
- for p in projects
- if p.config.source_factory is not None
- for cls, _ in p.config.source_factory.all_loaded_plugins()
+ for factory in [
+ p.config.element_factory,
+ p.first_pass_config.element_factory,
+ p.config.source_factory,
+ p.first_pass_config.source_factory,
+ ]
]
+ plugin_class_to_factory = {
+ cls: factory
+ for factory in factory_list
+ if factory is not None
+ for cls, _ in factory.all_loaded_plugins()
+ }
+
data = io.BytesIO()
pickler = pickle.Pickler(data)
pickler.dispatch_table = copyreg.dispatch_table.copy()
- for cls in element_classes:
- pickler.dispatch_table[cls] = _reduce_plugin
- for cls in source_classes:
- pickler.dispatch_table[cls] = _reduce_plugin
+ def reduce_plugin(plugin):
+ return _reduce_plugin_with_factory_dict(plugin, plugin_class_to_factory)
+
+ for cls in plugin_class_to_factory:
+ pickler.dispatch_table[cls] = reduce_plugin
pickler.dispatch_table[ArtifactProto] = _reduce_proto
pickler.dispatch_table[DigestProto] = _reduce_proto
pickler.dispatch_table[Loader] = _reduce_object
@@ -130,8 +136,10 @@ def _new_proto_from_reduction_args(name, data):
return instance
-def _reduce_plugin(plugin):
- factory, meta_kind, state = plugin._get_args_for_child_job_pickling()
+def _reduce_plugin_with_factory_dict(plugin, plugin_class_to_factory):
+ meta_kind, state = plugin._get_args_for_child_job_pickling()
+ assert meta_kind
+ factory = plugin_class_to_factory[type(plugin)]
args = (factory, meta_kind)
return (_new_plugin_from_reduction_args, args, state)
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 60eec85d0..dac6eb1f2 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -121,8 +121,6 @@ if TYPE_CHECKING:
from .source import Source
from ._context import Context
from ._loader.metaelement import MetaElement
- from ._loader.metasource import MetaSource
- from ._loader.metasource import MetaSource
from ._project import Project
# pylint: enable=cyclic-import
@@ -2308,11 +2306,10 @@ class Element(Plugin):
# Return data necessary to reconstruct this object in a child job process.
#
# Returns:
- # (PluginContext, str, dict): A tuple of (factory, meta_kind, state),
- # where `factory` is an object that can use `meta_kind` to create an
- # instance of the same type as `self`. `state` is what we want
- # `self.__dict__` to be restored to after instantiation in the child
- # process.
+ # (str, dict): A tuple of (meta_kind, state), where a factory can use
+ # `meta_kind` to create an instance of the same type as `self`. `state`
+ # is what we want `self.__dict__` to be restored to after instantiation
+ # in the child process.
#
def _get_args_for_child_job_pickling(self):
state = self.__dict__.copy()
@@ -2336,8 +2333,7 @@ class Element(Plugin):
# let us know, and we will need to update accordingly.
del state["_Element__required_callback"]
- factory = self._get_project().config.element_factory
- return factory, self.__meta_kind, state
+ return self.__meta_kind, state
def _walk_artifact_files(self):
yield from self.__artifact.get_files().walk()
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index e269a4b1a..c1ee333f7 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -712,11 +712,10 @@ class Plugin():
# Return data necessary to reconstruct this object in a child job process.
#
# Returns:
- # (PluginContext, str, dict): A tuple of (factory, meta_kind, state),
- # where `factory` is an object that can use `meta_kind` to create an
- # instance of the same type as `self`. `state` is what we want
- # `self.__dict__` to be restored to after instantiation in the child
- # process.
+ # (str, dict): A tuple of (meta_kind, state), where a factory can use
+ # `meta_kind` to create an instance of the same type as `self`. `state`
+ # is what we want `self.__dict__` to be restored to after instantiation
+ # in the child process.
#
def _get_args_for_child_job_pickling(self):
# Note that this is only to be implemented as a BuildStream internal,
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index 7fc2e9fc0..a2b566a05 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -1113,15 +1113,12 @@ class Source(Plugin):
# Return data necessary to reconstruct this object in a child job process.
#
# Returns:
- # (PluginContext, str, dict): A tuple of (factory, meta_kind, state),
- # where `factory` is an object that can use `meta_kind` to create an
- # instance of the same type as `self`. `state` is what we want
- # `self.__dict__` to be restored to after instantiation in the child
- # process.
+ # (str, dict): A tuple of (meta_kind, state), where a factory can use
+ # `meta_kind` to create an instance of the same type as `self`. `state`
+ # is what we want `self.__dict__` to be restored to after instantiation
+ # in the child process.
#
def _get_args_for_child_job_pickling(self):
- factory = self._get_project().config.source_factory
-
# In case you're wondering, note that it doesn't seem to be necessary
# to make a copy of `self.__dict__` here, because:
#
@@ -1131,7 +1128,7 @@ class Source(Plugin):
# o The code sketch of how pickling works also returns `self.__dict__`:
# https://docs.python.org/3/library/pickle.html#pickling-class-instances
#
- return factory, self.__meta_kind, self.__dict__
+ return self.__meta_kind, self.__dict__
#############################################################
# Local Private Methods #