summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTristan van Berkom <tristan.vanberkom@codethink.co.uk>2020-06-14 21:23:19 +0900
committerTristan Van Berkom <tristan.van.berkom@gmail.com>2020-06-16 16:18:21 +0000
commitdeb6562719c2ca90b5fbdc2974e8a77da8430a96 (patch)
tree8930acee7295640e355c4a957b4919653663ea11
parent3774ce42263b947b211c6bb54aea85a00caa561e (diff)
downloadbuildstream-tristan/load-context.tar.gz
_loader: Adding LoadContexttristan/load-context
Instead of passing around many details though calling signatures throughout the loader code, create a single LoadContext object which holds any overall loading state along with any values which are constant to a full load process. Overall this patch does: * _frontend/app.py: No need to pass Stream.fetch_subprojects() along anymore * _loader/loadelement.pyx: collect_element_no_deps() no longer takes a task argument * _loader/loader.py: Now the Loader has a `load_context` member, and no more `_fetch_subprojects` member or `_context` members Further, `rewritable` and `ticker` is no longer passed along through all of the recursing calling signatures, and `ticker` itself is finally removed because this has been replaced a long time ago with `Task` API from `State`. * _pipeline.py: The load() function no longer has a `rewritable` parameter * _project.py: The Project() is responsible for creating the toplevel LoadContext() if one doesn't exist yet, and this is passed through to the Loader() (and also passed to the Project() constructor by the Loader() when instantiating subprojects). * _stream.py: The `Stream._fetch_subprojects()` is now private and set on the project when giving the Project to the Stream in `Stream.set_project()`, also the Stream() sets the `rewritable` state on the `LoadContext` at the earliest opportunity, as the `Stream()` is the one who decides this detail. Further, some double underscore private functions are now regular single underscores, there was no reason for this inconsistency. * tests/internals/loader.py: Updated for API change
-rw-r--r--src/buildstream/_frontend/app.py1
-rw-r--r--src/buildstream/_loader/__init__.py1
-rw-r--r--src/buildstream/_loader/loadcontext.py66
-rw-r--r--src/buildstream/_loader/loadelement.pyx2
-rw-r--r--src/buildstream/_loader/loader.py141
-rw-r--r--src/buildstream/_pipeline.py6
-rw-r--r--src/buildstream/_project.py24
-rw-r--r--src/buildstream/_stream.py49
-rw-r--r--tests/internals/loader.py14
9 files changed, 166 insertions, 138 deletions
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 8d4dd34b1..b77cfc130 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -286,7 +286,6 @@ class App:
self.context,
cli_options=self._main_options["option"],
default_mirror=self._main_options.get("default_mirror"),
- fetch_subprojects=self.stream.fetch_subprojects,
)
self.stream.set_project(self.project)
diff --git a/src/buildstream/_loader/__init__.py b/src/buildstream/_loader/__init__.py
index a2c31796e..fd5cac2ae 100644
--- a/src/buildstream/_loader/__init__.py
+++ b/src/buildstream/_loader/__init__.py
@@ -19,4 +19,5 @@
from .metasource import MetaSource
from .metaelement import MetaElement
+from .loadcontext import LoadContext
from .loader import Loader
diff --git a/src/buildstream/_loader/loadcontext.py b/src/buildstream/_loader/loadcontext.py
new file mode 100644
index 000000000..161be913b
--- /dev/null
+++ b/src/buildstream/_loader/loadcontext.py
@@ -0,0 +1,66 @@
+#
+# Copyright (C) 2020 Codethink Limited
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+# Authors:
+# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
+
+
+# LoaderContext()
+#
+# An object to keep track of overall context during the load process.
+#
+# Args:
+# context (Context): The invocation context
+#
+class LoadContext:
+ def __init__(self, context):
+
+ # Keep track of global context required throughout the recursive load
+ self.context = context
+ self.rewritable = False
+ self.fetch_subprojects = None
+ self.task = None
+
+ # set_rewritable()
+ #
+ # Sets whether the projects are to be loaded in a rewritable fashion,
+ # this is used for tracking and is slightly more expensive in load time.
+ #
+ # Args:
+ # task (Task): The task to report progress on
+ #
+ def set_rewritable(self, rewritable):
+ self.rewritable = rewritable
+
+ # set_task()
+ #
+ # Sets the task for progress reporting.
+ #
+ # Args:
+ # task (Task): The task to report progress on
+ #
+ def set_task(self, task):
+ self.task = task
+
+ # set_fetch_subprojects()
+ #
+ # Sets the task for progress reporting.
+ #
+ # Args:
+ # task (callable): The callable for loading subprojects
+ #
+ def set_fetch_subprojects(self, fetch_subprojects):
+ self.fetch_subprojects = fetch_subprojects
diff --git a/src/buildstream/_loader/loadelement.pyx b/src/buildstream/_loader/loadelement.pyx
index 014f01746..784ab8f7b 100644
--- a/src/buildstream/_loader/loadelement.pyx
+++ b/src/buildstream/_loader/loadelement.pyx
@@ -127,7 +127,7 @@ cdef class LoadElement:
# store the link target and provenance
#
if self.node.get_str(Symbol.KIND, default=None) == 'link':
- meta_element = self._loader.collect_element_no_deps(self, None)
+ meta_element = self._loader.collect_element_no_deps(self)
element = Element._new_from_meta(meta_element)
element._initialize_state()
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index fd9e2ef2d..13d8f9f21 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -44,13 +44,11 @@ from .._message import Message, MessageType
# with their own MetaSources, ready for instantiation by the core.
#
# Args:
-# context (Context): The Context object
# project (Project): The toplevel Project object
-# fetch_subprojects (callable): A function to fetch subprojects
# parent (Loader): A parent Loader object, in the case this is a junctioned Loader
#
class Loader:
- def __init__(self, context, project, *, fetch_subprojects, parent=None):
+ def __init__(self, project, *, parent=None):
# Ensure we have an absolute path for the base directory
basedir = project.element_path
@@ -60,18 +58,17 @@ class Loader:
#
# Public members
#
+ self.load_context = project.load_context # The LoadContext
self.project = project # The associated Project
self.loaded = None # The number of loaded Elements
#
# Private members
#
- self._context = context
self._options = project.options # Project options (OptionPool)
self._basedir = basedir # Base project directory
self._first_pass_options = project.first_pass_config.options # Project options (OptionPool)
self._parent = parent # The parent loader
- self._fetch_subprojects = fetch_subprojects
self._meta_elements = {} # Dict of resolved meta elements by name
self._elements = {} # Dict of elements
@@ -84,16 +81,13 @@ class Loader:
# Loads the project based on the parameters given to the constructor
#
# Args:
- # rewritable (bool): Whether the loaded files should be rewritable
- # this is a bit more expensive due to deep copies
- # ticker (callable): An optional function for tracking load progress
# targets (list of str): Target, element-path relative bst filenames in the project
- # task (Task): A task object to report progress to
#
# Raises: LoadError
#
# Returns: The toplevel LoadElement
- def load(self, targets, task, rewritable=False, ticker=None):
+ #
+ def load(self, targets):
for filename in targets:
if os.path.isabs(filename):
@@ -113,8 +107,8 @@ class Loader:
for target in targets:
with PROFILER.profile(Topics.LOAD_PROJECT, target):
- _junction, name, loader = self._parse_name(target, rewritable, ticker)
- element = loader._load_file(name, rewritable, ticker)
+ _junction, name, loader = self._parse_name(target, None)
+ element = loader._load_file(name, None)
target_elements.append(element)
#
@@ -147,14 +141,14 @@ class Loader:
# Finally, wrap what we have into LoadElements and return the target
#
- ret.append(loader._collect_element(element, task))
+ ret.append(loader._collect_element(element))
self._clean_caches()
# Cache how many Elements have just been loaded
- if task:
+ if self.load_context.task:
# Workaround for task potentially being None (because no State object)
- self.loaded = task.current_progress
+ self.loaded = self.load_context.task.current_progress
return ret
@@ -165,21 +159,16 @@ class Loader:
# Args:
# name (str): Name of junction, may have multiple `:` in the name
# provenance (ProvenanceInformation): The provenance
- # rewritable (bool): Whether the loaded files should be rewritable
- # this is a bit more expensive due to deep copies
- # ticker (callable): An optional function for tracking load progress
#
# Returns:
# (Loader): loader for sub-project
#
- def get_loader(self, name, provenance, *, rewritable=False, ticker=None, level=0):
+ def get_loader(self, name, provenance, *, level=0):
junction_path = name.split(":")
loader = self
for junction_name in junction_path:
- loader = loader._get_loader(
- junction_name, rewritable=rewritable, ticker=ticker, level=level, provenance=provenance
- )
+ loader = loader._get_loader(junction_name, provenance, level=level)
return loader
@@ -192,12 +181,14 @@ class Loader:
#
# Args:
# element (LoadElement): The element for which to load a MetaElement
- # task (Task): A task to write progress information to
+ # report_progress (bool): Whether to report progress for this element, this is
+ # because we ignore junctions and links when counting
+ # how many elements we load.
#
# Returns:
# (MetaElement): A partially loaded MetaElement
#
- def collect_element_no_deps(self, element, task=None):
+ def collect_element_no_deps(self, element, *, report_progress=False):
# Return the already built one, if we already built it
meta_element = self._meta_elements.get(element.name)
if meta_element:
@@ -211,7 +202,7 @@ class Loader:
# if there's a workspace for this element then just append a dummy workspace
# metasource.
- workspace = self._context.get_workspaces().get_workspace(element.name)
+ workspace = self.load_context.context.get_workspaces().get_workspace(element.name)
skip_workspace = True
if workspace:
workspace_node = {"kind": "workspace"}
@@ -253,8 +244,8 @@ class Loader:
# Cache it now, make sure it's already there before recursing
self._meta_elements[element.name] = meta_element
- if task:
- task.add_current_progress()
+ if self.load_context.task and report_progress:
+ self.load_context.task.add_current_progress()
return meta_element
@@ -272,17 +263,18 @@ class Loader:
#
# Args:
# filename (str): The element-path relative bst file
- # rewritable (bool): Whether we should load in round trippable mode
# provenance (Provenance): The location from where the file was referred to, or None
#
# Returns:
# (LoadElement): A partially-loaded LoadElement
#
- def _load_file_no_deps(self, filename, rewritable, provenance=None):
+ def _load_file_no_deps(self, filename, provenance=None):
# Load the data and process any conditional statements therein
fullpath = os.path.join(self._basedir, filename)
try:
- node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable, project=self.project)
+ node = _yaml.load(
+ fullpath, shortname=filename, copy_tree=self.load_context.rewritable, project=self.project
+ )
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_FILE:
@@ -346,32 +338,24 @@ class Loader:
#
# Args:
# filename (str): The element-path relative bst file
- # rewritable (bool): Whether we should load in round trippable mode
- # ticker (callable): A callback to report loaded filenames to the frontend
# provenance (Provenance): The location from where the file was referred to, or None
#
# Returns:
# (LoadElement): A loaded LoadElement
#
- def _load_file(self, filename, rewritable, ticker, provenance=None):
+ def _load_file(self, filename, provenance):
# Silently ignore already loaded files
if filename in self._elements:
return self._elements[filename]
- # Call the ticker
- if ticker:
- ticker(filename)
-
- top_element = self._load_file_no_deps(filename, rewritable, provenance)
+ top_element = self._load_file_no_deps(filename, provenance)
# If this element is a link then we need to resolve it
# and replace the dependency we've processed with this one
if top_element.link_target is not None:
- _, filename, loader = self._parse_name(
- top_element.link_target, rewritable, ticker, top_element.link_target_provenance
- )
- top_element = loader._load_file(filename, rewritable, ticker, top_element.link_target_provenance)
+ _, filename, loader = self._parse_name(top_element.link_target, top_element.link_target_provenance)
+ top_element = loader._load_file(filename, top_element.link_target_provenance)
dependencies = extract_depends_from_node(top_element.node)
# The loader queue is a stack of tuples
@@ -391,8 +375,8 @@ class Loader:
current_element[2].append(dep.name)
if dep.junction:
- loader = self.get_loader(dep.junction, dep.provenance, rewritable=rewritable, ticker=ticker)
- dep_element = loader._load_file(dep.name, rewritable, ticker, dep.provenance)
+ loader = self.get_loader(dep.junction, dep.provenance)
+ dep_element = loader._load_file(dep.name, dep.provenance)
else:
dep_element = self._elements.get(dep.name)
@@ -400,7 +384,7 @@ class Loader:
# The loader does not have this available so we need to
# either recursively cause it to be loaded, or else we
# need to push this onto the loader queue in this loader
- dep_element = self._load_file_no_deps(dep.name, rewritable, dep.provenance)
+ dep_element = self._load_file_no_deps(dep.name, dep.provenance)
dep_deps = extract_depends_from_node(dep_element.node)
loader_queue.append((dep_element, list(reversed(dep_deps)), []))
@@ -413,10 +397,8 @@ class Loader:
# If this dependency is a link then we need to resolve it
# and replace the dependency we've processed with this one
if dep_element.link_target:
- _, filename, loader = self._parse_name(
- dep_element.link_target, rewritable, ticker, dep_element.link_target_provenance
- )
- dep_element = loader._load_file(filename, rewritable, ticker, dep_element.link_target_provenance)
+ _, filename, loader = self._parse_name(dep_element.link_target, dep_element.link_target_provenance)
+ dep_element = loader._load_file(filename, dep_element.link_target_provenance)
# All is well, push the dependency onto the LoadElement
# Pylint is not very happy with Cython and can't understand 'dependencies' is a list
@@ -492,14 +474,13 @@ class Loader:
#
# Args:
# top_element (LoadElement): The element for which to load a MetaElement
- # task (Task): The task to update with progress changes
#
# Returns:
# (MetaElement): A fully loaded MetaElement
#
- def _collect_element(self, top_element, task=None):
+ def _collect_element(self, top_element):
element_queue = [top_element]
- meta_element_queue = [self.collect_element_no_deps(top_element, task)]
+ meta_element_queue = [self.collect_element_no_deps(top_element, report_progress=True)]
while element_queue:
element = element_queue.pop()
@@ -515,12 +496,12 @@ class Loader:
loader = dep.element._loader
name = dep.element.name
- if name not in loader._meta_elements:
- meta_dep = loader.collect_element_no_deps(dep.element, task)
+ try:
+ meta_dep = loader._meta_elements[name]
+ except KeyError:
+ meta_dep = loader.collect_element_no_deps(dep.element, report_progress=True)
element_queue.append(dep.element)
meta_element_queue.append(meta_dep)
- else:
- meta_dep = loader._meta_elements[name]
if dep.dep_type != "runtime":
meta_element.build_dependencies.append(meta_dep)
@@ -539,13 +520,14 @@ class Loader:
#
# Args:
# filename (str): Junction name
+ # provenance (Provenance): The location from where the file was referred to, or None
#
# Raises: LoadError
#
# Returns: A Loader or None if specified junction does not exist
#
- def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0, provenance=None):
-
+ def _get_loader(self, filename, provenance, *, level=0):
+ loader = None
provenance_str = ""
if provenance is not None:
provenance_str = "{}: ".format(provenance)
@@ -569,15 +551,13 @@ class Loader:
if self._parent:
# junctions in the parent take precedence over junctions defined
# in subprojects
- loader = self._parent._get_loader(
- filename, rewritable=rewritable, ticker=ticker, level=level + 1, provenance=provenance
- )
+ loader = self._parent._get_loader(filename, level=level + 1, provenance=provenance)
if loader:
self._loaders[filename] = loader
return loader
try:
- self._load_file(filename, rewritable, ticker, provenance=provenance)
+ self._load_file(filename, provenance=provenance)
except LoadError as e:
if e.reason != LoadErrorReason.MISSING_FILE:
# other load error
@@ -599,21 +579,8 @@ class Loader:
# immediately and move on to the target.
#
if load_element.link_target:
-
- _, filename, loader = self._parse_name(
- load_element.link_target, rewritable, ticker, provenance=load_element.link_target_provenance
- )
-
- if loader != self:
- level = level + 1
-
- return loader._get_loader(
- filename,
- rewritable=rewritable,
- ticker=ticker,
- level=level,
- provenance=load_element.link_target_provenance,
- )
+ _, filename, loader = self._parse_name(load_element.link_target, load_element.link_target_provenance)
+ return loader.get_loader(filename, load_element.link_target_provenance)
# meta junction element
#
@@ -664,9 +631,7 @@ class Loader:
# Handle the case where a subproject needs to be fetched
#
if not element._has_all_sources_in_source_cache():
- if ticker:
- ticker(filename, "Fetching subproject")
- self._fetch_subprojects([element])
+ self.load_context.fetch_subprojects([element])
sources = list(element.sources())
if len(sources) == 1 and sources[0]._get_local_path():
@@ -699,12 +664,7 @@ class Loader:
from .._project import Project # pylint: disable=cyclic-import
project = Project(
- project_dir,
- self._context,
- junction=element,
- parent_loader=self,
- search_for_project=False,
- fetch_subprojects=self._fetch_subprojects,
+ project_dir, self.load_context.context, junction=element, parent_loader=self, search_for_project=False,
)
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
@@ -730,9 +690,6 @@ class Loader:
#
# Args:
# name (str): Name of target
- # rewritable (bool): Whether the loaded files should be rewritable
- # this is a bit more expensive due to deep copies
- # ticker (callable): An optional function for tracking load progress
# provenance (ProvenanceInformation): The provenance
#
# Returns:
@@ -740,7 +697,7 @@ class Loader:
# - (str): name of the element
# - (Loader): loader for sub-project
#
- def _parse_name(self, name, rewritable, ticker, provenance=None):
+ def _parse_name(self, name, provenance):
# We allow to split only once since deep junctions names are forbidden.
# Users who want to refer to elements in sub-sub-projects are required
# to create junctions on the top level project.
@@ -748,7 +705,7 @@ class Loader:
if len(junction_path) == 1:
return None, junction_path[-1], self
else:
- loader = self.get_loader(junction_path[-2], provenance, rewritable=rewritable, ticker=ticker)
+ loader = self.get_loader(junction_path[-2], provenance)
return junction_path[-2], junction_path[-1], loader
# Print a warning message, checks warning_token against project configuration
@@ -767,7 +724,7 @@ class Loader:
raise LoadError(brief, warning_token)
message = Message(MessageType.WARN, brief)
- self._context.messenger.message(message)
+ self.load_context.context.messenger.message(message)
# Print warning messages if any of the specified elements have invalid names.
#
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index 8de97bea6..9edc6f51b 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -62,19 +62,17 @@ class Pipeline:
#
# Args:
# target_groups (list of lists): Groups of toplevel targets to load
- # rewritable (bool): Whether the loaded files should be rewritable
- # this is a bit more expensive due to deep copies
#
# Returns:
# (tuple of lists): A tuple of grouped Element objects corresponding to target_groups
#
- def load(self, target_groups, *, rewritable=False):
+ def load(self, target_groups):
# First concatenate all the lists for the loader's sake
targets = list(itertools.chain(*target_groups))
with PROFILER.profile(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)):
- elements = self._project.load_elements(targets, rewritable=rewritable)
+ elements = self._project.load_elements(targets)
# Now create element groups to match the input target groups
elt_iter = iter(elements)
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 3006e2976..e0ddf3d41 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -39,7 +39,7 @@ from .sandbox import SandboxRemote
from ._pluginfactory import ElementFactory, SourceFactory, load_plugin_origin
from .types import CoreWarnings
from ._projectrefs import ProjectRefs, ProjectRefStorage
-from ._loader import Loader
+from ._loader import Loader, LoadContext
from .element import Element
from ._message import Message, MessageType
from ._includes import Includes
@@ -100,7 +100,6 @@ class Project:
default_mirror=None,
parent_loader=None,
search_for_project=True,
- fetch_subprojects=None
):
# The project name
@@ -108,6 +107,12 @@ class Project:
self._context = context # The invocation Context, a private member
+ # Create the LoadContext here if we are the toplevel project.
+ if parent_loader:
+ self.load_context = parent_loader.load_context
+ else:
+ self.load_context = LoadContext(self._context)
+
if search_for_project:
self.directory, self._invoked_from_workspace_element = self._find_project_dir(directory)
else:
@@ -159,7 +164,7 @@ class Project:
self._project_includes = None
with PROFILER.profile(Topics.LOAD_PROJECT, self.directory.replace(os.sep, "-")):
- self._load(parent_loader=parent_loader, fetch_subprojects=fetch_subprojects)
+ self._load(parent_loader=parent_loader)
self._partially_loaded = True
@@ -410,15 +415,16 @@ class Project:
#
# Args:
# targets (list): Target names
- # rewritable (bool): Whether the loaded files should be rewritable
- # this is a bit more expensive due to deep copies
#
# Returns:
# (list): A list of loaded Element
#
- def load_elements(self, targets, *, rewritable=False):
+ def load_elements(self, targets):
+
with self._context.messenger.simple_task("Loading elements", silent_nested=True) as task:
- meta_elements = self.loader.load(targets, task, rewritable=rewritable, ticker=None)
+ self.load_context.set_task(task)
+ meta_elements = self.loader.load(targets)
+ self.load_context.set_task(None)
with self._context.messenger.simple_task("Resolving elements") as task:
if task:
@@ -647,7 +653,7 @@ class Project:
#
# Raises: LoadError if there was a problem with the project.conf
#
- def _load(self, *, parent_loader=None, fetch_subprojects):
+ def _load(self, *, parent_loader=None):
# Load builtin default
projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE)
@@ -694,7 +700,7 @@ class Project:
# Fatal warnings
self._fatal_warnings = pre_config_node.get_str_list("fatal-warnings", default=[])
- self.loader = Loader(self._context, self, parent=parent_loader, fetch_subprojects=fetch_subprojects)
+ self.loader = Loader(self, parent=parent_loader)
self._project_includes = Includes(self.loader, copy_tree=False)
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index b38927e18..3d646a756 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -133,6 +133,7 @@ class Stream:
def set_project(self, project):
assert self._project is None
self._project = project
+ self._project.load_context.set_fetch_subprojects(self._fetch_subprojects)
self._pipeline = Pipeline(self._context, project, self._artifacts)
# load_selection()
@@ -1002,20 +1003,6 @@ class Stream:
return list(output_elements)
- # fetch_subprojects()
- #
- # Fetch subprojects as part of the project and element loading process.
- #
- # Args:
- # junctions (list of Element): The junctions to fetch
- #
- def fetch_subprojects(self, junctions):
- self._scheduler.clear_queues()
- queue = FetchQueue(self._scheduler)
- queue.enqueue(junctions)
- self.queues = [queue]
- self._run()
-
# get_state()
#
# Get the State object owned by Stream
@@ -1097,7 +1084,21 @@ class Stream:
# Private Methods #
#############################################################
- # __load_elements_from_targets
+ # _fetch_subprojects()
+ #
+ # Fetch subprojects as part of the project and element loading process.
+ #
+ # Args:
+ # junctions (list of Element): The junctions to fetch
+ #
+ def _fetch_subprojects(self, junctions):
+ self._scheduler.clear_queues()
+ queue = FetchQueue(self._scheduler)
+ queue.enqueue(junctions)
+ self.queues = [queue]
+ self._run()
+
+ # _load_elements_from_targets
#
# Given the usual set of target element names/artifact refs, load
# the `Element` objects required to describe the selection.
@@ -1114,15 +1115,17 @@ class Stream:
# Returns:
# ([elements], [except_elements], [artifact_elements])
#
- def __load_elements_from_targets(
+ def _load_elements_from_targets(
self, targets: List[str], except_targets: List[str], *, rewritable: bool = False
) -> Tuple[List[Element], List[Element], List[Element]]:
names, refs = self._classify_artifacts(targets)
loadable = [names, except_targets]
+ self._project.load_context.set_rewritable(rewritable)
+
# Load and filter elements
if loadable:
- elements, except_elements = self._pipeline.load(loadable, rewritable=rewritable)
+ elements, except_elements = self._pipeline.load(loadable)
else:
elements, except_elements = [], []
@@ -1134,7 +1137,7 @@ class Stream:
return elements, except_elements, artifacts
- # __connect_remotes()
+ # _connect_remotes()
#
# Connect to the source and artifact remotes.
#
@@ -1144,9 +1147,7 @@ class Stream:
# use_artifact_config - Whether to use the artifact config.
# use_source_config - Whether to use the source config.
#
- def __connect_remotes(
- self, artifact_url: str, source_url: str, use_artifact_config: bool, use_source_config: bool
- ):
+ def _connect_remotes(self, artifact_url: str, source_url: str, use_artifact_config: bool, use_source_config: bool):
# ArtifactCache.setup_remotes expects all projects to be fully loaded
for project in self._context.get_projects():
project.ensure_fully_loaded()
@@ -1176,7 +1177,7 @@ class Stream:
# We never want to use a PLAN selection when tracking elements
assert selection != _PipelineSelection.PLAN
- elements, except_elements, artifacts = self.__load_elements_from_targets(
+ elements, except_elements, artifacts = self._load_elements_from_targets(
targets, except_targets, rewritable=True
)
@@ -1241,7 +1242,7 @@ class Stream:
dynamic_plan=False,
load_refs=False
):
- elements, except_elements, artifacts = self.__load_elements_from_targets(
+ elements, except_elements, artifacts = self._load_elements_from_targets(
targets, except_targets, rewritable=False
)
@@ -1259,7 +1260,7 @@ class Stream:
self.targets = elements + artifacts
# Connect to remote caches, this needs to be done before resolving element state
- self.__connect_remotes(artifact_remote_url, source_remote_url, use_artifact_config, use_source_config)
+ self._connect_remotes(artifact_remote_url, source_remote_url, use_artifact_config, use_source_config)
# Now move on to loading primary selection.
#
diff --git a/tests/internals/loader.py b/tests/internals/loader.py
index 408813a64..bdce428f0 100644
--- a/tests/internals/loader.py
+++ b/tests/internals/loader.py
@@ -28,7 +28,7 @@ def test_one_file(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader:
- element = loader.load(["elements/onefile.bst"], None)[0]
+ element = loader.load(["elements/onefile.bst"])[0]
assert isinstance(element, MetaElement)
assert element.kind == "pony"
@@ -39,7 +39,7 @@ def test_missing_file(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load(["elements/missing.bst"], None)
+ loader.load(["elements/missing.bst"])
assert exc.value.reason == LoadErrorReason.MISSING_FILE
@@ -49,7 +49,7 @@ def test_invalid_reference(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load(["elements/badreference.bst"], None)
+ loader.load(["elements/badreference.bst"])
assert exc.value.reason == LoadErrorReason.INVALID_YAML
@@ -59,7 +59,7 @@ def test_invalid_yaml(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load(["elements/badfile.bst"], None)
+ loader.load(["elements/badfile.bst"])
assert exc.value.reason == LoadErrorReason.INVALID_YAML
@@ -71,7 +71,7 @@ def test_fail_fullpath_target(datafiles):
fullpath = os.path.join(basedir, "elements", "onefile.bst")
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load([fullpath], None)
+ loader.load([fullpath])
assert exc.value.reason == LoadErrorReason.INVALID_DATA
@@ -81,7 +81,7 @@ def test_invalid_key(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load(["elements/invalidkey.bst"], None)
+ loader.load(["elements/invalidkey.bst"])
assert exc.value.reason == LoadErrorReason.INVALID_DATA
@@ -91,6 +91,6 @@ def test_invalid_directory_load(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load(["elements/"], None)
+ loader.load(["elements/"])
assert exc.value.reason == LoadErrorReason.LOADING_DIRECTORY