summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTristan van Berkom <tristan@codethink.co.uk>2020-08-30 15:57:13 +0900
committerTristan van Berkom <tristan@codethink.co.uk>2020-09-04 18:22:38 +0900
commit1a3e4e89fc86b29342c9ec62ca8915b6eae084d2 (patch)
treed0e88bcc0fe25618306e93a3354026776bb9587b
parent97812cbb7d295cc3d270be9205cbc12313215028 (diff)
downloadbuildstream-1a3e4e89fc86b29342c9ec62ca8915b6eae084d2.tar.gz
element.py: Hide dependencies which are irrelevant to the Element
This is a large breaking change, a summary of the changes are that: * The Scope type is now private, since Element plugins do not have the choice to view any other scopes. * Element.dependencies() API change Now it accepts a "selection" (sequence) of dependency elements, so that Element.dependencies() can iterate over a collection of dependencies, ensuring that we iterate over every element only once even when we need to iterate over multiple element's dependencies. The old API is moved to Element._dependencies() and still used internally. * Element.stage_dependency_artifacts() API change This gets the same treatment as Element.dependencies(), and the old API is also preserved as Element._stage_dependency_artifacts(), so that the CLI can stage things for `bst artifact checkout` and such. * Element.search() API change The Scope argument is removed, and the old API is preserved as Element._search() temporarily, until we can remove this completely.
-rw-r--r--src/buildstream/__init__.py2
-rw-r--r--src/buildstream/_artifact.py6
-rw-r--r--src/buildstream/_artifactelement.py2
-rw-r--r--src/buildstream/_elementproxy.py56
-rw-r--r--src/buildstream/_frontend/app.py7
-rw-r--r--src/buildstream/_frontend/cli.py4
-rw-r--r--src/buildstream/_frontend/widget.py8
-rw-r--r--src/buildstream/_pipeline.py23
-rw-r--r--src/buildstream/_stream.py21
-rw-r--r--src/buildstream/buildelement.py8
-rw-r--r--src/buildstream/element.py359
-rw-r--r--src/buildstream/plugins/elements/compose.py8
-rw-r--r--src/buildstream/plugins/elements/filter.py13
-rw-r--r--src/buildstream/scriptelement.py19
-rw-r--r--src/buildstream/types.py54
-rw-r--r--tests/artifactcache/push.py5
-rw-r--r--tests/elements/filter/basic/element_plugins/dynamic.py4
17 files changed, 368 insertions, 231 deletions
diff --git a/src/buildstream/__init__.py b/src/buildstream/__init__.py
index 1bc564e2e..4d151873d 100644
--- a/src/buildstream/__init__.py
+++ b/src/buildstream/__init__.py
@@ -30,7 +30,7 @@ if "_BST_COMPLETION" not in os.environ:
from .utils import UtilError, ProgramNotFoundError
from .sandbox import Sandbox, SandboxFlags, SandboxCommandError
- from .types import Scope, CoreWarnings
+ from .types import CoreWarnings
from .node import MappingNode, Node, ProvenanceInformation, ScalarNode, SequenceNode
from .plugin import Plugin
from .source import Source, SourceError, SourceFetcher
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index 1df665c14..048f09cc6 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -33,7 +33,7 @@ import os
from ._protos.buildstream.v2.artifact_pb2 import Artifact as ArtifactProto
from . import _yaml
from . import utils
-from .types import Scope
+from .types import _Scope
from .storage._casbaseddirectory import CasBasedDirectory
@@ -181,7 +181,7 @@ class Artifact:
size += public_data_digest.size_bytes
# store build dependencies
- for e in element._dependencies(Scope.BUILD):
+ for e in element._dependencies(_Scope.BUILD):
new_build = artifact.build_deps.add()
new_build.project_name = e.project_name
new_build.element_name = e.name
@@ -363,7 +363,7 @@ class Artifact:
# get_dependency_artifact_names()
#
- # Retrieve the artifact names of all of the dependencies in Scope.BUILD
+ # Retrieve the artifact names of all of the dependencies in _Scope.BUILD
#
# Returns:
# (list [str]): A list of refs of all build dependencies in staging order.
diff --git a/src/buildstream/_artifactelement.py b/src/buildstream/_artifactelement.py
index 53a1ff72f..b669d95f7 100644
--- a/src/buildstream/_artifactelement.py
+++ b/src/buildstream/_artifactelement.py
@@ -113,7 +113,7 @@ class ArtifactElement(Element):
# get_dependency_artifact_names()
#
- # Retrieve the artifact names of all of the dependencies in Scope.BUILD
+ # Retrieve the artifact names of all of the dependencies in _Scope.BUILD
#
# Returns:
# (list [str]): A list of artifact refs
diff --git a/src/buildstream/_elementproxy.py b/src/buildstream/_elementproxy.py
index 2022c2c81..acb08ce8b 100644
--- a/src/buildstream/_elementproxy.py
+++ b/src/buildstream/_elementproxy.py
@@ -16,9 +16,9 @@
#
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-from typing import TYPE_CHECKING, cast, Optional, Iterator, Dict, List
+from typing import TYPE_CHECKING, cast, Optional, Iterator, Dict, List, Sequence
-from .types import Scope
+from .types import _Scope
from .utils import FileListResult
from ._pluginproxy import PluginProxy
@@ -55,15 +55,27 @@ class ElementProxy(PluginProxy):
def sources(self) -> Iterator["Source"]:
return cast("Element", self._plugin).sources()
- def dependencies(self, scope: Scope, *, recurse: bool = True, visited=None) -> Iterator["Element"]:
+ def dependencies(self, selection: Sequence["Element"] = None, *, recurse: bool = True) -> Iterator["Element"]:
#
- # FIXME: In the next phase, we will ensure that returned ElementProxy objects here are always
- # in the Scope.BUILD scope of the toplevel concrete Element class.
+ # When dependencies() is called on a dependency of the main plugin Element,
+ # we simply reroute the call to the original owning element, while specifying
+ # this element as the selection.
#
- return cast("Element", self._plugin).dependencies(scope, recurse=recurse, visited=visited)
+ # This ensures we only allow returning dependencies in the _Scope.RUN scope
+ # of this element.
+ #
+ if selection is None:
+ selection = [cast("Element", self._plugin)]
+
+ # Return the iterable from the called generator, this is more performant than yielding from it
+ return cast("Element", self._owner).dependencies(selection, recurse=recurse)
- def search(self, scope: Scope, name: str) -> Optional["Element"]:
- return cast("Element", self._plugin).search(scope, name)
+ def search(self, name: str) -> Optional["Element"]:
+ #
+ # Similarly to dependencies() above, we only search in the _Scope.RUN
+ # of dependencies of the active element plugin.
+ #
+ return cast("Element", self._plugin)._search(_Scope.RUN, name)
def node_subst_vars(self, node: "ScalarNode") -> str:
return cast("Element", self._plugin).node_subst_vars(node)
@@ -95,15 +107,20 @@ class ElementProxy(PluginProxy):
def stage_dependency_artifacts(
self,
sandbox: "Sandbox",
- scope: Scope,
+ selection: Sequence["Element"] = None,
*,
path: str = None,
include: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
orphans: bool = True
) -> None:
- return cast("Element", self._plugin).stage_dependency_artifacts(
- sandbox, scope, path=path, include=include, exclude=exclude, orphans=orphans
+ #
+ # Same approach used here as in Element.dependencies()
+ #
+ if selection is None:
+ selection = [cast("Element", self._plugin)]
+ cast("Element", self._owner).stage_dependency_artifacts(
+ sandbox, selection, path=path, include=include, exclude=exclude, orphans=orphans
)
def integrate(self, sandbox: "Sandbox") -> None:
@@ -120,3 +137,20 @@ class ElementProxy(PluginProxy):
def get_logs(self) -> List[str]:
return cast("Element", self._plugin).get_logs()
+
+ ##############################################################
+ # Element Internal APIs #
+ ##############################################################
+ #
+ # Some functions the Element expects to call directly on the
+ # proxy.
+ #
+ def _dependencies(self, scope, *, recurse=True, visited=None):
+ #
+ # We use a return statement even though this is a generator, simply
+ # to avoid the generator overhead of yielding each element.
+ #
+ return cast("Element", self._plugin)._dependencies(scope, recurse=recurse, visited=visited)
+
+ def _file_is_whitelisted(self, path):
+ return cast("Element", self._plugin)._file_is_whitelisted(path)
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 5d49e9612..320ae32ee 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -26,9 +26,6 @@ from textwrap import TextWrapper
import click
from click import UsageError
-# Import buildstream public symbols
-from .. import Scope
-
# Import various buildstream internals
from .._context import Context
from .._project import Project
@@ -36,7 +33,7 @@ from .._exceptions import BstError, StreamError, LoadError, AppError
from ..exceptions import LoadErrorReason
from .._message import Message, MessageType, unconditional_messages
from .._stream import Stream
-from ..types import _SchedulerErrorAction
+from ..types import _SchedulerErrorAction, _Scope
from .. import node
from .. import utils
from ..utils import UtilError
@@ -708,7 +705,7 @@ class App:
unique_id, element_key = element
prompt = self.shell_prompt(full_name, element_key)
self.stream.shell(
- None, Scope.BUILD, prompt, isolate=True, usebuildtree="always", unique_id=unique_id
+ None, _Scope.BUILD, prompt, isolate=True, usebuildtree="always", unique_id=unique_id
)
except BstError as e:
click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index b0eacbb02..946024bb7 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -614,10 +614,10 @@ def shell(app, element, mount, isolate, build_, cli_buildtree, pull_, command):
If no COMMAND is specified, the default is to attempt
to run an interactive shell.
"""
- from ..element import Scope
+ from ..element import _Scope
from .._project import HostMount
- scope = Scope.BUILD if build_ else Scope.RUN
+ scope = _Scope.BUILD if build_ else _Scope.RUN
# We may need to fetch dependency artifacts if we're pulling the artifact
selection = _PipelineSelection.ALL if pull_ else _PipelineSelection.NONE
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index dbe6b4337..65f74d475 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -27,7 +27,7 @@ from ruamel import yaml
import click
from .profile import Profile
-from .. import Scope
+from ..types import _Scope
from .. import __version__ as bst_version
from .._exceptions import BstError, ImplError
from .._message import MessageType
@@ -413,17 +413,17 @@ class LogLine(Widget):
# Dependencies
if "%{deps" in format_:
- deps = [e.name for e in element._dependencies(Scope.ALL, recurse=False)]
+ deps = [e.name for e in element._dependencies(_Scope.ALL, recurse=False)]
line = p.fmt_subst(line, "deps", yaml.safe_dump(deps, default_style=None).rstrip("\n"))
# Build Dependencies
if "%{build-deps" in format_:
- build_deps = [e.name for e in element._dependencies(Scope.BUILD, recurse=False)]
+ build_deps = [e.name for e in element._dependencies(_Scope.BUILD, recurse=False)]
line = p.fmt_subst(line, "build-deps", yaml.safe_dump(build_deps, default_style=False).rstrip("\n"))
# Runtime Dependencies
if "%{runtime-deps" in format_:
- runtime_deps = [e.name for e in element._dependencies(Scope.RUN, recurse=False)]
+ runtime_deps = [e.name for e in element._dependencies(_Scope.RUN, recurse=False)]
line = p.fmt_subst(
line, "runtime-deps", yaml.safe_dump(runtime_deps, default_style=False).rstrip("\n")
)
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index 0fb30e244..77b3c24e0 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -29,9 +29,8 @@ from pyroaring import BitMap # pylint: disable=no-name-in-module
from ._exceptions import PipelineError
from ._message import Message, MessageType
from ._profile import Topics, PROFILER
-from . import Scope
from ._project import ProjectRefStorage
-from .types import _PipelineSelection
+from .types import _PipelineSelection, _Scope
# Pipeline()
@@ -113,7 +112,7 @@ class Pipeline:
# to happen, even for large projects (tested with the Debian stack). Although,
# if it does become a problem we may have to set the recursion limit to a
# greater value.
- for element in self.dependencies(targets, Scope.ALL):
+ for element in self.dependencies(targets, _Scope.ALL):
# Determine initial element state.
element._initialize_state()
@@ -148,7 +147,7 @@ class Pipeline:
#
# Args:
# targets (list of Element): The target Elements to loop over
- # scope (Scope): The scope to iterate over
+ # scope (_Scope): The scope to iterate over
# recurse (bool): Whether to recurse into dependencies
#
def dependencies(self, targets, scope, *, recurse=True):
@@ -214,9 +213,9 @@ class Pipeline:
_PipelineSelection.NONE: lambda: targets,
_PipelineSelection.REDIRECT: redirect_and_log,
_PipelineSelection.PLAN: lambda: self.plan(targets),
- _PipelineSelection.ALL: lambda: list(self.dependencies(targets, Scope.ALL)),
- _PipelineSelection.BUILD: lambda: list(self.dependencies(targets, Scope.BUILD)),
- _PipelineSelection.RUN: lambda: list(self.dependencies(targets, Scope.RUN)),
+ _PipelineSelection.ALL: lambda: list(self.dependencies(targets, _Scope.ALL)),
+ _PipelineSelection.BUILD: lambda: list(self.dependencies(targets, _Scope.BUILD)),
+ _PipelineSelection.RUN: lambda: list(self.dependencies(targets, _Scope.RUN)),
}[mode]()
# except_elements():
@@ -238,7 +237,7 @@ class Pipeline:
if not except_targets:
return elements
- targeted = list(self.dependencies(targets, Scope.ALL))
+ targeted = list(self.dependencies(targets, _Scope.ALL))
visited = []
def find_intersection(element):
@@ -251,7 +250,7 @@ class Pipeline:
if element in targeted:
yield element
else:
- for dep in element._dependencies(Scope.ALL, recurse=False):
+ for dep in element._dependencies(_Scope.ALL, recurse=False):
yield from find_intersection(dep)
# Build a list of 'intersection' elements, i.e. the set of
@@ -272,7 +271,7 @@ class Pipeline:
continue
visited.append(element)
- queue.extend(element._dependencies(Scope.ALL, recurse=False))
+ queue.extend(element._dependencies(_Scope.ALL, recurse=False))
# That looks like a lot, but overall we only traverse (part
# of) the graph twice. This could be reduced to once if we
@@ -474,12 +473,12 @@ class _Planner:
return
self.visiting_elements.add(element)
- for dep in element._dependencies(Scope.RUN, recurse=False):
+ for dep in element._dependencies(_Scope.RUN, recurse=False):
self.plan_element(dep, depth)
# Dont try to plan builds of elements that are cached already
if not element._cached_success():
- for dep in element._dependencies(Scope.BUILD, recurse=False):
+ for dep in element._dependencies(_Scope.BUILD, recurse=False):
self.plan_element(dep, depth + 1)
self.depth_map[element] = depth
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 0df793fea..82bd4ee6f 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -48,10 +48,9 @@ from .element import Element
from ._pipeline import Pipeline
from ._profile import Topics, PROFILER
from ._state import State
-from .types import _KeyStrength, _PipelineSelection, _SchedulerErrorAction
+from .types import _KeyStrength, _PipelineSelection, _SchedulerErrorAction, _Scope
from .plugin import Plugin
from . import utils, _yaml, _site
-from . import Scope
# Stream()
@@ -169,7 +168,7 @@ class Stream:
#
# Args:
# element (Element): An Element object to run the shell for
- # scope (Scope): The scope for the shell (Scope.BUILD or Scope.RUN)
+ # scope (_Scope): The scope for the shell (_Scope.BUILD or _Scope.RUN)
# prompt (str): The prompt to display in the shell
# mounts (list of HostMount): Additional directories to mount into the sandbox
# isolate (bool): Whether to isolate the environment like we do in builds
@@ -239,7 +238,7 @@ class Stream:
buildtree = True
# Ensure we have our sources if we are launching a build shell
- if scope == Scope.BUILD and not buildtree:
+ if scope == _Scope.BUILD and not buildtree:
self._fetch([element])
self._pipeline.assert_sources_cached([element])
@@ -286,7 +285,7 @@ class Stream:
# fetch blobs of targets if options set
if self._context.pull_artifact_files:
- scope = Scope.ALL if selection == _PipelineSelection.ALL else Scope.RUN
+ scope = _Scope.ALL if selection == _PipelineSelection.ALL else _Scope.RUN
for element in self.targets:
element._set_artifact_files_required(scope=scope)
@@ -583,10 +582,10 @@ class Stream:
try:
scope = {
- _PipelineSelection.RUN: Scope.RUN,
- _PipelineSelection.BUILD: Scope.BUILD,
- _PipelineSelection.NONE: Scope.NONE,
- _PipelineSelection.ALL: Scope.ALL,
+ _PipelineSelection.RUN: _Scope.RUN,
+ _PipelineSelection.BUILD: _Scope.BUILD,
+ _PipelineSelection.NONE: _Scope.NONE,
+ _PipelineSelection.ALL: _Scope.ALL,
}
with target._prepare_sandbox(scope=scope[selection], integrate=integrate) as sandbox:
# Copy or move the sandbox to the target directory
@@ -820,7 +819,7 @@ class Stream:
for target in elements:
if not list(target.sources()):
- build_depends = [x.name for x in target._dependencies(Scope.BUILD, recurse=False)]
+ build_depends = [x.name for x in target._dependencies(_Scope.BUILD, recurse=False)]
if not build_depends:
raise StreamError("The element {} has no sources".format(target.name))
detail = "Try opening a workspace on one of its dependencies instead:\n"
@@ -1365,7 +1364,7 @@ class Stream:
# Inform the frontend of the full list of elements
# and the list of elements which will be processed in this run
#
- self.total_elements = list(self._pipeline.dependencies(self.targets, Scope.ALL))
+ self.total_elements = list(self._pipeline.dependencies(self.targets, _Scope.ALL))
if announce_session and self._session_start_callback is not None:
self._session_start_callback()
diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py
index aa427064b..a7900a25d 100644
--- a/src/buildstream/buildelement.py
+++ b/src/buildstream/buildelement.py
@@ -99,8 +99,7 @@ Element.stage()
In :func:`Element.stage() <buildstream.element.Element.stage>`, the BuildElement
will do the following operations:
-* Stage all the dependencies in the :func:`Scope.BUILD <buildstream.element.Scope.BUILD>`
- scope into the sandbox root.
+* Stage all of the build dependencies into the sandbox root.
* Run the integration commands for all staged dependencies using
:func:`Element.integrate() <buildstream.element.Element.integrate>`
@@ -139,7 +138,6 @@ import os
from .element import Element
from .sandbox import SandboxFlags
-from .types import Scope
# This list is preserved because of an unfortunate situation, we
@@ -222,12 +220,12 @@ class BuildElement(Element):
# Stage deps in the sandbox root
with self.timed_activity("Staging dependencies", silent_nested=True):
- self.stage_dependency_artifacts(sandbox, Scope.BUILD)
+ self.stage_dependency_artifacts(sandbox)
# Run any integration commands provided by the dependencies
# once they are all staged and ready
with sandbox.batch(SandboxFlags.NONE, label="Integrating sandbox"):
- for dep in self.dependencies(Scope.BUILD):
+ for dep in self.dependencies():
dep.integrate(sandbox)
# Stage sources in the build root
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index ffbf8216e..8c8de614c 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -84,7 +84,7 @@ from contextlib import contextmanager, suppress
from functools import partial
from itertools import chain
import string
-from typing import cast, TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Set
+from typing import cast, TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Set, Sequence
from pyroaring import BitMap # pylint: disable=no-name-in-module
from ruamel import yaml
@@ -103,7 +103,7 @@ from .plugin import Plugin
from .sandbox import SandboxFlags, SandboxCommandError
from .sandbox._config import SandboxConfig
from .sandbox._sandboxremote import SandboxRemote
-from .types import CoreWarnings, Scope, _CacheBuildTrees, _KeyStrength
+from .types import CoreWarnings, _Scope, _CacheBuildTrees, _KeyStrength
from ._artifact import Artifact
from ._elementproxy import ElementProxy
from ._elementsources import ElementSources
@@ -418,28 +418,56 @@ class Element(Plugin):
"""
return self.__sources.sources()
- def dependencies(self, scope: Scope, *, recurse: bool = True, visited=None) -> Iterator["Element"]:
- """dependencies(scope, *, recurse=True)
+ def dependencies(self, selection: Sequence["Element"] = None, *, recurse: bool = True) -> Iterator["Element"]:
+ """A generator function which yields the build dependencies of the given element.
- A generator function which yields the dependencies of the given element.
+ This generator gives the Element access to all of the dependencies which it is has
+ access to at build time. As explained in :ref:`the dependency type documentation <format_dependencies_types>`,
+ this includes the direct build dependencies of the element being built, along with any
+ transient runtime dependencies of those build dependencies.
+
+ Subsets of the dependency graph can be selected using the `selection` argument,, which
+ must consist of dependencies of this element. If the `selection` argument is specified as
+ `None`, then the `self` element on which this is called is used as the `selection`.
If `recurse` is specified (the default), the full dependencies will be listed
- in deterministic staging order, starting with the basemost elements in the
- given `scope`. Otherwise, if `recurse` is not specified then only the direct
- dependencies in the given `scope` will be traversed, and the element itself
- will be omitted.
+ in deterministic staging order, starting with the basemost elements. Otherwise,
+ if `recurse` is not specified then only the direct dependencies will be traversed.
Args:
- scope: The scope to iterate in
- recurse: Whether to recurse
+ selection (Sequence[Element]): A list of dependencies to select, or None
+ recurse (bool): Whether to recurse
Yields:
- The dependencies in `scope`, in deterministic staging order
+ The dependencies of the selection, in deterministic staging order
"""
- for dep in self._dependencies(scope, recurse=recurse):
- yield cast("Element", ElementProxy(self, dep))
+ #
+ # In this public API, we ensure the invariant that an element can only
+ # ever see elements in it's own _Scope.BUILD scope.
+ #
+ # - Yield ElementProxy objects for every element except for the self element
+ # - When a selection is provided, ensure that we call the real _dependencies()
+ # method using _Scope.RUNTIME
+ # - When iterating over the self element, use _Scope.BUILD
+ #
+ visited = (BitMap(), BitMap())
+ if selection is None:
+ selection = [self]
+
+ for element in selection:
+ if element is self:
+ scope = _Scope.BUILD
+ else:
+ scope = _Scope.RUN
+
+ # Elements in the `selection` will actually be `ElementProxy` objects, but
+ # those calls will be forwarded to their actual internal `_dependencies()`
+ # methods.
+ #
+ for dep in element._dependencies(scope, recurse=recurse, visited=visited):
+ yield cast("Element", ElementProxy(self, dep))
- def search(self, scope: Scope, name: str) -> Optional["Element"]:
+ def search(self, name: str) -> Optional["Element"]:
"""search(scope, *, name)
Search for a dependency by name
@@ -451,8 +479,10 @@ class Element(Plugin):
Returns:
The dependency element, or None if not found.
"""
- search = self._search(scope, name)
- if search:
+ search = self._search(_Scope.BUILD, name)
+ if search is self:
+ return self
+ elif search:
return cast("Element", ElementProxy(self, search))
return None
@@ -591,7 +621,7 @@ class Element(Plugin):
.. code:: python
# Stage the dependencies for a build of 'self'
- for dep in self.dependencies(Scope.BUILD):
+ for dep in self.dependencies():
dep.stage_artifact(sandbox)
"""
@@ -624,7 +654,7 @@ class Element(Plugin):
def stage_dependency_artifacts(
self,
sandbox: "Sandbox",
- scope: Scope,
+ selection: Sequence["Element"] = None,
*,
path: str = None,
include: Optional[List[str]] = None,
@@ -635,70 +665,32 @@ class Element(Plugin):
This is primarily a convenience wrapper around
:func:`Element.stage_artifact() <buildstream.element.Element.stage_artifact>`
- which takes care of staging all the dependencies in `scope` and issueing the
+ which takes care of staging all the dependencies in staging order and issueing the
appropriate warnings.
+ The `selection` argument will behave in the same was as specified by
+ :func:`Element.dependencies() <buildstream.element.Element.dependencies>`,
+ If the `selection` argument is specified as `None`, then the `self` element on which this
+ is called is used as the `selection`.
+
Args:
sandbox: The build sandbox
- scope: The scope to stage dependencies in
+ selection (Sequence[Element]): A list of dependencies to select, or None
path An optional sandbox relative path
include: An optional list of domains to include files from
exclude: An optional list of domains to exclude files from
orphans: Whether to include files not spoken for by split domains
Raises:
- (:class:`.ElementError`): If any of the dependencies in `scope` have not
- yet produced artifacts, or if forbidden overlaps
- occur.
+ (:class:`.ElementError`): if forbidden overlaps occur.
"""
- ignored = {}
- overlaps = OrderedDict() # type: OrderedDict[str, List[str]]
- files_written = {} # type: Dict[str, List[str]]
+ overlaps = _OverlapCollector(self)
- for dep in self.dependencies(scope):
+ for dep in self.dependencies(selection):
result = dep.stage_artifact(sandbox, path=path, include=include, exclude=exclude, orphans=orphans)
- if result.overwritten:
- for overwrite in result.overwritten:
- # Completely new overwrite
- if overwrite not in overlaps:
- # Find the overwritten element by checking where we've
- # written the element before
- for elm, contents in files_written.items():
- if overwrite in contents:
- overlaps[overwrite] = [elm, dep.name]
- else:
- overlaps[overwrite].append(dep.name)
- files_written[dep.name] = result.files_written
+ overlaps.collect_stage_result(dep.name, result)
- if result.ignored:
- ignored[dep.name] = result.ignored
-
- if overlaps:
- overlap_warning = False
- warning_detail = "Staged files overwrite existing files in staging area:\n"
- for f, elements in overlaps.items():
- overlap_warning_elements = []
- # The bottom item overlaps nothing
- overlapping_elements = elements[1:]
- for elm in overlapping_elements:
- element = cast(Element, self._search(scope, elm))
- if not element.__file_is_whitelisted(f):
- overlap_warning_elements.append(elm)
- overlap_warning = True
-
- warning_detail += _overlap_error_detail(f, overlap_warning_elements, elements)
-
- if overlap_warning:
- self.warn(
- "Non-whitelisted overlaps detected", detail=warning_detail, warning_token=CoreWarnings.OVERLAPS
- )
-
- if ignored:
- detail = "Not staging files which would replace non-empty directories:\n"
- for key, value in ignored.items():
- detail += "\nFrom {}:\n".format(key)
- detail += " " + " ".join(["/" + f + "\n" for f in value])
- self.warn("Ignored files", detail=detail)
+ overlaps.overlap_warnings()
def integrate(self, sandbox: "Sandbox") -> None:
"""Integrate currently staged filesystem against this artifact.
@@ -837,25 +829,25 @@ class Element(Plugin):
# will be omitted.
#
# Args:
- # scope (Scope): The scope to iterate in
+ # scope (_Scope): The scope to iterate in
# recurse (bool): Whether to recurse
#
# Yields:
# (Element): The dependencies in `scope`, in deterministic staging order
#
- def _dependencies(self, scope: Scope, *, recurse: bool = True, visited=None) -> Iterator["Element"]:
+ def _dependencies(self, scope, *, recurse=True, visited=None):
# The format of visited is (BitMap(), BitMap()), with the first BitMap
- # containing element that have been visited for the `Scope.BUILD` case
- # and the second one relating to the `Scope.RUN` case.
+ # containing element that have been visited for the `_Scope.BUILD` case
+ # and the second one relating to the `_Scope.RUN` case.
if not recurse:
result: Set[Element] = set()
- if scope in (Scope.BUILD, Scope.ALL):
+ if scope in (_Scope.BUILD, _Scope.ALL):
for dep in self.__build_dependencies:
if dep not in result:
result.add(dep)
yield dep
- if scope in (Scope.RUN, Scope.ALL):
+ if scope in (_Scope.RUN, _Scope.ALL):
for dep in self.__runtime_dependencies:
if dep not in result:
result.add(dep)
@@ -863,41 +855,41 @@ class Element(Plugin):
else:
def visit(element, scope, visited):
- if scope == Scope.ALL:
+ if scope == _Scope.ALL:
visited[0].add(element._unique_id)
visited[1].add(element._unique_id)
for dep in chain(element.__build_dependencies, element.__runtime_dependencies):
if dep._unique_id not in visited[0] and dep._unique_id not in visited[1]:
- yield from visit(dep, Scope.ALL, visited)
+ yield from visit(dep, _Scope.ALL, visited)
yield element
- elif scope == Scope.BUILD:
+ elif scope == _Scope.BUILD:
visited[0].add(element._unique_id)
for dep in element.__build_dependencies:
if dep._unique_id not in visited[1]:
- yield from visit(dep, Scope.RUN, visited)
+ yield from visit(dep, _Scope.RUN, visited)
- elif scope == Scope.RUN:
+ elif scope == _Scope.RUN:
visited[1].add(element._unique_id)
for dep in element.__runtime_dependencies:
if dep._unique_id not in visited[1]:
- yield from visit(dep, Scope.RUN, visited)
+ yield from visit(dep, _Scope.RUN, visited)
yield element
else:
yield element
if visited is None:
- # Visited is of the form (Visited for Scope.BUILD, Visited for Scope.RUN)
+ # Visited is of the form (Visited for _Scope.BUILD, Visited for _Scope.RUN)
visited = (BitMap(), BitMap())
else:
# We have already a visited set passed. we might be able to short-circuit
- if scope in (Scope.BUILD, Scope.ALL) and self._unique_id in visited[0]:
+ if scope in (_Scope.BUILD, _Scope.ALL) and self._unique_id in visited[0]:
return
- if scope in (Scope.RUN, Scope.ALL) and self._unique_id in visited[1]:
+ if scope in (_Scope.RUN, _Scope.ALL) and self._unique_id in visited[1]:
return
yield from visit(self, scope, visited)
@@ -907,13 +899,13 @@ class Element(Plugin):
# Search for a dependency by name
#
# Args:
- # scope (Scope): The scope to search
+ # scope (_Scope): The scope to search
# name (str): The dependency to search for
#
# Returns:
# (Element): The dependency element, or None if not found.
#
- def _search(self, scope: Scope, name: str) -> Optional["Element"]:
+ def _search(self, scope, name):
for dep in self._dependencies(scope):
if dep.name == name:
@@ -921,6 +913,34 @@ class Element(Plugin):
return None
+ # _stage_dependency_artifacts()
+ #
+ # Stage element dependencies in scope, this is used for core
+ # functionality especially in the CLI which wants to stage specifically
+ # build or runtime dependencies.
+ #
+ # Args:
+ # sandbox: The build sandbox
+ # scope (_Scope): The scope of artifacts to stage
+ # path An optional sandbox relative path
+ # include: An optional list of domains to include files from
+ # exclude: An optional list of domains to exclude files from
+ # orphans: Whether to include files not spoken for by split domains
+ #
+ # Raises:
+ # (:class:`.ElementError`): If any of the dependencies in `scope` have not
+ # yet produced artifacts, or if forbidden overlaps
+ # occur.
+ #
+ def _stage_dependency_artifacts(self, sandbox, scope, *, path=None, include=None, exclude=None, orphans=True):
+ overlaps = _OverlapCollector(self)
+
+ for dep in self._dependencies(scope):
+ result = dep.stage_artifact(sandbox, path=path, include=include, exclude=exclude, orphans=orphans)
+ overlaps.collect_stage_result(dep.name, result)
+
+ overlaps.overlap_warnings()
+
# _new_from_load_element():
#
# Recursively instantiate a new Element instance, its sources
@@ -1296,18 +1316,18 @@ class Element(Plugin):
self.__configure_sandbox(sandbox)
# Stage what we need
- if shell and scope == Scope.BUILD:
+ if shell and scope == _Scope.BUILD:
self.stage(sandbox)
else:
# Stage deps in the sandbox root
with self.timed_activity("Staging dependencies", silent_nested=True):
- self.stage_dependency_artifacts(sandbox, scope)
+ self._stage_dependency_artifacts(sandbox, scope)
# Run any integration commands provided by the dependencies
# once they are all staged and ready
if integrate:
with self.timed_activity("Integrating sandbox"):
- for dep in self.dependencies(scope):
+ for dep in self._dependencies(scope):
dep.integrate(sandbox)
yield sandbox
@@ -1393,7 +1413,7 @@ class Element(Plugin):
self.__required = True
# Request artifacts of runtime dependencies
- for dep in self._dependencies(Scope.RUN, recurse=False):
+ for dep in self._dependencies(_Scope.RUN, recurse=False):
dep._set_required()
# When an element becomes required, it must be assembled for
@@ -1420,7 +1440,7 @@ class Element(Plugin):
# Mark artifact files for this element and its runtime dependencies as
# required in the local cache.
#
- def _set_artifact_files_required(self, scope=Scope.RUN):
+ def _set_artifact_files_required(self, scope=_Scope.RUN):
if self.__artifact_files_required:
# Already done
return
@@ -1481,7 +1501,7 @@ class Element(Plugin):
self.__assemble_scheduled = True
# Requests artifacts of build dependencies
- for dep in self._dependencies(Scope.BUILD, recurse=False):
+ for dep in self._dependencies(_Scope.BUILD, recurse=False):
dep._set_required()
# Once we schedule an element for assembly, we know that our
@@ -1843,7 +1863,7 @@ class Element(Plugin):
# environment
#
# Args:
- # scope (Scope): Either BUILD or RUN scopes are valid, or None
+ # scope (_Scope): Either BUILD or RUN scopes are valid, or None
# mounts (list): A list of (str, str) tuples, representing host/target paths to mount
# isolate (bool): Whether to isolate the environment like we do in builds
# prompt (str): A suitable prompt string for PS1
@@ -2249,6 +2269,33 @@ class Element(Plugin):
def _add_build_dependency(self, dependency):
self.__build_dependencies.append(dependency)
+ # _file_is_whitelisted()
+ #
+ # Checks if a file is whitelisted in the overlap whitelist
+ #
+ # This is only internal (one underscore) and not locally private
+ # because it needs to be proxied through ElementProxy.
+ #
+ # Args:
+ # path (str): The path to check
+ #
+ # Returns:
+ # (bool): True of the specified `path` is whitelisted
+ #
+ def _file_is_whitelisted(self, path):
+ # Considered storing the whitelist regex for re-use, but public data
+ # can be altered mid-build.
+ # Public data is not guaranteed to stay the same for the duration of
+ # the build, but I can think of no reason to change it mid-build.
+ # If this ever changes, things will go wrong unexpectedly.
+ if not self.__whitelist_regex:
+ bstdata = self.get_public_data("bst")
+ whitelist = bstdata.get_sequence("overlap-whitelist", default=[])
+ whitelist_expressions = [utils._glob2re(self.__variables.subst(node)) for node in whitelist]
+ expression = "^(?:" + "|".join(whitelist_expressions) + ")$"
+ self.__whitelist_regex = re.compile(expression)
+ return self.__whitelist_regex.match(os.path.join(os.sep, path))
+
#############################################################
# Private Local Methods #
#############################################################
@@ -2312,7 +2359,7 @@ class Element(Plugin):
# __get_dependency_artifact_names()
#
- # Retrieve the artifact names of all of the dependencies in Scope.BUILD
+ # Retrieve the artifact names of all of the dependencies in _Scope.BUILD
#
# Returns:
# (list [str]): A list of refs of all dependencies in staging order.
@@ -2320,7 +2367,7 @@ class Element(Plugin):
def __get_dependency_artifact_names(self):
return [
os.path.join(dep.project_name, _get_normal_name(dep.name), dep._get_cache_key())
- for dep in self._dependencies(Scope.BUILD)
+ for dep in self._dependencies(_Scope.BUILD)
]
# __get_last_build_artifact()
@@ -2384,8 +2431,8 @@ class Element(Plugin):
def __preflight(self):
if self.BST_FORBID_RDEPENDS and self.BST_FORBID_BDEPENDS:
- if any(self._dependencies(Scope.RUN, recurse=False)) or any(
- self._dependencies(Scope.BUILD, recurse=False)
+ if any(self._dependencies(_Scope.RUN, recurse=False)) or any(
+ self._dependencies(_Scope.BUILD, recurse=False)
):
raise ElementError(
"{}: Dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
@@ -2393,14 +2440,14 @@ class Element(Plugin):
)
if self.BST_FORBID_RDEPENDS:
- if any(self._dependencies(Scope.RUN, recurse=False)):
+ if any(self._dependencies(_Scope.RUN, recurse=False)):
raise ElementError(
"{}: Runtime dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
reason="element-forbidden-rdepends",
)
if self.BST_FORBID_BDEPENDS:
- if any(self._dependencies(Scope.BUILD, recurse=False)):
+ if any(self._dependencies(_Scope.BUILD, recurse=False)):
raise ElementError(
"{}: Build dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
reason="element-forbidden-bdepends",
@@ -2832,20 +2879,6 @@ class Element(Plugin):
if filter_func(filename):
yield filename
- def __file_is_whitelisted(self, path):
- # Considered storing the whitelist regex for re-use, but public data
- # can be altered mid-build.
- # Public data is not guaranteed to stay the same for the duration of
- # the build, but I can think of no reason to change it mid-build.
- # If this ever changes, things will go wrong unexpectedly.
- if not self.__whitelist_regex:
- bstdata = self.get_public_data("bst")
- whitelist = bstdata.get_sequence("overlap-whitelist", default=[])
- whitelist_expressions = [utils._glob2re(self.__variables.subst(node)) for node in whitelist]
- expression = "^(?:" + "|".join(whitelist_expressions) + ")$"
- self.__whitelist_regex = re.compile(expression)
- return self.__whitelist_regex.match(os.path.join(os.sep, path))
-
# __load_public_data():
#
# Loads the public data from the cached artifact
@@ -2957,7 +2990,7 @@ class Element(Plugin):
[e.project_name, e.name, e._get_cache_key(strength=_KeyStrength.WEAK)]
if self.BST_STRICT_REBUILD or e in self.__strict_dependencies
else [e.project_name, e.name]
- for e in self._dependencies(Scope.BUILD)
+ for e in self._dependencies(_Scope.BUILD)
]
self.__weak_cache_key = self._calculate_cache_key(dependencies)
@@ -2970,7 +3003,7 @@ class Element(Plugin):
if self.__strict_cache_key is None:
dependencies = [
[e.project_name, e.name, e.__strict_cache_key] if e.__strict_cache_key is not None else None
- for e in self._dependencies(Scope.BUILD)
+ for e in self._dependencies(_Scope.BUILD)
]
self.__strict_cache_key = self._calculate_cache_key(dependencies)
@@ -3060,7 +3093,7 @@ class Element(Plugin):
self.__cache_key = strong_key
elif self.__assemble_scheduled or self.__assemble_done:
# Artifact will or has been built, not downloaded
- dependencies = [[e.project_name, e.name, e._get_cache_key()] for e in self._dependencies(Scope.BUILD)]
+ dependencies = [[e.project_name, e.name, e._get_cache_key()] for e in self._dependencies(_Scope.BUILD)]
self.__cache_key = self._calculate_cache_key(dependencies)
if self.__cache_key is None:
@@ -3164,16 +3197,90 @@ class Element(Plugin):
self._update_ready_for_runtime_and_cached()
-def _overlap_error_detail(f, forbidden_overlap_elements, elements):
- if forbidden_overlap_elements:
- return "/{}: {} {} not permitted to overlap other elements, order {} \n".format(
- f,
- " and ".join(forbidden_overlap_elements),
- "is" if len(forbidden_overlap_elements) == 1 else "are",
- " above ".join(reversed(elements)),
- )
- else:
- return ""
+# _OverlapCollector()
+#
+# Collects results of Element.stage_artifact() and saves
+# them in order to raise a proper overlap error at the end
+# of staging.
+#
+# Args:
+# element (Element): The element for which we are staging artifacts
+#
+class _OverlapCollector:
+ def __init__(self, element):
+ self.element = element
+ self.ignored = {}
+ self.overlaps = {} # type: Dict[str, List[str]]
+ self.files_written = {} # type: Dict[str, List[str]]
+
+ # collect_stage_result()
+ #
+ # Collect and accumulate results of Element.stage_artifact()
+ #
+ # Args:
+ # element_name (str): The name of the element staged
+ # result (FileListResult): The result of Element.stage_artifact()
+ #
+ def collect_stage_result(self, element_name: str, result: FileListResult):
+ if result.overwritten:
+ for overwrite in result.overwritten:
+ # Completely new overwrite
+ if overwrite not in self.overlaps:
+ # Find the overwritten element by checking where we've
+ # written the element before
+ for elm, contents in self.files_written.items():
+ if overwrite in contents:
+ self.overlaps[overwrite] = [elm, element_name]
+ else:
+ self.overlaps[overwrite].append(element_name)
+
+ self.files_written[element_name] = result.files_written
+ if result.ignored:
+ self.ignored[element_name] = result.ignored
+
+ # overlap_warnings()
+ #
+ # Issue any warnings as a batch as a result of staging artifacts,
+ # based on the results collected with collect_stage_result().
+ #
+ def overlap_warnings(self):
+ if self.overlaps:
+ overlap_warning = False
+ warning_detail = "Staged files overwrite existing files in staging area:\n"
+ for f, elements in self.overlaps.items():
+ overlap_warning_elements = []
+ # The bottom item overlaps nothing
+ overlapping_elements = elements[1:]
+ for elm in overlapping_elements:
+ element = cast(Element, self.element.search(elm))
+ if not element._file_is_whitelisted(f):
+ overlap_warning_elements.append(elm)
+ overlap_warning = True
+
+ warning_detail += self._overlap_error_detail(f, overlap_warning_elements, elements)
+
+ if overlap_warning:
+ self.element.warn(
+ "Non-whitelisted overlaps detected", detail=warning_detail, warning_token=CoreWarnings.OVERLAPS
+ )
+
+ if self.ignored:
+ detail = "Not staging files which would replace non-empty directories:\n"
+ for key, value in self.ignored.items():
+ detail += "\nFrom {}:\n".format(key)
+ detail += " " + " ".join(["/" + f + "\n" for f in value])
+ self.element.warn("Ignored files", detail=detail)
+
+ def _overlap_error_detail(self, f, forbidden_overlap_elements, elements):
+ if forbidden_overlap_elements:
+ return "/{}: {} {} not permitted to overlap other elements, order {} \n".format(
+ f,
+ " and ".join(forbidden_overlap_elements),
+ "is" if len(forbidden_overlap_elements) == 1 else "are",
+ " above ".join(reversed(elements)),
+ )
+ else:
+ return ""
# _get_normal_name():
diff --git a/src/buildstream/plugins/elements/compose.py b/src/buildstream/plugins/elements/compose.py
index 9c136df9d..808419675 100644
--- a/src/buildstream/plugins/elements/compose.py
+++ b/src/buildstream/plugins/elements/compose.py
@@ -34,7 +34,7 @@ The default configuration and possible options are as such:
"""
import os
-from buildstream import Element, Scope
+from buildstream import Element
# Element implementation for the 'compose' kind.
@@ -90,12 +90,12 @@ class ComposeElement(Element):
# Stage deps in the sandbox root
with self.timed_activity("Staging dependencies", silent_nested=True):
- self.stage_dependency_artifacts(sandbox, Scope.BUILD)
+ self.stage_dependency_artifacts(sandbox)
manifest = set()
if require_split:
with self.timed_activity("Computing split", silent_nested=True):
- for dep in self.dependencies(Scope.BUILD):
+ for dep in self.dependencies():
files = dep.compute_manifest(
include=self.include, exclude=self.exclude, orphans=self.include_orphans
)
@@ -118,7 +118,7 @@ class ComposeElement(Element):
vbasedir.mark_unmodified()
with sandbox.batch(0):
- for dep in self.dependencies(Scope.BUILD):
+ for dep in self.dependencies():
dep.integrate(sandbox)
if require_split:
diff --git a/src/buildstream/plugins/elements/filter.py b/src/buildstream/plugins/elements/filter.py
index 9647c7191..c817ca46b 100644
--- a/src/buildstream/plugins/elements/filter.py
+++ b/src/buildstream/plugins/elements/filter.py
@@ -134,7 +134,8 @@ of the filter element:
:language: yaml
"""
-from buildstream import Element, ElementError, Scope
+from buildstream import Element, ElementError
+from buildstream.types import _Scope
class FilterElement(Element):
@@ -169,7 +170,7 @@ class FilterElement(Element):
def preflight(self):
# Exactly one build-depend is permitted
- build_deps = list(self._dependencies(Scope.BUILD, recurse=False))
+ build_deps = list(self._dependencies(_Scope.BUILD, recurse=False))
if len(build_deps) != 1:
detail = "Full list of build-depends:\n"
deps_list = " \n".join([x.name for x in build_deps])
@@ -183,7 +184,7 @@ class FilterElement(Element):
)
# That build-depend must not also be a runtime-depend
- runtime_deps = list(self._dependencies(Scope.RUN, recurse=False))
+ runtime_deps = list(self._dependencies(_Scope.RUN, recurse=False))
if build_deps[0] in runtime_deps:
detail = "Full list of runtime depends:\n"
deps_list = " \n".join([x.name for x in runtime_deps])
@@ -222,7 +223,7 @@ class FilterElement(Element):
def assemble(self, sandbox):
with self.timed_activity("Staging artifact", silent_nested=True):
- for dep in self._dependencies(Scope.BUILD, recurse=False):
+ for dep in self.dependencies(recurse=False):
# Check that all the included/excluded domains exist
pub_data = dep.get_public_data("bst")
split_rules = pub_data.get_mapping("split-rules", {})
@@ -254,14 +255,14 @@ class FilterElement(Element):
def _get_source_element(self):
# Filter elements act as proxies for their sole build-dependency
#
- build_deps = list(self._dependencies(Scope.BUILD, recurse=False))
+ build_deps = list(self._dependencies(_Scope.BUILD, recurse=False))
assert len(build_deps) == 1
output_elm = build_deps[0]._get_source_element()
return output_elm
def integrate(self, sandbox):
if self.pass_integration:
- for dep in self._dependencies(Scope.BUILD, recurse=False):
+ for dep in self.dependencies(recurse=False):
dep.integrate(sandbox)
super().integrate(sandbox)
diff --git a/src/buildstream/scriptelement.py b/src/buildstream/scriptelement.py
index d930d96b0..5ecae998c 100644
--- a/src/buildstream/scriptelement.py
+++ b/src/buildstream/scriptelement.py
@@ -38,7 +38,6 @@ from typing import List, Optional, TYPE_CHECKING
from .element import Element, ElementError
from .sandbox import SandboxFlags
-from .types import Scope
if TYPE_CHECKING:
from typing import Dict
@@ -128,8 +127,8 @@ class ScriptElement(Element):
commands will not be run.
If this function is never called, then the default behavior is to just
- stage the Scope.BUILD dependencies of the element in question at the
- sandbox root. Otherwise, the Scope.RUN dependencies of each specified
+ stage the build dependencies of the element in question at the
+ sandbox root. Otherwise, the runtime dependencies of each specified
element will be staged in their specified destination directories.
.. note::
@@ -222,12 +221,12 @@ class ScriptElement(Element):
# if no layout set, stage all dependencies into the sandbox root
with self.timed_activity("Staging dependencies", silent_nested=True):
- self.stage_dependency_artifacts(sandbox, Scope.BUILD)
+ self.stage_dependency_artifacts(sandbox)
# Run any integration commands provided by the dependencies
# once they are all staged and ready
with sandbox.batch(SandboxFlags.NONE, label="Integrating sandbox"):
- for dep in self.dependencies(Scope.BUILD):
+ for dep in self.dependencies():
dep.integrate(sandbox)
else:
# If layout, follow its rules.
@@ -237,11 +236,11 @@ class ScriptElement(Element):
if not item["element"]:
continue
- element = self.search(Scope.BUILD, item["element"])
+ element = self.search(item["element"])
with self.timed_activity(
"Staging {} at {}".format(element.name, item["destination"]), silent_nested=True
):
- element.stage_dependency_artifacts(sandbox, Scope.RUN, path=item["destination"])
+ element.stage_dependency_artifacts(sandbox, [element], path=item["destination"])
with sandbox.batch(SandboxFlags.NONE):
for item in self.__layout:
@@ -250,12 +249,12 @@ class ScriptElement(Element):
if not item["element"]:
continue
- element = self.search(Scope.BUILD, item["element"])
+ element = self.search(item["element"])
# Integration commands can only be run for elements staged to /
if item["destination"] == "/":
with self.timed_activity("Integrating {}".format(element.name), silent_nested=True):
- for dep in element.dependencies(Scope.RUN):
+ for dep in element.dependencies():
dep.integrate(sandbox)
install_root_path_components = self.__install_root.lstrip(os.sep).split(os.sep)
@@ -293,7 +292,7 @@ class ScriptElement(Element):
# of the dependencies.
for item in self.__layout:
if item["element"]:
- if not self.search(Scope.BUILD, item["element"]):
+ if not self.search(item["element"]):
raise ElementError(
"{}: '{}' in layout not found in dependencies".format(self, item["element"])
)
diff --git a/src/buildstream/types.py b/src/buildstream/types.py
index cf76defbd..3b1f7a4db 100644
--- a/src/buildstream/types.py
+++ b/src/buildstream/types.py
@@ -88,32 +88,6 @@ class FastEnum(metaclass=MetaFastEnum):
return self.__class__, (self.value,)
-class Scope(FastEnum):
- """Defines the scope of dependencies to include for a given element
- when iterating over the dependency graph in APIs like
- :func:`Element.dependencies() <buildstream.element.Element.dependencies>`
- """
-
- ALL = 1
- """All elements which the given element depends on, following
- all elements required for building. Including the element itself.
- """
-
- BUILD = 2
- """All elements required for building the element, including their
- respective run dependencies. Not including the given element itself.
- """
-
- RUN = 3
- """All elements required for running the element. Including the element
- itself.
- """
-
- NONE = 4
- """Just the element itself, no dependencies.
- """
-
-
class CoreWarnings:
"""CoreWarnings()
@@ -145,6 +119,34 @@ class CoreWarnings:
"""
+# _Scope():
+#
+# Defines the scope of dependencies to include for a given element
+# when iterating over the dependency graph in APIs like
+# Element._dependencies().
+#
+class _Scope(FastEnum):
+
+ # All elements which the given element depends on, following
+ # all elements required for building. Including the element itself.
+ #
+ ALL = 1
+
+ # All elements required for building the element, including their
+ # respective run dependencies. Not including the given element itself.
+ #
+ BUILD = 2
+
+ # All elements required for running the element. Including the element
+ # itself.
+ #
+ RUN = 3
+
+ # Just the element itself, no dependencies.
+ #
+ NONE = 4
+
+
# _KeyStrength():
#
# Strength of cache key
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 87bc3b325..74062ce47 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -5,7 +5,8 @@ import os
import pytest
-from buildstream import _yaml, Scope
+from buildstream import _yaml
+from buildstream.types import _Scope
from buildstream._project import Project
from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from buildstream.testing import cli # pylint: disable=unused-import
@@ -35,7 +36,7 @@ def _push(cli, cache_dir, project_dir, config_file, target):
# Ensure the element's artifact memeber is initialised
# This is duplicated from Pipeline.resolve_elements()
# as this test does not use the cli frontend.
- for e in element._dependencies(Scope.ALL):
+ for e in element._dependencies(_Scope.ALL):
e._initialize_state()
# Manually setup the CAS remotes
diff --git a/tests/elements/filter/basic/element_plugins/dynamic.py b/tests/elements/filter/basic/element_plugins/dynamic.py
index db92a6647..16a600823 100644
--- a/tests/elements/filter/basic/element_plugins/dynamic.py
+++ b/tests/elements/filter/basic/element_plugins/dynamic.py
@@ -1,4 +1,4 @@
-from buildstream import Element, Scope
+from buildstream import Element
# Copies files from the dependent element but inserts split-rules using dynamic data
@@ -24,7 +24,7 @@ class DynamicElement(Element):
def assemble(self, sandbox):
with self.timed_activity("Staging artifact", silent_nested=True):
- for dep in self.dependencies(Scope.BUILD):
+ for dep in self.dependencies():
dep.stage_artifact(sandbox)
bstdata = self.get_public_data("bst")