summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChandan Singh <chandan@chandansingh.net>2019-06-30 02:43:56 +0100
committerChandan Singh <chandan@chandansingh.net>2019-09-02 21:56:34 +0100
commit419a3afbe6e774c9182001977874c8ea08a9f49d (patch)
tree18d3c0f24179ae843caa76db46d4c38478e81dd7
parent3641578dcac7bd514f8e074835841f381a36c877 (diff)
downloadbuildstream-419a3afbe6e774c9182001977874c8ea08a9f49d.tar.gz
Add initial mypy configuration and types
As a first step, add type hints to variables whose type `mypy` cannot infer automatically. This is the minimal set of type hints that allow running `mypy` without any arguments, and having it not fail. We currently ignore C extensions that mypy can't process directly. Later, we can look into generating stubs for such modules (potentially automatically).
-rw-r--r--setup.cfg19
-rw-r--r--src/buildstream/_artifactelement.py9
-rw-r--r--src/buildstream/_basecache.py16
-rw-r--r--src/buildstream/_cas/casremote.py3
-rw-r--r--src/buildstream/_frontend/cli.py4
-rw-r--r--src/buildstream/_options/option.py7
-rw-r--r--src/buildstream/_scheduler/queues/queue.py11
-rw-r--r--src/buildstream/_signals.py13
-rw-r--r--src/buildstream/_yaml.pyi3
-rw-r--r--src/buildstream/element.py256
-rw-r--r--src/buildstream/plugin.py97
-rw-r--r--src/buildstream/sandbox/sandbox.py82
-rw-r--r--src/buildstream/scriptelement.py79
-rw-r--r--src/buildstream/source.py109
-rw-r--r--src/buildstream/storage/directory.py28
-rw-r--r--src/buildstream/testing/__init__.py2
-rw-r--r--src/buildstream/testing/_utils/site.py5
-rw-r--r--src/buildstream/types.py13
-rw-r--r--src/buildstream/utils.py118
-rw-r--r--tox.ini10
20 files changed, 555 insertions, 329 deletions
diff --git a/setup.cfg b/setup.cfg
index 4a0dfb54d..7a62f9282 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -29,3 +29,22 @@ markers =
max-line-length = 119
ignore = E129,E125,W504,W605
exclude = .git/**,.tox/**,.eggs/**,build/**,doc/source/conf.py,src/buildstream/_fuse/fuse.py,src/buildstream/_protos/**/*py,tmp/**
+
+[mypy]
+files = src
+warn_unused_configs = True
+warn_no_return = True
+
+# Ignore missing stubs for third-party packages.
+# In future, these should be re-enabled if/when stubs for them become available.
+[mypy-copyreg,arpy,grpc,pluginbase,psutil,py,pyroaring,pytest,_pytest.*,ruamel]
+ignore_missing_imports=True
+
+# Ignore missing stubs for Cythonized modules.
+# In future, these should be re-enabled by writing stubs for them.
+[mypy-buildstream.node,buildstream._loader.*,buildstream._types,buildstream._utils,buildstream._variables]
+ignore_missing_imports=True
+
+# Ignore issues with generated files and vendored code
+[mypy-buildstream._protos.*,buildstream._fuse.*,buildstream._version]
+ignore_errors = True
diff --git a/src/buildstream/_artifactelement.py b/src/buildstream/_artifactelement.py
index cfd3c29c8..e3bffaf72 100644
--- a/src/buildstream/_artifactelement.py
+++ b/src/buildstream/_artifactelement.py
@@ -16,12 +16,18 @@
#
# Authors:
# James Ennis <james.ennis@codethink.co.uk>
+
+from typing import TYPE_CHECKING
+
from . import Element
from . import _cachekey
from ._exceptions import ArtifactElementError
from ._loader.metaelement import MetaElement
from .types import Scope
+if TYPE_CHECKING:
+ from typing import Dict
+
# ArtifactElement()
#
@@ -33,7 +39,8 @@ from .types import Scope
#
class ArtifactElement(Element):
- __instantiated_artifacts = {} # A hash of ArtifactElement by ref
+ # A hash of ArtifactElement by ref
+ __instantiated_artifacts = {} # type: Dict[str, ArtifactElement]
def __init__(self, context, ref):
_, element, key = verify_artifact_ref(ref)
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index fc6087bf8..46de29f7b 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -19,6 +19,7 @@
import multiprocessing
import os
from fnmatch import fnmatch
+from typing import TYPE_CHECKING
from . import utils
from . import _yaml
@@ -26,6 +27,11 @@ from ._cas import CASRemote
from ._message import Message, MessageType
from ._exceptions import LoadError
+if TYPE_CHECKING:
+ from typing import Optional, Type
+ from ._exceptions import BstError
+ from ._cas import CASRemoteSpec
+
# Base Cache for Caches to derive from
#
@@ -33,11 +39,11 @@ class BaseCache():
# None of these should ever be called in the base class, but this appeases
# pylint to some degree
- spec_class = None
- spec_name = None
- spec_error = None
- config_node_name = None
- remote_class = CASRemote
+ spec_class = None # type: Type[CASRemoteSpec]
+ spec_name = None # type: str
+ spec_error = None # type: Type[BstError]
+ config_node_name = None # type: str
+ remote_class = CASRemote # type: Type[CASRemote]
def __init__(self, context):
self.context = context
diff --git a/src/buildstream/_cas/casremote.py b/src/buildstream/_cas/casremote.py
index 23240c6f7..9cc1a5488 100644
--- a/src/buildstream/_cas/casremote.py
+++ b/src/buildstream/_cas/casremote.py
@@ -62,7 +62,8 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
-CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
+# Disable type-checking since "Callable[...] has no attributes __defaults__"
+CASRemoteSpec.__new__.__defaults__ = (None, None, None, None) # type: ignore
class BlobNotFound(CASRemoteError):
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index d9527b274..e5325934c 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -236,7 +236,9 @@ def override_main(self, args=None, prog_name=None, complete_var=None,
original_main = click.BaseCommand.main
-click.BaseCommand.main = override_main
+# Disable type checking since mypy doesn't support assigning to a method.
+# See https://github.com/python/mypy/issues/2427.
+click.BaseCommand.main = override_main # type: ignore
##################################################################
diff --git a/src/buildstream/_options/option.py b/src/buildstream/_options/option.py
index da1191310..51017be22 100644
--- a/src/buildstream/_options/option.py
+++ b/src/buildstream/_options/option.py
@@ -17,8 +17,13 @@
# Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
+from typing import TYPE_CHECKING
+
from ..node import _assert_symbol_name
+if TYPE_CHECKING:
+ from typing import Optional
+
# Shared symbols for validation purposes
#
@@ -41,7 +46,7 @@ class Option():
# Subclasses use this to specify the type name used
# for the yaml format and error messages
- OPTION_TYPE = None
+ OPTION_TYPE = None # type: Optional[str]
def __init__(self, name, definition, pool):
self.name = name
diff --git a/src/buildstream/_scheduler/queues/queue.py b/src/buildstream/_scheduler/queues/queue.py
index 8c81ce21d..745b59417 100644
--- a/src/buildstream/_scheduler/queues/queue.py
+++ b/src/buildstream/_scheduler/queues/queue.py
@@ -23,6 +23,7 @@ import os
from collections import deque
import heapq
import traceback
+from typing import TYPE_CHECKING
# Local imports
from ..jobs import ElementJob, JobStatus
@@ -33,6 +34,9 @@ from ..._exceptions import BstError, ImplError, set_last_task_error
from ..._message import Message, MessageType
from ...types import FastEnum
+if TYPE_CHECKING:
+ from typing import List, Optional
+
# Queue status for a given element
#
@@ -56,9 +60,10 @@ class QueueStatus(FastEnum):
class Queue():
# These should be overridden on class data of of concrete Queue implementations
- action_name = None
- complete_name = None
- resources = [] # Resources this queues' jobs want
+ action_name = None # type: Optional[str]
+ complete_name = None # type: Optional[str]
+ # Resources this queues' jobs want
+ resources = [] # type: List[int]
def __init__(self, scheduler):
diff --git a/src/buildstream/_signals.py b/src/buildstream/_signals.py
index 2df2c7915..31982c199 100644
--- a/src/buildstream/_signals.py
+++ b/src/buildstream/_signals.py
@@ -23,13 +23,22 @@ import threading
import traceback
from contextlib import contextmanager, ExitStack
from collections import deque
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from typing import Callable, MutableSequence
# Global per process state for handling of sigterm/sigtstp/sigcont,
# note that it is expected that this only ever be used by new processes
# the scheduler starts, not the main process.
-terminator_stack = deque()
-suspendable_stack = deque()
+#
+# FIXME: We should ideally be using typing.Deque as type hints below, not
+# typing.MutableSequence. However, that is only available in Python versions
+# 3.5.4 onward and 3.6.1 onward.
+# Debian 9 ships with 3.5.3.
+terminator_stack = deque() # type: MutableSequence[Callable]
+suspendable_stack = deque() # type: MutableSequence[Callable]
# Per process SIGTERM handler
diff --git a/src/buildstream/_yaml.pyi b/src/buildstream/_yaml.pyi
new file mode 100644
index 000000000..215866e3e
--- /dev/null
+++ b/src/buildstream/_yaml.pyi
@@ -0,0 +1,3 @@
+# FIXME: This file is currently a placeholder so that mypy doesn't complain
+# about `_yaml` module being missing. In future, we should consider adding
+# stubs for Cythonized code.
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 950334695..2efdd3abb 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -83,6 +83,7 @@ from functools import partial
from itertools import chain
import tempfile
import string
+from typing import cast, TYPE_CHECKING, Any, Dict, Iterator, List, Optional
from pyroaring import BitMap # pylint: disable=no-name-in-module
@@ -91,7 +92,7 @@ from ._variables import Variables
from ._versions import BST_CORE_ARTIFACT_VERSION
from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
ErrorDomain, SourceCacheError
-from .utils import UtilError
+from .utils import FileListResult, UtilError
from . import utils
from . import _cachekey
from . import _signals
@@ -110,19 +111,40 @@ from .storage._filebaseddirectory import FileBasedDirectory
from .storage._casbaseddirectory import CasBasedDirectory
from .storage.directory import VirtualDirectoryError
+if TYPE_CHECKING:
+ from .node import MappingNode
+ from .types import SourceRef
+ from typing import Set, Tuple
+
+ # pylint: disable=cyclic-import
+ from .sandbox import Sandbox
+ from .source import Source
+ from ._context import Context
+ from ._loader.metaelement import MetaElement
+ from ._loader.metasource import MetaSource
+ from ._loader.metasource import MetaSource
+ from ._project import Project
+ # pylint: enable=cyclic-import
+
class ElementError(BstError):
"""This exception should be raised by :class:`.Element` implementations
to report errors to the user.
Args:
- message (str): The error message to report to the user
- detail (str): A possibly multiline, more detailed error message
- reason (str): An optional machine readable reason string, used for test cases
- collect (str): An optional directory containing partial install contents
- temporary (bool): An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
+ message: The error message to report to the user
+ detail: A possibly multiline, more detailed error message
+ reason: An optional machine readable reason string, used for test cases
+ collect: An optional directory containing partial install contents
+ temporary: An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
"""
- def __init__(self, message, *, detail=None, reason=None, collect=None, temporary=False):
+ def __init__(self,
+ message: str,
+ *,
+ detail: str = None,
+ reason: str = None,
+ collect: str = None,
+ temporary: bool = False):
super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=temporary)
self.collect = collect
@@ -136,9 +158,12 @@ class Element(Plugin):
All elements derive from this class, this interface defines how
the core will be interacting with Elements.
"""
- __defaults = None # The defaults from the yaml file and project
- __instantiated_elements = {} # A hash of Element by MetaElement
- __redundant_source_refs = [] # A list of (source, ref) tuples which were redundantly specified
+ # The defaults from the yaml file and project
+ __defaults = None
+ # A hash of Element by MetaElement
+ __instantiated_elements = {} # type: Dict[MetaElement, Element]
+ # A list of (source, ref) tuples which were redundantly specified
+ __redundant_source_refs = [] # type: List[Tuple[Source, SourceRef]]
BST_ARTIFACT_VERSION = 0
"""The element plugin's artifact version
@@ -186,7 +211,7 @@ class Element(Plugin):
*Since: 1.4*
"""
- def __init__(self, context, project, meta, plugin_conf):
+ def __init__(self, context: 'Context', project: 'Project', meta: 'MetaElement', plugin_conf: Dict[str, Any]):
self.__cache_key_dict = None # Dict for cache key calculation
self.__cache_key = None # Our cached cache key
@@ -206,11 +231,16 @@ class Element(Plugin):
and creating directory names and such.
"""
- self.__runtime_dependencies = [] # Direct runtime dependency Elements
- self.__build_dependencies = [] # Direct build dependency Elements
- self.__strict_dependencies = [] # Direct build dependency subset which require strict rebuilds
- self.__reverse_build_deps = set() # Direct reverse build dependency Elements
- self.__reverse_runtime_deps = set() # Direct reverse runtime dependency Elements
+ # Direct runtime dependency Elements
+ self.__runtime_dependencies = [] # type: List[Element]
+ # Direct build dependency Elements
+ self.__build_dependencies = [] # type: List[Element]
+ # Direct build dependency subset which require strict rebuilds
+ self.__strict_dependencies = [] # type: List[Element]
+ # Direct reverse build dependency Elements
+ self.__reverse_build_deps = set() # type: Set[Element]
+ # Direct reverse runtime dependency Elements
+ self.__reverse_runtime_deps = set() # type: Set[Element]
self.__build_deps_without_strict_cache_key = None # Number of build dependencies without a strict key
self.__runtime_deps_without_strict_cache_key = None # Number of runtime dependencies without a strict key
self.__build_deps_without_cache_key = None # Number of build dependencies without a cache key
@@ -221,7 +251,8 @@ class Element(Plugin):
self.__ready_for_runtime = False # Whether the element and its runtime dependencies have cache keys
self.__ready_for_runtime_and_cached = False # Whether all runtime deps are cached, as well as the element
self.__cached_remotely = None # Whether the element is cached remotely
- self.__sources = [] # List of Sources
+ # List of Sources
+ self.__sources = [] # type: List[Source]
self.__weak_cache_key = None # Our cached weak cache key
self.__strict_cache_key = None # Our cached cache key for strict builds
self.__cache_keys_unstable = None # Whether the current cache keys can be considered as stable
@@ -235,13 +266,15 @@ class Element(Plugin):
self.__cached_successfully = None # If the Element is known to be successfully cached
self.__splits = None # Resolved regex objects for computing split domains
self.__whitelist_regex = None # Resolved regex object to check if file is allowed to overlap
- self.__staged_sources_directory = None # Location where Element.stage_sources() was called
+ # Location where Element.stage_sources() was called
+ self.__staged_sources_directory = None # type: Optional[str]
self.__tainted = None # Whether the artifact is tainted and should not be shared
self.__required = False # Whether the artifact is required in the current session
self.__artifact_files_required = False # Whether artifact files are required in the local cache
self.__build_result = None # The result of assembling this Element (success, description, detail)
self._build_log_path = None # The path of the build log for this Element
- self.__artifact = None # Artifact class for direct artifact composite interaction
+ # Artifact class for direct artifact composite interaction
+ self.__artifact = None # type: Optional[Artifact]
self.__strict_artifact = None # Artifact for strict cache key
self.__meta_kind = meta.kind # The kind of this source, required for unpickling
@@ -251,7 +284,8 @@ class Element(Plugin):
self.__batch_prepare_assemble = False # Whether batching across prepare()/assemble() is configured
self.__batch_prepare_assemble_flags = 0 # Sandbox flags for batching across prepare()/assemble()
- self.__batch_prepare_assemble_collect = None # Collect dir for batching across prepare()/assemble()
+ # Collect dir for batching across prepare()/assemble()
+ self.__batch_prepare_assemble_collect = None # type: Optional[str]
# Callbacks
self.__required_callback = None # Callback to Queues
@@ -310,11 +344,11 @@ class Element(Plugin):
#############################################################
# Abstract Methods #
#############################################################
- def configure_sandbox(self, sandbox):
+ def configure_sandbox(self, sandbox: 'Sandbox') -> None:
"""Configures the the sandbox for execution
Args:
- sandbox (:class:`.Sandbox`): The build sandbox
+ sandbox: The build sandbox
Raises:
(:class:`.ElementError`): When the element raises an error
@@ -325,11 +359,11 @@ class Element(Plugin):
raise ImplError("element plugin '{kind}' does not implement configure_sandbox()".format(
kind=self.get_kind()))
- def stage(self, sandbox):
+ def stage(self, sandbox: 'Sandbox') -> None:
"""Stage inputs into the sandbox directories
Args:
- sandbox (:class:`.Sandbox`): The build sandbox
+ sandbox: The build sandbox
Raises:
(:class:`.ElementError`): When the element raises an error
@@ -342,7 +376,7 @@ class Element(Plugin):
raise ImplError("element plugin '{kind}' does not implement stage()".format(
kind=self.get_kind()))
- def prepare(self, sandbox):
+ def prepare(self, sandbox: 'Sandbox') -> None:
"""Run one-off preparation commands.
This is run before assemble(), but is guaranteed to run only
@@ -351,7 +385,7 @@ class Element(Plugin):
entire element to rebuild.
Args:
- sandbox (:class:`.Sandbox`): The build sandbox
+ sandbox: The build sandbox
Raises:
(:class:`.ElementError`): When the element raises an error
@@ -362,14 +396,14 @@ class Element(Plugin):
*Since: 1.2*
"""
- def assemble(self, sandbox):
+ def assemble(self, sandbox: 'Sandbox') -> str:
"""Assemble the output artifact
Args:
- sandbox (:class:`.Sandbox`): The build sandbox
+ sandbox: The build sandbox
Returns:
- (str): An absolute path within the sandbox to collect the artifact from
+ An absolute path within the sandbox to collect the artifact from
Raises:
(:class:`.ElementError`): When the element raises an error
@@ -380,11 +414,11 @@ class Element(Plugin):
raise ImplError("element plugin '{kind}' does not implement assemble()".format(
kind=self.get_kind()))
- def generate_script(self):
+ def generate_script(self) -> str:
"""Generate a build (sh) script to build this element
Returns:
- (str): A string containing the shell commands required to build the element
+ A string containing the shell commands required to build the element
BuildStream guarantees the following environment when the
generated script is run:
@@ -405,16 +439,16 @@ class Element(Plugin):
#############################################################
# Public Methods #
#############################################################
- def sources(self):
+ def sources(self) -> Iterator['Source']:
"""A generator function to enumerate the element sources
Yields:
- (:class:`.Source`): The sources of this element
+ The sources of this element
"""
for source in self.__sources:
yield source
- def dependencies(self, scope, *, recurse=True, visited=None):
+ def dependencies(self, scope: Scope, *, recurse: bool = True, visited=None) -> Iterator['Element']:
"""dependencies(scope, *, recurse=True)
A generator function which yields the dependencies of the given element.
@@ -426,11 +460,11 @@ class Element(Plugin):
will be omitted.
Args:
- scope (:class:`.Scope`): The scope to iterate in
- recurse (bool): Whether to recurse
+ scope: The scope to iterate in
+ recurse: Whether to recurse
Yields:
- (:class:`.Element`): The dependencies in `scope`, in deterministic staging order
+ The dependencies in `scope`, in deterministic staging order
"""
# The format of visited is (BitMap(), BitMap()), with the first BitMap
# containing element that have been visited for the `Scope.BUILD` case
@@ -481,15 +515,15 @@ class Element(Plugin):
yield from visit(self, scope, visited)
- def search(self, scope, name):
+ def search(self, scope: Scope, name: str) -> Optional['Element']:
"""Search for a dependency by name
Args:
- scope (:class:`.Scope`): The scope to search
- name (str): The dependency to search for
+ scope: The scope to search
+ name: The dependency to search for
Returns:
- (:class:`.Element`): The dependency element, or None if not found.
+ The dependency element, or None if not found.
"""
for dep in self.dependencies(scope):
if dep.name == name:
@@ -500,14 +534,14 @@ class Element(Plugin):
def substitute_variables(self, value):
return self.__variables.subst(value)
- def node_subst_member(self, node, member_name, default=_node_sentinel):
+ def node_subst_member(self, node: 'MappingNode[str, Any]', member_name: str, default: str = _node_sentinel) -> Any:
"""Fetch the value of a string node member, substituting any variables
in the loaded value with the element contextual variables.
Args:
- node (:class:`MappingNode <buildstream.node.MappingNode>`): A MappingNode loaded from YAML
- member_name (str): The name of the member to fetch
- default (str): A value to return when *member_name* is not specified in *node*
+ node: A MappingNode loaded from YAML
+ member_name: The name of the member to fetch
+ default: A value to return when *member_name* is not specified in *node*
Returns:
The value of *member_name* in *node*, otherwise *default*
@@ -530,12 +564,12 @@ class Element(Plugin):
provenance = node.get_scalar(member_name).get_provenance()
raise LoadError('{}: {}'.format(provenance, e), e.reason, detail=e.detail) from e
- def node_subst_list(self, node, member_name):
+ def node_subst_list(self, node: 'MappingNode[str, Any]', member_name: str) -> List[Any]:
"""Fetch a list from a node member, substituting any variables in the list
Args:
- node (:class:`MappingNode <buildstream.node.MappingNode>`): A MappingNode loaded from YAML
- member_name (str): The name of the member to fetch (a list)
+ node: A MappingNode loaded from YAML
+ member_name: The name of the member to fetch (a list)
Returns:
The list in *member_name*
@@ -553,7 +587,11 @@ class Element(Plugin):
raise LoadError('{}: {}'.format(provenance, e), e.reason, detail=e.detail) from e
return ret
- def compute_manifest(self, *, include=None, exclude=None, orphans=True):
+ def compute_manifest(self,
+ *,
+ include: Optional[List[str]] = None,
+ exclude: Optional[List[str]] = None,
+ orphans: bool = True) -> str:
"""Compute and return this element's selective manifest
The manifest consists on the list of file paths in the
@@ -563,17 +601,17 @@ class Element(Plugin):
included unless explicitly excluded with an `exclude` domain.
Args:
- include (list): An optional list of domains to include files from
- exclude (list): An optional list of domains to exclude files from
- orphans (bool): Whether to include files not spoken for by split domains
+ include: An optional list of domains to include files from
+ exclude: An optional list of domains to exclude files from
+ orphans: Whether to include files not spoken for by split domains
Yields:
- (str): The paths of the files in manifest
+ The paths of the files in manifest
"""
self.__assert_cached()
return self.__compute_splits(include, exclude, orphans)
- def get_artifact_name(self, key=None):
+ def get_artifact_name(self, key: Optional[str] = None) -> str:
"""Compute and return this element's full artifact name
Generate a full name for an artifact, including the project
@@ -584,10 +622,10 @@ class Element(Plugin):
digits, letters and some select characters are allowed.
Args:
- key (str): The element's cache key. Defaults to None
+ key: The element's cache key. Defaults to None
Returns:
- (str): The relative path for the artifact
+ The relative path for the artifact
"""
if key is None:
key = self._get_cache_key()
@@ -596,7 +634,14 @@ class Element(Plugin):
return _compose_artifact_name(self.project_name, self.normal_name, key)
- def stage_artifact(self, sandbox, *, path=None, include=None, exclude=None, orphans=True, update_mtimes=None):
+ def stage_artifact(self,
+ sandbox: 'Sandbox',
+ *,
+ path: str = None,
+ include: Optional[List[str]] = None,
+ exclude: Optional[List[str]] = None,
+ orphans: bool = True,
+ update_mtimes: Optional[List[str]] = None) -> FileListResult:
"""Stage this element's output artifact in the sandbox
This will stage the files from the artifact to the sandbox at specified location.
@@ -605,18 +650,18 @@ class Element(Plugin):
are included unless explicitly excluded with an `exclude` domain.
Args:
- sandbox (:class:`.Sandbox`): The build sandbox
- path (str): An optional sandbox relative path
- include (list): An optional list of domains to include files from
- exclude (list): An optional list of domains to exclude files from
- orphans (bool): Whether to include files not spoken for by split domains
- update_mtimes (list): An optional list of files whose mtimes to set to the current time.
+ sandbox: The build sandbox
+ path: An optional sandbox relative path
+ include: An optional list of domains to include files from
+ exclude: An optional list of domains to exclude files from
+ orphans: Whether to include files not spoken for by split domains
+ update_mtimes: An optional list of files whose mtimes to set to the current time.
Raises:
(:class:`.ElementError`): If the element has not yet produced an artifact.
Returns:
- (:class:`~.utils.FileListResult`): The result describing what happened while staging
+ The result describing what happened while staging
.. note::
@@ -646,7 +691,9 @@ class Element(Plugin):
self.__assert_cached()
with self.timed_activity("Staging {}/{}".format(self.name, self._get_brief_display_key())):
- files_vdir = self.__artifact.get_files()
+ # Disable type checking since we can't easily tell mypy that
+ # `self.__artifact` can't be None at this stage.
+ files_vdir = self.__artifact.get_files() # type: ignore
# Hard link it into the staging area
#
@@ -679,8 +726,14 @@ class Element(Plugin):
return result
- def stage_dependency_artifacts(self, sandbox, scope, *, path=None,
- include=None, exclude=None, orphans=True):
+ def stage_dependency_artifacts(self,
+ sandbox: 'Sandbox',
+ scope: Scope,
+ *,
+ path: str = None,
+ include: Optional[List[str]] = None,
+ exclude: Optional[List[str]] = None,
+ orphans: bool = True) -> None:
"""Stage element dependencies in scope
This is primarily a convenience wrapper around
@@ -689,12 +742,12 @@ class Element(Plugin):
appropriate warnings.
Args:
- sandbox (:class:`.Sandbox`): The build sandbox
- scope (:class:`.Scope`): The scope to stage dependencies in
- path (str): An optional sandbox relative path
- include (list): An optional list of domains to include files from
- exclude (list): An optional list of domains to exclude files from
- orphans (bool): Whether to include files not spoken for by split domains
+ sandbox: The build sandbox
+ scope: The scope to stage dependencies in
+ path An optional sandbox relative path
+ include: An optional list of domains to include files from
+ exclude: An optional list of domains to exclude files from
+ orphans: Whether to include files not spoken for by split domains
Raises:
(:class:`.ElementError`): If any of the dependencies in `scope` have not
@@ -702,8 +755,8 @@ class Element(Plugin):
occur.
"""
ignored = {}
- overlaps = OrderedDict()
- files_written = {}
+ overlaps = OrderedDict() # type: OrderedDict[str, List[str]]
+ files_written = {} # type: Dict[str, List[str]]
old_dep_keys = None
workspace = self._get_workspace()
context = self._get_context()
@@ -784,7 +837,7 @@ class Element(Plugin):
# The bottom item overlaps nothing
overlapping_elements = elements[1:]
for elm in overlapping_elements:
- element = self.search(scope, elm)
+ element = cast(Element, self.search(scope, elm))
if not element.__file_is_whitelisted(f):
overlap_warning_elements.append(elm)
overlap_warning = True
@@ -802,11 +855,11 @@ class Element(Plugin):
detail += " " + " ".join(["/" + f + "\n" for f in value])
self.warn("Ignored files", detail=detail)
- def integrate(self, sandbox):
+ def integrate(self, sandbox: 'Sandbox') -> None:
"""Integrate currently staged filesystem against this artifact.
Args:
- sandbox (:class:`.Sandbox`): The build sandbox
+ sandbox: The build sandbox
This modifies the sysroot staged inside the sandbox so that
the sysroot is *integrated*. Only an *integrated* sandbox
@@ -826,12 +879,12 @@ class Element(Plugin):
sandbox.run(['sh', '-e', '-c', cmd], 0, env=environment, cwd='/',
label=cmd)
- def stage_sources(self, sandbox, directory):
+ def stage_sources(self, sandbox: 'Sandbox', directory: str) -> None:
"""Stage this element's sources to a directory in the sandbox
Args:
- sandbox (:class:`.Sandbox`): The build sandbox
- directory (str): An absolute path within the sandbox to stage the sources at
+ sandbox: The build sandbox
+ directory: An absolute path within the sandbox to stage the sources at
"""
# Hold on to the location where a plugin decided to stage sources,
@@ -843,14 +896,13 @@ class Element(Plugin):
self._stage_sources_in_sandbox(sandbox, directory)
- def get_public_data(self, domain):
+ def get_public_data(self, domain: str) -> 'MappingNode[Any, Any]':
"""Fetch public data on this element
Args:
- domain (str): A public domain name to fetch data for
+ domain: A public domain name to fetch data for
Returns:
- :class:`MappingNode <buildstream.node.MappingNode>`: The public data dictionary for the given domain
.. note::
@@ -861,18 +913,20 @@ class Element(Plugin):
if self.__dynamic_public is None:
self.__load_public_data()
- data = self.__dynamic_public.get_mapping(domain, default=None)
+ # Disable type-checking since we can't easily tell mypy that
+ # `self.__dynamic_public` can't be None here.
+ data = self.__dynamic_public.get_mapping(domain, default=None) # type: ignore
if data is not None:
data = data.clone()
return data
- def set_public_data(self, domain, data):
+ def set_public_data(self, domain: str, data: 'MappingNode[Any, Any]') -> None:
"""Set public data on this element
Args:
- domain (str): A public domain name to fetch data for
- data (:class:`MappingNode <buildstream.node.MappingNode>`): The public data dictionary for the given domain
+ domain: A public domain name to fetch data for
+ data: The public data dictionary for the given domain
This allows an element to dynamically mutate public data of
elements or add new domains as the result of success completion
@@ -885,37 +939,37 @@ class Element(Plugin):
if data is not None:
data = data.clone()
- self.__dynamic_public[domain] = data
+ self.__dynamic_public[domain] = data # type: ignore
- def get_environment(self):
+ def get_environment(self) -> Dict[str, str]:
"""Fetch the environment suitable for running in the sandbox
Returns:
- (dict): A dictionary of string key/values suitable for passing
+ A dictionary of string key/values suitable for passing
to :func:`Sandbox.run() <buildstream.sandbox.Sandbox.run>`
"""
return self.__environment
- def get_variable(self, varname):
+ def get_variable(self, varname: str) -> Optional[str]:
"""Fetch the value of a variable resolved for this element.
Args:
- varname (str): The name of the variable to fetch
+ varname: The name of the variable to fetch
Returns:
- (str): The resolved value for *varname*, or None if no
+ The resolved value for *varname*, or None if no
variable was declared with the given name.
"""
# Flat is not recognized correctly by Pylint as being a dictionary
return self.__variables.flat.get(varname) # pylint: disable=no-member
- def batch_prepare_assemble(self, flags, *, collect=None):
+ def batch_prepare_assemble(self, flags: int, *, collect: Optional[str] = None) -> None:
""" Configure command batching across prepare() and assemble()
Args:
- flags (:class:`.SandboxFlags`): The sandbox flags for the command batch
- collect (str): An optional directory containing partial install contents
- on command failure.
+ flags: The sandbox flags for the command batch
+ collect: An optional directory containing partial install contents
+ on command failure.
This may be called in :func:`Element.configure_sandbox() <buildstream.element.Element.configure_sandbox>`
to enable batching of all sandbox commands issued in prepare() and assemble().
@@ -927,13 +981,13 @@ class Element(Plugin):
self.__batch_prepare_assemble_flags = flags
self.__batch_prepare_assemble_collect = collect
- def get_logs(self):
+ def get_logs(self) -> List[str]:
"""Obtain a list of log file paths
Returns:
- (list): A list of log file paths
+ A list of log file paths
"""
- return self.__artifact.get_logs()
+ return cast(Artifact, self.__artifact).get_logs()
#############################################################
# Private Methods used in BuildStream #
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index 5783eb270..e269a4b1a 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -114,12 +114,20 @@ import os
import subprocess
import sys
from contextlib import contextmanager
+from typing import Generator, Optional, Tuple, TYPE_CHECKING
from weakref import WeakValueDictionary
from . import utils
from ._exceptions import PluginError, ImplError
from ._message import Message, MessageType
-from .types import CoreWarnings
+from .node import MappingNode, ProvenanceInformation
+from .types import CoreWarnings, SourceRef
+
+if TYPE_CHECKING:
+ # pylint: disable=cyclic-import
+ from ._context import Context
+ from ._project import Project
+ # pylint: enable=cyclic-import
class Plugin():
@@ -202,9 +210,15 @@ class Plugin():
#
# Note that Plugins can only be instantiated in the main process before
# scheduling tasks.
- __TABLE = WeakValueDictionary()
+ __TABLE = WeakValueDictionary() # type: WeakValueDictionary[int, Plugin]
- def __init__(self, name, context, project, provenance, type_tag, unique_id=None):
+ def __init__(self,
+ name: str,
+ context: 'Context',
+ project: 'Project',
+ provenance: ProvenanceInformation,
+ type_tag: str,
+ unique_id: Optional[int] = None):
self.name = name
"""The plugin name
@@ -275,11 +289,11 @@ class Plugin():
#############################################################
# Abstract Methods #
#############################################################
- def configure(self, node):
+ def configure(self, node: MappingNode) -> None:
"""Configure the Plugin from loaded configuration data
Args:
- node (:class:`MappingNode <buildstream.node.MappingNode>`): The loaded configuration dictionary
+ node: The loaded configuration dictionary
Raises:
:class:`.SourceError`: If it's a :class:`.Source` implementation
@@ -301,7 +315,7 @@ class Plugin():
raise ImplError("{tag} plugin '{kind}' does not implement configure()".format(
tag=self.__type_tag, kind=self.get_kind()))
- def preflight(self):
+ def preflight(self) -> None:
"""Preflight Check
Raises:
@@ -322,7 +336,7 @@ class Plugin():
raise ImplError("{tag} plugin '{kind}' does not implement preflight()".format(
tag=self.__type_tag, kind=self.get_kind()))
- def get_unique_key(self):
+ def get_unique_key(self) -> SourceRef:
"""Return something which uniquely identifies the plugin input
Returns:
@@ -347,11 +361,11 @@ class Plugin():
#############################################################
# Public Methods #
#############################################################
- def get_kind(self):
+ def get_kind(self) -> str:
"""Fetches the kind of this plugin
Returns:
- (str): The kind of this plugin
+ The kind of this plugin
"""
return self.__kind
@@ -398,33 +412,33 @@ class Plugin():
check_is_file=check_is_file,
check_is_dir=check_is_dir)
- def debug(self, brief, *, detail=None):
+ def debug(self, brief: str, *, detail: Optional[str] = None) -> None:
"""Print a debugging message
Args:
- brief (str): The brief message
- detail (str): An optional detailed message, can be multiline output
+ brief: The brief message
+ detail: An optional detailed message, can be multiline output
"""
if self.__context.log_debug:
self.__message(MessageType.DEBUG, brief, detail=detail)
- def status(self, brief, *, detail=None):
+ def status(self, brief: str, *, detail: Optional[str] = None) -> None:
"""Print a status message
Args:
- brief (str): The brief message
- detail (str): An optional detailed message, can be multiline output
+ brief: The brief message
+ detail: An optional detailed message, can be multiline output
Note: Status messages tell about what a plugin is currently doing
"""
self.__message(MessageType.STATUS, brief, detail=detail)
- def info(self, brief, *, detail=None):
+ def info(self, brief: str, *, detail: Optional[str] = None) -> None:
"""Print an informative message
Args:
- brief (str): The brief message
- detail (str): An optional detailed message, can be multiline output
+ brief: The brief message
+ detail: An optional detailed message, can be multiline output
Note: Informative messages tell the user something they might want
to know, like if refreshing an element caused it to change.
@@ -434,15 +448,15 @@ class Plugin():
"""
self.__message(MessageType.INFO, brief, detail=detail)
- def warn(self, brief, *, detail=None, warning_token=None):
+ def warn(self, brief: str, *, detail: Optional[str] = None, warning_token: Optional[str] = None) -> None:
"""Print a warning message, checks warning_token against project configuration
Args:
- brief (str): The brief message
- detail (str): An optional detailed message, can be multiline output
- warning_token (str): An optional configurable warning assosciated with this warning,
- this will cause PluginError to be raised if this warning is configured as fatal.
- (*Since 1.4*)
+ brief: The brief message
+ detail: An optional detailed message, can be multiline output
+ warning_token: An optional configurable warning assosciated with this warning,
+ this will cause PluginError to be raised if this warning is configured as fatal.
+ (*Since 1.4*)
Raises:
(:class:`.PluginError`): When warning_token is considered fatal by the project configuration
@@ -458,26 +472,30 @@ class Plugin():
self.__message(MessageType.WARN, brief=brief, detail=detail)
- def log(self, brief, *, detail=None):
+ def log(self, brief: str, *, detail: Optional[str] = None) -> None:
"""Log a message into the plugin's log file
The message will not be shown in the master log at all (so it will not
be displayed to the user on the console).
Args:
- brief (str): The brief message
- detail (str): An optional detailed message, can be multiline output
+ brief: The brief message
+ detail: An optional detailed message, can be multiline output
"""
self.__message(MessageType.LOG, brief, detail=detail)
@contextmanager
- def timed_activity(self, activity_name, *, detail=None, silent_nested=False):
+ def timed_activity(self,
+ activity_name: str,
+ *,
+ detail: Optional[str] = None,
+ silent_nested: bool = False) -> Generator[None, None, None]:
"""Context manager for performing timed activities in plugins
Args:
- activity_name (str): The name of the activity
- detail (str): An optional detailed message, can be multiline output
- silent_nested (bool): If specified, nested messages will be silenced
+ activity_name: The name of the activity
+ detail: An optional detailed message, can be multiline output
+ silent_nested: If specified, nested messages will be silenced
This function lets you perform timed tasks in your plugin,
the core will take care of timing the duration of your
@@ -499,19 +517,19 @@ class Plugin():
silent_nested=silent_nested):
yield
- def call(self, *popenargs, fail=None, fail_temporarily=False, **kwargs):
+ def call(self, *popenargs, fail: Optional[str] = None, fail_temporarily: bool = False, **kwargs) -> int:
"""A wrapper for subprocess.call()
Args:
popenargs (list): Popen() arguments
- fail (str): A message to display if the process returns
- a non zero exit code
- fail_temporarily (bool): Whether any exceptions should
- be raised as temporary. (*Since: 1.2*)
+ fail: A message to display if the process returns
+ a non zero exit code
+ fail_temporarily: Whether any exceptions should
+ be raised as temporary. (*Since: 1.2*)
rest_of_args (kwargs): Remaining arguments to subprocess.call()
Returns:
- (int): The process exit code.
+ The process exit code.
Raises:
(:class:`.PluginError`): If a non-zero return code is received and *fail* is specified
@@ -533,7 +551,7 @@ class Plugin():
exit_code, _ = self.__call(*popenargs, fail=fail, fail_temporarily=fail_temporarily, **kwargs)
return exit_code
- def check_output(self, *popenargs, fail=None, fail_temporarily=False, **kwargs):
+ def check_output(self, *popenargs, fail=None, fail_temporarily=False, **kwargs) -> Tuple[int, str]:
"""A wrapper for subprocess.check_output()
Args:
@@ -545,8 +563,7 @@ class Plugin():
rest_of_args (kwargs): Remaining arguments to subprocess.call()
Returns:
- (int): The process exit code
- (str): The process standard output
+ A 2-tuple of form (process exit code, process standard output)
Raises:
(:class:`.PluginError`): If a non-zero return code is received and *fail* is specified
diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py
index 879587748..b4691fe3f 100644
--- a/src/buildstream/sandbox/sandbox.py
+++ b/src/buildstream/sandbox/sandbox.py
@@ -33,12 +33,22 @@ import os
import shlex
import contextlib
from contextlib import contextmanager
+from typing import Dict, Generator, List, Optional, TYPE_CHECKING
from .._exceptions import ImplError, BstError, SandboxError
from .._message import Message, MessageType
+from ..storage.directory import Directory
from ..storage._filebaseddirectory import FileBasedDirectory
from ..storage._casbaseddirectory import CasBasedDirectory
+if TYPE_CHECKING:
+ from typing import Union
+
+ # pylint: disable=cyclic-import
+ from .._context import Context
+ from .._project import Project
+ # pylint: enable=cyclic-import
+
class SandboxFlags():
"""Flags indicating how the sandbox should be run.
@@ -109,15 +119,15 @@ class Sandbox():
'/dev/zero',
'/dev/null'
]
- _dummy_reasons = []
+ _dummy_reasons = [] # type: List[str]
- def __init__(self, context, project, directory, **kwargs):
+ def __init__(self, context: 'Context', project: 'Project', directory: str, **kwargs):
self.__context = context
self.__project = project
- self.__directories = []
- self.__cwd = None
- self.__env = None
- self.__mount_sources = {}
+ self.__directories = [] # type: List[Dict[str, Union[int, str]]]
+ self.__cwd = None # type: Optional[str]
+ self.__env = None # type: Optional[Dict[str, str]]
+ self.__mount_sources = {} # type: Dict[str, str]
self.__allow_real_directory = kwargs['allow_real_directory']
self.__allow_run = True
@@ -148,10 +158,10 @@ class Sandbox():
for directory_ in [self._root, self.__scratch]:
os.makedirs(directory_, exist_ok=True)
- self._output_directory = None
+ self._output_directory = None # type: Optional[str]
self._build_directory = None
self._build_directory_always = None
- self._vdir = None
+ self._vdir = None # type: Optional[Directory]
self._usebuildtree = False
# This is set if anyone requests access to the underlying
@@ -161,7 +171,7 @@ class Sandbox():
# Pending command batch
self.__batch = None
- def get_directory(self):
+ def get_directory(self) -> str:
"""Fetches the sandbox root directory
The root directory is where artifacts for the base
@@ -169,7 +179,7 @@ class Sandbox():
BST_VIRTUAL_DIRECTORY is not set.
Returns:
- (str): The sandbox root directory
+ The sandbox root directory
"""
if self.__allow_real_directory:
@@ -178,7 +188,7 @@ class Sandbox():
else:
raise BstError("You can't use get_directory")
- def get_virtual_directory(self):
+ def get_virtual_directory(self) -> Directory:
"""Fetches the sandbox root directory as a virtual Directory.
The root directory is where artifacts for the base
@@ -191,7 +201,7 @@ class Sandbox():
must call get_virtual_directory again to get a new copy.
Returns:
- (Directory): The sandbox root directory
+ The sandbox root directory
"""
if self._vdir is None or self._never_cache_vdirs:
@@ -208,38 +218,38 @@ class Sandbox():
"""
self._vdir = virtual_directory
- def set_environment(self, environment):
+ def set_environment(self, environment: Dict[str, str]) -> None:
"""Sets the environment variables for the sandbox
Args:
- environment (dict): The environment variables to use in the sandbox
+ environment: The environment variables to use in the sandbox
"""
self.__env = environment
- def set_work_directory(self, directory):
+ def set_work_directory(self, directory: str) -> None:
"""Sets the work directory for commands run in the sandbox
Args:
- directory (str): An absolute path within the sandbox
+ directory: An absolute path within the sandbox
"""
self.__cwd = directory
- def set_output_directory(self, directory):
+ def set_output_directory(self, directory: str) -> None:
"""Sets the output directory - the directory which is preserved
as an artifact after assembly.
Args:
- directory (str): An absolute path within the sandbox
+ directory: An absolute path within the sandbox
"""
self._output_directory = directory
- def mark_directory(self, directory, *, artifact=False):
+ def mark_directory(self, directory: str, *, artifact: bool = False) -> None:
"""Marks a sandbox directory and ensures it will exist
Args:
- directory (str): An absolute path within the sandbox to mark
- artifact (bool): Whether the content staged at this location
- contains artifacts
+ directory: An absolute path within the sandbox to mark
+ artifact: Whether the content staged at this location
+ contains artifacts
.. note::
Any marked directories will be read-write in the sandboxed
@@ -250,7 +260,13 @@ class Sandbox():
'artifact': artifact
})
- def run(self, command, flags, *, cwd=None, env=None, label=None):
+ def run(self,
+ command: List[str],
+ flags: int,
+ *,
+ cwd: Optional[str] = None,
+ env: Optional[Dict[str, str]] = None,
+ label: str = None) -> Optional[int]:
"""Run a command in the sandbox.
If this is called outside a batch context, the command is immediately
@@ -261,16 +277,16 @@ class Sandbox():
executed. Command flags must match batch flags.
Args:
- command (list): The command to run in the sandboxed environment, as a list
- of strings starting with the binary to run.
+ command: The command to run in the sandboxed environment, as a list
+ of strings starting with the binary to run.
flags (:class:`.SandboxFlags`): The flags for running this command.
- cwd (str): The sandbox relative working directory in which to run the command.
- env (dict): A dictionary of string key, value pairs to set as environment
- variables inside the sandbox environment.
- label (str): An optional label for the command, used for logging. (*Since: 1.4*)
+ cwd: The sandbox relative working directory in which to run the command.
+ env: A dictionary of string key, value pairs to set as environment
+ variables inside the sandbox environment.
+ label: An optional label for the command, used for logging. (*Since: 1.4*)
Returns:
- (int|None): The program exit code, or None if running in batch context.
+ The program exit code, or None if running in batch context.
Raises:
(:class:`.ProgramNotFoundError`): If a host tool which the given sandbox
@@ -310,7 +326,7 @@ class Sandbox():
return self._run(command, flags, cwd=cwd, env=env)
@contextmanager
- def batch(self, flags, *, label=None, collect=None):
+ def batch(self, flags: int, *, label: str = None, collect: str = None) -> Generator[None, None, None]:
"""Context manager for command batching
This provides a batch context that defers execution of commands until
@@ -322,8 +338,8 @@ class Sandbox():
Args:
flags (:class:`.SandboxFlags`): The flags for this command batch.
- label (str): An optional label for the batch group, used for logging.
- collect (str): An optional directory containing partial install contents
+ label: An optional label for the batch group, used for logging.
+ collect: An optional directory containing partial install contents
on command failure.
Raises:
diff --git a/src/buildstream/scriptelement.py b/src/buildstream/scriptelement.py
index 8023e64d2..e78049b4a 100644
--- a/src/buildstream/scriptelement.py
+++ b/src/buildstream/scriptelement.py
@@ -34,18 +34,22 @@ implementations.
import os
from collections import OrderedDict
+from typing import List, Optional, TYPE_CHECKING
from .element import Element, ElementError
from .sandbox import SandboxFlags
from .types import Scope
+if TYPE_CHECKING:
+ from typing import Dict
+
class ScriptElement(Element):
__install_root = "/"
__cwd = "/"
__root_read_only = False
- __commands = None
- __layout = []
+ __commands = None # type: OrderedDict[str, List[str]]
+ __layout = [] # type: List[Dict[str, Optional[str]]]
# The compose element's output is its dependencies, so
# we must rebuild if the dependencies change even when
@@ -61,14 +65,18 @@ class ScriptElement(Element):
# added, to reduce the potential for confusion
BST_FORBID_SOURCES = True
- def set_work_dir(self, work_dir=None):
+ #############################################################
+ # Public Methods #
+ #############################################################
+
+ def set_work_dir(self, work_dir: Optional[str] = None) -> None:
"""Sets the working dir
The working dir (a.k.a. cwd) is the directory which commands will be
called from.
Args:
- work_dir (str): The working directory. If called without this argument
+ work_dir: The working directory. If called without this argument
set, it'll default to the value of the variable ``cwd``.
"""
if work_dir is None:
@@ -76,14 +84,14 @@ class ScriptElement(Element):
else:
self.__cwd = work_dir
- def set_install_root(self, install_root=None):
+ def set_install_root(self, install_root: Optional[str] = None) -> None:
"""Sets the install root
The install root is the directory which output will be collected from
once the commands have been run.
Args:
- install_root(str): The install root. If called without this argument
+ install_root: The install root. If called without this argument
set, it'll default to the value of the variable ``install-root``.
"""
if install_root is None:
@@ -91,7 +99,7 @@ class ScriptElement(Element):
else:
self.__install_root = install_root
- def set_root_read_only(self, root_read_only):
+ def set_root_read_only(self, root_read_only: bool) -> None:
"""Sets root read-only
When commands are run, if root_read_only is true, then the root of the
@@ -101,24 +109,23 @@ class ScriptElement(Element):
If this variable is not set, the default permission is read-write.
Args:
- root_read_only (bool): Whether to mark the root filesystem as
- read-only.
+ root_read_only: Whether to mark the root filesystem as read-only.
"""
self.__root_read_only = root_read_only
- def layout_add(self, element, destination):
+ def layout_add(self, element: Optional[str], destination: str) -> None:
"""Adds an element-destination pair to the layout.
Layout is a way of defining how dependencies should be added to the
staging area for running commands.
Args:
- element (str): The name of the element to stage, or None. This may be any
- element found in the dependencies, whether it is a direct
- or indirect dependency.
- destination (str): The path inside the staging area for where to
- stage this element. If it is not "/", then integration
- commands will not be run.
+ element: The name of the element to stage, or None. This may be any
+ element found in the dependencies, whether it is a direct
+ or indirect dependency.
+ destination: The path inside the staging area for where to
+ stage this element. If it is not "/", then integration
+ commands will not be run.
If this function is never called, then the default behavior is to just
stage the Scope.BUILD dependencies of the element in question at the
@@ -145,7 +152,7 @@ class ScriptElement(Element):
self.__layout.append({"element": element,
"destination": destination})
- def add_commands(self, group_name, command_list):
+ def add_commands(self, group_name: str, command_list: List[str]) -> None:
"""Adds a list of commands under the group-name.
.. note::
@@ -166,21 +173,9 @@ class ScriptElement(Element):
self.__commands = OrderedDict()
self.__commands[group_name] = command_list
- def __validate_layout(self):
- if self.__layout:
- # Cannot proceeed if layout is used, but none are for "/"
- root_defined = any([(entry['destination'] == '/') for entry in self.__layout])
- if not root_defined:
- raise ElementError("{}: Using layout, but none are staged as '/'"
- .format(self))
-
- # Cannot proceed if layout specifies an element that isn't part
- # of the dependencies.
- for item in self.__layout:
- if item['element']:
- if not self.search(Scope.BUILD, item['element']):
- raise ElementError("{}: '{}' in layout not found in dependencies"
- .format(self, item['element']))
+ #############################################################
+ # Abstract Method Implementations #
+ #############################################################
def preflight(self):
# The layout, if set, must make sense.
@@ -295,6 +290,26 @@ class ScriptElement(Element):
# Return where the result can be collected from
return self.__install_root
+ #############################################################
+ # Private Local Methods #
+ #############################################################
+
+ def __validate_layout(self):
+ if self.__layout:
+ # Cannot proceeed if layout is used, but none are for "/"
+ root_defined = any([(entry['destination'] == '/') for entry in self.__layout])
+ if not root_defined:
+ raise ElementError("{}: Using layout, but none are staged as '/'"
+ .format(self))
+
+ # Cannot proceed if layout specifies an element that isn't part
+ # of the dependencies.
+ for item in self.__layout:
+ if item['element']:
+ if not self.search(Scope.BUILD, item['element']):
+ raise ElementError("{}: '{}' in layout not found in dependencies"
+ .format(self, item['element']))
+
def setup():
return ScriptElement
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index c5fb61415..cea68d57e 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -163,10 +163,12 @@ Class Reference
import os
from contextlib import contextmanager
+from typing import Iterable, Iterator, Optional, Tuple, TYPE_CHECKING
from . import _yaml, utils
+from .node import MappingNode
from .plugin import Plugin
-from .types import Consistency
+from .types import Consistency, SourceRef
from ._exceptions import BstError, ImplError, ErrorDomain
from ._loader.metasource import MetaSource
from ._projectrefs import ProjectRefStorage
@@ -174,18 +176,31 @@ from ._cachekey import generate_key
from .storage import FileBasedDirectory
from .storage.directory import Directory, VirtualDirectoryError
+if TYPE_CHECKING:
+ from typing import Any, Dict, Set
+
+ # pylint: disable=cyclic-import
+ from ._context import Context
+ from ._project import Project
+ # pylint: enable=cyclic-import
+
class SourceError(BstError):
"""This exception should be raised by :class:`.Source` implementations
to report errors to the user.
Args:
- message (str): The breif error description to report to the user
- detail (str): A possibly multiline, more detailed error message
- reason (str): An optional machine readable reason string, used for test cases
- temporary (bool): An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
+ message: The breif error description to report to the user
+ detail: A possibly multiline, more detailed error message
+ reason: An optional machine readable reason string, used for test cases
+ temporary: An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
"""
- def __init__(self, message, *, detail=None, reason=None, temporary=False):
+ def __init__(self,
+ message: str,
+ *,
+ detail: Optional[str] = None,
+ reason: Optional[str] = None,
+ temporary: bool = False):
super().__init__(message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason, temporary=temporary)
@@ -211,12 +226,12 @@ class SourceFetcher():
#############################################################
# Abstract Methods #
#############################################################
- def fetch(self, alias_override=None, **kwargs):
+ def fetch(self, alias_override: Optional[str] = None, **kwargs) -> None:
"""Fetch remote sources and mirror them locally, ensuring at least
that the specific reference is cached locally.
Args:
- alias_override (str): The alias to use instead of the default one
+ alias_override: The alias to use instead of the default one
defined by the :ref:`aliases <project_source_aliases>` field
in the project's config.
@@ -231,13 +246,13 @@ class SourceFetcher():
#############################################################
# Public Methods #
#############################################################
- def mark_download_url(self, url):
+ def mark_download_url(self, url: str) -> None:
"""Identifies the URL that this SourceFetcher uses to download
This must be called during the fetcher's initialization
Args:
- url (str): The url used to download.
+ url: The url used to download.
"""
self.__alias = _extract_alias(url)
@@ -258,7 +273,8 @@ class Source(Plugin):
All Sources derive from this class, this interface defines how
the core will be interacting with Sources.
"""
- __defaults = None # The defaults from the project
+ # The defaults from the project
+ __defaults = None # type: Optional[Dict[str, Any]]
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = False
"""Whether access to previous sources is required during track
@@ -306,7 +322,13 @@ class Source(Plugin):
*Since: 1.4*
"""
- def __init__(self, context, project, meta, *, alias_override=None, unique_id=None):
+ def __init__(self,
+ context: 'Context',
+ project: 'Project',
+ meta: MetaSource,
+ *,
+ alias_override: Optional[Tuple[str, str]] = None,
+ unique_id: Optional[int] = None):
provenance = meta.config.get_provenance()
# Set element_name member before parent init, as needed for debug messaging
self.__element_name = meta.element_name # The name of the element owning this source
@@ -324,7 +346,8 @@ class Source(Plugin):
# The alias_override is only set on a re-instantiated Source
self.__alias_override = alias_override # Tuple of alias and its override to use instead
self.__expected_alias = None # The primary alias
- self.__marked_urls = set() # Set of marked download URLs
+ # Set of marked download URLs
+ self.__marked_urls = set() # type: Set[str]
# Collect the composited element configuration and
# ask the element to configure itself.
@@ -333,7 +356,7 @@ class Source(Plugin):
self.__first_pass = meta.first_pass
# cached values for commonly access values on the source
- self.__mirror_directory = None
+ self.__mirror_directory = None # type: Optional[str]
self._configure(self.__config)
@@ -347,7 +370,7 @@ class Source(Plugin):
#############################################################
# Abstract Methods #
#############################################################
- def get_consistency(self):
+ def get_consistency(self) -> int:
"""Report whether the source has a resolved reference
Returns:
@@ -355,11 +378,11 @@ class Source(Plugin):
"""
raise ImplError("Source plugin '{}' does not implement get_consistency()".format(self.get_kind()))
- def load_ref(self, node):
+ def load_ref(self, node: MappingNode) -> None:
"""Loads the *ref* for this Source from the specified *node*.
Args:
- node (:class:`MappingNode <buildstream.node.MappingNode>`): The YAML node to load the ref from
+ node: The YAML node to load the ref from
.. note::
@@ -373,7 +396,7 @@ class Source(Plugin):
"""
raise ImplError("Source plugin '{}' does not implement load_ref()".format(self.get_kind()))
- def get_ref(self):
+ def get_ref(self) -> SourceRef:
"""Fetch the internal ref, however it is represented
Returns:
@@ -391,12 +414,12 @@ class Source(Plugin):
"""
raise ImplError("Source plugin '{}' does not implement get_ref()".format(self.get_kind()))
- def set_ref(self, ref, node):
+ def set_ref(self, ref: SourceRef, node: MappingNode) -> None:
"""Applies the internal ref, however it is represented
Args:
ref (simple object): The internal source reference to set, or ``None``
- node (:class:`MappingNode <buildstream.node.MappingNode>`): The same dictionary which was previously passed
+ node: The same dictionary which was previously passed
to :func:`Plugin.configure() <buildstream.plugin.Plugin.configure>`
See :func:`Source.get_ref() <buildstream.source.Source.get_ref>`
@@ -409,7 +432,7 @@ class Source(Plugin):
"""
raise ImplError("Source plugin '{}' does not implement set_ref()".format(self.get_kind()))
- def track(self, **kwargs):
+ def track(self, **kwargs) -> SourceRef:
"""Resolve a new ref from the plugin's track option
Args:
@@ -437,7 +460,7 @@ class Source(Plugin):
# Allow a non implementation
return None
- def fetch(self, **kwargs):
+ def fetch(self, **kwargs) -> None:
"""Fetch remote sources and mirror them locally, ensuring at least
that the specific reference is cached locally.
@@ -455,11 +478,11 @@ class Source(Plugin):
"""
raise ImplError("Source plugin '{}' does not implement fetch()".format(self.get_kind()))
- def stage(self, directory):
+ def stage(self, directory: str) -> None:
"""Stage the sources to a directory
Args:
- directory (str): Path to stage the source
+ directory: Path to stage the source
Raises:
:class:`.SourceError`
@@ -472,11 +495,11 @@ class Source(Plugin):
"""
raise ImplError("Source plugin '{}' does not implement stage()".format(self.get_kind()))
- def init_workspace(self, directory):
+ def init_workspace(self, directory: str) -> None:
"""Initialises a new workspace
Args:
- directory (str): Path of the workspace to init
+ directory: Path of the workspace to init
Raises:
:class:`.SourceError`
@@ -492,7 +515,7 @@ class Source(Plugin):
"""
self.stage(directory)
- def get_source_fetchers(self):
+ def get_source_fetchers(self) -> Iterable[SourceFetcher]:
"""Get the objects that are used for fetching
If this source doesn't download from multiple URLs,
@@ -500,7 +523,7 @@ class Source(Plugin):
is recommended.
Returns:
- iterable: The Source's SourceFetchers, if any.
+ The Source's SourceFetchers, if any.
.. note::
@@ -514,7 +537,7 @@ class Source(Plugin):
"""
return []
- def validate_cache(self):
+ def validate_cache(self) -> None:
"""Implement any validations once we know the sources are cached
This is guaranteed to be called only once for a given session
@@ -530,11 +553,11 @@ class Source(Plugin):
#############################################################
# Public Methods #
#############################################################
- def get_mirror_directory(self):
+ def get_mirror_directory(self) -> str:
"""Fetches the directory where this source should store things
Returns:
- (str): The directory belonging to this source
+ The directory belonging to this source
"""
if self.__mirror_directory is None:
# Create the directory if it doesnt exist
@@ -545,17 +568,17 @@ class Source(Plugin):
return self.__mirror_directory
- def translate_url(self, url, *, alias_override=None, primary=True):
+ def translate_url(self, url: str, *, alias_override: Optional[str] = None, primary: bool = True) -> str:
"""Translates the given url which may be specified with an alias
into a fully qualified url.
Args:
- url (str): A URL, which may be using an alias
- alias_override (str): Optionally, an URI to override the alias with. (*Since: 1.2*)
- primary (bool): Whether this is the primary URL for the source. (*Since: 1.2*)
+ url: A URL, which may be using an alias
+ alias_override: Optionally, an URI to override the alias with. (*Since: 1.2*)
+ primary: Whether this is the primary URL for the source. (*Since: 1.2*)
Returns:
- str: The fully qualified URL, with aliases resolved
+ The fully qualified URL, with aliases resolved
.. note::
This must be called for every URL in the configuration during
@@ -578,8 +601,8 @@ class Source(Plugin):
# specific alias, so that sources that fetch from multiple
# URLs and use different aliases default to only overriding
# one alias, rather than getting confused.
- override_alias = self.__alias_override[0]
- override_url = self.__alias_override[1]
+ override_alias = self.__alias_override[0] # type: ignore
+ override_url = self.__alias_override[1] # type: ignore
if url_alias == override_alias:
url = override_url + url_body
return url
@@ -587,7 +610,7 @@ class Source(Plugin):
project = self._get_project()
return project.translate_url(url, first_pass=self.__first_pass)
- def mark_download_url(self, url, *, primary=True):
+ def mark_download_url(self, url: str, *, primary: bool = True) -> None:
"""Identifies the URL that this Source uses to download
Args:
@@ -634,24 +657,24 @@ class Source(Plugin):
assert (url in self.__marked_urls or not _extract_alias(url)), \
"URL was not seen at configure time: {}".format(url)
- def get_project_directory(self):
+ def get_project_directory(self) -> str:
"""Fetch the project base directory
This is useful for sources which need to load resources
stored somewhere inside the project.
Returns:
- str: The project base directory
+ The project base directory
"""
project = self._get_project()
return project.directory
@contextmanager
- def tempdir(self):
+ def tempdir(self) -> Iterator[str]:
"""Context manager for working in a temporary directory
Yields:
- (str): A path to a temporary directory
+ A path to a temporary directory
This should be used by source plugins directly instead of the tempfile
module. This one will automatically cleanup in case of termination by
diff --git a/src/buildstream/storage/directory.py b/src/buildstream/storage/directory.py
index 5039e0af9..29cbb53f2 100644
--- a/src/buildstream/storage/directory.py
+++ b/src/buildstream/storage/directory.py
@@ -32,9 +32,11 @@ See also: :ref:`sandboxing`.
"""
+from typing import Callable, Optional, Union
+
from .._exceptions import BstError, ErrorDomain
from ..types import FastEnum
-from ..utils import BST_ARBITRARY_TIMESTAMP
+from ..utils import BST_ARBITRARY_TIMESTAMP, FileListResult
class VirtualDirectoryError(BstError):
@@ -52,13 +54,13 @@ class Directory():
def __init__(self, external_directory=None):
raise NotImplementedError()
- def descend(self, *paths, create=False, follow_symlinks=False):
+ def descend(self, *paths: str, create: bool = False, follow_symlinks: bool = False):
"""Descend one or more levels of directory hierarchy and return a new
Directory object for that directory.
Args:
- *paths (str): A list of strings which are all directory names.
- create (boolean): If this is true, the directories will be created if
+ *paths: A list of strings which are all directory names.
+ create: If this is true, the directories will be created if
they don't already exist.
Yields:
@@ -72,32 +74,32 @@ class Directory():
raise NotImplementedError()
# Import and export of files and links
- def import_files(self, external_pathspec, *,
- filter_callback=None,
- report_written=True, update_mtime=False,
- can_link=False):
+ def import_files(self, external_pathspec: Union['Directory', str], *,
+ filter_callback: Optional[Callable[[str], bool]] = None,
+ report_written: bool = True, update_mtime: bool = False,
+ can_link: bool = False) -> FileListResult:
"""Imports some or all files from external_path into this directory.
Args:
external_pathspec: Either a string containing a pathname, or a
Directory object, to use as the source.
- filter_callback (callable): Optional filter callback. Called with the
+ filter_callback: Optional filter callback. Called with the
relative path as argument for every file in the source directory.
The file is imported only if the callable returns True.
If no filter callback is specified, all files will be imported.
- report_written (bool): Return the full list of files
+ report_written: Return the full list of files
written. Defaults to true. If false, only a list of
overwritten files is returned.
- update_mtime (bool): Update the access and modification time
+ update_mtime: Update the access and modification time
of each file copied to the current time.
- can_link (bool): Whether it's OK to create a hard link to the
+ can_link: Whether it's OK to create a hard link to the
original content, meaning the stored copy will change when the
original files change. Setting this doesn't guarantee hard
links will be made. can_link will never be used if
update_mtime is set.
Yields:
- (FileListResult) - A report of files imported and overwritten.
+ A report of files imported and overwritten.
"""
diff --git a/src/buildstream/testing/__init__.py b/src/buildstream/testing/__init__.py
index 5a034cf30..3926b4eab 100644
--- a/src/buildstream/testing/__init__.py
+++ b/src/buildstream/testing/__init__.py
@@ -38,7 +38,7 @@ except ImportError:
# Of the form plugin_name -> (repo_class, plugin_package)
-ALL_REPO_KINDS = OrderedDict()
+ALL_REPO_KINDS = OrderedDict() # type: OrderedDict[Repo, str]
def create_repo(kind, directory, subdir='repo'):
diff --git a/src/buildstream/testing/_utils/site.py b/src/buildstream/testing/_utils/site.py
index d51d37525..b098b7d41 100644
--- a/src/buildstream/testing/_utils/site.py
+++ b/src/buildstream/testing/_utils/site.py
@@ -5,13 +5,14 @@ import os
import subprocess
import sys
import platform
+from typing import Optional # pylint: disable=unused-import
from buildstream import _site, utils, ProgramNotFoundError
from buildstream._platform import Platform
try:
- GIT = utils.get_host_tool('git')
+ GIT = utils.get_host_tool('git') # type: Optional[str]
HAVE_GIT = True
out = str(subprocess.check_output(['git', '--version']), "utf-8")
@@ -33,7 +34,7 @@ except ProgramNotFoundError:
GIT_ENV = dict()
try:
- BZR = utils.get_host_tool('bzr')
+ BZR = utils.get_host_tool('bzr') # type: Optional[str]
HAVE_BZR = True
BZR_ENV = {
"BZR_EMAIL": "Testy McTesterson <testy.mctesterson@example.com>"
diff --git a/src/buildstream/types.py b/src/buildstream/types.py
index 0635f310e..5688bf393 100644
--- a/src/buildstream/types.py
+++ b/src/buildstream/types.py
@@ -25,6 +25,8 @@ Foundation types
"""
+from typing import Any, Dict, List, Union
+
from ._types import MetaFastEnum
@@ -44,7 +46,8 @@ class FastEnum(metaclass=MetaFastEnum):
"""The value of the current Enum entry, same as :func:`enum.Enum.value`
"""
- _value_to_entry = dict() # A dict of all values mapping to the entries in the enum
+ # A dict of all values mapping to the entries in the enum
+ _value_to_entry = dict() # type: Dict[str, Any]
@classmethod
def values(cls):
@@ -225,3 +228,11 @@ class _CacheBuildTrees(FastEnum):
# Never cache build trees
NEVER = "never"
+
+
+########################################
+# Type aliases #
+########################################
+
+# Internal reference for a given Source
+SourceRef = Union[None, int, List[Any], Dict[str, Any]]
diff --git a/src/buildstream/utils.py b/src/buildstream/utils.py
index 578d520ec..998b77a71 100644
--- a/src/buildstream/utils.py
+++ b/src/buildstream/utils.py
@@ -34,6 +34,8 @@ import subprocess
import tempfile
import itertools
from contextlib import contextmanager
+from pathlib import Path
+from typing import Callable, IO, Iterable, Iterator, Optional, Tuple, Union
import psutil
@@ -102,7 +104,7 @@ class FileListResult():
self.files_written = []
"""List of files that were written."""
- def combine(self, other):
+ def combine(self, other: 'FileListResult') -> 'FileListResult':
"""Create a new FileListResult that contains the results of both.
"""
ret = FileListResult()
@@ -115,7 +117,7 @@ class FileListResult():
return ret
-def list_relative_paths(directory):
+def list_relative_paths(directory: str) -> Iterator[str]:
"""A generator for walking directory relative paths
This generator is useful for checking the full manifest of
@@ -125,7 +127,7 @@ def list_relative_paths(directory):
in the manifest.
Args:
- directory (str): The directory to list files in
+ directory: The directory to list files in
Yields:
Relative filenames in `directory`
@@ -167,7 +169,7 @@ def list_relative_paths(directory):
# pylint: disable=anomalous-backslash-in-string
-def glob(paths, pattern):
+def glob(paths: Iterable[str], pattern: str) -> Iterator[str]:
"""A generator to yield paths which match the glob pattern
Args:
@@ -218,14 +220,14 @@ def glob(paths, pattern):
yield filename
-def sha256sum(filename):
+def sha256sum(filename: str) -> str:
"""Calculate the sha256sum of a file
Args:
- filename (str): A path to a file on disk
+ filename: A path to a file on disk
Returns:
- (str): An sha256 checksum string
+ An sha256 checksum string
Raises:
UtilError: In the case there was an issue opening
@@ -244,13 +246,13 @@ def sha256sum(filename):
return h.hexdigest()
-def safe_copy(src, dest, *, result=None):
+def safe_copy(src: str, dest: str, *, result: Optional[FileListResult] = None) -> None:
"""Copy a file while preserving attributes
Args:
- src (str): The source filename
- dest (str): The destination filename
- result (:class:`~.FileListResult`): An optional collective result
+ src: The source filename
+ dest: The destination filename
+ result: An optional collective result
Raises:
UtilError: In the case of unexpected system call failures
@@ -285,13 +287,13 @@ def safe_copy(src, dest, *, result=None):
.format(src, dest, e)) from e
-def safe_link(src, dest, *, result=None, _unlink=False):
+def safe_link(src: str, dest: str, *, result: Optional[FileListResult] = None, _unlink=False) -> None:
"""Try to create a hardlink, but resort to copying in the case of cross device links.
Args:
- src (str): The source filename
- dest (str): The destination filename
- result (:class:`~.FileListResult`): An optional collective result
+ src: The source filename
+ dest: The destination filename
+ result: An optional collective result
Raises:
UtilError: In the case of unexpected system call failures
@@ -320,14 +322,14 @@ def safe_link(src, dest, *, result=None, _unlink=False):
.format(src, dest, e)) from e
-def safe_remove(path):
+def safe_remove(path: str) -> bool:
"""Removes a file or directory
This will remove a file if it exists, and will
remove a directory if the directory is empty.
Args:
- path (str): The path to remove
+ path: The path to remove
Returns:
True if `path` was removed or did not exist, False
@@ -357,21 +359,26 @@ def safe_remove(path):
.format(path, e))
-def copy_files(src, dest, *, filter_callback=None, ignore_missing=False, report_written=False):
+def copy_files(src: str,
+ dest: str,
+ *,
+ filter_callback: Optional[Callable[[str], bool]] = None,
+ ignore_missing: bool = False,
+ report_written: bool = False) -> FileListResult:
"""Copy files from source to destination.
Args:
- src (str): The source directory
- dest (str): The destination directory
- filter_callback (callable): Optional filter callback. Called with the relative path as
- argument for every file in the source directory. The file is
- copied only if the callable returns True. If no filter callback
- is specified, all files will be copied.
- ignore_missing (bool): Dont raise any error if a source file is missing
- report_written (bool): Add to the result object the full list of files written
+ src: The source directory
+ dest: The destination directory
+ filter_callback: Optional filter callback. Called with the relative path as
+ argument for every file in the source directory. The file is
+ copied only if the callable returns True. If no filter callback
+ is specified, all files will be copied.
+ ignore_missing: Dont raise any error if a source file is missing
+ report_written: Add to the result object the full list of files written
Returns:
- (:class:`~.FileListResult`): The result describing what happened during this file operation
+ The result describing what happened during this file operation
Raises:
UtilError: In the case of unexpected system call failures
@@ -396,21 +403,26 @@ def copy_files(src, dest, *, filter_callback=None, ignore_missing=False, report_
return result
-def link_files(src, dest, *, filter_callback=None, ignore_missing=False, report_written=False):
+def link_files(src: str,
+ dest: str,
+ *,
+ filter_callback: Optional[Callable[[str], bool]] = None,
+ ignore_missing: bool = False,
+ report_written: bool = False) -> FileListResult:
"""Hardlink files from source to destination.
Args:
- src (str): The source directory
- dest (str): The destination directory
- filter_callback (callable): Optional filter callback. Called with the relative path as
- argument for every file in the source directory. The file is
- hardlinked only if the callable returns True. If no filter
- callback is specified, all files will be hardlinked.
- ignore_missing (bool): Dont raise any error if a source file is missing
- report_written (bool): Add to the result object the full list of files written
+ src: The source directory
+ dest: The destination directory
+ filter_callback: Optional filter callback. Called with the relative path as
+ argument for every file in the source directory. The file is
+ hardlinked only if the callable returns True. If no filter
+ callback is specified, all files will be hardlinked.
+ ignore_missing: Dont raise any error if a source file is missing
+ report_written: Add to the result object the full list of files written
Returns:
- (:class:`~.FileListResult`): The result describing what happened during this file operation
+ The result describing what happened during this file operation
Raises:
UtilError: In the case of unexpected system call failures
@@ -441,7 +453,7 @@ def link_files(src, dest, *, filter_callback=None, ignore_missing=False, report_
return result
-def get_host_tool(name):
+def get_host_tool(name: str) -> str:
"""Get the full path of a host tool
Args:
@@ -462,13 +474,12 @@ def get_host_tool(name):
return program_path
-def get_bst_version():
+def get_bst_version() -> Tuple[int, int]:
"""Gets the major, minor release portion of the
BuildStream version.
Returns:
- (int): The major version
- (int): The minor version
+ A 2-tuple of form (major version, minor version)
"""
# Import this only conditionally, it's not resolved at bash complete time
from . import __version__ # pylint: disable=cyclic-import
@@ -490,7 +501,7 @@ def get_bst_version():
.format(__version__))
-def move_atomic(source, destination, *, ensure_parents=True):
+def move_atomic(source: Union[Path, str], destination: Union[Path, str], *, ensure_parents: bool = True) -> None:
"""Move the source to the destination using atomic primitives.
This uses `os.rename` to move a file or directory to a new destination.
@@ -508,10 +519,10 @@ def move_atomic(source, destination, *, ensure_parents=True):
should be used instead of `os.rename`
Args:
- source (str or Path): source to rename
- destination (str or Path): destination to which to move the source
- ensure_parents (bool): Whether or not to create the parent's directories
- of the destination (default: True)
+ source: source to rename
+ destination: destination to which to move the source
+ ensure_parents: Whether or not to create the parent's directories
+ of the destination (default: True)
Raises:
DirectoryExistsError: if the destination directory already exists and is
not empty
@@ -529,8 +540,16 @@ def move_atomic(source, destination, *, ensure_parents=True):
@contextmanager
-def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
- errors=None, newline=None, closefd=True, opener=None, tempdir=None):
+def save_file_atomic(filename: str,
+ mode: str = 'w',
+ *,
+ buffering: int = -1,
+ encoding: Optional[str] = None,
+ errors: Optional[str] = None,
+ newline: Optional[str] = None,
+ closefd: bool = True,
+ opener: Optional[Callable[[str, int], int]] = None,
+ tempdir: Optional[str] = None) -> Iterator[IO]:
"""Save a file with a temporary name and rename it into place when ready.
This is a context manager which is meant for saving data to files.
@@ -576,7 +595,8 @@ def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
try:
with _signals.terminator(cleanup_tempfile):
- f.real_filename = filename
+ # Disable type-checking since "IO[Any]" has no attribute "real_filename"
+ f.real_filename = filename # type: ignore
yield f
f.close()
# This operation is atomic, at least on platforms we care about:
diff --git a/tox.ini b/tox.ini
index 39381dc02..c1a04dda5 100644
--- a/tox.ini
+++ b/tox.ini
@@ -99,6 +99,16 @@ deps =
-rrequirements/dev-requirements.txt
-rrequirements/plugin-requirements.txt
+#
+# Running static type checkers
+#
+[testenv:mypy]
+skip_install = True
+commands =
+ mypy {posargs}
+deps =
+ mypy
+
#
# Building documentation