summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorChandan Singh <csingh43@bloomberg.net>2019-11-11 17:07:09 +0000
committerChandan Singh <chandan@chandansingh.net>2019-11-14 21:21:06 +0000
commit122177153b14664a0e4fed85aa4f22b87cfabf56 (patch)
tree032c2e46825af91f6fe27f22b5b567eea2b7935d /src
parenta3ee349558f36a220f79665873b36c1b0f990c8e (diff)
downloadbuildstream-122177153b14664a0e4fed85aa4f22b87cfabf56.tar.gz
Reformat code using Black
As discussed over the mailing list, reformat code using Black. This is a one-off change to reformat all our codebase. Moving forward, we shouldn't expect such blanket reformats. Rather, we expect each change to already comply with the Black formatting style.
Diffstat (limited to 'src')
-rw-r--r--src/buildstream/__init__.py4
-rw-r--r--src/buildstream/__main__.py3
-rw-r--r--src/buildstream/_artifact.py41
-rw-r--r--src/buildstream/_artifactcache.py63
-rw-r--r--src/buildstream/_artifactelement.py6
-rw-r--r--src/buildstream/_basecache.py36
-rw-r--r--src/buildstream/_cachekey.py2
-rw-r--r--src/buildstream/_cas/cascache.py159
-rw-r--r--src/buildstream/_cas/casremote.py30
-rw-r--r--src/buildstream/_cas/casserver.py140
-rw-r--r--src/buildstream/_context.py176
-rw-r--r--src/buildstream/_elementfactory.py15
-rw-r--r--src/buildstream/_exceptions.py9
-rw-r--r--src/buildstream/_frontend/app.py456
-rw-r--r--src/buildstream/_frontend/cli.py943
-rw-r--r--src/buildstream/_frontend/complete.py71
-rw-r--r--src/buildstream/_frontend/linuxapp.py7
-rw-r--r--src/buildstream/_frontend/profile.py3
-rw-r--r--src/buildstream/_frontend/status.py158
-rw-r--r--src/buildstream/_frontend/widget.py344
-rw-r--r--src/buildstream/_gitsourcebase.py425
-rw-r--r--src/buildstream/_includes.py60
-rw-r--r--src/buildstream/_loader/loader.py198
-rw-r--r--src/buildstream/_loader/metaelement.py20
-rw-r--r--src/buildstream/_loader/metasource.py2
-rw-r--r--src/buildstream/_message.py76
-rw-r--r--src/buildstream/_messenger.py47
-rw-r--r--src/buildstream/_options/option.py14
-rw-r--r--src/buildstream/_options/optionarch.py10
-rw-r--r--src/buildstream/_options/optionbool.py15
-rw-r--r--src/buildstream/_options/optioneltmask.py4
-rw-r--r--src/buildstream/_options/optionenum.py28
-rw-r--r--src/buildstream/_options/optionflags.py32
-rw-r--r--src/buildstream/_options/optionos.py3
-rw-r--r--src/buildstream/_options/optionpool.py50
-rw-r--r--src/buildstream/_pipeline.py70
-rw-r--r--src/buildstream/_platform/darwin.py6
-rw-r--r--src/buildstream/_platform/fallback.py10
-rw-r--r--src/buildstream/_platform/linux.py38
-rw-r--r--src/buildstream/_platform/platform.py69
-rw-r--r--src/buildstream/_platform/win32.py3
-rw-r--r--src/buildstream/_plugincontext.py140
-rw-r--r--src/buildstream/_profile.py48
-rw-r--r--src/buildstream/_project.py368
-rw-r--r--src/buildstream/_projectrefs.py17
-rw-r--r--src/buildstream/_remote.py66
-rw-r--r--src/buildstream/_scheduler/jobs/elementjob.py12
-rw-r--r--src/buildstream/_scheduler/jobs/job.py145
-rw-r--r--src/buildstream/_scheduler/jobs/jobpickler.py14
-rw-r--r--src/buildstream/_scheduler/queues/buildqueue.py13
-rw-r--r--src/buildstream/_scheduler/queues/queue.py54
-rw-r--r--src/buildstream/_scheduler/resources.py13
-rw-r--r--src/buildstream/_scheduler/scheduler.py83
-rw-r--r--src/buildstream/_signals.py20
-rw-r--r--src/buildstream/_site.py14
-rw-r--r--src/buildstream/_sourcecache.py40
-rw-r--r--src/buildstream/_sourcefactory.py11
-rw-r--r--src/buildstream/_state.py13
-rw-r--r--src/buildstream/_stream.py538
-rw-r--r--src/buildstream/_version.py133
-rw-r--r--src/buildstream/_workspaces.py99
-rw-r--r--src/buildstream/buildelement.py59
-rw-r--r--src/buildstream/element.py601
-rw-r--r--src/buildstream/plugin.py118
-rw-r--r--src/buildstream/plugins/elements/autotools.py3
-rw-r--r--src/buildstream/plugins/elements/compose.py37
-rw-r--r--src/buildstream/plugins/elements/filter.py59
-rw-r--r--src/buildstream/plugins/elements/import.py28
-rw-r--r--src/buildstream/plugins/elements/junction.py10
-rw-r--r--src/buildstream/plugins/elements/manual.py3
-rw-r--r--src/buildstream/plugins/elements/pip.py3
-rw-r--r--src/buildstream/plugins/elements/script.py12
-rw-r--r--src/buildstream/plugins/elements/stack.py4
-rw-r--r--src/buildstream/plugins/sources/_downloadablefilesource.py61
-rw-r--r--src/buildstream/plugins/sources/bzr.py109
-rw-r--r--src/buildstream/plugins/sources/deb.py6
-rw-r--r--src/buildstream/plugins/sources/local.py8
-rw-r--r--src/buildstream/plugins/sources/patch.py12
-rw-r--r--src/buildstream/plugins/sources/pip.py106
-rw-r--r--src/buildstream/plugins/sources/remote.py11
-rw-r--r--src/buildstream/plugins/sources/tar.py45
-rw-r--r--src/buildstream/plugins/sources/workspace.py12
-rw-r--r--src/buildstream/plugins/sources/zip.py14
-rw-r--r--src/buildstream/sandbox/_config.py11
-rw-r--r--src/buildstream/sandbox/_mount.py19
-rw-r--r--src/buildstream/sandbox/_mounter.py48
-rw-r--r--src/buildstream/sandbox/_sandboxbuildbox.py73
-rw-r--r--src/buildstream/sandbox/_sandboxbwrap.py118
-rw-r--r--src/buildstream/sandbox/_sandboxchroot.py60
-rw-r--r--src/buildstream/sandbox/_sandboxdummy.py11
-rw-r--r--src/buildstream/sandbox/_sandboxreapi.py47
-rw-r--r--src/buildstream/sandbox/_sandboxremote.py215
-rw-r--r--src/buildstream/sandbox/sandbox.py120
-rw-r--r--src/buildstream/scriptelement.py72
-rw-r--r--src/buildstream/source.py169
-rw-r--r--src/buildstream/storage/_casbaseddirectory.py111
-rw-r--r--src/buildstream/storage/_filebaseddirectory.py64
-rw-r--r--src/buildstream/storage/directory.py18
-rw-r--r--src/buildstream/testing/__init__.py9
-rw-r--r--src/buildstream/testing/_fixtures.py1
-rw-r--r--src/buildstream/testing/_sourcetests/build_checkout.py36
-rw-r--r--src/buildstream/testing/_sourcetests/fetch.py57
-rw-r--r--src/buildstream/testing/_sourcetests/mirror.py318
-rw-r--r--src/buildstream/testing/_sourcetests/source_determinism.py75
-rw-r--r--src/buildstream/testing/_sourcetests/track.py245
-rw-r--r--src/buildstream/testing/_sourcetests/track_cross_junction.py134
-rw-r--r--src/buildstream/testing/_sourcetests/utils.py15
-rw-r--r--src/buildstream/testing/_sourcetests/workspace.py85
-rw-r--r--src/buildstream/testing/_utils/junction.py41
-rw-r--r--src/buildstream/testing/_utils/site.py43
-rw-r--r--src/buildstream/testing/integration.py22
-rw-r--r--src/buildstream/testing/repo.py7
-rw-r--r--src/buildstream/testing/runcli.py282
-rw-r--r--src/buildstream/types.py2
-rw-r--r--src/buildstream/utils.py265
115 files changed, 5011 insertions, 4790 deletions
diff --git a/src/buildstream/__init__.py b/src/buildstream/__init__.py
index cd8d0f1cf..c78fcbbf6 100644
--- a/src/buildstream/__init__.py
+++ b/src/buildstream/__init__.py
@@ -19,11 +19,13 @@
# Plugin author facing APIs
import os
+
if "_BST_COMPLETION" not in os.environ:
# Special sauce to get the version from versioneer
from ._version import get_versions
- __version__ = get_versions()['version']
+
+ __version__ = get_versions()["version"]
del get_versions
from .utils import UtilError, ProgramNotFoundError
diff --git a/src/buildstream/__main__.py b/src/buildstream/__main__.py
index 4b0fdabfe..556a0f67e 100644
--- a/src/buildstream/__main__.py
+++ b/src/buildstream/__main__.py
@@ -11,7 +11,8 @@
# This is used when we need to run BuildStream before installing,
# like when we build documentation.
#
-if __name__ == '__main__':
+if __name__ == "__main__":
# pylint: disable=no-value-for-parameter
from ._frontend.cli import cli
+
cli()
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index e5174eaea..feba3898b 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -47,7 +47,7 @@ from .storage._casbaseddirectory import CasBasedDirectory
# strong_key (str): The elements strong cache key, dependent on context
# weak_key (str): The elements weak cache key
#
-class Artifact():
+class Artifact:
version = 0
@@ -61,11 +61,11 @@ class Artifact():
self._tmpdir = context.tmpdir
self._proto = None
- self._metadata_keys = None # Strong and weak key tuple extracted from the artifact
- self._metadata_dependencies = None # Dictionary of dependency strong keys from the artifact
- self._metadata_workspaced = None # Boolean of whether it's a workspaced artifact
+ self._metadata_keys = None # Strong and weak key tuple extracted from the artifact
+ self._metadata_dependencies = None # Dictionary of dependency strong keys from the artifact
+ self._metadata_workspaced = None # Boolean of whether it's a workspaced artifact
self._metadata_workspaced_dependencies = None # List of which dependencies are workspaced from the artifact
- self._cached = None # Boolean of whether the artifact is cached
+ self._cached = None # Boolean of whether the artifact is cached
# get_files():
#
@@ -193,12 +193,11 @@ class Artifact():
artifact.buildtree.CopyFrom(buildtreevdir._get_digest())
size += buildtreevdir.get_size()
- os.makedirs(os.path.dirname(os.path.join(
- self._artifactdir, element.get_artifact_name())), exist_ok=True)
+ os.makedirs(os.path.dirname(os.path.join(self._artifactdir, element.get_artifact_name())), exist_ok=True)
keys = utils._deduplicate([self._cache_key, self._weak_cache_key])
for key in keys:
path = os.path.join(self._artifactdir, element.get_artifact_name(key=key))
- with utils.save_file_atomic(path, mode='wb') as f:
+ with utils.save_file_atomic(path, mode="wb") as f:
f.write(artifact.SerializeToString())
return size
@@ -247,7 +246,7 @@ class Artifact():
# Load the public data from the artifact
artifact = self._get_proto()
meta_file = self._cas.objpath(artifact.public_data)
- data = _yaml.load(meta_file, shortname='public.yaml')
+ data = _yaml.load(meta_file, shortname="public.yaml")
return data
@@ -263,9 +262,7 @@ class Artifact():
def load_build_result(self):
artifact = self._get_proto()
- build_result = (artifact.build_success,
- artifact.build_error,
- artifact.build_error_details)
+ build_result = (artifact.build_success, artifact.build_error, artifact.build_error_details)
return build_result
@@ -345,8 +342,9 @@ class Artifact():
# Extract proto
artifact = self._get_proto()
- self._metadata_workspaced_dependencies = [dep.element_name for dep in artifact.build_deps
- if dep.was_workspaced]
+ self._metadata_workspaced_dependencies = [
+ dep.element_name for dep in artifact.build_deps if dep.was_workspaced
+ ]
return self._metadata_workspaced_dependencies
@@ -419,12 +417,14 @@ class Artifact():
# Determine whether directories are required
require_directories = context.require_artifact_directories
# Determine whether file contents are required as well
- require_files = (context.require_artifact_files or
- self._element._artifact_files_required())
+ require_files = context.require_artifact_files or self._element._artifact_files_required()
# Check whether 'files' subdirectory is available, with or without file contents
- if (require_directories and str(artifact.files) and
- not self._cas.contains_directory(artifact.files, with_files=require_files)):
+ if (
+ require_directories
+ and str(artifact.files)
+ and not self._cas.contains_directory(artifact.files, with_files=require_files)
+ ):
self._cached = False
return False
@@ -471,11 +471,10 @@ class Artifact():
key = self.get_extract_key()
- proto_path = os.path.join(self._artifactdir,
- self._element.get_artifact_name(key=key))
+ proto_path = os.path.join(self._artifactdir, self._element.get_artifact_name(key=key))
artifact = ArtifactProto()
try:
- with open(proto_path, mode='r+b') as f:
+ with open(proto_path, mode="r+b") as f:
artifact.ParseFromString(f.read())
except FileNotFoundError:
return None
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index 79d0dc50b..03c47b906 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -23,8 +23,7 @@ import grpc
from ._basecache import BaseCache
from ._exceptions import ArtifactError, CASError, CASCacheError, CASRemoteError, RemoteError
-from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
- artifact_pb2, artifact_pb2_grpc
+from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, artifact_pb2, artifact_pb2_grpc
from ._remote import BaseRemote
from .storage._casbaseddirectory import CasBasedDirectory
@@ -38,7 +37,6 @@ from . import utils
# artifact remotes.
#
class ArtifactRemote(BaseRemote):
-
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.artifact_service = None
@@ -78,8 +76,10 @@ class ArtifactRemote(BaseRemote):
except grpc.RpcError as e:
# Check if this remote has the artifact service
if e.code() == grpc.StatusCode.UNIMPLEMENTED:
- raise RemoteError("Configured remote does not have the BuildStream "
- "capabilities service. Please check remote configuration.")
+ raise RemoteError(
+ "Configured remote does not have the BuildStream "
+ "capabilities service. Please check remote configuration."
+ )
# Else raise exception with details
raise RemoteError("Remote initialisation failed: {}".format(e.details()))
@@ -263,9 +263,11 @@ class ArtifactCache(BaseCache):
if self._push_artifact_blobs(artifact, remote):
element.info("Pushed data from artifact {} -> {}".format(display_key, remote))
else:
- element.info("Remote ({}) already has all data of artifact {} cached".format(
- remote, element._get_brief_display_key()
- ))
+ element.info(
+ "Remote ({}) already has all data of artifact {} cached".format(
+ remote, element._get_brief_display_key()
+ )
+ )
for remote in index_remotes:
remote.init()
@@ -275,9 +277,9 @@ class ArtifactCache(BaseCache):
element.info("Pushed artifact {} -> {}".format(display_key, remote))
pushed = True
else:
- element.info("Remote ({}) already has artifact {} cached".format(
- remote, element._get_brief_display_key()
- ))
+ element.info(
+ "Remote ({}) already has artifact {} cached".format(remote, element._get_brief_display_key())
+ )
return pushed
@@ -295,7 +297,7 @@ class ArtifactCache(BaseCache):
#
def pull(self, element, key, *, pull_buildtrees=False):
artifact = None
- display_key = key[:self.context.log_key_length]
+ display_key = key[: self.context.log_key_length]
project = element._get_project()
errors = []
@@ -310,16 +312,15 @@ class ArtifactCache(BaseCache):
element.info("Pulled artifact {} <- {}".format(display_key, remote))
break
else:
- element.info("Remote ({}) does not have artifact {} cached".format(
- remote, display_key
- ))
+ element.info("Remote ({}) does not have artifact {} cached".format(remote, display_key))
except CASError as e:
element.warn("Could not pull from remote {}: {}".format(remote, e))
errors.append(e)
if errors and not artifact:
- raise ArtifactError("Failed to pull artifact {}".format(display_key),
- detail="\n".join(str(e) for e in errors))
+ raise ArtifactError(
+ "Failed to pull artifact {}".format(display_key), detail="\n".join(str(e) for e in errors)
+ )
# If we don't have an artifact, we can't exactly pull our
# artifact
@@ -337,16 +338,15 @@ class ArtifactCache(BaseCache):
element.info("Pulled data for artifact {} <- {}".format(display_key, remote))
return True
- element.info("Remote ({}) does not have artifact {} cached".format(
- remote, display_key
- ))
+ element.info("Remote ({}) does not have artifact {} cached".format(remote, display_key))
except CASError as e:
element.warn("Could not pull from remote {}: {}".format(remote, e))
errors.append(e)
if errors:
- raise ArtifactError("Failed to pull artifact {}".format(display_key),
- detail="\n".join(str(e) for e in errors))
+ raise ArtifactError(
+ "Failed to pull artifact {}".format(display_key), detail="\n".join(str(e) for e in errors)
+ )
return False
@@ -388,8 +388,9 @@ class ArtifactCache(BaseCache):
push_remotes = []
if not push_remotes:
- raise ArtifactError("push_message was called, but no remote artifact " +
- "servers are configured as push remotes.")
+ raise ArtifactError(
+ "push_message was called, but no remote artifact " + "servers are configured as push remotes."
+ )
for remote in push_remotes:
message_digest = remote.push_message(message)
@@ -410,8 +411,7 @@ class ArtifactCache(BaseCache):
newref = element.get_artifact_name(newkey)
if not os.path.exists(os.path.join(self.artifactdir, newref)):
- os.link(os.path.join(self.artifactdir, oldref),
- os.path.join(self.artifactdir, newref))
+ os.link(os.path.join(self.artifactdir, oldref), os.path.join(self.artifactdir, newref))
# get_artifact_logs():
#
@@ -425,7 +425,7 @@ class ArtifactCache(BaseCache):
#
def get_artifact_logs(self, ref):
cache_id = self.cas.resolve_ref(ref, update_mtime=True)
- vdir = CasBasedDirectory(self.cas, digest=cache_id).descend('logs')
+ vdir = CasBasedDirectory(self.cas, digest=cache_id).descend("logs")
return vdir
# fetch_missing_blobs():
@@ -517,7 +517,7 @@ class ArtifactCache(BaseCache):
for root, _, files in os.walk(self.artifactdir):
for artifact_file in files:
artifact = artifact_pb2.Artifact()
- with open(os.path.join(root, artifact_file), 'r+b') as f:
+ with open(os.path.join(root, artifact_file), "r+b") as f:
artifact.ParseFromString(f.read())
if str(artifact.files):
@@ -535,7 +535,7 @@ class ArtifactCache(BaseCache):
for root, _, files in os.walk(self.artifactdir):
for artifact_file in files:
artifact = artifact_pb2.Artifact()
- with open(os.path.join(root, artifact_file), 'r+b') as f:
+ with open(os.path.join(root, artifact_file), "r+b") as f:
artifact.ParseFromString(f.read())
if str(artifact.public_data):
@@ -620,8 +620,7 @@ class ArtifactCache(BaseCache):
remote.get_artifact(element.get_artifact_name(key=key))
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
- raise ArtifactError("Error checking artifact cache: {}"
- .format(e.details()))
+ raise ArtifactError("Error checking artifact cache: {}".format(e.details()))
else:
return False
@@ -710,7 +709,7 @@ class ArtifactCache(BaseCache):
# Write the artifact proto to cache
artifact_path = os.path.join(self.artifactdir, artifact_name)
os.makedirs(os.path.dirname(artifact_path), exist_ok=True)
- with utils.save_file_atomic(artifact_path, mode='wb') as f:
+ with utils.save_file_atomic(artifact_path, mode="wb") as f:
f.write(artifact.SerializeToString())
return artifact
diff --git a/src/buildstream/_artifactelement.py b/src/buildstream/_artifactelement.py
index 48c3d1769..1c1c5db46 100644
--- a/src/buildstream/_artifactelement.py
+++ b/src/buildstream/_artifactelement.py
@@ -40,7 +40,7 @@ if TYPE_CHECKING:
class ArtifactElement(Element):
# A hash of ArtifactElement by ref
- __instantiated_artifacts = {} # type: Dict[str, ArtifactElement]
+ __instantiated_artifacts = {} # type: Dict[str, ArtifactElement]
# ArtifactElement's require this as the sandbox will use a normal
# directory when we checkout
@@ -138,7 +138,7 @@ class ArtifactElement(Element):
# sandbox (Sandbox)
#
def configure_sandbox(self, sandbox):
- install_root = self.get_variable('install-root')
+ install_root = self.get_variable("install-root")
# Tell the sandbox to mount the build root and install root
sandbox.mark_directory(install_root)
@@ -173,7 +173,7 @@ class ArtifactElement(Element):
#
def verify_artifact_ref(ref):
try:
- project, element, key = ref.split('/', 2) # This will raise a Value error if unable to split
+ project, element, key = ref.split("/", 2) # This will raise a Value error if unable to split
# Explicitly raise a ValueError if the key length is not as expected
if not _cachekey.is_key(key):
raise ValueError
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index fc2e92456..516119cd1 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -37,21 +37,21 @@ if TYPE_CHECKING:
# Base Cache for Caches to derive from
#
-class BaseCache():
+class BaseCache:
# None of these should ever be called in the base class, but this appeases
# pylint to some degree
- spec_name = None # type: str
- spec_error = None # type: Type[BstError]
- config_node_name = None # type: str
- index_remote_class = None # type: Type[BaseRemote]
+ spec_name = None # type: str
+ spec_error = None # type: Type[BstError]
+ config_node_name = None # type: str
+ index_remote_class = None # type: Type[BaseRemote]
storage_remote_class = CASRemote # type: Type[BaseRemote]
def __init__(self, context):
self.context = context
self.cas = context.get_cascache()
- self._remotes_setup = False # Check to prevent double-setup of remotes
+ self._remotes_setup = False # Check to prevent double-setup of remotes
# Per-project list of Remote instances.
self._storage_remotes = {}
self._index_remotes = {}
@@ -116,8 +116,12 @@ class BaseCache():
artifacts = config_node.get_sequence(cls.config_node_name, default=[])
except LoadError:
provenance = config_node.get_node(cls.config_node_name).get_provenance()
- raise _yaml.LoadError("{}: '{}' must be a single remote mapping, or a list of mappings"
- .format(provenance, cls.config_node_name), _yaml.LoadErrorReason.INVALID_DATA)
+ raise _yaml.LoadError(
+ "{}: '{}' must be a single remote mapping, or a list of mappings".format(
+ provenance, cls.config_node_name
+ ),
+ _yaml.LoadErrorReason.INVALID_DATA,
+ )
for spec_node in artifacts:
cache_specs.append(RemoteSpec.new_from_config_node(spec_node))
@@ -144,8 +148,7 @@ class BaseCache():
project_specs = getattr(project, cls.spec_name)
context_specs = getattr(context, cls.spec_name)
- return list(utils._deduplicate(
- project_extra_specs + project_specs + context_specs))
+ return list(utils._deduplicate(project_extra_specs + project_specs + context_specs))
# setup_remotes():
#
@@ -266,8 +269,9 @@ class BaseCache():
# Check whether the specified element's project has push remotes
index_remotes = self._index_remotes[plugin._get_project()]
storage_remotes = self._storage_remotes[plugin._get_project()]
- return (any(remote.spec.push for remote in index_remotes) and
- any(remote.spec.push for remote in storage_remotes))
+ return any(remote.spec.push for remote in index_remotes) and any(
+ remote.spec.push for remote in storage_remotes
+ )
################################################
# Local Private Methods #
@@ -323,8 +327,9 @@ class BaseCache():
storage_remotes[remote_spec] = storage
self._has_fetch_remotes = storage_remotes and index_remotes
- self._has_push_remotes = (any(spec.push for spec in storage_remotes) and
- any(spec.push for spec in index_remotes))
+ self._has_push_remotes = any(spec.push for spec in storage_remotes) and any(
+ spec.push for spec in index_remotes
+ )
return index_remotes, storage_remotes
@@ -366,8 +371,7 @@ class BaseCache():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self.context.messenger.message(
- Message(message_type, message, **args))
+ self.context.messenger.message(Message(message_type, message, **args))
# _set_remotes():
#
diff --git a/src/buildstream/_cachekey.py b/src/buildstream/_cachekey.py
index 89d47671e..dd9207516 100644
--- a/src/buildstream/_cachekey.py
+++ b/src/buildstream/_cachekey.py
@@ -62,5 +62,5 @@ def is_key(key):
# (str): An sha256 hex digest of the given value
#
def generate_key(value):
- ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode('utf-8')
+ ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode("utf-8")
return hashlib.sha256(ustring).hexdigest()
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index 022730445..c1f2b30b0 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -68,15 +68,14 @@ class CASLogLevel(FastEnum):
# protect_session_blobs (bool): Disable expiry for blobs used in the current session
# log_level (LogLevel): Log level to give to buildbox-casd for logging
#
-class CASCache():
-
+class CASCache:
def __init__(
- self, path, *, casd=True, cache_quota=None, protect_session_blobs=True, log_level=CASLogLevel.WARNING
+ self, path, *, casd=True, cache_quota=None, protect_session_blobs=True, log_level=CASLogLevel.WARNING
):
- self.casdir = os.path.join(path, 'cas')
- self.tmpdir = os.path.join(path, 'tmp')
- os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
- os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
+ self.casdir = os.path.join(path, "cas")
+ self.tmpdir = os.path.join(path, "tmp")
+ os.makedirs(os.path.join(self.casdir, "refs", "heads"), exist_ok=True)
+ os.makedirs(os.path.join(self.casdir, "objects"), exist_ok=True)
os.makedirs(self.tmpdir, exist_ok=True)
self._casd_channel = None
@@ -88,19 +87,19 @@ class CASCache():
if casd:
# Place socket in global/user temporary directory to avoid hitting
# the socket path length limit.
- self._casd_socket_tempdir = tempfile.mkdtemp(prefix='buildstream')
- self._casd_socket_path = os.path.join(self._casd_socket_tempdir, 'casd.sock')
+ self._casd_socket_tempdir = tempfile.mkdtemp(prefix="buildstream")
+ self._casd_socket_path = os.path.join(self._casd_socket_tempdir, "casd.sock")
- casd_args = [utils.get_host_tool('buildbox-casd')]
- casd_args.append('--bind=unix:' + self._casd_socket_path)
- casd_args.append('--log-level=' + log_level.value)
+ casd_args = [utils.get_host_tool("buildbox-casd")]
+ casd_args.append("--bind=unix:" + self._casd_socket_path)
+ casd_args.append("--log-level=" + log_level.value)
if cache_quota is not None:
- casd_args.append('--quota-high={}'.format(int(cache_quota)))
- casd_args.append('--quota-low={}'.format(int(cache_quota / 2)))
+ casd_args.append("--quota-high={}".format(int(cache_quota)))
+ casd_args.append("--quota-low={}".format(int(cache_quota / 2)))
if protect_session_blobs:
- casd_args.append('--protect-session-blobs')
+ casd_args.append("--protect-session-blobs")
casd_args.append(path)
@@ -112,7 +111,8 @@ class CASCache():
# The frontend will take care of it if needed
with _signals.blocked([signal.SIGINT], ignore=False):
self._casd_process = subprocess.Popen(
- casd_args, cwd=path, stdout=logfile_fp, stderr=subprocess.STDOUT)
+ casd_args, cwd=path, stdout=logfile_fp, stderr=subprocess.STDOUT
+ )
self._cache_usage_monitor = _CASCacheUsageMonitor(self)
else:
@@ -123,16 +123,16 @@ class CASCache():
# Popen objects are not pickle-able, however, child processes only
# need the information whether a casd subprocess was started or not.
- assert '_casd_process' in state
- state['_casd_process'] = bool(self._casd_process)
+ assert "_casd_process" in state
+ state["_casd_process"] = bool(self._casd_process)
# The usage monitor is not pickle-able, but we also don't need it in
# child processes currently. Make sure that if this changes, we get a
# bug report, by setting _cache_usage_monitor_forbidden.
- assert '_cache_usage_monitor' in state
- assert '_cache_usage_monitor_forbidden' in state
- state['_cache_usage_monitor'] = None
- state['_cache_usage_monitor_forbidden'] = True
+ assert "_cache_usage_monitor" in state
+ assert "_cache_usage_monitor_forbidden" in state
+ state["_cache_usage_monitor"] = None
+ state["_cache_usage_monitor_forbidden"] = True
return state
@@ -148,7 +148,7 @@ class CASCache():
time.sleep(0.01)
- self._casd_channel = grpc.insecure_channel('unix:' + self._casd_socket_path)
+ self._casd_channel = grpc.insecure_channel("unix:" + self._casd_socket_path)
self._casd_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self._casd_channel)
self._local_cas = local_cas_pb2_grpc.LocalContentAddressableStorageStub(self._casd_channel)
@@ -179,8 +179,8 @@ class CASCache():
# Preflight check.
#
def preflight(self):
- headdir = os.path.join(self.casdir, 'refs', 'heads')
- objdir = os.path.join(self.casdir, 'objects')
+ headdir = os.path.join(self.casdir, "refs", "heads")
+ objdir = os.path.join(self.casdir, "objects")
if not (os.path.isdir(headdir) and os.path.isdir(objdir)):
raise CASCacheError("CAS repository check failed for '{}'".format(self.casdir))
@@ -285,7 +285,7 @@ class CASCache():
directory = remote_execution_pb2.Directory()
- with open(self.objpath(tree), 'rb') as f:
+ with open(self.objpath(tree), "rb") as f:
directory.ParseFromString(f.read())
for filenode in directory.files:
@@ -297,8 +297,16 @@ class CASCache():
utils.safe_copy(self.objpath(filenode.digest), fullpath)
if filenode.is_executable:
- os.chmod(fullpath, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
- stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
+ os.chmod(
+ fullpath,
+ stat.S_IRUSR
+ | stat.S_IWUSR
+ | stat.S_IXUSR
+ | stat.S_IRGRP
+ | stat.S_IXGRP
+ | stat.S_IROTH
+ | stat.S_IXOTH,
+ )
for dirnode in directory.directories:
fullpath = os.path.join(dest, dirnode.name)
@@ -365,7 +373,7 @@ class CASCache():
# (str): The path of the object
#
def objpath(self, digest):
- return os.path.join(self.casdir, 'objects', digest.hash[:2], digest.hash[2:])
+ return os.path.join(self.casdir, "objects", digest.hash[:2], digest.hash[2:])
# add_object():
#
@@ -450,7 +458,7 @@ class CASCache():
treepath = self.objpath(tree_response.tree_digest)
tree = remote_execution_pb2.Tree()
- with open(treepath, 'rb') as f:
+ with open(treepath, "rb") as f:
tree.ParseFromString(f.read())
root_directory = tree.root.SerializeToString()
@@ -467,7 +475,7 @@ class CASCache():
def set_ref(self, ref, tree):
refpath = self._refpath(ref)
os.makedirs(os.path.dirname(refpath), exist_ok=True)
- with utils.save_file_atomic(refpath, 'wb', tempdir=self.tmpdir) as f:
+ with utils.save_file_atomic(refpath, "wb", tempdir=self.tmpdir) as f:
f.write(tree.SerializeToString())
# resolve_ref():
@@ -485,7 +493,7 @@ class CASCache():
refpath = self._refpath(ref)
try:
- with open(refpath, 'rb') as f:
+ with open(refpath, "rb") as f:
if update_mtime:
os.utime(refpath)
@@ -521,7 +529,7 @@ class CASCache():
def remove(self, ref, *, basedir=None):
if basedir is None:
- basedir = os.path.join(self.casdir, 'refs', 'heads')
+ basedir = os.path.join(self.casdir, "refs", "heads")
# Remove cache ref
self._remove_ref(ref, basedir)
@@ -611,7 +619,7 @@ class CASCache():
directory = remote_execution_pb2.Directory()
- with open(self.objpath(directory_digest), 'rb') as f:
+ with open(self.objpath(directory_digest), "rb") as f:
directory.ParseFromString(f.read())
for filenode in directory.files:
@@ -626,21 +634,19 @@ class CASCache():
dir_b = remote_execution_pb2.Directory()
if tree_a:
- with open(self.objpath(tree_a), 'rb') as f:
+ with open(self.objpath(tree_a), "rb") as f:
dir_a.ParseFromString(f.read())
if tree_b:
- with open(self.objpath(tree_b), 'rb') as f:
+ with open(self.objpath(tree_b), "rb") as f:
dir_b.ParseFromString(f.read())
a = 0
b = 0
while a < len(dir_a.files) or b < len(dir_b.files):
- if b < len(dir_b.files) and (a >= len(dir_a.files) or
- dir_a.files[a].name > dir_b.files[b].name):
+ if b < len(dir_b.files) and (a >= len(dir_a.files) or dir_a.files[a].name > dir_b.files[b].name):
added.append(os.path.join(path, dir_b.files[b].name))
b += 1
- elif a < len(dir_a.files) and (b >= len(dir_b.files) or
- dir_b.files[b].name > dir_a.files[a].name):
+ elif a < len(dir_a.files) and (b >= len(dir_b.files) or dir_b.files[b].name > dir_a.files[a].name):
removed.append(os.path.join(path, dir_a.files[a].name))
a += 1
else:
@@ -653,24 +659,41 @@ class CASCache():
a = 0
b = 0
while a < len(dir_a.directories) or b < len(dir_b.directories):
- if b < len(dir_b.directories) and (a >= len(dir_a.directories) or
- dir_a.directories[a].name > dir_b.directories[b].name):
- self.diff_trees(None, dir_b.directories[b].digest,
- added=added, removed=removed, modified=modified,
- path=os.path.join(path, dir_b.directories[b].name))
+ if b < len(dir_b.directories) and (
+ a >= len(dir_a.directories) or dir_a.directories[a].name > dir_b.directories[b].name
+ ):
+ self.diff_trees(
+ None,
+ dir_b.directories[b].digest,
+ added=added,
+ removed=removed,
+ modified=modified,
+ path=os.path.join(path, dir_b.directories[b].name),
+ )
b += 1
- elif a < len(dir_a.directories) and (b >= len(dir_b.directories) or
- dir_b.directories[b].name > dir_a.directories[a].name):
- self.diff_trees(dir_a.directories[a].digest, None,
- added=added, removed=removed, modified=modified,
- path=os.path.join(path, dir_a.directories[a].name))
+ elif a < len(dir_a.directories) and (
+ b >= len(dir_b.directories) or dir_b.directories[b].name > dir_a.directories[a].name
+ ):
+ self.diff_trees(
+ dir_a.directories[a].digest,
+ None,
+ added=added,
+ removed=removed,
+ modified=modified,
+ path=os.path.join(path, dir_a.directories[a].name),
+ )
a += 1
else:
# Subdirectory exists in both directories
if dir_a.directories[a].digest.hash != dir_b.directories[b].digest.hash:
- self.diff_trees(dir_a.directories[a].digest, dir_b.directories[b].digest,
- added=added, removed=removed, modified=modified,
- path=os.path.join(path, dir_a.directories[a].name))
+ self.diff_trees(
+ dir_a.directories[a].digest,
+ dir_b.directories[b].digest,
+ added=added,
+ removed=removed,
+ modified=modified,
+ path=os.path.join(path, dir_a.directories[a].name),
+ )
a += 1
b += 1
@@ -703,7 +726,7 @@ class CASCache():
return os.path.join(log_dir, str(self._casd_start_time) + ".log")
def _refpath(self, ref):
- return os.path.join(self.casdir, 'refs', 'heads', ref)
+ return os.path.join(self.casdir, "refs", "heads", ref)
# _remove_ref()
#
@@ -763,7 +786,7 @@ class CASCache():
directory = remote_execution_pb2.Directory()
- with open(self.objpath(tree), 'rb') as f:
+ with open(self.objpath(tree), "rb") as f:
directory.ParseFromString(f.read())
for dirnode in directory.directories:
@@ -783,7 +806,7 @@ class CASCache():
directory = remote_execution_pb2.Directory()
- with open(self.objpath(tree), 'rb') as f:
+ with open(self.objpath(tree), "rb") as f:
directory.ParseFromString(f.read())
except FileNotFoundError:
@@ -813,8 +836,7 @@ class CASCache():
@contextlib.contextmanager
def _temporary_object(self):
with utils._tempnamedfile(dir=self.tmpdir) as f:
- os.chmod(f.name,
- stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
+ os.chmod(f.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
yield f
# _ensure_blob():
@@ -898,12 +920,13 @@ class CASCache():
objpath = self._ensure_blob(remote, dir_digest)
directory = remote_execution_pb2.Directory()
- with open(objpath, 'rb') as f:
+ with open(objpath, "rb") as f:
directory.ParseFromString(f.read())
for dirnode in directory.directories:
- batch = self._fetch_directory_node(remote, dirnode.digest, batch,
- fetch_queue, fetch_next_queue, recursive=True)
+ batch = self._fetch_directory_node(
+ remote, dirnode.digest, batch, fetch_queue, fetch_next_queue, recursive=True
+ )
# Fetch final batch
self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
@@ -913,7 +936,7 @@ class CASCache():
tree = remote_execution_pb2.Tree()
- with open(objpath, 'rb') as f:
+ with open(objpath, "rb") as f:
tree.ParseFromString(f.read())
tree.children.extend([tree.root])
@@ -1062,8 +1085,7 @@ class CASCache():
# used_size (int): Total size used by the local cache, in bytes.
# quota_size (int): Disk quota for the local cache, in bytes.
#
-class _CASCacheUsage():
-
+class _CASCacheUsage:
def __init__(self, used_size, quota_size):
self.used_size = used_size
self.quota_size = quota_size
@@ -1080,10 +1102,11 @@ class _CASCacheUsage():
elif self.quota_size is None:
return utils._pretty_size(self.used_size, dec_places=1)
else:
- return "{} / {} ({}%)" \
- .format(utils._pretty_size(self.used_size, dec_places=1),
- utils._pretty_size(self.quota_size, dec_places=1),
- self.used_percent)
+ return "{} / {} ({}%)".format(
+ utils._pretty_size(self.used_size, dec_places=1),
+ utils._pretty_size(self.quota_size, dec_places=1),
+ self.used_percent,
+ )
# _CASCacheUsageMonitor
diff --git a/src/buildstream/_cas/casremote.py b/src/buildstream/_cas/casremote.py
index a054b288a..ee6f4679c 100644
--- a/src/buildstream/_cas/casremote.py
+++ b/src/buildstream/_cas/casremote.py
@@ -32,7 +32,6 @@ _MAX_DIGESTS = _MAX_PAYLOAD_BYTES / 80
class BlobNotFound(CASRemoteError):
-
def __init__(self, blob, msg):
self.blob = blob
super().__init__(msg)
@@ -41,7 +40,6 @@ class BlobNotFound(CASRemoteError):
# Represents a single remote CAS cache.
#
class CASRemote(BaseRemote):
-
def __init__(self, spec, cascache, **kwargs):
super().__init__(spec, **kwargs)
@@ -90,7 +88,7 @@ class CASRemote(BaseRemote):
# Represents a batch of blobs queued for fetching.
#
-class _CASBatchRead():
+class _CASBatchRead:
def __init__(self, remote):
self._remote = remote
self._requests = []
@@ -123,22 +121,28 @@ class _CASBatchRead():
for response in batch_response.responses:
if response.status.code == code_pb2.NOT_FOUND:
if missing_blobs is None:
- raise BlobNotFound(response.digest.hash, "Failed to download blob {}: {}".format(
- response.digest.hash, response.status.code))
+ raise BlobNotFound(
+ response.digest.hash,
+ "Failed to download blob {}: {}".format(response.digest.hash, response.status.code),
+ )
missing_blobs.append(response.digest)
if response.status.code != code_pb2.OK:
- raise CASRemoteError("Failed to download blob {}: {}".format(
- response.digest.hash, response.status.code))
+ raise CASRemoteError(
+ "Failed to download blob {}: {}".format(response.digest.hash, response.status.code)
+ )
if response.digest.size_bytes != len(response.data):
- raise CASRemoteError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
- response.digest.hash, response.digest.size_bytes, len(response.data)))
+ raise CASRemoteError(
+ "Failed to download blob {}: expected {} bytes, received {} bytes".format(
+ response.digest.hash, response.digest.size_bytes, len(response.data)
+ )
+ )
# Represents a batch of blobs queued for upload.
#
-class _CASBatchUpdate():
+class _CASBatchUpdate:
def __init__(self, remote):
self._remote = remote
self._requests = []
@@ -175,5 +179,7 @@ class _CASBatchUpdate():
else:
reason = None
- raise CASRemoteError("Failed to upload blob {}: {}".format(
- response.digest.hash, response.status.code), reason=reason)
+ raise CASRemoteError(
+ "Failed to upload blob {}: {}".format(response.digest.hash, response.status.code),
+ reason=reason,
+ )
diff --git a/src/buildstream/_cas/casserver.py b/src/buildstream/_cas/casserver.py
index d4241435a..a2110d8a2 100644
--- a/src/buildstream/_cas/casserver.py
+++ b/src/buildstream/_cas/casserver.py
@@ -33,8 +33,14 @@ import click
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
from .._protos.google.rpc import code_pb2
-from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
- artifact_pb2, artifact_pb2_grpc, source_pb2, source_pb2_grpc
+from .._protos.buildstream.v2 import (
+ buildstream_pb2,
+ buildstream_pb2_grpc,
+ artifact_pb2,
+ artifact_pb2_grpc,
+ source_pb2,
+ source_pb2_grpc,
+)
from .. import utils
from .._exceptions import CASError, CASCacheError
@@ -61,8 +67,8 @@ def create_server(repo, *, enable_push, quota, index_only):
cas = CASCache(os.path.abspath(repo), cache_quota=quota, protect_session_blobs=False)
try:
- artifactdir = os.path.join(os.path.abspath(repo), 'artifacts', 'refs')
- sourcedir = os.path.join(os.path.abspath(repo), 'source_protos')
+ artifactdir = os.path.join(os.path.abspath(repo), "artifacts", "refs")
+ sourcedir = os.path.join(os.path.abspath(repo), "source_protos")
# Use max_workers default from Python 3.5+
max_workers = (os.cpu_count() or 1) * 5
@@ -70,31 +76,31 @@ def create_server(repo, *, enable_push, quota, index_only):
if not index_only:
bytestream_pb2_grpc.add_ByteStreamServicer_to_server(
- _ByteStreamServicer(cas, enable_push=enable_push), server)
+ _ByteStreamServicer(cas, enable_push=enable_push), server
+ )
remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(
- _ContentAddressableStorageServicer(cas, enable_push=enable_push), server)
+ _ContentAddressableStorageServicer(cas, enable_push=enable_push), server
+ )
- remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(
- _CapabilitiesServicer(), server)
+ remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(_CapabilitiesServicer(), server)
buildstream_pb2_grpc.add_ReferenceStorageServicer_to_server(
- _ReferenceStorageServicer(cas, enable_push=enable_push), server)
+ _ReferenceStorageServicer(cas, enable_push=enable_push), server
+ )
artifact_pb2_grpc.add_ArtifactServiceServicer_to_server(
- _ArtifactServicer(cas, artifactdir, update_cas=not index_only), server)
+ _ArtifactServicer(cas, artifactdir, update_cas=not index_only), server
+ )
- source_pb2_grpc.add_SourceServiceServicer_to_server(
- _SourceServicer(sourcedir), server)
+ source_pb2_grpc.add_SourceServiceServicer_to_server(_SourceServicer(sourcedir), server)
# Create up reference storage and artifact capabilities
- artifact_capabilities = buildstream_pb2.ArtifactCapabilities(
- allow_updates=enable_push)
- source_capabilities = buildstream_pb2.SourceCapabilities(
- allow_updates=enable_push)
+ artifact_capabilities = buildstream_pb2.ArtifactCapabilities(allow_updates=enable_push)
+ source_capabilities = buildstream_pb2.SourceCapabilities(allow_updates=enable_push)
buildstream_pb2_grpc.add_CapabilitiesServicer_to_server(
- _BuildStreamCapabilitiesServicer(artifact_capabilities, source_capabilities),
- server)
+ _BuildStreamCapabilitiesServicer(artifact_capabilities, source_capabilities), server
+ )
yield server
@@ -103,28 +109,25 @@ def create_server(repo, *, enable_push, quota, index_only):
@click.command(short_help="CAS Artifact Server")
-@click.option('--port', '-p', type=click.INT, required=True, help="Port number")
-@click.option('--server-key', help="Private server key for TLS (PEM-encoded)")
-@click.option('--server-cert', help="Public server certificate for TLS (PEM-encoded)")
-@click.option('--client-certs', help="Public client certificates for TLS (PEM-encoded)")
-@click.option('--enable-push', is_flag=True,
- help="Allow clients to upload blobs and update artifact cache")
-@click.option('--quota', type=click.INT, default=10e9, show_default=True,
- help="Maximum disk usage in bytes")
-@click.option('--index-only', is_flag=True,
- help="Only provide the BuildStream artifact and source services (\"index\"), not the CAS (\"storage\")")
-@click.argument('repo')
-def server_main(repo, port, server_key, server_cert, client_certs, enable_push,
- quota, index_only):
+@click.option("--port", "-p", type=click.INT, required=True, help="Port number")
+@click.option("--server-key", help="Private server key for TLS (PEM-encoded)")
+@click.option("--server-cert", help="Public server certificate for TLS (PEM-encoded)")
+@click.option("--client-certs", help="Public client certificates for TLS (PEM-encoded)")
+@click.option("--enable-push", is_flag=True, help="Allow clients to upload blobs and update artifact cache")
+@click.option("--quota", type=click.INT, default=10e9, show_default=True, help="Maximum disk usage in bytes")
+@click.option(
+ "--index-only",
+ is_flag=True,
+ help='Only provide the BuildStream artifact and source services ("index"), not the CAS ("storage")',
+)
+@click.argument("repo")
+def server_main(repo, port, server_key, server_cert, client_certs, enable_push, quota, index_only):
# Handle SIGTERM by calling sys.exit(0), which will raise a SystemExit exception,
# properly executing cleanup code in `finally` clauses and context managers.
# This is required to terminate buildbox-casd on SIGTERM.
signal.signal(signal.SIGTERM, lambda signalnum, frame: sys.exit(0))
- with create_server(repo,
- quota=quota,
- enable_push=enable_push,
- index_only=index_only) as server:
+ with create_server(repo, quota=quota, enable_push=enable_push, index_only=index_only) as server:
use_tls = bool(server_key)
@@ -138,23 +141,25 @@ def server_main(repo, port, server_key, server_cert, client_certs, enable_push,
if use_tls:
# Read public/private key pair
- with open(server_key, 'rb') as f:
+ with open(server_key, "rb") as f:
server_key_bytes = f.read()
- with open(server_cert, 'rb') as f:
+ with open(server_cert, "rb") as f:
server_cert_bytes = f.read()
if client_certs:
- with open(client_certs, 'rb') as f:
+ with open(client_certs, "rb") as f:
client_certs_bytes = f.read()
else:
client_certs_bytes = None
- credentials = grpc.ssl_server_credentials([(server_key_bytes, server_cert_bytes)],
- root_certificates=client_certs_bytes,
- require_client_auth=bool(client_certs))
- server.add_secure_port('[::]:{}'.format(port), credentials)
+ credentials = grpc.ssl_server_credentials(
+ [(server_key_bytes, server_cert_bytes)],
+ root_certificates=client_certs_bytes,
+ require_client_auth=bool(client_certs),
+ )
+ server.add_secure_port("[::]:{}".format(port), credentials)
else:
- server.add_insecure_port('[::]:{}'.format(port))
+ server.add_insecure_port("[::]:{}".format(port))
# Run artifact server
server.start()
@@ -183,7 +188,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
return
try:
- with open(self.cas.objpath(client_digest), 'rb') as f:
+ with open(self.cas.objpath(client_digest), "rb") as f:
if os.fstat(f.fileno()).st_size != client_digest.size_bytes:
context.set_code(grpc.StatusCode.NOT_FOUND)
return
@@ -317,7 +322,7 @@ class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddres
blob_response.digest.size_bytes = digest.size_bytes
try:
objpath = self.cas.objpath(digest)
- with open(objpath, 'rb') as f:
+ with open(objpath, "rb") as f:
if os.fstat(f.fileno()).st_size != digest.size_bytes:
blob_response.status.code = code_pb2.NOT_FOUND
continue
@@ -437,7 +442,6 @@ class _ReferenceStorageServicer(buildstream_pb2_grpc.ReferenceStorageServicer):
class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
-
def __init__(self, cas, artifactdir, *, update_cas=True):
super().__init__()
self.cas = cas
@@ -451,7 +455,7 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
context.abort(grpc.StatusCode.NOT_FOUND, "Artifact proto not found")
artifact = artifact_pb2.Artifact()
- with open(artifact_path, 'rb') as f:
+ with open(artifact_path, "rb") as f:
artifact.ParseFromString(f.read())
# Artifact-only servers will not have blobs on their system,
@@ -489,11 +493,9 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
except FileNotFoundError:
os.unlink(artifact_path)
- context.abort(grpc.StatusCode.NOT_FOUND,
- "Artifact files incomplete")
+ context.abort(grpc.StatusCode.NOT_FOUND, "Artifact files incomplete")
except DecodeError:
- context.abort(grpc.StatusCode.NOT_FOUND,
- "Artifact files not valid")
+ context.abort(grpc.StatusCode.NOT_FOUND, "Artifact files not valid")
return artifact
@@ -516,7 +518,7 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
# Add the artifact proto to the cas
artifact_path = os.path.join(self.artifactdir, request.cache_key)
os.makedirs(os.path.dirname(artifact_path), exist_ok=True)
- with utils.save_file_atomic(artifact_path, mode='wb') as f:
+ with utils.save_file_atomic(artifact_path, mode="wb") as f:
f.write(artifact.SerializeToString())
return artifact
@@ -527,19 +529,18 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
def _check_directory(self, name, digest, context):
try:
directory = remote_execution_pb2.Directory()
- with open(self.cas.objpath(digest), 'rb') as f:
+ with open(self.cas.objpath(digest), "rb") as f:
directory.ParseFromString(f.read())
except FileNotFoundError:
- context.abort(grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but no files found".format(name))
+ context.abort(grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but no files found".format(name))
except DecodeError:
- context.abort(grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but directory not found".format(name))
+ context.abort(
+ grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but directory not found".format(name)
+ )
def _check_file(self, name, digest, context):
if not os.path.exists(self.cas.objpath(digest)):
- context.abort(grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but not found".format(name))
+ context.abort(grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but not found".format(name))
class _BuildStreamCapabilitiesServicer(buildstream_pb2_grpc.CapabilitiesServicer):
@@ -564,8 +565,7 @@ class _SourceServicer(source_pb2_grpc.SourceServiceServicer):
except FileNotFoundError:
context.abort(grpc.StatusCode.NOT_FOUND, "Source not found")
except DecodeError:
- context.abort(grpc.StatusCode.NOT_FOUND,
- "Sources gives invalid directory")
+ context.abort(grpc.StatusCode.NOT_FOUND, "Sources gives invalid directory")
return source_proto
@@ -576,7 +576,7 @@ class _SourceServicer(source_pb2_grpc.SourceServiceServicer):
def _get_source(self, cache_key):
path = os.path.join(self.sourcedir, cache_key)
source_proto = source_pb2.Source()
- with open(path, 'r+b') as f:
+ with open(path, "r+b") as f:
source_proto.ParseFromString(f.read())
os.utime(path)
return source_proto
@@ -584,18 +584,18 @@ class _SourceServicer(source_pb2_grpc.SourceServiceServicer):
def _set_source(self, cache_key, source_proto):
path = os.path.join(self.sourcedir, cache_key)
os.makedirs(os.path.dirname(path), exist_ok=True)
- with utils.save_file_atomic(path, 'w+b') as f:
+ with utils.save_file_atomic(path, "w+b") as f:
f.write(source_proto.SerializeToString())
def _digest_from_download_resource_name(resource_name):
- parts = resource_name.split('/')
+ parts = resource_name.split("/")
# Accept requests from non-conforming BuildStream 1.1.x clients
if len(parts) == 2:
- parts.insert(0, 'blobs')
+ parts.insert(0, "blobs")
- if len(parts) != 3 or parts[0] != 'blobs':
+ if len(parts) != 3 or parts[0] != "blobs":
return None
try:
@@ -608,15 +608,15 @@ def _digest_from_download_resource_name(resource_name):
def _digest_from_upload_resource_name(resource_name):
- parts = resource_name.split('/')
+ parts = resource_name.split("/")
# Accept requests from non-conforming BuildStream 1.1.x clients
if len(parts) == 2:
- parts.insert(0, 'uploads')
+ parts.insert(0, "uploads")
parts.insert(1, str(uuid.uuid4()))
- parts.insert(2, 'blobs')
+ parts.insert(2, "blobs")
- if len(parts) < 5 or parts[0] != 'uploads' or parts[2] != 'blobs':
+ if len(parts) < 5 or parts[0] != "uploads" or parts[2] != "blobs":
return None
try:
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 4e1007e28..f426f4bb0 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -46,13 +46,12 @@ from .sandbox import SandboxRemote
# verbosity levels and basically anything pertaining to the context
# in which BuildStream was invoked.
#
-class Context():
-
+class Context:
def __init__(self, *, use_casd=True):
# Whether we are running as part of a test suite. This is only relevant
# for developing BuildStream itself.
- self.is_running_in_test_suite = 'BST_TEST_SUITE' in os.environ
+ self.is_running_in_test_suite = "BST_TEST_SUITE" in os.environ
# Filename indicating which configuration file was used, or None for the defaults
self.config_origin = None
@@ -216,8 +215,7 @@ class Context():
# a $XDG_CONFIG_HOME/buildstream.conf file
#
if not config:
- default_config = os.path.join(os.environ['XDG_CONFIG_HOME'],
- 'buildstream.conf')
+ default_config = os.path.join(os.environ["XDG_CONFIG_HOME"], "buildstream.conf")
if os.path.exists(default_config):
config = default_config
@@ -231,19 +229,32 @@ class Context():
user_config._composite(defaults)
# Give obsoletion warnings
- if 'builddir' in defaults:
+ if "builddir" in defaults:
raise LoadError("builddir is obsolete, use cachedir", LoadErrorReason.INVALID_DATA)
- if 'artifactdir' in defaults:
+ if "artifactdir" in defaults:
raise LoadError("artifactdir is obsolete", LoadErrorReason.INVALID_DATA)
- defaults.validate_keys([
- 'cachedir', 'sourcedir', 'builddir', 'logdir', 'scheduler', 'build',
- 'artifacts', 'source-caches', 'logging', 'projects', 'cache', 'prompt',
- 'workspacedir', 'remote-execution',
- ])
-
- for directory in ['cachedir', 'sourcedir', 'logdir', 'workspacedir']:
+ defaults.validate_keys(
+ [
+ "cachedir",
+ "sourcedir",
+ "builddir",
+ "logdir",
+ "scheduler",
+ "build",
+ "artifacts",
+ "source-caches",
+ "logging",
+ "projects",
+ "cache",
+ "prompt",
+ "workspacedir",
+ "remote-execution",
+ ]
+ )
+
+ for directory in ["cachedir", "sourcedir", "logdir", "workspacedir"]:
# Allow the ~ tilde expansion and any environment variables in
# path specification in the config files.
#
@@ -256,25 +267,23 @@ class Context():
# Relative paths don't make sense in user configuration. The exception is
# workspacedir where `.` is useful as it will be combined with the name
# specified on the command line.
- if not os.path.isabs(path) and not (directory == 'workspacedir' and path == '.'):
+ if not os.path.isabs(path) and not (directory == "workspacedir" and path == "."):
raise LoadError("{} must be an absolute path".format(directory), LoadErrorReason.INVALID_DATA)
# add directories not set by users
- self.tmpdir = os.path.join(self.cachedir, 'tmp')
- self.casdir = os.path.join(self.cachedir, 'cas')
- self.builddir = os.path.join(self.cachedir, 'build')
- self.artifactdir = os.path.join(self.cachedir, 'artifacts', 'refs')
+ self.tmpdir = os.path.join(self.cachedir, "tmp")
+ self.casdir = os.path.join(self.cachedir, "cas")
+ self.builddir = os.path.join(self.cachedir, "build")
+ self.artifactdir = os.path.join(self.cachedir, "artifacts", "refs")
# Move old artifact cas to cas if it exists and create symlink
- old_casdir = os.path.join(self.cachedir, 'artifacts', 'cas')
- if (os.path.exists(old_casdir) and not os.path.islink(old_casdir) and
- not os.path.exists(self.casdir)):
+ old_casdir = os.path.join(self.cachedir, "artifacts", "cas")
+ if os.path.exists(old_casdir) and not os.path.islink(old_casdir) and not os.path.exists(self.casdir):
os.rename(old_casdir, self.casdir)
os.symlink(self.casdir, old_casdir)
# Cleanup old extract directories
- old_extractdirs = [os.path.join(self.cachedir, 'artifacts', 'extract'),
- os.path.join(self.cachedir, 'extract')]
+ old_extractdirs = [os.path.join(self.cachedir, "artifacts", "extract"), os.path.join(self.cachedir, "extract")]
for old_extractdir in old_extractdirs:
if os.path.isdir(old_extractdir):
shutil.rmtree(old_extractdir, ignore_errors=True)
@@ -282,21 +291,22 @@ class Context():
# Load quota configuration
# We need to find the first existing directory in the path of our
# casdir - the casdir may not have been created yet.
- cache = defaults.get_mapping('cache')
- cache.validate_keys(['quota', 'pull-buildtrees', 'cache-buildtrees'])
+ cache = defaults.get_mapping("cache")
+ cache.validate_keys(["quota", "pull-buildtrees", "cache-buildtrees"])
cas_volume = self.casdir
while not os.path.exists(cas_volume):
cas_volume = os.path.dirname(cas_volume)
- self.config_cache_quota_string = cache.get_str('quota')
+ self.config_cache_quota_string = cache.get_str("quota")
try:
- self.config_cache_quota = utils._parse_size(self.config_cache_quota_string,
- cas_volume)
+ self.config_cache_quota = utils._parse_size(self.config_cache_quota_string, cas_volume)
except utils.UtilError as e:
- raise LoadError("{}\nPlease specify the value in bytes or as a % of full disk space.\n"
- "\nValid values are, for example: 800M 10G 1T 50%\n"
- .format(str(e)), LoadErrorReason.INVALID_DATA) from e
+ raise LoadError(
+ "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
+ "\nValid values are, for example: 800M 10G 1T 50%\n".format(str(e)),
+ LoadErrorReason.INVALID_DATA,
+ ) from e
# Load artifact share configuration
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
@@ -305,73 +315,70 @@ class Context():
self.source_cache_specs = SourceCache.specs_from_config_node(defaults)
# Load remote execution config getting pull-artifact-files from it
- remote_execution = defaults.get_mapping('remote-execution', default=None)
+ remote_execution = defaults.get_mapping("remote-execution", default=None)
if remote_execution:
- self.pull_artifact_files = remote_execution.get_bool('pull-artifact-files', default=True)
+ self.pull_artifact_files = remote_execution.get_bool("pull-artifact-files", default=True)
# This stops it being used in the remote service set up
- remote_execution.safe_del('pull-artifact-files')
+ remote_execution.safe_del("pull-artifact-files")
# Don't pass the remote execution settings if that was the only option
if remote_execution.keys() == []:
- del defaults['remote-execution']
+ del defaults["remote-execution"]
else:
self.pull_artifact_files = True
self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
# Load pull build trees configuration
- self.pull_buildtrees = cache.get_bool('pull-buildtrees')
+ self.pull_buildtrees = cache.get_bool("pull-buildtrees")
# Load cache build trees configuration
- self.cache_buildtrees = cache.get_enum('cache-buildtrees', _CacheBuildTrees)
+ self.cache_buildtrees = cache.get_enum("cache-buildtrees", _CacheBuildTrees)
# Load logging config
- logging = defaults.get_mapping('logging')
- logging.validate_keys([
- 'key-length', 'verbose',
- 'error-lines', 'message-lines',
- 'debug', 'element-format', 'message-format'
- ])
- self.log_key_length = logging.get_int('key-length')
- self.log_debug = logging.get_bool('debug')
- self.log_verbose = logging.get_bool('verbose')
- self.log_error_lines = logging.get_int('error-lines')
- self.log_message_lines = logging.get_int('message-lines')
- self.log_element_format = logging.get_str('element-format')
- self.log_message_format = logging.get_str('message-format')
+ logging = defaults.get_mapping("logging")
+ logging.validate_keys(
+ ["key-length", "verbose", "error-lines", "message-lines", "debug", "element-format", "message-format"]
+ )
+ self.log_key_length = logging.get_int("key-length")
+ self.log_debug = logging.get_bool("debug")
+ self.log_verbose = logging.get_bool("verbose")
+ self.log_error_lines = logging.get_int("error-lines")
+ self.log_message_lines = logging.get_int("message-lines")
+ self.log_element_format = logging.get_str("element-format")
+ self.log_message_format = logging.get_str("message-format")
# Load scheduler config
- scheduler = defaults.get_mapping('scheduler')
- scheduler.validate_keys([
- 'on-error', 'fetchers', 'builders',
- 'pushers', 'network-retries'
- ])
- self.sched_error_action = scheduler.get_enum('on-error', _SchedulerErrorAction)
- self.sched_fetchers = scheduler.get_int('fetchers')
- self.sched_builders = scheduler.get_int('builders')
- self.sched_pushers = scheduler.get_int('pushers')
- self.sched_network_retries = scheduler.get_int('network-retries')
+ scheduler = defaults.get_mapping("scheduler")
+ scheduler.validate_keys(["on-error", "fetchers", "builders", "pushers", "network-retries"])
+ self.sched_error_action = scheduler.get_enum("on-error", _SchedulerErrorAction)
+ self.sched_fetchers = scheduler.get_int("fetchers")
+ self.sched_builders = scheduler.get_int("builders")
+ self.sched_pushers = scheduler.get_int("pushers")
+ self.sched_network_retries = scheduler.get_int("network-retries")
# Load build config
- build = defaults.get_mapping('build')
- build.validate_keys(['max-jobs', 'dependencies'])
- self.build_max_jobs = build.get_int('max-jobs')
-
- self.build_dependencies = build.get_str('dependencies')
- if self.build_dependencies not in ['plan', 'all']:
- provenance = build.get_scalar('dependencies').get_provenance()
- raise LoadError("{}: Invalid value for 'dependencies'. Choose 'plan' or 'all'."
- .format(provenance), LoadErrorReason.INVALID_DATA)
+ build = defaults.get_mapping("build")
+ build.validate_keys(["max-jobs", "dependencies"])
+ self.build_max_jobs = build.get_int("max-jobs")
+
+ self.build_dependencies = build.get_str("dependencies")
+ if self.build_dependencies not in ["plan", "all"]:
+ provenance = build.get_scalar("dependencies").get_provenance()
+ raise LoadError(
+ "{}: Invalid value for 'dependencies'. Choose 'plan' or 'all'.".format(provenance),
+ LoadErrorReason.INVALID_DATA,
+ )
# Load per-projects overrides
- self._project_overrides = defaults.get_mapping('projects', default={})
+ self._project_overrides = defaults.get_mapping("projects", default={})
# Shallow validation of overrides, parts of buildstream which rely
# on the overrides are expected to validate elsewhere.
for overrides_project in self._project_overrides.keys():
overrides = self._project_overrides.get_mapping(overrides_project)
- overrides.validate_keys(['artifacts', 'source-caches', 'options',
- 'strict', 'default-mirror',
- 'remote-execution'])
+ overrides.validate_keys(
+ ["artifacts", "source-caches", "options", "strict", "default-mirror", "remote-execution"]
+ )
@property
def platform(self):
@@ -474,7 +481,7 @@ class Context():
# so work out if we should be strict, and then cache the result
toplevel = self.get_toplevel_project()
overrides = self.get_overrides(toplevel.name)
- self._strict_build_plan = overrides.get_bool('strict', default=True)
+ self._strict_build_plan = overrides.get_bool("strict", default=True)
# If it was set by the CLI, it overrides any config
# Ditto if we've already computed this, then we return the computed
@@ -505,12 +512,12 @@ class Context():
# preferred locations of things from user configuration
# files.
def _init_xdg(self):
- if not os.environ.get('XDG_CACHE_HOME'):
- os.environ['XDG_CACHE_HOME'] = os.path.expanduser('~/.cache')
- if not os.environ.get('XDG_CONFIG_HOME'):
- os.environ['XDG_CONFIG_HOME'] = os.path.expanduser('~/.config')
- if not os.environ.get('XDG_DATA_HOME'):
- os.environ['XDG_DATA_HOME'] = os.path.expanduser('~/.local/share')
+ if not os.environ.get("XDG_CACHE_HOME"):
+ os.environ["XDG_CACHE_HOME"] = os.path.expanduser("~/.cache")
+ if not os.environ.get("XDG_CONFIG_HOME"):
+ os.environ["XDG_CONFIG_HOME"] = os.path.expanduser("~/.config")
+ if not os.environ.get("XDG_DATA_HOME"):
+ os.environ["XDG_DATA_HOME"] = os.path.expanduser("~/.local/share")
def get_cascache(self):
if self._cascache is None:
@@ -521,10 +528,9 @@ class Context():
else:
log_level = CASLogLevel.WARNING
- self._cascache = CASCache(self.cachedir,
- casd=self.use_casd,
- cache_quota=self.config_cache_quota,
- log_level=log_level)
+ self._cascache = CASCache(
+ self.cachedir, casd=self.use_casd, cache_quota=self.config_cache_quota, log_level=log_level
+ )
return self._cascache
# prepare_fork():
diff --git a/src/buildstream/_elementfactory.py b/src/buildstream/_elementfactory.py
index d6591bf4c..5d219c627 100644
--- a/src/buildstream/_elementfactory.py
+++ b/src/buildstream/_elementfactory.py
@@ -30,14 +30,15 @@ from .element import Element
# plugin_origins (list): Data used to search for external Element plugins
#
class ElementFactory(PluginContext):
+ def __init__(self, plugin_base, *, format_versions={}, plugin_origins=None):
- def __init__(self, plugin_base, *,
- format_versions={},
- plugin_origins=None):
-
- super().__init__(plugin_base, Element, [_site.element_plugins],
- plugin_origins=plugin_origins,
- format_versions=format_versions)
+ super().__init__(
+ plugin_base,
+ Element,
+ [_site.element_plugins],
+ plugin_origins=plugin_origins,
+ format_versions=format_versions,
+ )
# create():
#
diff --git a/src/buildstream/_exceptions.py b/src/buildstream/_exceptions.py
index 46de90796..ca17577f7 100644
--- a/src/buildstream/_exceptions.py
+++ b/src/buildstream/_exceptions.py
@@ -51,7 +51,7 @@ def get_last_exception():
# Used by regression tests
#
def get_last_task_error():
- if 'BST_TEST_SUITE' not in os.environ:
+ if "BST_TEST_SUITE" not in os.environ:
raise BstError("Getting the last task error is only supported when running tests")
global _last_task_error_domain
@@ -71,7 +71,7 @@ def get_last_task_error():
# tests about how things failed in a machine readable way
#
def set_last_task_error(domain, reason):
- if 'BST_TEST_SUITE' in os.environ:
+ if "BST_TEST_SUITE" in os.environ:
global _last_task_error_domain
global _last_task_error_reason
@@ -108,7 +108,6 @@ class ErrorDomain(Enum):
# context can then be communicated back to the main process.
#
class BstError(Exception):
-
def __init__(self, message, *, detail=None, domain=None, reason=None, temporary=False):
global _last_exception
@@ -133,7 +132,7 @@ class BstError(Exception):
self.reason = reason
# Hold on to the last raised exception for testing purposes
- if 'BST_TEST_SUITE' in os.environ:
+ if "BST_TEST_SUITE" in os.environ:
_last_exception = self
@@ -330,7 +329,6 @@ class CASCacheError(CASError):
# Raised from pipeline operations
#
class PipelineError(BstError):
-
def __init__(self, message, *, detail=None, reason=None):
super().__init__(message, detail=detail, domain=ErrorDomain.PIPELINE, reason=reason)
@@ -340,7 +338,6 @@ class PipelineError(BstError):
# Raised when a stream operation fails
#
class StreamError(BstError):
-
def __init__(self, message=None, *, detail=None, reason=None, terminated=False):
# The empty string should never appear to a user,
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 99e164358..09610851f 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -56,19 +56,18 @@ INDENT = 4
# main_options (dict): The main CLI options of the `bst`
# command, before any subcommand
#
-class App():
-
+class App:
def __init__(self, main_options):
#
# Public members
#
- self.context = None # The Context object
- self.stream = None # The Stream object
- self.project = None # The toplevel Project object
- self.logger = None # The LogLine object
- self.interactive = None # Whether we are running in interactive mode
- self.colors = None # Whether to use colors in logging
+ self.context = None # The Context object
+ self.stream = None # The Stream object
+ self.project = None # The toplevel Project object
+ self.logger = None # The LogLine object
+ self.interactive = None # Whether we are running in interactive mode
+ self.colors = None # Whether to use colors in logging
#
# Private members
@@ -76,18 +75,18 @@ class App():
self._session_start = datetime.datetime.now()
self._session_name = None
self._main_options = main_options # Main CLI options, before any command
- self._status = None # The Status object
- self._fail_messages = {} # Failure messages by unique plugin id
+ self._status = None # The Status object
+ self._fail_messages = {} # Failure messages by unique plugin id
self._interactive_failures = None # Whether to handle failures interactively
- self._started = False # Whether a session has started
- self._set_project_dir = False # Whether -C option was used
- self._state = None # Frontend reads this and registers callbacks
+ self._started = False # Whether a session has started
+ self._set_project_dir = False # Whether -C option was used
+ self._state = None # Frontend reads this and registers callbacks
# UI Colors Profiles
- self._content_profile = Profile(fg='yellow')
- self._format_profile = Profile(fg='cyan', dim=True)
- self._success_profile = Profile(fg='green')
- self._error_profile = Profile(fg='red', dim=True)
+ self._content_profile = Profile(fg="yellow")
+ self._format_profile = Profile(fg="cyan", dim=True)
+ self._success_profile = Profile(fg="green")
+ self._error_profile = Profile(fg="red", dim=True)
self._detail_profile = Profile(dim=True)
#
@@ -96,31 +95,31 @@ class App():
is_a_tty = sys.stdout.isatty() and sys.stderr.isatty()
# Enable interactive mode if we're attached to a tty
- if main_options['no_interactive']:
+ if main_options["no_interactive"]:
self.interactive = False
else:
self.interactive = is_a_tty
# Handle errors interactively if we're in interactive mode
# and --on-error was not specified on the command line
- if main_options.get('on_error') is not None:
+ if main_options.get("on_error") is not None:
self._interactive_failures = False
else:
self._interactive_failures = self.interactive
# Use color output if we're attached to a tty, unless
# otherwise specified on the command line
- if main_options['colors'] is None:
+ if main_options["colors"] is None:
self.colors = is_a_tty
- elif main_options['colors']:
+ elif main_options["colors"]:
self.colors = True
else:
self.colors = False
- if main_options['directory']:
+ if main_options["directory"]:
self._set_project_dir = True
else:
- main_options['directory'] = os.getcwd()
+ main_options["directory"] = os.getcwd()
# create()
#
@@ -133,9 +132,10 @@ class App():
#
@classmethod
def create(cls, *args, **kwargs):
- if sys.platform.startswith('linux'):
+ if sys.platform.startswith("linux"):
# Use an App with linux specific features
from .linuxapp import LinuxApp # pylint: disable=cyclic-import
+
return LinuxApp(*args, **kwargs)
else:
# The base App() class is default
@@ -163,8 +163,8 @@ class App():
#
@contextmanager
def initialized(self, *, session_name=None):
- directory = self._main_options['directory']
- config = self._main_options['config']
+ directory = self._main_options["directory"]
+ config = self._main_options["config"]
self._session_name = session_name
@@ -184,19 +184,19 @@ class App():
# the command line when used, trumps the config files.
#
override_map = {
- 'strict': '_strict_build_plan',
- 'debug': 'log_debug',
- 'verbose': 'log_verbose',
- 'error_lines': 'log_error_lines',
- 'message_lines': 'log_message_lines',
- 'on_error': 'sched_error_action',
- 'fetchers': 'sched_fetchers',
- 'builders': 'sched_builders',
- 'pushers': 'sched_pushers',
- 'max_jobs': 'build_max_jobs',
- 'network_retries': 'sched_network_retries',
- 'pull_buildtrees': 'pull_buildtrees',
- 'cache_buildtrees': 'cache_buildtrees'
+ "strict": "_strict_build_plan",
+ "debug": "log_debug",
+ "verbose": "log_verbose",
+ "error_lines": "log_error_lines",
+ "message_lines": "log_message_lines",
+ "on_error": "sched_error_action",
+ "fetchers": "sched_fetchers",
+ "builders": "sched_builders",
+ "pushers": "sched_pushers",
+ "max_jobs": "build_max_jobs",
+ "network_retries": "sched_network_retries",
+ "pull_buildtrees": "pull_buildtrees",
+ "cache_buildtrees": "cache_buildtrees",
}
for cli_option, context_attr in override_map.items():
option_value = self._main_options.get(cli_option)
@@ -208,10 +208,13 @@ class App():
self._error_exit(e, "Error instantiating platform")
# Create the stream right away, we'll need to pass it around.
- self.stream = Stream(self.context, self._session_start,
- session_start_callback=self.session_start_cb,
- interrupt_callback=self._interrupt_handler,
- ticker_callback=self._tick)
+ self.stream = Stream(
+ self.context,
+ self._session_start,
+ session_start_callback=self.session_start_cb,
+ interrupt_callback=self._interrupt_handler,
+ ticker_callback=self._tick,
+ )
self._state = self.stream.get_state()
@@ -219,13 +222,16 @@ class App():
self._state.register_task_failed_callback(self._job_failed)
# Create the logger right before setting the message handler
- self.logger = LogLine(self.context, self._state,
- self._content_profile,
- self._format_profile,
- self._success_profile,
- self._error_profile,
- self._detail_profile,
- indent=INDENT)
+ self.logger = LogLine(
+ self.context,
+ self._state,
+ self._content_profile,
+ self._format_profile,
+ self._success_profile,
+ self._error_profile,
+ self._detail_profile,
+ indent=INDENT,
+ )
# Propagate pipeline feedback to the user
self.context.messenger.set_message_handler(self._message_handler)
@@ -248,10 +254,15 @@ class App():
self.stream.init()
# Create our status printer, only available in interactive
- self._status = Status(self.context, self._state,
- self._content_profile, self._format_profile,
- self._success_profile, self._error_profile,
- self.stream)
+ self._status = Status(
+ self.context,
+ self._state,
+ self._content_profile,
+ self._format_profile,
+ self._success_profile,
+ self._error_profile,
+ self.stream,
+ )
# Mark the beginning of the session
if session_name:
@@ -261,9 +272,13 @@ class App():
# Load the Project
#
try:
- self.project = Project(directory, self.context, cli_options=self._main_options['option'],
- default_mirror=self._main_options.get('default_mirror'),
- fetch_subprojects=self.stream.fetch_subprojects)
+ self.project = Project(
+ directory,
+ self.context,
+ cli_options=self._main_options["option"],
+ default_mirror=self._main_options.get("default_mirror"),
+ fetch_subprojects=self.stream.fetch_subprojects,
+ )
self.stream.set_project(self.project)
except LoadError as e:
@@ -291,7 +306,7 @@ class App():
elapsed = self.stream.elapsed_time
if isinstance(e, StreamError) and e.terminated: # pylint: disable=no-member
- self._message(MessageType.WARN, session_name + ' Terminated', elapsed=elapsed)
+ self._message(MessageType.WARN, session_name + " Terminated", elapsed=elapsed)
else:
self._message(MessageType.FAIL, session_name, elapsed=elapsed)
@@ -304,8 +319,9 @@ class App():
# Exit with the error
self._error_exit(e)
except RecursionError:
- click.echo("RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.",
- err=True)
+ click.echo(
+ "RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.", err=True
+ )
sys.exit(-1)
else:
@@ -331,41 +347,51 @@ class App():
# force (bool): Allow overwriting an existing project.conf
# target_directory (str): The target directory the project should be initialized in
#
- def init_project(self, project_name, format_version=BST_FORMAT_VERSION, element_path='elements',
- force=False, target_directory=None):
+ def init_project(
+ self,
+ project_name,
+ format_version=BST_FORMAT_VERSION,
+ element_path="elements",
+ force=False,
+ target_directory=None,
+ ):
if target_directory:
directory = os.path.abspath(target_directory)
else:
- directory = self._main_options['directory']
+ directory = self._main_options["directory"]
directory = os.path.abspath(directory)
- project_path = os.path.join(directory, 'project.conf')
+ project_path = os.path.join(directory, "project.conf")
try:
if self._set_project_dir:
- raise AppError("Attempted to use -C or --directory with init.",
- reason='init-with-set-directory',
- detail="Please use 'bst init {}' instead.".format(directory))
+ raise AppError(
+ "Attempted to use -C or --directory with init.",
+ reason="init-with-set-directory",
+ detail="Please use 'bst init {}' instead.".format(directory),
+ )
# Abort if the project.conf already exists, unless `--force` was specified in `bst init`
if not force and os.path.exists(project_path):
- raise AppError("A project.conf already exists at: {}".format(project_path),
- reason='project-exists')
+ raise AppError("A project.conf already exists at: {}".format(project_path), reason="project-exists")
if project_name:
# If project name was specified, user interaction is not desired, just
# perform some validation and write the project.conf
- node._assert_symbol_name(project_name, 'project name')
+ node._assert_symbol_name(project_name, "project name")
self._assert_format_version(format_version)
self._assert_element_path(element_path)
elif not self.interactive:
- raise AppError("Cannot initialize a new project without specifying the project name",
- reason='unspecified-project-name')
+ raise AppError(
+ "Cannot initialize a new project without specifying the project name",
+ reason="unspecified-project-name",
+ )
else:
# Collect the parameters using an interactive session
- project_name, format_version, element_path = \
- self._init_project_interactive(project_name, format_version, element_path)
+ project_name, format_version, element_path = self._init_project_interactive(
+ project_name, format_version, element_path
+ )
# Create the directory if it doesnt exist
try:
@@ -378,20 +404,21 @@ class App():
try:
os.makedirs(elements_path, exist_ok=True)
except IOError as e:
- raise AppError("Error creating elements sub-directory {}: {}"
- .format(elements_path, e)) from e
+ raise AppError("Error creating elements sub-directory {}: {}".format(elements_path, e)) from e
# Dont use ruamel.yaml here, because it doesnt let
# us programatically insert comments or whitespace at
# the toplevel.
try:
- with open(project_path, 'w') as f:
- f.write("# Unique project name\n" +
- "name: {}\n\n".format(project_name) +
- "# Required BuildStream format version\n" +
- "format-version: {}\n\n".format(format_version) +
- "# Subdirectory where elements are stored\n" +
- "element-path: {}\n".format(element_path))
+ with open(project_path, "w") as f:
+ f.write(
+ "# Unique project name\n"
+ + "name: {}\n\n".format(project_name)
+ + "# Required BuildStream format version\n"
+ + "format-version: {}\n\n".format(format_version)
+ + "# Subdirectory where elements are stored\n"
+ + "element-path: {}\n".format(element_path)
+ )
except IOError as e:
raise AppError("Error writing {}: {}".format(project_path, e)) from e
@@ -419,15 +446,18 @@ class App():
_, key, dim = element_key
if self.colors:
- prompt = self._format_profile.fmt('[') + \
- self._content_profile.fmt(key, dim=dim) + \
- self._format_profile.fmt('@') + \
- self._content_profile.fmt(element_name) + \
- self._format_profile.fmt(':') + \
- self._content_profile.fmt('$PWD') + \
- self._format_profile.fmt(']$') + ' '
+ prompt = (
+ self._format_profile.fmt("[")
+ + self._content_profile.fmt(key, dim=dim)
+ + self._format_profile.fmt("@")
+ + self._content_profile.fmt(element_name)
+ + self._format_profile.fmt(":")
+ + self._content_profile.fmt("$PWD")
+ + self._format_profile.fmt("]$")
+ + " "
+ )
else:
- prompt = '[{}@{}:${{PWD}}]$ '.format(key, element_name)
+ prompt = "[{}@{}:${{PWD}}]$ ".format(key, element_name)
return prompt
@@ -473,8 +503,7 @@ class App():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self.context.messenger.message(
- Message(message_type, message, **args))
+ self.context.messenger.message(Message(message_type, message, **args))
# Exception handler
#
@@ -482,8 +511,7 @@ class App():
# Print the regular BUG message
formatted = "".join(traceback.format_exception(etype, value, tb))
- self._message(MessageType.BUG, str(value),
- detail=formatted)
+ self._message(MessageType.BUG, str(value), detail=formatted)
# If the scheduler has started, try to terminate all jobs gracefully,
# otherwise exit immediately.
@@ -498,8 +526,7 @@ class App():
def _maybe_render_status(self):
# If we're suspended or terminating, then dont render the status area
- if self._status and self.stream and \
- not (self.stream.suspended or self.stream.terminated):
+ if self._status and self.stream and not (self.stream.suspended or self.stream.terminated):
self._status.render()
#
@@ -518,36 +545,40 @@ class App():
# the currently ongoing tasks. We can also print something more
# intelligent, like how many tasks remain to complete overall.
with self._interrupted():
- click.echo("\nUser interrupted with ^C\n" +
- "\n"
- "Choose one of the following options:\n" +
- " (c)ontinue - Continue queueing jobs as much as possible\n" +
- " (q)uit - Exit after all ongoing jobs complete\n" +
- " (t)erminate - Terminate any ongoing jobs and exit\n" +
- "\n" +
- "Pressing ^C again will terminate jobs and exit\n",
- err=True)
+ click.echo(
+ "\nUser interrupted with ^C\n" + "\n"
+ "Choose one of the following options:\n"
+ + " (c)ontinue - Continue queueing jobs as much as possible\n"
+ + " (q)uit - Exit after all ongoing jobs complete\n"
+ + " (t)erminate - Terminate any ongoing jobs and exit\n"
+ + "\n"
+ + "Pressing ^C again will terminate jobs and exit\n",
+ err=True,
+ )
try:
- choice = click.prompt("Choice:",
- value_proc=_prefix_choice_value_proc(['continue', 'quit', 'terminate']),
- default='continue', err=True)
+ choice = click.prompt(
+ "Choice:",
+ value_proc=_prefix_choice_value_proc(["continue", "quit", "terminate"]),
+ default="continue",
+ err=True,
+ )
except (click.Abort, SystemError):
# In some cases, the readline buffer underlying the prompt gets corrupted on the second CTRL+C
# This throws a SystemError, which doesn't seem to be problematic for the rest of the program
# Ensure a newline after automatically printed '^C'
click.echo("", err=True)
- choice = 'terminate'
+ choice = "terminate"
- if choice == 'terminate':
+ if choice == "terminate":
click.echo("\nTerminating all jobs at user request\n", err=True)
self.stream.terminate()
else:
- if choice == 'quit':
+ if choice == "quit":
click.echo("\nCompleting ongoing tasks before quitting\n", err=True)
self.stream.quit()
- elif choice == 'continue':
+ elif choice == "continue":
click.echo("\nContinuing\n", err=True)
def _tick(self):
@@ -577,9 +608,11 @@ class App():
# the failure message reaches us ??
if not failure:
self._status.clear()
- click.echo("\n\n\nBUG: Message handling out of sync, " +
- "unable to retrieve failure message for element {}\n\n\n\n\n"
- .format(full_name), err=True)
+ click.echo(
+ "\n\n\nBUG: Message handling out of sync, "
+ + "unable to retrieve failure message for element {}\n\n\n\n\n".format(full_name),
+ err=True,
+ )
else:
self._handle_failure(element, action_name, failure, full_name)
@@ -604,69 +637,72 @@ class App():
# Interactive mode for element failures
with self._interrupted():
- summary = ("\n{} failure on element: {}\n".format(failure.action_name, full_name) +
- "\n" +
- "Choose one of the following options:\n" +
- " (c)ontinue - Continue queueing jobs as much as possible\n" +
- " (q)uit - Exit after all ongoing jobs complete\n" +
- " (t)erminate - Terminate any ongoing jobs and exit\n" +
- " (r)etry - Retry this job\n")
+ summary = (
+ "\n{} failure on element: {}\n".format(failure.action_name, full_name)
+ + "\n"
+ + "Choose one of the following options:\n"
+ + " (c)ontinue - Continue queueing jobs as much as possible\n"
+ + " (q)uit - Exit after all ongoing jobs complete\n"
+ + " (t)erminate - Terminate any ongoing jobs and exit\n"
+ + " (r)etry - Retry this job\n"
+ )
if failure.logfile:
summary += " (l)og - View the full log file\n"
if failure.sandbox:
summary += " (s)hell - Drop into a shell in the failed build sandbox\n"
summary += "\nPressing ^C will terminate jobs and exit\n"
- choices = ['continue', 'quit', 'terminate', 'retry']
+ choices = ["continue", "quit", "terminate", "retry"]
if failure.logfile:
- choices += ['log']
+ choices += ["log"]
if failure.sandbox:
- choices += ['shell']
+ choices += ["shell"]
- choice = ''
- while choice not in ['continue', 'quit', 'terminate', 'retry']:
+ choice = ""
+ while choice not in ["continue", "quit", "terminate", "retry"]:
click.echo(summary, err=True)
- self._notify("BuildStream failure", "{} on element {}"
- .format(failure.action_name, full_name))
+ self._notify("BuildStream failure", "{} on element {}".format(failure.action_name, full_name))
try:
- choice = click.prompt("Choice:", default='continue', err=True,
- value_proc=_prefix_choice_value_proc(choices))
+ choice = click.prompt(
+ "Choice:", default="continue", err=True, value_proc=_prefix_choice_value_proc(choices)
+ )
except (click.Abort, SystemError):
# In some cases, the readline buffer underlying the prompt gets corrupted on the second CTRL+C
# This throws a SystemError, which doesn't seem to be problematic for the rest of the program
# Ensure a newline after automatically printed '^C'
click.echo("", err=True)
- choice = 'terminate'
+ choice = "terminate"
# Handle choices which you can come back from
#
- if choice == 'shell':
+ if choice == "shell":
click.echo("\nDropping into an interactive shell in the failed build sandbox\n", err=True)
try:
unique_id, element_key = element
prompt = self.shell_prompt(full_name, element_key)
- self.stream.shell(None, Scope.BUILD, prompt, isolate=True,
- usebuildtree='always', unique_id=unique_id)
+ self.stream.shell(
+ None, Scope.BUILD, prompt, isolate=True, usebuildtree="always", unique_id=unique_id
+ )
except BstError as e:
click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
- elif choice == 'log':
- with open(failure.logfile, 'r') as logfile:
+ elif choice == "log":
+ with open(failure.logfile, "r") as logfile:
content = logfile.read()
click.echo_via_pager(content)
- if choice == 'terminate':
+ if choice == "terminate":
click.echo("\nTerminating all jobs\n", err=True)
self.stream.terminate()
else:
- if choice == 'quit':
+ if choice == "quit":
click.echo("\nCompleting ongoing tasks before quitting\n", err=True)
self.stream.quit()
- elif choice == 'continue':
+ elif choice == "continue":
click.echo("\nContinuing with other non failing elements\n", err=True)
- elif choice == 'retry':
+ elif choice == "retry":
click.echo("\nRetrying failed job\n", err=True)
unique_id = element[0]
self.stream._failure_retry(action_name, unique_id)
@@ -678,17 +714,14 @@ class App():
def session_start_cb(self):
self._started = True
if self._session_name:
- self.logger.print_heading(self.project,
- self.stream,
- log_file=self._main_options['log_file'])
+ self.logger.print_heading(self.project, self.stream, log_file=self._main_options["log_file"])
#
# Print a summary of the queues
#
def _print_summary(self):
click.echo("", err=True)
- self.logger.print_summary(self.stream,
- self._main_options['log_file'])
+ self.logger.print_summary(self.stream, self._main_options["log_file"])
# _error_exit()
#
@@ -720,7 +753,7 @@ class App():
click.echo(main_error, err=True)
if error.detail:
indent = " " * INDENT
- detail = '\n' + indent + indent.join(error.detail.splitlines(True))
+ detail = "\n" + indent + indent.join(error.detail.splitlines(True))
click.echo(detail, err=True)
sys.exit(-1)
@@ -753,8 +786,8 @@ class App():
self._maybe_render_status()
# Additionally log to a file
- if self._main_options['log_file']:
- click.echo(text, file=self._main_options['log_file'], color=False, nl=False)
+ if self._main_options["log_file"]:
+ click.echo(text, file=self._main_options["log_file"], color=False, nl=False)
@contextmanager
def _interrupted(self):
@@ -768,25 +801,26 @@ class App():
# Some validation routines for project initialization
#
def _assert_format_version(self, format_version):
- message = "The version must be supported by this " + \
- "version of buildstream (0 - {})\n".format(BST_FORMAT_VERSION)
+ message = "The version must be supported by this " + "version of buildstream (0 - {})\n".format(
+ BST_FORMAT_VERSION
+ )
# Validate that it is an integer
try:
number = int(format_version)
except ValueError as e:
- raise AppError(message, reason='invalid-format-version') from e
+ raise AppError(message, reason="invalid-format-version") from e
# Validate that the specified version is supported
if number < 0 or number > BST_FORMAT_VERSION:
- raise AppError(message, reason='invalid-format-version')
+ raise AppError(message, reason="invalid-format-version")
def _assert_element_path(self, element_path):
message = "The element path cannot be an absolute path or contain any '..' components\n"
# Validate the path is not absolute
if os.path.isabs(element_path):
- raise AppError(message, reason='invalid-element-path')
+ raise AppError(message, reason="invalid-element-path")
# Validate that the path does not contain any '..' components
path = element_path
@@ -794,8 +828,8 @@ class App():
split = os.path.split(path)
path = split[0]
basename = split[1]
- if basename == '..':
- raise AppError(message, reason='invalid-element-path')
+ if basename == "..":
+ raise AppError(message, reason="invalid-element-path")
# _init_project_interactive()
#
@@ -811,11 +845,10 @@ class App():
# format_version (int): The user selected format version
# element_path (str): The user selected element path
#
- def _init_project_interactive(self, project_name, format_version=BST_FORMAT_VERSION, element_path='elements'):
-
+ def _init_project_interactive(self, project_name, format_version=BST_FORMAT_VERSION, element_path="elements"):
def project_name_proc(user_input):
try:
- node._assert_symbol_name(user_input, 'project name')
+ node._assert_symbol_name(user_input, "project name")
except LoadError as e:
message = "{}\n\n{}\n".format(e, e.detail)
raise UsageError(message) from e
@@ -835,63 +868,101 @@ class App():
raise UsageError(str(e)) from e
return user_input
- w = TextWrapper(initial_indent=' ', subsequent_indent=' ', width=79)
+ w = TextWrapper(initial_indent=" ", subsequent_indent=" ", width=79)
# Collect project name
click.echo("", err=True)
click.echo(self._content_profile.fmt("Choose a unique name for your project"), err=True)
click.echo(self._format_profile.fmt("-------------------------------------"), err=True)
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("The project name is a unique symbol for your project and will be used "
- "to distinguish your project from others in user preferences, namspaceing "
- "of your project's artifacts in shared artifact caches, and in any case where "
- "BuildStream needs to distinguish between multiple projects.")), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "The project name is a unique symbol for your project and will be used "
+ "to distinguish your project from others in user preferences, namspaceing "
+ "of your project's artifacts in shared artifact caches, and in any case where "
+ "BuildStream needs to distinguish between multiple projects."
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("The project name must contain only alphanumeric characters, "
- "may not start with a digit, and may contain dashes or underscores.")), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "The project name must contain only alphanumeric characters, "
+ "may not start with a digit, and may contain dashes or underscores."
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- project_name = click.prompt(self._content_profile.fmt("Project name"),
- value_proc=project_name_proc, err=True)
+ project_name = click.prompt(self._content_profile.fmt("Project name"), value_proc=project_name_proc, err=True)
click.echo("", err=True)
# Collect format version
click.echo(self._content_profile.fmt("Select the minimum required format version for your project"), err=True)
click.echo(self._format_profile.fmt("-----------------------------------------------------------"), err=True)
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("The format version is used to provide users who build your project "
- "with a helpful error message in the case that they do not have a recent "
- "enough version of BuildStream supporting all the features which your "
- "project might use.")), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "The format version is used to provide users who build your project "
+ "with a helpful error message in the case that they do not have a recent "
+ "enough version of BuildStream supporting all the features which your "
+ "project might use."
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("The lowest version allowed is 0, the currently installed version of BuildStream "
- "supports up to format version {}.".format(BST_FORMAT_VERSION))), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "The lowest version allowed is 0, the currently installed version of BuildStream "
+ "supports up to format version {}.".format(BST_FORMAT_VERSION)
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- format_version = click.prompt(self._content_profile.fmt("Format version"),
- value_proc=format_version_proc,
- default=format_version, err=True)
+ format_version = click.prompt(
+ self._content_profile.fmt("Format version"),
+ value_proc=format_version_proc,
+ default=format_version,
+ err=True,
+ )
click.echo("", err=True)
# Collect element path
click.echo(self._content_profile.fmt("Select the element path"), err=True)
click.echo(self._format_profile.fmt("-----------------------"), err=True)
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("The element path is a project subdirectory where element .bst files are stored "
- "within your project.")), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "The element path is a project subdirectory where element .bst files are stored "
+ "within your project."
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("Elements will be displayed in logs as filenames relative to "
- "the element path, and similarly, dependencies must be expressed as filenames "
- "relative to the element path.")), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "Elements will be displayed in logs as filenames relative to "
+ "the element path, and similarly, dependencies must be expressed as filenames "
+ "relative to the element path."
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- element_path = click.prompt(self._content_profile.fmt("Element path"),
- value_proc=element_path_proc,
- default=element_path, err=True)
+ element_path = click.prompt(
+ self._content_profile.fmt("Element path"), value_proc=element_path_proc, default=element_path, err=True
+ )
return (project_name, format_version, element_path)
@@ -909,7 +980,6 @@ class App():
# ask for a new input.
#
def _prefix_choice_value_proc(choices):
-
def value_proc(user_input):
remaining_candidate = [choice for choice in choices if choice.startswith(user_input)]
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index 5c0293589..935a492d9 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -17,8 +17,8 @@ from ..utils import _get_compression, UtilError
# Helper classes and methods for Click #
##################################################################
-class FastEnumType(click.Choice):
+class FastEnumType(click.Choice):
def __init__(self, enum):
self._enum = enum
super().__init__(enum.values())
@@ -45,7 +45,7 @@ class FastEnumType(click.Choice):
#
def search_command(args, *, context=None):
if context is None:
- context = cli.make_context('bst', args, resilient_parsing=True)
+ context = cli.make_context("bst", args, resilient_parsing=True)
# Loop into the deepest command
command = cli
@@ -54,9 +54,7 @@ def search_command(args, *, context=None):
command = command_ctx.command.get_command(command_ctx, cmd)
if command is None:
return None
- command_ctx = command.make_context(command.name, [command.name],
- parent=command_ctx,
- resilient_parsing=True)
+ command_ctx = command.make_context(command.name, [command.name], parent=command_ctx, resilient_parsing=True)
return command_ctx
@@ -65,8 +63,11 @@ def search_command(args, *, context=None):
def complete_commands(cmd, args, incomplete):
command_ctx = search_command(args[1:])
if command_ctx and command_ctx.command and isinstance(command_ctx.command, click.MultiCommand):
- return [subcommand + " " for subcommand in command_ctx.command.list_commands(command_ctx)
- if not command_ctx.command.get_command(command_ctx, subcommand).hidden]
+ return [
+ subcommand + " "
+ for subcommand in command_ctx.command.list_commands(command_ctx)
+ if not command_ctx.command.get_command(command_ctx, subcommand).hidden
+ ]
return []
@@ -80,18 +81,19 @@ def complete_target(args, incomplete):
"""
from .. import utils
- project_conf = 'project.conf'
+
+ project_conf = "project.conf"
# First resolve the directory, in case there is an
# active --directory/-C option
#
- base_directory = '.'
+ base_directory = "."
idx = -1
try:
- idx = args.index('-C')
+ idx = args.index("-C")
except ValueError:
try:
- idx = args.index('--directory')
+ idx = args.index("--directory")
except ValueError:
pass
@@ -116,7 +118,7 @@ def complete_target(args, incomplete):
return []
# The project is not required to have an element-path
- element_directory = project.get_str('element-path', default='')
+ element_directory = project.get_str("element-path", default="")
# If a project was loaded, use its element-path to
# adjust our completion's base directory
@@ -132,19 +134,20 @@ def complete_target(args, incomplete):
def complete_artifact(orig_args, args, incomplete):
from .._context import Context
+
with Context(use_casd=False) as ctx:
config = None
if orig_args:
for i, arg in enumerate(orig_args):
- if arg in ('-c', '--config'):
+ if arg in ("-c", "--config"):
try:
config = orig_args[i + 1]
except IndexError:
pass
if args:
for i, arg in enumerate(args):
- if arg in ('-c', '--config'):
+ if arg in ("-c", "--config"):
try:
config = args[i + 1]
except IndexError:
@@ -167,38 +170,40 @@ def override_completions(orig_args, cmd, cmd_param, args, incomplete):
:return: all the possible user-specified completions for the param
"""
- if cmd.name == 'help':
+ if cmd.name == "help":
return complete_commands(cmd, args, incomplete)
# We can't easily extend click's data structures without
# modifying click itself, so just do some weak special casing
# right here and select which parameters we want to handle specially.
if isinstance(cmd_param.type, click.Path):
- if (cmd_param.name == 'elements' or
- cmd_param.name == 'element' or
- cmd_param.name == 'except_' or
- cmd_param.opts == ['--track'] or
- cmd_param.opts == ['--track-except']):
+ if (
+ cmd_param.name == "elements"
+ or cmd_param.name == "element"
+ or cmd_param.name == "except_"
+ or cmd_param.opts == ["--track"]
+ or cmd_param.opts == ["--track-except"]
+ ):
return complete_target(args, incomplete)
- if cmd_param.name == 'artifacts' or cmd_param.name == 'target':
+ if cmd_param.name == "artifacts" or cmd_param.name == "target":
return complete_artifact(orig_args, args, incomplete)
raise CompleteUnhandled()
def validate_output_streams():
- if sys.platform == 'win32':
+ if sys.platform == "win32":
# Windows does not support 'fcntl', the module is unavailable there as
# of Python 3.7, therefore early-out here.
return
import fcntl
+
for stream in (sys.stdout, sys.stderr):
fileno = stream.fileno()
flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
if flags & os.O_NONBLOCK:
- click.echo("{} is currently set to O_NONBLOCK, try opening a new shell"
- .format(stream.name), err=True)
+ click.echo("{} is currently set to O_NONBLOCK, try opening a new shell".format(stream.name), err=True)
sys.exit(-1)
@@ -237,8 +242,7 @@ def handle_bst_force_start_method_env():
sys.exit(-1)
-def override_main(self, args=None, prog_name=None, complete_var=None,
- standalone_mode=True, **extra):
+def override_main(self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra):
# Hook for the Bash completion. This only activates if the Bash
# completion is actually enabled, otherwise this is quite a fast
@@ -250,7 +254,7 @@ def override_main(self, args=None, prog_name=None, complete_var=None,
#
# The below is a quicker exit path for the sake
# of making completions respond faster.
- if 'BST_TEST_SUITE' not in os.environ:
+ if "BST_TEST_SUITE" not in os.environ:
sys.stdout.flush()
sys.stderr.flush()
os._exit(0)
@@ -269,14 +273,13 @@ def override_main(self, args=None, prog_name=None, complete_var=None,
# case of testing, our tests preceed our entrypoint, so we do our best.
handle_bst_force_start_method_env()
- original_main(self, args=args, prog_name=prog_name, complete_var=None,
- standalone_mode=standalone_mode, **extra)
+ original_main(self, args=args, prog_name=prog_name, complete_var=None, standalone_mode=standalone_mode, **extra)
original_main = click.BaseCommand.main
# Disable type checking since mypy doesn't support assigning to a method.
# See https://github.com/python/mypy/issues/2427.
-click.BaseCommand.main = override_main # type: ignore
+click.BaseCommand.main = override_main # type: ignore
##################################################################
@@ -287,58 +290,78 @@ def print_version(ctx, param, value):
return
from .. import __version__
+
click.echo(__version__)
ctx.exit()
-@click.group(context_settings=dict(help_option_names=['-h', '--help']))
-@click.option('--version', is_flag=True, callback=print_version,
- expose_value=False, is_eager=True)
-@click.option('--config', '-c',
- type=click.Path(exists=True, dir_okay=False, readable=True),
- help="Configuration file to use")
-@click.option('--directory', '-C', default=None, # Set to os.getcwd() later.
- type=click.Path(file_okay=False, readable=True),
- help="Project directory (default: current directory)")
-@click.option('--on-error', default=None,
- type=FastEnumType(_SchedulerErrorAction),
- help="What to do when an error is encountered")
-@click.option('--fetchers', type=click.INT, default=None,
- help="Maximum simultaneous download tasks")
-@click.option('--builders', type=click.INT, default=None,
- help="Maximum simultaneous build tasks")
-@click.option('--pushers', type=click.INT, default=None,
- help="Maximum simultaneous upload tasks")
-@click.option('--max-jobs', type=click.INT, default=None,
- help="Number of parallel jobs allowed for a given build task")
-@click.option('--network-retries', type=click.INT, default=None,
- help="Maximum retries for network tasks")
-@click.option('--no-interactive', is_flag=True,
- help="Force non interactive mode, otherwise this is automatically decided")
-@click.option('--verbose/--no-verbose', default=None,
- help="Be extra verbose")
-@click.option('--debug/--no-debug', default=None,
- help="Print debugging output")
-@click.option('--error-lines', type=click.INT, default=None,
- help="Maximum number of lines to show from a task log")
-@click.option('--message-lines', type=click.INT, default=None,
- help="Maximum number of lines to show in a detailed message")
-@click.option('--log-file',
- type=click.File(mode='w', encoding='UTF-8'),
- help="A file to store the main log (allows storing the main log while in interactive mode)")
-@click.option('--colors/--no-colors', default=None,
- help="Force enable/disable ANSI color codes in output")
-@click.option('--strict/--no-strict', default=None, is_flag=True,
- help="Elements must be rebuilt when their dependencies have changed")
-@click.option('--option', '-o', type=click.Tuple([str, str]), multiple=True, metavar='OPTION VALUE',
- help="Specify a project option")
-@click.option('--default-mirror', default=None,
- help="The mirror to fetch from first, before attempting other mirrors")
-@click.option('--pull-buildtrees', is_flag=True, default=None,
- help="Include an element's build tree when pulling remote element artifacts")
-@click.option('--cache-buildtrees', default=None,
- type=FastEnumType(_CacheBuildTrees),
- help="Cache artifact build tree content on creation")
+@click.group(context_settings=dict(help_option_names=["-h", "--help"]))
+@click.option("--version", is_flag=True, callback=print_version, expose_value=False, is_eager=True)
+@click.option(
+ "--config", "-c", type=click.Path(exists=True, dir_okay=False, readable=True), help="Configuration file to use"
+)
+@click.option(
+ "--directory",
+ "-C",
+ default=None, # Set to os.getcwd() later.
+ type=click.Path(file_okay=False, readable=True),
+ help="Project directory (default: current directory)",
+)
+@click.option(
+ "--on-error",
+ default=None,
+ type=FastEnumType(_SchedulerErrorAction),
+ help="What to do when an error is encountered",
+)
+@click.option("--fetchers", type=click.INT, default=None, help="Maximum simultaneous download tasks")
+@click.option("--builders", type=click.INT, default=None, help="Maximum simultaneous build tasks")
+@click.option("--pushers", type=click.INT, default=None, help="Maximum simultaneous upload tasks")
+@click.option(
+ "--max-jobs", type=click.INT, default=None, help="Number of parallel jobs allowed for a given build task"
+)
+@click.option("--network-retries", type=click.INT, default=None, help="Maximum retries for network tasks")
+@click.option(
+ "--no-interactive", is_flag=True, help="Force non interactive mode, otherwise this is automatically decided"
+)
+@click.option("--verbose/--no-verbose", default=None, help="Be extra verbose")
+@click.option("--debug/--no-debug", default=None, help="Print debugging output")
+@click.option("--error-lines", type=click.INT, default=None, help="Maximum number of lines to show from a task log")
+@click.option(
+ "--message-lines", type=click.INT, default=None, help="Maximum number of lines to show in a detailed message"
+)
+@click.option(
+ "--log-file",
+ type=click.File(mode="w", encoding="UTF-8"),
+ help="A file to store the main log (allows storing the main log while in interactive mode)",
+)
+@click.option("--colors/--no-colors", default=None, help="Force enable/disable ANSI color codes in output")
+@click.option(
+ "--strict/--no-strict",
+ default=None,
+ is_flag=True,
+ help="Elements must be rebuilt when their dependencies have changed",
+)
+@click.option(
+ "--option",
+ "-o",
+ type=click.Tuple([str, str]),
+ multiple=True,
+ metavar="OPTION VALUE",
+ help="Specify a project option",
+)
+@click.option("--default-mirror", default=None, help="The mirror to fetch from first, before attempting other mirrors")
+@click.option(
+ "--pull-buildtrees",
+ is_flag=True,
+ default=None,
+ help="Include an element's build tree when pulling remote element artifacts",
+)
+@click.option(
+ "--cache-buildtrees",
+ default=None,
+ type=FastEnumType(_CacheBuildTrees),
+ help="Cache artifact build tree content on creation",
+)
@click.pass_context
def cli(context, **kwargs):
"""Build and manipulate BuildStream projects
@@ -360,17 +383,15 @@ def cli(context, **kwargs):
##################################################################
# Help Command #
##################################################################
-@cli.command(name="help", short_help="Print usage information",
- context_settings={"help_option_names": []})
-@click.argument("command", nargs=-1, metavar='COMMAND')
+@cli.command(name="help", short_help="Print usage information", context_settings={"help_option_names": []})
+@click.argument("command", nargs=-1, metavar="COMMAND")
@click.pass_context
def help_command(ctx, command):
"""Print usage information about a given command
"""
command_ctx = search_command(command, context=ctx.parent)
if not command_ctx:
- click.echo("Not a valid command: '{} {}'"
- .format(ctx.parent.info_name, " ".join(command)), err=True)
+ click.echo("Not a valid command: '{} {}'".format(ctx.parent.info_name, " ".join(command)), err=True)
sys.exit(-1)
click.echo(command_ctx.command.get_help(command_ctx), err=True)
@@ -380,24 +401,32 @@ def help_command(ctx, command):
detail = " "
if command:
detail = " {} ".format(" ".join(command))
- click.echo("\nFor usage on a specific command: {} help{}COMMAND"
- .format(ctx.parent.info_name, detail), err=True)
+ click.echo(
+ "\nFor usage on a specific command: {} help{}COMMAND".format(ctx.parent.info_name, detail), err=True
+ )
##################################################################
# Init Command #
##################################################################
@cli.command(short_help="Initialize a new BuildStream project")
-@click.option('--project-name', type=click.STRING,
- help="The project name to use")
-@click.option('--format-version', type=click.INT, default=BST_FORMAT_VERSION, show_default=True,
- help="The required format version")
-@click.option('--element-path', type=click.Path(), default="elements", show_default=True,
- help="The subdirectory to store elements in")
-@click.option('--force', '-f', is_flag=True,
- help="Allow overwriting an existing project.conf")
-@click.argument('target-directory', nargs=1, required=False,
- type=click.Path(file_okay=False, writable=True))
+@click.option("--project-name", type=click.STRING, help="The project name to use")
+@click.option(
+ "--format-version",
+ type=click.INT,
+ default=BST_FORMAT_VERSION,
+ show_default=True,
+ help="The required format version",
+)
+@click.option(
+ "--element-path",
+ type=click.Path(),
+ default="elements",
+ show_default=True,
+ help="The subdirectory to store elements in",
+)
+@click.option("--force", "-f", is_flag=True, help="Allow overwriting an existing project.conf")
+@click.argument("target-directory", nargs=1, required=False, type=click.Path(file_okay=False, writable=True))
@click.pass_obj
def init(app, project_name, format_version, element_path, force, target_directory):
"""Initialize a new BuildStream project
@@ -415,13 +444,11 @@ def init(app, project_name, format_version, element_path, force, target_director
# Build Command #
##################################################################
@cli.command(short_help="Build elements in a pipeline")
-@click.option('--deps', '-d', default=None,
- type=click.Choice(['plan', 'all']),
- help='The dependencies to build')
-@click.option('--remote', '-r', default=None,
- help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option("--deps", "-d", default=None, type=click.Choice(["plan", "all"]), help="The dependencies to build")
+@click.option(
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)"
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def build(app, elements, deps, remote):
"""Build elements in a pipeline
@@ -450,30 +477,41 @@ def build(app, elements, deps, remote):
# Junction elements cannot be built, exclude them from default targets
ignore_junction_targets = True
- app.stream.build(elements,
- selection=deps,
- ignore_junction_targets=ignore_junction_targets,
- remote=remote)
+ app.stream.build(elements, selection=deps, ignore_junction_targets=ignore_junction_targets, remote=remote)
##################################################################
# Show Command #
##################################################################
@cli.command(short_help="Show elements in the pipeline")
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies")
-@click.option('--deps', '-d', default='all', show_default=True,
- type=click.Choice(['none', 'plan', 'run', 'build', 'all']),
- help='The dependencies to show')
-@click.option('--order', default="stage", show_default=True,
- type=click.Choice(['stage', 'alpha']),
- help='Staging or alphabetic ordering of dependencies')
-@click.option('--format', '-f', 'format_', metavar='FORMAT', default=None,
- type=click.STRING,
- help='Format string for each element')
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--except", "except_", multiple=True, type=click.Path(readable=False), help="Except certain dependencies"
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="all",
+ show_default=True,
+ type=click.Choice(["none", "plan", "run", "build", "all"]),
+ help="The dependencies to show",
+)
+@click.option(
+ "--order",
+ default="stage",
+ show_default=True,
+ type=click.Choice(["stage", "alpha"]),
+ help="Staging or alphabetic ordering of dependencies",
+)
+@click.option(
+ "--format",
+ "-f",
+ "format_",
+ metavar="FORMAT",
+ default=None,
+ type=click.STRING,
+ help="Format string for each element",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def show(app, elements, deps, except_, order, format_):
"""Show elements in the pipeline
@@ -536,9 +574,7 @@ def show(app, elements, deps, except_, order, format_):
if not elements:
elements = app.project.get_default_targets()
- dependencies = app.stream.load_selection(elements,
- selection=deps,
- except_targets=except_)
+ dependencies = app.stream.load_selection(elements, selection=deps, except_targets=except_)
if order == "alpha":
dependencies = sorted(dependencies)
@@ -554,25 +590,34 @@ def show(app, elements, deps, except_, order, format_):
# Shell Command #
##################################################################
@cli.command(short_help="Shell into an element's sandbox environment")
-@click.option('--build', '-b', 'build_', is_flag=True,
- help='Stage dependencies and sources to build')
-@click.option('--sysroot', '-s', default=None,
- type=click.Path(exists=True, file_okay=False, readable=True),
- help="An existing sysroot")
-@click.option('--mount', type=click.Tuple([click.Path(exists=True), str]), multiple=True,
- metavar='HOSTPATH PATH',
- help="Mount a file or directory into the sandbox")
-@click.option('--isolate', is_flag=True,
- help='Create an isolated build sandbox')
-@click.option('--use-buildtree', '-t', 'cli_buildtree', type=click.Choice(['ask', 'try', 'always', 'never']),
- default='ask', show_default=True,
- help=('Use a buildtree. If `always` is set, will always fail to '
- 'build if a buildtree is not available.'))
-@click.option('--pull', 'pull_', is_flag=True,
- help='Attempt to pull missing or incomplete artifacts')
-@click.argument('element', required=False,
- type=click.Path(readable=False))
-@click.argument('command', type=click.STRING, nargs=-1)
+@click.option("--build", "-b", "build_", is_flag=True, help="Stage dependencies and sources to build")
+@click.option(
+ "--sysroot",
+ "-s",
+ default=None,
+ type=click.Path(exists=True, file_okay=False, readable=True),
+ help="An existing sysroot",
+)
+@click.option(
+ "--mount",
+ type=click.Tuple([click.Path(exists=True), str]),
+ multiple=True,
+ metavar="HOSTPATH PATH",
+ help="Mount a file or directory into the sandbox",
+)
+@click.option("--isolate", is_flag=True, help="Create an isolated build sandbox")
+@click.option(
+ "--use-buildtree",
+ "-t",
+ "cli_buildtree",
+ type=click.Choice(["ask", "try", "always", "never"]),
+ default="ask",
+ show_default=True,
+ help=("Use a buildtree. If `always` is set, will always fail to " "build if a buildtree is not available."),
+)
+@click.option("--pull", "pull_", is_flag=True, help="Attempt to pull missing or incomplete artifacts")
+@click.argument("element", required=False, type=click.Path(readable=False))
+@click.argument("command", type=click.STRING, nargs=-1)
@click.pass_obj
def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, command):
"""Run a command in the target element's sandbox environment
@@ -616,8 +661,7 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
if not element:
raise AppError('Missing argument "ELEMENT".')
- elements = app.stream.load_selection((element,), selection=selection,
- use_artifact_config=True)
+ elements = app.stream.load_selection((element,), selection=selection, use_artifact_config=True)
# last one will be the element we want to stage, previous ones are
# elements to try and pull
@@ -628,10 +672,7 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
element_key = element._get_display_key()
prompt = app.shell_prompt(element_name, element_key)
- mounts = [
- HostMount(path, host_path)
- for host_path, path in mount
- ]
+ mounts = [HostMount(path, host_path) for host_path, path in mount]
cached = element._cached_buildtree()
buildtree_exists = element._buildtree_exists()
@@ -640,27 +681,31 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
if buildtree_exists or pull_:
use_buildtree = cli_buildtree
if not cached and use_buildtree == "always":
- click.echo("WARNING: buildtree is not cached locally, will attempt to pull from available remotes",
- err=True)
+ click.echo(
+ "WARNING: buildtree is not cached locally, will attempt to pull from available remotes",
+ err=True,
+ )
else:
if cli_buildtree == "always":
# Exit early if it won't be possible to even fetch a buildtree with always option
raise AppError("Artifact was created without buildtree, unable to launch shell with it")
- click.echo("WARNING: Artifact created without buildtree, shell will be loaded without it",
- err=True)
+ click.echo("WARNING: Artifact created without buildtree, shell will be loaded without it", err=True)
else:
# If the value has defaulted to ask and in non interactive mode, don't consider the buildtree, this
# being the default behaviour of the command
if app.interactive and cli_buildtree == "ask":
- if cached and bool(click.confirm('Do you want to use the cached buildtree?')):
+ if cached and bool(click.confirm("Do you want to use the cached buildtree?")):
use_buildtree = "always"
elif buildtree_exists:
try:
- choice = click.prompt("Do you want to pull & use a cached buildtree?",
- type=click.Choice(['try', 'always', 'never']),
- err=True, show_choices=True)
+ choice = click.prompt(
+ "Do you want to pull & use a cached buildtree?",
+ type=click.Choice(["try", "always", "never"]),
+ err=True,
+ show_choices=True,
+ )
except click.Abort:
- click.echo('Aborting', err=True)
+ click.echo("Aborting", err=True)
sys.exit(-1)
if choice != "never":
@@ -671,13 +716,17 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
click.echo("WARNING: using a buildtree from a failed build.", err=True)
try:
- exitcode = app.stream.shell(element, scope, prompt,
- directory=sysroot,
- mounts=mounts,
- isolate=isolate,
- command=command,
- usebuildtree=use_buildtree,
- pull_dependencies=pull_dependencies)
+ exitcode = app.stream.shell(
+ element,
+ scope,
+ prompt,
+ directory=sysroot,
+ mounts=mounts,
+ isolate=isolate,
+ command=command,
+ usebuildtree=use_buildtree,
+ pull_dependencies=pull_dependencies,
+ )
except BstError as e:
raise AppError("Error launching shell: {}".format(e), detail=e.detail) from e
@@ -697,20 +746,27 @@ def source():
# Source Fetch Command #
##################################################################
@source.command(name="fetch", short_help="Fetch sources in a pipeline")
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies from fetching")
-@click.option('--deps', '-d', default='plan', show_default=True,
- type=click.Choice(['none', 'plan', 'all']),
- help='The dependencies to fetch')
-@click.option('--track', 'track_', is_flag=True,
- help="Track new source references before fetching")
-@click.option('--track-cross-junctions', '-J', is_flag=True,
- help="Allow tracking to cross junction boundaries")
-@click.option('--remote', '-r', default=None,
- help="The URL of the remote source cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--except",
+ "except_",
+ multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies from fetching",
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="plan",
+ show_default=True,
+ type=click.Choice(["none", "plan", "all"]),
+ help="The dependencies to fetch",
+)
+@click.option("--track", "track_", is_flag=True, help="Track new source references before fetching")
+@click.option("--track-cross-junctions", "-J", is_flag=True, help="Allow tracking to cross junction boundaries")
+@click.option(
+ "--remote", "-r", default=None, help="The URL of the remote source cache (defaults to the first configured cache)"
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def source_fetch(app, elements, deps, track_, except_, track_cross_junctions, remote):
"""Fetch sources required to build the pipeline
@@ -741,36 +797,48 @@ def source_fetch(app, elements, deps, track_, except_, track_cross_junctions, re
sys.exit(-1)
if track_ and deps == PipelineSelection.PLAN:
- click.echo("WARNING: --track specified for tracking of a build plan\n\n"
- "Since tracking modifies the build plan, all elements will be tracked.", err=True)
+ click.echo(
+ "WARNING: --track specified for tracking of a build plan\n\n"
+ "Since tracking modifies the build plan, all elements will be tracked.",
+ err=True,
+ )
deps = PipelineSelection.ALL
with app.initialized(session_name="Fetch"):
if not elements:
elements = app.project.get_default_targets()
- app.stream.fetch(elements,
- selection=deps,
- except_targets=except_,
- track_targets=track_,
- track_cross_junctions=track_cross_junctions,
- remote=remote)
+ app.stream.fetch(
+ elements,
+ selection=deps,
+ except_targets=except_,
+ track_targets=track_,
+ track_cross_junctions=track_cross_junctions,
+ remote=remote,
+ )
##################################################################
# Source Track Command #
##################################################################
@source.command(name="track", short_help="Track new source references")
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies from tracking")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependencies to track')
-@click.option('--cross-junctions', '-J', is_flag=True,
- help="Allow crossing junction boundaries")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--except",
+ "except_",
+ multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies from tracking",
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependencies to track",
+)
+@click.option("--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def source_track(app, elements, deps, except_, cross_junctions):
"""Consults the specified tracking branches for new versions available
@@ -800,41 +868,50 @@ def source_track(app, elements, deps, except_, cross_junctions):
# Substitute 'none' for 'redirect' so that element redirections
# will be done
- if deps == 'none':
- deps = 'redirect'
- app.stream.track(elements,
- selection=deps,
- except_targets=except_,
- cross_junctions=cross_junctions)
+ if deps == "none":
+ deps = "redirect"
+ app.stream.track(elements, selection=deps, except_targets=except_, cross_junctions=cross_junctions)
##################################################################
# Source Checkout Command #
##################################################################
-@source.command(name='checkout', short_help='Checkout sources of an element')
-@click.option('--force', '-f', is_flag=True,
- help="Allow files to be overwritten")
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['build', 'none', 'run', 'all']),
- help='The dependencies whose sources to checkout')
-@click.option('--tar', default=None, metavar='LOCATION',
- type=click.Path(),
- help="Create a tarball containing the sources instead "
- "of a file tree.")
-@click.option('--compression', default=None,
- type=click.Choice(['gz', 'xz', 'bz2']),
- help="The compression option of the tarball created.")
-@click.option('--include-build-scripts', 'build_scripts', is_flag=True)
-@click.option('--directory', default='source-checkout',
- type=click.Path(file_okay=False),
- help="The directory to checkout the sources to")
-@click.argument('element', required=False, type=click.Path(readable=False))
+@source.command(name="checkout", short_help="Checkout sources of an element")
+@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
+@click.option(
+ "--except", "except_", multiple=True, type=click.Path(readable=False), help="Except certain dependencies"
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["build", "none", "run", "all"]),
+ help="The dependencies whose sources to checkout",
+)
+@click.option(
+ "--tar",
+ default=None,
+ metavar="LOCATION",
+ type=click.Path(),
+ help="Create a tarball containing the sources instead " "of a file tree.",
+)
+@click.option(
+ "--compression",
+ default=None,
+ type=click.Choice(["gz", "xz", "bz2"]),
+ help="The compression option of the tarball created.",
+)
+@click.option("--include-build-scripts", "build_scripts", is_flag=True)
+@click.option(
+ "--directory",
+ default="source-checkout",
+ type=click.Path(file_okay=False),
+ help="The directory to checkout the sources to",
+)
+@click.argument("element", required=False, type=click.Path(readable=False))
@click.pass_obj
-def source_checkout(app, element, directory, force, deps, except_,
- tar, compression, build_scripts):
+def source_checkout(app, element, directory, force, deps, except_, tar, compression, build_scripts):
"""Checkout sources of an element to the specified location
When this command is executed from a workspace directory, the default
@@ -859,14 +936,16 @@ def source_checkout(app, element, directory, force, deps, except_,
if not element:
raise AppError('Missing argument "ELEMENT".')
- app.stream.source_checkout(element,
- location=location,
- force=force,
- deps=deps,
- except_targets=except_,
- tar=bool(tar),
- compression=compression,
- include_build_scripts=build_scripts)
+ app.stream.source_checkout(
+ element,
+ location=location,
+ force=force,
+ deps=deps,
+ except_targets=except_,
+ tar=bool(tar),
+ compression=compression,
+ include_build_scripts=build_scripts,
+ )
##################################################################
@@ -880,39 +959,42 @@ def workspace():
##################################################################
# Workspace Open Command #
##################################################################
-@workspace.command(name='open', short_help="Open a new workspace")
-@click.option('--no-checkout', is_flag=True,
- help="Do not checkout the source, only link to the given directory")
-@click.option('--force', '-f', is_flag=True,
- help="The workspace will be created even if the directory in which it will be created is not empty " +
- "or if a workspace for that element already exists")
-@click.option('--track', 'track_', is_flag=True,
- help="Track and fetch new source references before checking out the workspace")
-@click.option('--directory', type=click.Path(file_okay=False), default=None,
- help="Only for use when a single Element is given: Set the directory to use to create the workspace")
-@click.argument('elements', nargs=-1, type=click.Path(readable=False), required=True)
+@workspace.command(name="open", short_help="Open a new workspace")
+@click.option("--no-checkout", is_flag=True, help="Do not checkout the source, only link to the given directory")
+@click.option(
+ "--force",
+ "-f",
+ is_flag=True,
+ help="The workspace will be created even if the directory in which it will be created is not empty "
+ + "or if a workspace for that element already exists",
+)
+@click.option(
+ "--track", "track_", is_flag=True, help="Track and fetch new source references before checking out the workspace"
+)
+@click.option(
+ "--directory",
+ type=click.Path(file_okay=False),
+ default=None,
+ help="Only for use when a single Element is given: Set the directory to use to create the workspace",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False), required=True)
@click.pass_obj
def workspace_open(app, no_checkout, force, track_, directory, elements):
"""Open a workspace for manual source modification"""
with app.initialized():
- app.stream.workspace_open(elements,
- no_checkout=no_checkout,
- track_first=track_,
- force=force,
- custom_dir=directory)
+ app.stream.workspace_open(
+ elements, no_checkout=no_checkout, track_first=track_, force=force, custom_dir=directory
+ )
##################################################################
# Workspace Close Command #
##################################################################
-@workspace.command(name='close', short_help="Close workspaces")
-@click.option('--remove-dir', is_flag=True,
- help="Remove the path that contains the closed workspace")
-@click.option('--all', '-a', 'all_', is_flag=True,
- help="Close all open workspaces")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@workspace.command(name="close", short_help="Close workspaces")
+@click.option("--remove-dir", is_flag=True, help="Remove the path that contains the closed workspace")
+@click.option("--all", "-a", "all_", is_flag=True, help="Close all open workspaces")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def workspace_close(app, remove_dir, all_, elements):
"""Close a workspace"""
@@ -927,11 +1009,11 @@ def workspace_close(app, remove_dir, all_, elements):
if element:
elements = (element,)
else:
- raise AppError('No elements specified')
+ raise AppError("No elements specified")
# Early exit if we specified `all` and there are no workspaces
if all_ and not app.stream.workspace_exists():
- click.echo('No open workspaces to close', err=True)
+ click.echo("No open workspaces to close", err=True)
sys.exit(0)
if all_:
@@ -958,21 +1040,19 @@ def workspace_close(app, remove_dir, all_, elements):
if removed_required_element:
click.echo(
"Removed '{}', therefore you can no longer run BuildStream "
- "commands from the current directory.".format(element_name), err=True)
+ "commands from the current directory.".format(element_name),
+ err=True,
+ )
##################################################################
# Workspace Reset Command #
##################################################################
-@workspace.command(name='reset', short_help="Reset a workspace to its original state")
-@click.option('--soft', is_flag=True,
- help="Reset workspace state without affecting its contents")
-@click.option('--track', 'track_', is_flag=True,
- help="Track and fetch the latest source before resetting")
-@click.option('--all', '-a', 'all_', is_flag=True,
- help="Reset all open workspaces")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@workspace.command(name="reset", short_help="Reset a workspace to its original state")
+@click.option("--soft", is_flag=True, help="Reset workspace state without affecting its contents")
+@click.option("--track", "track_", is_flag=True, help="Track and fetch the latest source before resetting")
+@click.option("--all", "-a", "all_", is_flag=True, help="Reset all open workspaces")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def workspace_reset(app, soft, track_, all_, elements):
"""Reset a workspace to its original state"""
@@ -985,7 +1065,7 @@ def workspace_reset(app, soft, track_, all_, elements):
if element:
elements = (element,)
else:
- raise AppError('No elements specified to reset')
+ raise AppError("No elements specified to reset")
if all_ and not app.stream.workspace_exists():
raise AppError("No open workspaces to reset")
@@ -999,7 +1079,7 @@ def workspace_reset(app, soft, track_, all_, elements):
##################################################################
# Workspace List Command #
##################################################################
-@workspace.command(name='list', short_help="List open workspaces")
+@workspace.command(name="list", short_help="List open workspaces")
@click.pass_obj
def workspace_list(app):
"""List open workspaces"""
@@ -1044,11 +1124,16 @@ def artifact():
#############################################################
# Artifact show Command #
#############################################################
-@artifact.command(name='show', short_help="Show the cached state of artifacts")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['build', 'run', 'all', 'none']),
- help='The dependencies we also want to show')
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="show", short_help="Show the cached state of artifacts")
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["build", "run", "all", "none"]),
+ help="The dependencies we also want to show",
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
@click.pass_obj
def artifact_show(app, deps, artifacts):
"""show the cached state of artifacts"""
@@ -1061,31 +1146,38 @@ def artifact_show(app, deps, artifacts):
#####################################################################
# Artifact Checkout Command #
#####################################################################
-@artifact.command(name='checkout', short_help="Checkout contents of an artifact")
-@click.option('--force', '-f', is_flag=True,
- help="Allow files to be overwritten")
-@click.option('--deps', '-d', default='run', show_default=True,
- type=click.Choice(['run', 'build', 'none', 'all']),
- help='The dependencies to checkout')
-@click.option('--integrate/--no-integrate', default=None, is_flag=True,
- help="Whether to run integration commands")
-@click.option('--hardlinks', is_flag=True,
- help="Checkout hardlinks instead of copying if possible")
-@click.option('--tar', default=None, metavar='LOCATION',
- type=click.Path(),
- help="Create a tarball from the artifact contents instead "
- "of a file tree. If LOCATION is '-', the tarball "
- "will be dumped to the standard output.")
-@click.option('--compression', default=None,
- type=click.Choice(['gz', 'xz', 'bz2']),
- help="The compression option of the tarball created.")
-@click.option('--pull', 'pull_', is_flag=True,
- help="Pull the artifact if it's missing or incomplete.")
-@click.option('--directory', default=None,
- type=click.Path(file_okay=False),
- help="The directory to checkout the artifact to")
-@click.argument('target', required=False,
- type=click.Path(readable=False))
+@artifact.command(name="checkout", short_help="Checkout contents of an artifact")
+@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
+@click.option(
+ "--deps",
+ "-d",
+ default="run",
+ show_default=True,
+ type=click.Choice(["run", "build", "none", "all"]),
+ help="The dependencies to checkout",
+)
+@click.option("--integrate/--no-integrate", default=None, is_flag=True, help="Whether to run integration commands")
+@click.option("--hardlinks", is_flag=True, help="Checkout hardlinks instead of copying if possible")
+@click.option(
+ "--tar",
+ default=None,
+ metavar="LOCATION",
+ type=click.Path(),
+ help="Create a tarball from the artifact contents instead "
+ "of a file tree. If LOCATION is '-', the tarball "
+ "will be dumped to the standard output.",
+)
+@click.option(
+ "--compression",
+ default=None,
+ type=click.Choice(["gz", "xz", "bz2"]),
+ help="The compression option of the tarball created.",
+)
+@click.option("--pull", "pull_", is_flag=True, help="Pull the artifact if it's missing or incomplete.")
+@click.option(
+ "--directory", default=None, type=click.Path(file_okay=False), help="The directory to checkout the artifact to"
+)
+@click.argument("target", required=False, type=click.Path(readable=False))
@click.pass_obj
def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression, pull_, directory, target):
"""Checkout contents of an artifact
@@ -1110,7 +1202,7 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression,
location = os.path.abspath(os.path.join(os.getcwd(), target))
else:
location = directory
- if location[-4:] == '.bst':
+ if location[-4:] == ".bst":
location = location[:-4]
tar = False
else:
@@ -1120,9 +1212,12 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression,
except UtilError as e:
click.echo("ERROR: Invalid file extension given with '--tar': {}".format(e), err=True)
sys.exit(-1)
- if compression and inferred_compression != '' and inferred_compression != compression:
- click.echo("WARNING: File extension and compression differ."
- "File extension has been overridden by --compression", err=True)
+ if compression and inferred_compression != "" and inferred_compression != compression:
+ click.echo(
+ "WARNING: File extension and compression differ."
+ "File extension has been overridden by --compression",
+ err=True,
+ )
if not compression:
compression = inferred_compression
@@ -1132,28 +1227,35 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression,
if not target:
raise AppError('Missing argument "ELEMENT".')
- app.stream.checkout(target,
- location=location,
- force=force,
- selection=deps,
- integrate=True if integrate is None else integrate,
- hardlinks=hardlinks,
- pull=pull_,
- compression=compression,
- tar=bool(tar))
+ app.stream.checkout(
+ target,
+ location=location,
+ force=force,
+ selection=deps,
+ integrate=True if integrate is None else integrate,
+ hardlinks=hardlinks,
+ pull=pull_,
+ compression=compression,
+ tar=bool(tar),
+ )
################################################################
# Artifact Pull Command #
################################################################
@artifact.command(name="pull", short_help="Pull a built artifact")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependency artifacts to pull')
-@click.option('--remote', '-r', default=None,
- help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('artifacts', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependency artifacts to pull",
+)
+@click.option(
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)"
+)
+@click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def artifact_pull(app, artifacts, deps, remote):
"""Pull a built artifact from the configured remote artifact cache.
@@ -1184,21 +1286,25 @@ def artifact_pull(app, artifacts, deps, remote):
# Junction elements cannot be pulled, exclude them from default targets
ignore_junction_targets = True
- app.stream.pull(artifacts, selection=deps, remote=remote,
- ignore_junction_targets=ignore_junction_targets)
+ app.stream.pull(artifacts, selection=deps, remote=remote, ignore_junction_targets=ignore_junction_targets)
##################################################################
# Artifact Push Command #
##################################################################
@artifact.command(name="push", short_help="Push a built artifact")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependencies to push')
-@click.option('--remote', '-r', default=None,
- help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('artifacts', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependencies to push",
+)
+@click.option(
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)"
+)
+@click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def artifact_push(app, artifacts, deps, remote):
"""Push a built artifact to a remote artifact cache.
@@ -1231,18 +1337,19 @@ def artifact_push(app, artifacts, deps, remote):
# Junction elements cannot be pushed, exclude them from default targets
ignore_junction_targets = True
- app.stream.push(artifacts, selection=deps, remote=remote,
- ignore_junction_targets=ignore_junction_targets)
+ app.stream.push(artifacts, selection=deps, remote=remote, ignore_junction_targets=ignore_junction_targets)
################################################################
# Artifact Log Command #
################################################################
-@artifact.command(name='log', short_help="Show logs of artifacts")
-@click.option('--out',
- type=click.Path(file_okay=True, writable=True),
- help="Output logs to individual files in the specified path. If absent, logs are written to stdout.")
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="log", short_help="Show logs of artifacts")
+@click.option(
+ "--out",
+ type=click.Path(file_okay=True, writable=True),
+ help="Output logs to individual files in the specified path. If absent, logs are written to stdout.",
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
@click.pass_obj
def artifact_log(app, artifacts, out):
"""Show build logs of artifacts"""
@@ -1252,7 +1359,7 @@ def artifact_log(app, artifacts, out):
if not out:
try:
for log in list(artifact_logs.values()):
- with open(log[0], 'r') as f:
+ with open(log[0], "r") as f:
data = f.read()
click.echo_via_pager(data)
except (OSError, FileNotFoundError):
@@ -1274,7 +1381,7 @@ def artifact_log(app, artifacts, out):
shutil.copy(log, dest)
# make a dir and write in log files
else:
- log_name = os.path.splitext(name)[0] + '.log'
+ log_name = os.path.splitext(name)[0] + ".log"
dest = os.path.join(out, log_name)
shutil.copy(log_files[0], dest)
# write a log file
@@ -1283,10 +1390,11 @@ def artifact_log(app, artifacts, out):
################################################################
# Artifact List-Contents Command #
################################################################
-@artifact.command(name='list-contents', short_help="List the contents of an artifact")
-@click.option('--long', '-l', 'long_', is_flag=True,
- help="Provide more information about the contents of the artifact.")
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="list-contents", short_help="List the contents of an artifact")
+@click.option(
+ "--long", "-l", "long_", is_flag=True, help="Provide more information about the contents of the artifact."
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
@click.pass_obj
def artifact_list_contents(app, artifacts, long_):
"""List the contents of an artifact.
@@ -1308,11 +1416,16 @@ def artifact_list_contents(app, artifacts, long_):
###################################################################
# Artifact Delete Command #
###################################################################
-@artifact.command(name='delete', short_help="Remove artifacts from the local cache")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'run', 'build', 'all']),
- help="The dependencies to delete")
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="delete", short_help="Remove artifacts from the local cache")
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "run", "build", "all"]),
+ help="The dependencies to delete",
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
@click.pass_obj
def artifact_delete(app, artifacts, deps):
"""Remove artifacts from the local cache"""
@@ -1333,18 +1446,24 @@ def artifact_delete(app, artifacts, deps):
# Fetch Command #
##################################################################
@cli.command(short_help="COMMAND OBSOLETE - Fetch sources in a pipeline", hidden=True)
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies from fetching")
-@click.option('--deps', '-d', default='plan', show_default=True,
- type=click.Choice(['none', 'plan', 'all']),
- help='The dependencies to fetch')
-@click.option('--track', 'track_', is_flag=True,
- help="Track new source references before fetching")
-@click.option('--track-cross-junctions', '-J', is_flag=True,
- help="Allow tracking to cross junction boundaries")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--except",
+ "except_",
+ multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies from fetching",
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="plan",
+ show_default=True,
+ type=click.Choice(["none", "plan", "all"]),
+ help="The dependencies to fetch",
+)
+@click.option("--track", "track_", is_flag=True, help="Track new source references before fetching")
+@click.option("--track-cross-junctions", "-J", is_flag=True, help="Allow tracking to cross junction boundaries")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def fetch(app, elements, deps, track_, except_, track_cross_junctions):
click.echo("This command is now obsolete. Use `bst source fetch` instead.", err=True)
@@ -1355,16 +1474,23 @@ def fetch(app, elements, deps, track_, except_, track_cross_junctions):
# Track Command #
##################################################################
@cli.command(short_help="COMMAND OBSOLETE - Track new source references", hidden=True)
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies from tracking")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependencies to track')
-@click.option('--cross-junctions', '-J', is_flag=True,
- help="Allow crossing junction boundaries")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--except",
+ "except_",
+ multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies from tracking",
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependencies to track",
+)
+@click.option("--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def track(app, elements, deps, except_, cross_junctions):
click.echo("This command is now obsolete. Use `bst source track` instead.", err=True)
@@ -1375,26 +1501,33 @@ def track(app, elements, deps, except_, cross_junctions):
# Checkout Command #
##################################################################
@cli.command(short_help="COMMAND OBSOLETE - Checkout a built artifact", hidden=True)
-@click.option('--force', '-f', is_flag=True,
- help="Allow files to be overwritten")
-@click.option('--deps', '-d', default='run', show_default=True,
- type=click.Choice(['run', 'build', 'none']),
- help='The dependencies to checkout')
-@click.option('--integrate/--no-integrate', default=True,
- help="Run integration commands (default is to run commands)")
-@click.option('--hardlinks', is_flag=True,
- help="Checkout hardlinks instead of copies (handle with care)")
-@click.option('--tar', is_flag=True,
- help="Create a tarball from the artifact contents instead "
- "of a file tree. If LOCATION is '-', the tarball "
- "will be dumped to the standard output.")
-@click.argument('element', required=False,
- type=click.Path(readable=False))
-@click.argument('location', type=click.Path(), required=False)
+@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
+@click.option(
+ "--deps",
+ "-d",
+ default="run",
+ show_default=True,
+ type=click.Choice(["run", "build", "none"]),
+ help="The dependencies to checkout",
+)
+@click.option("--integrate/--no-integrate", default=True, help="Run integration commands (default is to run commands)")
+@click.option("--hardlinks", is_flag=True, help="Checkout hardlinks instead of copies (handle with care)")
+@click.option(
+ "--tar",
+ is_flag=True,
+ help="Create a tarball from the artifact contents instead "
+ "of a file tree. If LOCATION is '-', the tarball "
+ "will be dumped to the standard output.",
+)
+@click.argument("element", required=False, type=click.Path(readable=False))
+@click.argument("location", type=click.Path(), required=False)
@click.pass_obj
def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
- click.echo("This command is now obsolete. Use `bst artifact checkout` instead " +
- "and use the --directory option to specify LOCATION", err=True)
+ click.echo(
+ "This command is now obsolete. Use `bst artifact checkout` instead "
+ + "and use the --directory option to specify LOCATION",
+ err=True,
+ )
sys.exit(1)
@@ -1402,13 +1535,16 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
# Pull Command #
################################################################
@cli.command(short_help="COMMAND OBSOLETE - Pull a built artifact", hidden=True)
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependency artifacts to pull')
-@click.option('--remote', '-r',
- help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependency artifacts to pull",
+)
+@click.option("--remote", "-r", help="The URL of the remote cache (defaults to the first configured cache)")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def pull(app, elements, deps, remote):
click.echo("This command is now obsolete. Use `bst artifact pull` instead.", err=True)
@@ -1419,13 +1555,18 @@ def pull(app, elements, deps, remote):
# Push Command #
##################################################################
@cli.command(short_help="COMMAND OBSOLETE - Push a built artifact", hidden=True)
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependencies to push')
-@click.option('--remote', '-r', default=None,
- help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependencies to push",
+)
+@click.option(
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)"
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def push(app, elements, deps, remote):
click.echo("This command is now obsolete. Use `bst artifact push` instead.", err=True)
diff --git a/src/buildstream/_frontend/complete.py b/src/buildstream/_frontend/complete.py
index 06067f6cc..45e857e3e 100644
--- a/src/buildstream/_frontend/complete.py
+++ b/src/buildstream/_frontend/complete.py
@@ -39,9 +39,9 @@ import click
from click.core import MultiCommand, Option, Argument
from click.parser import split_arg_string
-WORDBREAK = '='
+WORDBREAK = "="
-COMPLETION_SCRIPT = '''
+COMPLETION_SCRIPT = """
%(complete_func)s() {
local IFS=$'\n'
COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\
@@ -51,7 +51,7 @@ COMPLETION_SCRIPT = '''
}
complete -F %(complete_func)s -o nospace %(script_names)s
-'''
+"""
# An exception for our custom completion handler to
@@ -62,7 +62,7 @@ class CompleteUnhandled(Exception):
pass
-def complete_path(path_type, incomplete, base_directory='.'):
+def complete_path(path_type, incomplete, base_directory="."):
"""Helper method for implementing the completions() method
for File and Path parameter types.
"""
@@ -71,7 +71,7 @@ def complete_path(path_type, incomplete, base_directory='.'):
# specified in `incomplete` minus the last path component,
# otherwise list files starting from the current working directory.
entries = []
- base_path = ''
+ base_path = ""
# This is getting a bit messy
listed_base_directory = False
@@ -128,11 +128,11 @@ def complete_path(path_type, incomplete, base_directory='.'):
return [
# Return an appropriate path for each entry
- fix_path(e) for e in sorted(entries)
-
+ fix_path(e)
+ for e in sorted(entries)
# Filter out non directory elements when searching for a directory,
# the opposite is fine, however.
- if not (path_type == 'Directory' and not entry_is_dir(e))
+ if not (path_type == "Directory" and not entry_is_dir(e))
]
@@ -183,7 +183,7 @@ def start_of_option(param_str):
:param param_str: param_str to check
:return: whether or not this is the start of an option declaration (i.e. starts "-" or "--")
"""
- return param_str and param_str[:1] == '-'
+ return param_str and param_str[:1] == "-"
def is_incomplete_option(all_args, cmd_param):
@@ -218,8 +218,11 @@ def is_incomplete_argument(current_params, cmd_param):
return True
if cmd_param.nargs == -1:
return True
- if isinstance(current_param_values, collections.abc.Iterable) \
- and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs:
+ if (
+ isinstance(current_param_values, collections.abc.Iterable)
+ and cmd_param.nargs > 1
+ and len(current_param_values) < cmd_param.nargs
+ ):
return True
return False
@@ -237,10 +240,7 @@ def get_user_autocompletions(args, incomplete, cmd, cmd_param, override):
# Use the type specific default completions unless it was overridden
try:
- return override(cmd=cmd,
- cmd_param=cmd_param,
- args=args,
- incomplete=incomplete)
+ return override(cmd=cmd, cmd_param=cmd_param, args=args, incomplete=incomplete)
except CompleteUnhandled:
return get_param_type_completion(cmd_param.type, incomplete) or []
@@ -269,7 +269,7 @@ def get_choices(cli, prog_name, args, incomplete, override):
all_args.append(partition_incomplete[0])
incomplete = partition_incomplete[2]
elif incomplete == WORDBREAK:
- incomplete = ''
+ incomplete = ""
choices = []
found_param = False
@@ -277,8 +277,13 @@ def get_choices(cli, prog_name, args, incomplete, override):
# completions for options
for param in ctx.command.params:
if isinstance(param, Option):
- choices.extend([param_opt + " " for param_opt in param.opts + param.secondary_opts
- if param_opt not in all_args or param.multiple])
+ choices.extend(
+ [
+ param_opt + " "
+ for param_opt in param.opts + param.secondary_opts
+ if param_opt not in all_args or param.multiple
+ ]
+ )
found_param = True
if not found_param:
# completion for option values by choices
@@ -297,14 +302,22 @@ def get_choices(cli, prog_name, args, incomplete, override):
if not found_param and isinstance(ctx.command, MultiCommand):
# completion for any subcommands
- choices.extend([cmd + " " for cmd in ctx.command.list_commands(ctx)
- if not ctx.command.get_command(ctx, cmd).hidden])
-
- if not start_of_option(incomplete) and ctx.parent is not None \
- and isinstance(ctx.parent.command, MultiCommand) and ctx.parent.command.chain:
+ choices.extend(
+ [cmd + " " for cmd in ctx.command.list_commands(ctx) if not ctx.command.get_command(ctx, cmd).hidden]
+ )
+
+ if (
+ not start_of_option(incomplete)
+ and ctx.parent is not None
+ and isinstance(ctx.parent.command, MultiCommand)
+ and ctx.parent.command.chain
+ ):
# completion for chained commands
- visible_commands = [cmd for cmd in ctx.parent.command.list_commands(ctx.parent)
- if not ctx.parent.command.get_command(ctx.parent, cmd).hidden]
+ visible_commands = [
+ cmd
+ for cmd in ctx.parent.command.list_commands(ctx.parent)
+ if not ctx.parent.command.get_command(ctx.parent, cmd).hidden
+ ]
remaining_commands = set(visible_commands) - set(ctx.parent.protected_args)
choices.extend([cmd + " " for cmd in remaining_commands])
@@ -314,13 +327,13 @@ def get_choices(cli, prog_name, args, incomplete, override):
def do_complete(cli, prog_name, override):
- cwords = split_arg_string(os.environ['COMP_WORDS'])
- cword = int(os.environ['COMP_CWORD'])
+ cwords = split_arg_string(os.environ["COMP_WORDS"])
+ cword = int(os.environ["COMP_CWORD"])
args = cwords[1:cword]
try:
incomplete = cwords[cword]
except IndexError:
- incomplete = ''
+ incomplete = ""
for item in get_choices(cli, prog_name, args, incomplete, override):
click.echo(item)
@@ -331,7 +344,7 @@ def do_complete(cli, prog_name, override):
def main_bashcomplete(cmd, prog_name, override):
"""Internal handler for the bash completion support."""
- if '_BST_COMPLETION' in os.environ:
+ if "_BST_COMPLETION" in os.environ:
do_complete(cmd, prog_name, override)
return True
diff --git a/src/buildstream/_frontend/linuxapp.py b/src/buildstream/_frontend/linuxapp.py
index 0444dc7b4..987b023bd 100644
--- a/src/buildstream/_frontend/linuxapp.py
+++ b/src/buildstream/_frontend/linuxapp.py
@@ -28,9 +28,9 @@ from .app import App
#
def _osc_777_supported():
- term = os.environ.get('TERM')
+ term = os.environ.get("TERM")
- if term and (term.startswith('xterm') or term.startswith('vte')):
+ if term and (term.startswith("xterm") or term.startswith("vte")):
# Since vte version 4600, upstream silently ignores
# the OSC 777 without printing garbage to the terminal.
@@ -39,7 +39,7 @@ def _osc_777_supported():
# will trigger a desktop notification and bring attention
# to the terminal.
#
- vte_version = os.environ.get('VTE_VERSION')
+ vte_version = os.environ.get("VTE_VERSION")
try:
vte_version_int = int(vte_version)
except (ValueError, TypeError):
@@ -54,7 +54,6 @@ def _osc_777_supported():
# A linux specific App implementation
#
class LinuxApp(App):
-
def notify(self, title, text):
# Currently we only try this notification method
diff --git a/src/buildstream/_frontend/profile.py b/src/buildstream/_frontend/profile.py
index dda0f7ffe..f49be5b0a 100644
--- a/src/buildstream/_frontend/profile.py
+++ b/src/buildstream/_frontend/profile.py
@@ -28,7 +28,7 @@ import click
# Kwargs:
# The same keyword arguments which can be used with click.style()
#
-class Profile():
+class Profile:
def __init__(self, **kwargs):
self._kwargs = dict(kwargs)
@@ -64,7 +64,6 @@ class Profile():
# arguments
#
def fmt_subst(self, text, varname, value, **kwargs):
-
def subst_callback(match):
# Extract and format the "{(varname)...}" portion of the match
inner_token = match.group(1)
diff --git a/src/buildstream/_frontend/status.py b/src/buildstream/_frontend/status.py
index 85fdf7451..a3f0d8aa7 100644
--- a/src/buildstream/_frontend/status.py
+++ b/src/buildstream/_frontend/status.py
@@ -43,19 +43,12 @@ from .widget import TimeCode
# error_profile (Profile): Formatting profile for error text
# stream (Stream): The Stream
#
-class Status():
+class Status:
# Table of the terminal capabilities we require and use
- _TERM_CAPABILITIES = {
- 'move_up': 'cuu1',
- 'move_x': 'hpa',
- 'clear_eol': 'el'
- }
+ _TERM_CAPABILITIES = {"move_up": "cuu1", "move_x": "hpa", "clear_eol": "el"}
- def __init__(self, context, state,
- content_profile, format_profile,
- success_profile, error_profile,
- stream):
+ def __init__(self, context, state, content_profile, format_profile, success_profile, error_profile, stream):
self._context = context
self._state = state
@@ -67,10 +60,9 @@ class Status():
self._jobs = OrderedDict()
self._last_lines = 0 # Number of status lines we last printed to console
self._spacing = 1
- self._header = _StatusHeader(context, state,
- content_profile, format_profile,
- success_profile, error_profile,
- stream)
+ self._header = _StatusHeader(
+ context, state, content_profile, format_profile, success_profile, error_profile, stream
+ )
self._term_width, _ = click.get_terminal_size()
self._alloc_lines = 0
@@ -131,7 +123,7 @@ class Status():
# feeds for the amount of lines we intend to print first, and
# move cursor position back to the first line
for _ in range(self._alloc_lines + self._header.lines):
- click.echo('', err=True)
+ click.echo("", err=True)
for _ in range(self._alloc_lines + self._header.lines):
self._move_up()
@@ -143,14 +135,14 @@ class Status():
# alignment of each column
n_columns = len(self._alloc_columns)
for line in self._job_lines(n_columns):
- text = ''
+ text = ""
for job in line:
column = line.index(job)
text += job.render(self._alloc_columns[column] - job.size, elapsed)
# Add spacing between columns
if column < (n_columns - 1):
- text += ' ' * self._spacing
+ text += " " * self._spacing
# Print the line
click.echo(text, err=True)
@@ -196,7 +188,7 @@ class Status():
# Initialized terminal, curses might decide it doesnt
# support this terminal
try:
- curses.setupterm(os.environ.get('TERM', 'dumb'))
+ curses.setupterm(os.environ.get("TERM", "dumb"))
except curses.error:
return None
@@ -221,7 +213,7 @@ class Status():
# as well, and should provide better compatibility with most
# terminals.
#
- term_caps[capname] = code.decode('latin1')
+ term_caps[capname] = code.decode("latin1")
return term_caps
@@ -236,19 +228,19 @@ class Status():
# Explicitly move to beginning of line, fixes things up
# when there was a ^C or ^Z printed to the terminal.
- move_x = curses.tparm(self._term_caps['move_x'].encode('latin1'), 0)
- move_x = move_x.decode('latin1')
+ move_x = curses.tparm(self._term_caps["move_x"].encode("latin1"), 0)
+ move_x = move_x.decode("latin1")
- move_up = curses.tparm(self._term_caps['move_up'].encode('latin1'))
- move_up = move_up.decode('latin1')
+ move_up = curses.tparm(self._term_caps["move_up"].encode("latin1"))
+ move_up = move_up.decode("latin1")
click.echo(move_x + move_up, nl=False, err=True)
def _clear_line(self):
assert self._term_caps is not None
- clear_eol = curses.tparm(self._term_caps['clear_eol'].encode('latin1'))
- clear_eol = clear_eol.decode('latin1')
+ clear_eol = curses.tparm(self._term_caps["clear_eol"].encode("latin1"))
+ clear_eol = clear_eol.decode("latin1")
click.echo(clear_eol, nl=False, err=True)
def _allocate(self):
@@ -277,7 +269,7 @@ class Status():
def _job_lines(self, columns):
jobs_list = list(self._jobs.values())
for i in range(0, len(self._jobs), columns):
- yield jobs_list[i:i + columns]
+ yield jobs_list[i : i + columns]
# Returns an array of integers representing the maximum
# length in characters for each column, given the current
@@ -307,9 +299,7 @@ class Status():
def _add_job(self, action_name, full_name):
task = self._state.tasks[(action_name, full_name)]
elapsed = task.elapsed_offset
- job = _StatusJob(self._context, action_name, full_name,
- self._content_profile, self._format_profile,
- elapsed)
+ job = _StatusJob(self._context, action_name, full_name, self._content_profile, self._format_profile, elapsed)
self._jobs[(action_name, full_name)] = job
self._need_alloc = True
@@ -338,12 +328,8 @@ class Status():
# error_profile (Profile): Formatting profile for error text
# stream (Stream): The Stream
#
-class _StatusHeader():
-
- def __init__(self, context, state,
- content_profile, format_profile,
- success_profile, error_profile,
- stream):
+class _StatusHeader:
+ def __init__(self, context, state, content_profile, format_profile, success_profile, error_profile, stream):
#
# Public members
@@ -375,19 +361,22 @@ class _StatusHeader():
total = str(len(self._stream.total_elements))
size = 0
- text = ''
+ text = ""
size += len(total) + len(session) + 4 # Size for (N/N) with a leading space
size += 8 # Size of time code
size += len(project.name) + 1
text += self._time_code.render_time(elapsed)
- text += ' ' + self._content_profile.fmt(project.name)
- text += ' ' + self._format_profile.fmt('(') + \
- self._content_profile.fmt(session) + \
- self._format_profile.fmt('/') + \
- self._content_profile.fmt(total) + \
- self._format_profile.fmt(')')
-
- line1 = self._centered(text, size, line_length, '=')
+ text += " " + self._content_profile.fmt(project.name)
+ text += (
+ " "
+ + self._format_profile.fmt("(")
+ + self._content_profile.fmt(session)
+ + self._format_profile.fmt("/")
+ + self._content_profile.fmt(total)
+ + self._format_profile.fmt(")")
+ )
+
+ line1 = self._centered(text, size, line_length, "=")
#
# Line 2: Dynamic list of queue status reports
@@ -395,7 +384,7 @@ class _StatusHeader():
# (Sources Fetched:0 117 0)→ (Built:4 0 0)
#
size = 0
- text = ''
+ text = ""
# Format and calculate size for each queue progress
for index, task_group in enumerate(self._state.task_groups.values()):
@@ -403,13 +392,13 @@ class _StatusHeader():
# Add spacing
if index > 0:
size += 2
- text += self._format_profile.fmt('→ ')
+ text += self._format_profile.fmt("→ ")
group_text, group_size = self._render_task_group(task_group)
size += group_size
text += group_text
- line2 = self._centered(text, size, line_length, ' ')
+ line2 = self._centered(text, size, line_length, " ")
#
# Line 3: Cache usage percentage report
@@ -423,7 +412,7 @@ class _StatusHeader():
if usage.used_size is None:
# Cache usage is unknown
size = 0
- text = ''
+ text = ""
else:
size = 21
size += len(usage_string)
@@ -434,15 +423,17 @@ class _StatusHeader():
else:
formatted_usage = self._success_profile.fmt(usage_string)
- text = self._format_profile.fmt("~~~~~~ ") + \
- self._content_profile.fmt('cache') + \
- self._format_profile.fmt(': ') + \
- formatted_usage + \
- self._format_profile.fmt(' ~~~~~~')
+ text = (
+ self._format_profile.fmt("~~~~~~ ")
+ + self._content_profile.fmt("cache")
+ + self._format_profile.fmt(": ")
+ + formatted_usage
+ + self._format_profile.fmt(" ~~~~~~")
+ )
- line3 = self._centered(text, size, line_length, ' ')
+ line3 = self._centered(text, size, line_length, " ")
- return line1 + '\n' + line2 + '\n' + line3
+ return line1 + "\n" + line2 + "\n" + line3
###################################################
# Private Methods #
@@ -455,13 +446,17 @@ class _StatusHeader():
size = 5 # Space for the formatting '[', ':', ' ', ' ' and ']'
size += len(group.complete_name)
size += len(processed) + len(skipped) + len(failed)
- text = self._format_profile.fmt("(") + \
- self._content_profile.fmt(group.complete_name) + \
- self._format_profile.fmt(":") + \
- self._success_profile.fmt(processed) + ' ' + \
- self._content_profile.fmt(skipped) + ' ' + \
- self._error_profile.fmt(failed) + \
- self._format_profile.fmt(")")
+ text = (
+ self._format_profile.fmt("(")
+ + self._content_profile.fmt(group.complete_name)
+ + self._format_profile.fmt(":")
+ + self._success_profile.fmt(processed)
+ + " "
+ + self._content_profile.fmt(skipped)
+ + " "
+ + self._error_profile.fmt(failed)
+ + self._format_profile.fmt(")")
+ )
return (text, size)
@@ -469,9 +464,9 @@ class _StatusHeader():
remaining = line_length - size
remaining -= 2
- final_text = self._format_profile.fmt(fill * (remaining // 2)) + ' '
+ final_text = self._format_profile.fmt(fill * (remaining // 2)) + " "
final_text += text
- final_text += ' ' + self._format_profile.fmt(fill * (remaining // 2))
+ final_text += " " + self._format_profile.fmt(fill * (remaining // 2))
return final_text
@@ -488,14 +483,13 @@ class _StatusHeader():
# format_profile (Profile): Formatting profile for formatting text
# elapsed (datetime): The offset into the session when this job is created
#
-class _StatusJob():
-
+class _StatusJob:
def __init__(self, context, action_name, full_name, content_profile, format_profile, elapsed):
#
# Public members
#
- self.action_name = action_name # The action name
- self.size = None # The number of characters required to render
+ self.action_name = action_name # The action name
+ self.size = None # The number of characters required to render
self.full_name = full_name
#
@@ -568,24 +562,26 @@ class _StatusJob():
# elapsed (datetime): The session elapsed time offset
#
def render(self, padding, elapsed):
- text = self._format_profile.fmt('[') + \
- self._time_code.render_time(elapsed - self._offset) + \
- self._format_profile.fmt(']')
-
- text += self._format_profile.fmt('[') + \
- self._content_profile.fmt(self.action_name) + \
- self._format_profile.fmt(':') + \
- self._content_profile.fmt(self.full_name)
+ text = (
+ self._format_profile.fmt("[")
+ + self._time_code.render_time(elapsed - self._offset)
+ + self._format_profile.fmt("]")
+ )
+
+ text += (
+ self._format_profile.fmt("[")
+ + self._content_profile.fmt(self.action_name)
+ + self._format_profile.fmt(":")
+ + self._content_profile.fmt(self.full_name)
+ )
if self._current_progress is not None:
- text += self._format_profile.fmt(':') + \
- self._content_profile.fmt(str(self._current_progress))
+ text += self._format_profile.fmt(":") + self._content_profile.fmt(str(self._current_progress))
if self._maximum_progress is not None:
- text += self._format_profile.fmt('/') + \
- self._content_profile.fmt(str(self._maximum_progress))
+ text += self._format_profile.fmt("/") + self._content_profile.fmt(str(self._maximum_progress))
# Add padding before terminating ']'
- terminator = (' ' * padding) + ']'
+ terminator = (" " * padding) + "]"
text += self._format_profile.fmt(terminator)
return text
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index 0a268b717..63fbfbb7d 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -45,8 +45,7 @@ ERROR_MESSAGES = [MessageType.FAIL, MessageType.ERROR, MessageType.BUG]
#
# An abstract class for printing output columns in our text UI.
#
-class Widget():
-
+class Widget:
def __init__(self, context, content_profile, format_profile):
# The context
@@ -74,7 +73,6 @@ class Widget():
# Used to add fixed text between columns
class FixedText(Widget):
-
def __init__(self, context, text, content_profile, format_profile):
super().__init__(context, content_profile, format_profile)
self.text = text
@@ -91,15 +89,13 @@ class WallclockTime(Widget):
def render(self, message):
- fields = [self.content_profile.fmt("{:02d}".format(x)) for x in
- [message.creation_time.hour,
- message.creation_time.minute,
- message.creation_time.second,
- ]
- ]
+ fields = [
+ self.content_profile.fmt("{:02d}".format(x))
+ for x in [message.creation_time.hour, message.creation_time.minute, message.creation_time.second,]
+ ]
text = self.format_profile.fmt(":").join(fields)
- if self._output_format == 'us':
+ if self._output_format == "us":
text += self.content_profile.fmt(".{:06d}".format(message.creation_time.microsecond))
return text
@@ -107,11 +103,10 @@ class WallclockTime(Widget):
# A widget for rendering the debugging column
class Debug(Widget):
-
def render(self, message):
element_name = "n/a" if message.element_name is None else message.element_name
- text = self.format_profile.fmt('pid:')
+ text = self.format_profile.fmt("pid:")
text += self.content_profile.fmt("{: <5}".format(message.pid))
text += self.format_profile.fmt("element name:")
text += self.content_profile.fmt("{: <30}".format(element_name))
@@ -130,19 +125,13 @@ class TimeCode(Widget):
def render_time(self, elapsed):
if elapsed is None:
- fields = [
- self.content_profile.fmt('--')
- for i in range(3)
- ]
+ fields = [self.content_profile.fmt("--") for i in range(3)]
else:
hours, remainder = divmod(int(elapsed.total_seconds()), 60 * 60)
minutes, seconds = divmod(remainder, 60)
- fields = [
- self.content_profile.fmt("{0:02d}".format(field))
- for field in [hours, minutes, seconds]
- ]
+ fields = [self.content_profile.fmt("{0:02d}".format(field)) for field in [hours, minutes, seconds]]
- text = self.format_profile.fmt(':').join(fields)
+ text = self.format_profile.fmt(":").join(fields)
if self._microseconds:
if elapsed is not None:
@@ -169,41 +158,43 @@ class TypeName(Widget):
}
def render(self, message):
- return self.content_profile.fmt("{: <7}"
- .format(message.message_type.upper()),
- bold=True, dim=True,
- fg=self._action_colors[message.message_type])
+ return self.content_profile.fmt(
+ "{: <7}".format(message.message_type.upper()),
+ bold=True,
+ dim=True,
+ fg=self._action_colors[message.message_type],
+ )
# A widget for displaying the Element name
class ElementName(Widget):
-
def render(self, message):
action_name = message.action_name
element_name = message.element_name
if element_name is not None:
- name = '{: <30}'.format(element_name)
+ name = "{: <30}".format(element_name)
else:
- name = 'core activity'
- name = '{: <30}'.format(name)
+ name = "core activity"
+ name = "{: <30}".format(name)
if not action_name:
action_name = "Main"
- return self.content_profile.fmt("{: >8}".format(action_name.lower())) + \
- self.format_profile.fmt(':') + self.content_profile.fmt(name)
+ return (
+ self.content_profile.fmt("{: >8}".format(action_name.lower()))
+ + self.format_profile.fmt(":")
+ + self.content_profile.fmt(name)
+ )
# A widget for displaying the primary message text
class MessageText(Widget):
-
def render(self, message):
return message.message
# A widget for formatting the element cache key
class CacheKey(Widget):
-
def __init__(self, context, content_profile, format_profile, err_profile):
super().__init__(context, content_profile, format_profile)
@@ -216,10 +207,10 @@ class CacheKey(Widget):
return ""
if message.element_name is None:
- return ' ' * self._key_length
+ return " " * self._key_length
missing = False
- key = ' ' * self._key_length
+ key = " " * self._key_length
if message.element_key:
_, key, missing = message.element_key
@@ -233,7 +224,6 @@ class CacheKey(Widget):
# A widget for formatting the log file
class LogFile(Widget):
-
def __init__(self, context, content_profile, format_profile, err_profile):
super().__init__(context, content_profile, format_profile)
@@ -248,7 +238,7 @@ class LogFile(Widget):
logfile = message.logfile
if abbrev and self._logdir != "" and logfile.startswith(self._logdir):
- logfile = logfile[len(self._logdir):]
+ logfile = logfile[len(self._logdir) :]
logfile = logfile.lstrip(os.sep)
if message.message_type in ERROR_MESSAGES:
@@ -256,7 +246,7 @@ class LogFile(Widget):
else:
text = self.content_profile.fmt(logfile, dim=True)
else:
- text = ''
+ text = ""
return text
@@ -273,8 +263,7 @@ class MessageOrLogFile(Widget):
def render(self, message):
# Show the log file only in the main start/success messages
- if message.logfile and message.scheduler and \
- message.message_type in [MessageType.START, MessageType.SUCCESS]:
+ if message.logfile and message.scheduler and message.message_type in [MessageType.START, MessageType.SUCCESS]:
text = self._logfile_widget.render(message)
else:
text = self._message_widget.render(message)
@@ -296,14 +285,9 @@ class MessageOrLogFile(Widget):
# indent (int): Number of spaces to use for general indentation
#
class LogLine(Widget):
-
- def __init__(self, context, state,
- content_profile,
- format_profile,
- success_profile,
- err_profile,
- detail_profile,
- indent=4):
+ def __init__(
+ self, context, state, content_profile, format_profile, success_profile, err_profile, detail_profile, indent=4
+ ):
super().__init__(context, content_profile, format_profile)
self._columns = []
@@ -311,7 +295,7 @@ class LogLine(Widget):
self._success_profile = success_profile
self._err_profile = err_profile
self._detail_profile = detail_profile
- self._indent = ' ' * indent
+ self._indent = " " * indent
self._log_lines = context.log_error_lines
self._message_lines = context.log_message_lines
self._resolved_keys = None
@@ -320,19 +304,17 @@ class LogLine(Widget):
self._logfile_widget = LogFile(context, content_profile, format_profile, err_profile)
if context.log_debug:
- self._columns.extend([
- Debug(context, content_profile, format_profile)
- ])
+ self._columns.extend([Debug(context, content_profile, format_profile)])
self.logfile_variable_names = {
"elapsed": TimeCode(context, content_profile, format_profile, microseconds=False),
"elapsed-us": TimeCode(context, content_profile, format_profile, microseconds=True),
"wallclock": WallclockTime(context, content_profile, format_profile),
- "wallclock-us": WallclockTime(context, content_profile, format_profile, output_format='us'),
+ "wallclock-us": WallclockTime(context, content_profile, format_profile, output_format="us"),
"key": CacheKey(context, content_profile, format_profile, err_profile),
"element": ElementName(context, content_profile, format_profile),
"action": TypeName(context, content_profile, format_profile),
- "message": MessageOrLogFile(context, content_profile, format_profile, err_profile)
+ "message": MessageOrLogFile(context, content_profile, format_profile, err_profile),
}
logfile_tokens = self._parse_logfile_format(context.log_message_format, content_profile, format_profile)
self._columns.extend(logfile_tokens)
@@ -352,7 +334,7 @@ class LogLine(Widget):
# (str): The formatted list of elements
#
def show_pipeline(self, dependencies, format_):
- report = ''
+ report = ""
p = Profile()
for element in dependencies:
@@ -360,57 +342,57 @@ class LogLine(Widget):
full_key, cache_key, dim_keys = element._get_display_key()
- line = p.fmt_subst(line, 'name', element._get_full_name(), fg='blue', bold=True)
- line = p.fmt_subst(line, 'key', cache_key, fg='yellow', dim=dim_keys)
- line = p.fmt_subst(line, 'full-key', full_key, fg='yellow', dim=dim_keys)
+ line = p.fmt_subst(line, "name", element._get_full_name(), fg="blue", bold=True)
+ line = p.fmt_subst(line, "key", cache_key, fg="yellow", dim=dim_keys)
+ line = p.fmt_subst(line, "full-key", full_key, fg="yellow", dim=dim_keys)
consistency = element._get_consistency()
if consistency == Consistency.INCONSISTENT:
- line = p.fmt_subst(line, 'state', "no reference", fg='red')
+ line = p.fmt_subst(line, "state", "no reference", fg="red")
else:
if element._cached_failure():
- line = p.fmt_subst(line, 'state', "failed", fg='red')
+ line = p.fmt_subst(line, "state", "failed", fg="red")
elif element._cached_success():
- line = p.fmt_subst(line, 'state', "cached", fg='magenta')
+ line = p.fmt_subst(line, "state", "cached", fg="magenta")
elif consistency == Consistency.RESOLVED and not element._source_cached():
- line = p.fmt_subst(line, 'state', "fetch needed", fg='red')
+ line = p.fmt_subst(line, "state", "fetch needed", fg="red")
elif element._buildable():
- line = p.fmt_subst(line, 'state', "buildable", fg='green')
+ line = p.fmt_subst(line, "state", "buildable", fg="green")
else:
- line = p.fmt_subst(line, 'state', "waiting", fg='blue')
+ line = p.fmt_subst(line, "state", "waiting", fg="blue")
# Element configuration
if "%{config" in format_:
line = p.fmt_subst(
- line, 'config',
- yaml.round_trip_dump(element._Element__config, default_flow_style=False, allow_unicode=True))
+ line,
+ "config",
+ yaml.round_trip_dump(element._Element__config, default_flow_style=False, allow_unicode=True),
+ )
# Variables
if "%{vars" in format_:
variables = element._Element__variables.flat
line = p.fmt_subst(
- line, 'vars',
- yaml.round_trip_dump(variables, default_flow_style=False, allow_unicode=True))
+ line, "vars", yaml.round_trip_dump(variables, default_flow_style=False, allow_unicode=True)
+ )
# Environment
if "%{env" in format_:
environment = element._Element__environment
line = p.fmt_subst(
- line, 'env',
- yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True))
+ line, "env", yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True)
+ )
# Public
if "%{public" in format_:
environment = element._Element__public
line = p.fmt_subst(
- line, 'public',
- yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True))
+ line, "public", yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True)
+ )
# Workspaced
if "%{workspaced" in format_:
- line = p.fmt_subst(
- line, 'workspaced',
- '(workspaced)' if element._get_workspace() else '', fg='yellow')
+ line = p.fmt_subst(line, "workspaced", "(workspaced)" if element._get_workspace() else "", fg="yellow")
# Workspace-dirs
if "%{workspace-dirs" in format_:
@@ -418,36 +400,31 @@ class LogLine(Widget):
if workspace is not None:
path = workspace.get_absolute_path()
if path.startswith("~/"):
- path = os.path.join(os.getenv('HOME', '/root'), path[2:])
- line = p.fmt_subst(line, 'workspace-dirs', "Workspace: {}".format(path))
+ path = os.path.join(os.getenv("HOME", "/root"), path[2:])
+ line = p.fmt_subst(line, "workspace-dirs", "Workspace: {}".format(path))
else:
- line = p.fmt_subst(
- line, 'workspace-dirs', '')
+ line = p.fmt_subst(line, "workspace-dirs", "")
# Dependencies
if "%{deps" in format_:
deps = [e.name for e in element.dependencies(Scope.ALL, recurse=False)]
- line = p.fmt_subst(
- line, 'deps',
- yaml.safe_dump(deps, default_style=None).rstrip('\n'))
+ line = p.fmt_subst(line, "deps", yaml.safe_dump(deps, default_style=None).rstrip("\n"))
# Build Dependencies
if "%{build-deps" in format_:
build_deps = [e.name for e in element.dependencies(Scope.BUILD, recurse=False)]
- line = p.fmt_subst(
- line, 'build-deps',
- yaml.safe_dump(build_deps, default_style=False).rstrip('\n'))
+ line = p.fmt_subst(line, "build-deps", yaml.safe_dump(build_deps, default_style=False).rstrip("\n"))
# Runtime Dependencies
if "%{runtime-deps" in format_:
runtime_deps = [e.name for e in element.dependencies(Scope.RUN, recurse=False)]
line = p.fmt_subst(
- line, 'runtime-deps',
- yaml.safe_dump(runtime_deps, default_style=False).rstrip('\n'))
+ line, "runtime-deps", yaml.safe_dump(runtime_deps, default_style=False).rstrip("\n")
+ )
- report += line + '\n'
+ report += line + "\n"
- return report.rstrip('\n')
+ return report.rstrip("\n")
# print_heading()
#
@@ -463,25 +440,24 @@ class LogLine(Widget):
def print_heading(self, project, stream, *, log_file):
context = self.context
starttime = datetime.datetime.now()
- text = ''
+ text = ""
self._resolved_keys = {element: element._get_cache_key() for element in stream.session_elements}
# Main invocation context
- text += '\n'
+ text += "\n"
text += self.content_profile.fmt("BuildStream Version {}\n".format(bst_version), bold=True)
values = OrderedDict()
- values["Session Start"] = starttime.strftime('%A, %d-%m-%Y at %H:%M:%S')
+ values["Session Start"] = starttime.strftime("%A, %d-%m-%Y at %H:%M:%S")
values["Project"] = "{} ({})".format(project.name, project.directory)
values["Targets"] = ", ".join([t.name for t in stream.targets])
text += self._format_values(values)
# User configurations
- text += '\n'
+ text += "\n"
text += self.content_profile.fmt("User Configuration\n", bold=True)
values = OrderedDict()
- values["Configuration File"] = \
- "Default Configuration" if not context.config_origin else context.config_origin
+ values["Configuration File"] = "Default Configuration" if not context.config_origin else context.config_origin
values["Cache Directory"] = context.cachedir
values["Log Files"] = context.logdir
values["Source Mirrors"] = context.sourcedir
@@ -492,7 +468,7 @@ class LogLine(Widget):
values["Maximum Push Tasks"] = context.sched_pushers
values["Maximum Network Retries"] = context.sched_network_retries
text += self._format_values(values)
- text += '\n'
+ text += "\n"
# Project Options
values = OrderedDict()
@@ -500,22 +476,25 @@ class LogLine(Widget):
if values:
text += self.content_profile.fmt("Project Options\n", bold=True)
text += self._format_values(values)
- text += '\n'
+ text += "\n"
# Plugins
- text += self._format_plugins(project.first_pass_config.element_factory.loaded_dependencies,
- project.first_pass_config.source_factory.loaded_dependencies)
+ text += self._format_plugins(
+ project.first_pass_config.element_factory.loaded_dependencies,
+ project.first_pass_config.source_factory.loaded_dependencies,
+ )
if project.config.element_factory and project.config.source_factory:
- text += self._format_plugins(project.config.element_factory.loaded_dependencies,
- project.config.source_factory.loaded_dependencies)
+ text += self._format_plugins(
+ project.config.element_factory.loaded_dependencies, project.config.source_factory.loaded_dependencies
+ )
# Pipeline state
text += self.content_profile.fmt("Pipeline\n", bold=True)
text += self.show_pipeline(stream.total_elements, context.log_element_format)
- text += '\n'
+ text += "\n"
# Separator line before following output
- text += self.format_profile.fmt("=" * 79 + '\n')
+ text += self.format_profile.fmt("=" * 79 + "\n")
click.echo(text, nl=False, err=True)
if log_file:
@@ -537,7 +516,7 @@ class LogLine(Widget):
if not self._state.task_groups:
return
- text = ''
+ text = ""
assert self._resolved_keys is not None
elements = sorted(e for (e, k) in self._resolved_keys.items() if k != e._get_cache_key())
@@ -554,7 +533,7 @@ class LogLine(Widget):
# Exclude the failure messages if the job didn't ultimately fail
# (e.g. succeeded on retry)
if element_name in group.failed_tasks:
- values[element_name] = ''.join(self._render(v) for v in messages)
+ values[element_name] = "".join(self._render(v) for v in messages)
if values:
text += self.content_profile.fmt("Failure Summary\n", bold=True)
@@ -563,8 +542,8 @@ class LogLine(Widget):
text += self.content_profile.fmt("Pipeline Summary\n", bold=True)
values = OrderedDict()
- values['Total'] = self.content_profile.fmt(str(len(stream.total_elements)))
- values['Session'] = self.content_profile.fmt(str(len(stream.session_elements)))
+ values["Total"] = self.content_profile.fmt(str(len(stream.total_elements)))
+ values["Session"] = self.content_profile.fmt(str(len(stream.session_elements)))
processed_maxlen = 1
skipped_maxlen = 1
@@ -579,20 +558,25 @@ class LogLine(Widget):
skipped = str(group.skipped_tasks)
failed = str(len(group.failed_tasks))
- processed_align = ' ' * (processed_maxlen - len(processed))
- skipped_align = ' ' * (skipped_maxlen - len(skipped))
- failed_align = ' ' * (failed_maxlen - len(failed))
-
- status_text = self.content_profile.fmt("processed ") + \
- self._success_profile.fmt(processed) + \
- self.format_profile.fmt(', ') + processed_align
-
- status_text += self.content_profile.fmt("skipped ") + \
- self.content_profile.fmt(skipped) + \
- self.format_profile.fmt(', ') + skipped_align
-
- status_text += self.content_profile.fmt("failed ") + \
- self._err_profile.fmt(failed) + ' ' + failed_align
+ processed_align = " " * (processed_maxlen - len(processed))
+ skipped_align = " " * (skipped_maxlen - len(skipped))
+ failed_align = " " * (failed_maxlen - len(failed))
+
+ status_text = (
+ self.content_profile.fmt("processed ")
+ + self._success_profile.fmt(processed)
+ + self.format_profile.fmt(", ")
+ + processed_align
+ )
+
+ status_text += (
+ self.content_profile.fmt("skipped ")
+ + self.content_profile.fmt(skipped)
+ + self.format_profile.fmt(", ")
+ + skipped_align
+ )
+
+ status_text += self.content_profile.fmt("failed ") + self._err_profile.fmt(failed) + " " + failed_align
values["{} Queue".format(group.name)] = status_text
text += self._format_values(values, style_value=False)
@@ -627,7 +611,7 @@ class LogLine(Widget):
m = re.search(r"^%\{([^\}]+)\}", format_string)
if m is not None:
variable = m.group(1)
- format_string = format_string[m.end(0):]
+ format_string = format_string[m.end(0) :]
if variable not in self.logfile_variable_names:
raise Exception("'{0}' is not a valid log variable name.".format(variable))
logfile_tokens.append(self.logfile_variable_names[variable])
@@ -635,7 +619,7 @@ class LogLine(Widget):
m = re.search("^[^%]+", format_string)
if m is not None:
text = FixedText(self.context, m.group(0), content_profile, format_profile)
- format_string = format_string[m.end(0):]
+ format_string = format_string[m.end(0) :]
logfile_tokens.append(text)
else:
# No idea what to do now
@@ -645,11 +629,11 @@ class LogLine(Widget):
def _render(self, message):
# Render the column widgets first
- text = ''
+ text = ""
for widget in self._columns:
text += widget.render(message)
- text += '\n'
+ text += "\n"
extra_nl = False
@@ -664,51 +648,53 @@ class LogLine(Widget):
n_lines = len(lines)
abbrev = False
- if message.message_type not in ERROR_MESSAGES \
- and not frontend_message and n_lines > self._message_lines:
- lines = lines[0:self._message_lines]
+ if message.message_type not in ERROR_MESSAGES and not frontend_message and n_lines > self._message_lines:
+ lines = lines[0 : self._message_lines]
if self._message_lines > 0:
abbrev = True
else:
- lines[n_lines - 1] = lines[n_lines - 1].rstrip('\n')
+ lines[n_lines - 1] = lines[n_lines - 1].rstrip("\n")
detail = self._indent + self._indent.join(lines)
- text += '\n'
+ text += "\n"
if message.message_type in ERROR_MESSAGES:
text += self._err_profile.fmt(detail, bold=True)
else:
text += self._detail_profile.fmt(detail)
if abbrev:
- text += self._indent + \
- self.content_profile.fmt('Message contains {} additional lines'
- .format(n_lines - self._message_lines), dim=True)
- text += '\n'
+ text += self._indent + self.content_profile.fmt(
+ "Message contains {} additional lines".format(n_lines - self._message_lines), dim=True
+ )
+ text += "\n"
extra_nl = True
if message.scheduler and message.message_type == MessageType.FAIL:
- text += '\n'
+ text += "\n"
if self.context is not None and not self.context.log_verbose:
text += self._indent + self._err_profile.fmt("Log file: ")
- text += self._indent + self._logfile_widget.render(message) + '\n'
+ text += self._indent + self._logfile_widget.render(message) + "\n"
elif self._log_lines > 0:
- text += self._indent + self._err_profile.fmt("Printing the last {} lines from log file:"
- .format(self._log_lines)) + '\n'
- text += self._indent + self._logfile_widget.render_abbrev(message, abbrev=False) + '\n'
- text += self._indent + self._err_profile.fmt("=" * 70) + '\n'
+ text += (
+ self._indent
+ + self._err_profile.fmt("Printing the last {} lines from log file:".format(self._log_lines))
+ + "\n"
+ )
+ text += self._indent + self._logfile_widget.render_abbrev(message, abbrev=False) + "\n"
+ text += self._indent + self._err_profile.fmt("=" * 70) + "\n"
log_content = self._read_last_lines(message.logfile)
log_content = textwrap.indent(log_content, self._indent)
text += self._detail_profile.fmt(log_content)
- text += '\n'
- text += self._indent + self._err_profile.fmt("=" * 70) + '\n'
+ text += "\n"
+ text += self._indent + self._err_profile.fmt("=" * 70) + "\n"
extra_nl = True
if extra_nl:
- text += '\n'
+ text += "\n"
return text
@@ -716,14 +702,14 @@ class LogLine(Widget):
with ExitStack() as stack:
# mmap handles low-level memory details, allowing for
# faster searches
- f = stack.enter_context(open(logfile, 'r+'))
+ f = stack.enter_context(open(logfile, "r+"))
log = stack.enter_context(mmap(f.fileno(), os.path.getsize(f.name)))
count = 0
end = log.size() - 1
while count < self._log_lines and end >= 0:
- location = log.rfind(b'\n', 0, end)
+ location = log.rfind(b"\n", 0, end)
count += 1
# If location is -1 (none found), this will print the
@@ -735,8 +721,8 @@ class LogLine(Widget):
# then we get the first characther. If end is a newline position,
# we discard it and only want to print the beginning of the next
# line.
- lines = log[(end + 1):].splitlines()
- return '\n'.join([line.decode('utf-8') for line in lines]).rstrip()
+ lines = log[(end + 1) :].splitlines()
+ return "\n".join([line.decode("utf-8") for line in lines]).rstrip()
def _format_plugins(self, element_plugins, source_plugins):
text = ""
@@ -756,7 +742,7 @@ class LogLine(Widget):
for plugin in source_plugins:
text += self.content_profile.fmt(" - {}\n".format(plugin))
- text += '\n'
+ text += "\n"
return text
@@ -773,23 +759,23 @@ class LogLine(Widget):
# (str): The formatted values
#
def _format_values(self, values, style_value=True):
- text = ''
+ text = ""
max_key_len = 0
for key, value in values.items():
max_key_len = max(len(key), max_key_len)
for key, value in values.items():
- if isinstance(value, str) and '\n' in value:
+ if isinstance(value, str) and "\n" in value:
text += self.format_profile.fmt(" {}:\n".format(key))
text += textwrap.indent(value, self._indent)
continue
- text += self.format_profile.fmt(" {}: {}".format(key, ' ' * (max_key_len - len(key))))
+ text += self.format_profile.fmt(" {}: {}".format(key, " " * (max_key_len - len(key))))
if style_value:
text += self.content_profile.fmt(str(value))
else:
text += str(value)
- text += '\n'
+ text += "\n"
return text
@@ -806,20 +792,20 @@ class LogLine(Widget):
# (str): The formatted values
#
def _pretty_print_dictionary(self, values, long_=False, style_value=True):
- text = ''
+ text = ""
max_key_len = 0
try:
max_key_len = max(len(key) for key in values.keys())
except ValueError:
- text = ''
+ text = ""
for key, value in values.items():
- if isinstance(value, str) and '\n' in value:
+ if isinstance(value, str) and "\n" in value:
text += self.format_profile.fmt(" {}:".format(key))
text += textwrap.indent(value, self._indent)
continue
- text += self.format_profile.fmt(" {}:{}".format(key, ' ' * (max_key_len - len(key))))
+ text += self.format_profile.fmt(" {}:{}".format(key, " " * (max_key_len - len(key))))
value_list = "\n\t" + "\n\t".join((self._get_filestats(v, list_long=long_) for v in value))
if value == []:
@@ -832,7 +818,7 @@ class LogLine(Widget):
text += self.content_profile.fmt(value_list)
else:
text += value_list
- text += '\n'
+ text += "\n"
return text
@@ -854,22 +840,22 @@ class LogLine(Widget):
# cached status of
#
def show_state_of_artifacts(self, targets):
- report = ''
+ report = ""
p = Profile()
for element in targets:
- line = '%{state: >12} %{name}'
- line = p.fmt_subst(line, 'name', element.name, fg='yellow')
+ line = "%{state: >12} %{name}"
+ line = p.fmt_subst(line, "name", element.name, fg="yellow")
if element._cached_success():
- line = p.fmt_subst(line, 'state', "cached", fg='magenta')
+ line = p.fmt_subst(line, "state", "cached", fg="magenta")
elif element._cached():
- line = p.fmt_subst(line, 'state', "failed", fg='red')
+ line = p.fmt_subst(line, "state", "failed", fg="red")
elif element._cached_remotely():
- line = p.fmt_subst(line, 'state', "available", fg='green')
+ line = p.fmt_subst(line, "state", "available", fg="green")
else:
- line = p.fmt_subst(line, 'state', "not cached", fg='bright_red')
+ line = p.fmt_subst(line, "state", "not cached", fg="bright_red")
- report += line + '\n'
+ report += line + "\n"
return report
@@ -890,15 +876,27 @@ class LogLine(Widget):
# Support files up to 99G, meaning maximum characters is 11
max_v_len = 11
if entry["type"] == _FileType.DIRECTORY:
- return "drwxr-xr-x dir {}".format(entry["size"]) +\
- "{} ".format(' ' * (max_v_len - len(size))) + "{}".format(entry["name"])
+ return (
+ "drwxr-xr-x dir {}".format(entry["size"])
+ + "{} ".format(" " * (max_v_len - len(size)))
+ + "{}".format(entry["name"])
+ )
elif entry["type"] == _FileType.SYMLINK:
- return "lrwxrwxrwx link {}".format(entry["size"]) +\
- "{} ".format(' ' * (max_v_len - len(size))) + "{} -> {}".format(entry["name"], entry["target"])
+ return (
+ "lrwxrwxrwx link {}".format(entry["size"])
+ + "{} ".format(" " * (max_v_len - len(size)))
+ + "{} -> {}".format(entry["name"], entry["target"])
+ )
elif entry["executable"]:
- return "-rwxr-xr-x exe {}".format(entry["size"]) +\
- "{} ".format(' ' * (max_v_len - len(size))) + "{}".format(entry["name"])
+ return (
+ "-rwxr-xr-x exe {}".format(entry["size"])
+ + "{} ".format(" " * (max_v_len - len(size)))
+ + "{}".format(entry["name"])
+ )
else:
- return "-rw-r--r-- reg {}".format(entry["size"]) +\
- "{} ".format(' ' * (max_v_len - len(size))) + "{}".format(entry["name"])
+ return (
+ "-rw-r--r-- reg {}".format(entry["size"])
+ + "{} ".format(" " * (max_v_len - len(size)))
+ + "{}".format(entry["name"])
+ )
return entry["name"]
diff --git a/src/buildstream/_gitsourcebase.py b/src/buildstream/_gitsourcebase.py
index 120d8c72a..4e9e59161 100644
--- a/src/buildstream/_gitsourcebase.py
+++ b/src/buildstream/_gitsourcebase.py
@@ -35,7 +35,7 @@ from . import utils
from .types import FastEnum
from .utils import move_atomic, DirectoryExistsError
-GIT_MODULES = '.gitmodules'
+GIT_MODULES = ".gitmodules"
# Warnings
WARN_INCONSISTENT_SUBMODULE = "inconsistent-submodule"
@@ -53,7 +53,6 @@ class _RefFormat(FastEnum):
# might have at a given time
#
class _GitMirror(SourceFetcher):
-
def __init__(self, source, path, url, ref, *, primary=False, tags=[]):
super().__init__()
@@ -80,59 +79,64 @@ class _GitMirror(SourceFetcher):
# system configured tmpdir is not on the same partition.
#
with self.source.tempdir() as tmpdir:
- url = self.source.translate_url(self.url, alias_override=alias_override,
- primary=self.primary)
- self.source.call([self.source.host_git, 'clone', '--mirror', '-n', url, tmpdir],
- fail="Failed to clone git repository {}".format(url),
- fail_temporarily=True)
+ url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
+ self.source.call(
+ [self.source.host_git, "clone", "--mirror", "-n", url, tmpdir],
+ fail="Failed to clone git repository {}".format(url),
+ fail_temporarily=True,
+ )
try:
move_atomic(tmpdir, self.mirror)
except DirectoryExistsError:
# Another process was quicker to download this repository.
# Let's discard our own
- self.source.status("{}: Discarding duplicate clone of {}"
- .format(self.source, url))
+ self.source.status("{}: Discarding duplicate clone of {}".format(self.source, url))
except OSError as e:
- raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
- .format(self.source, url, tmpdir, self.mirror, e)) from e
+ raise SourceError(
+ "{}: Failed to move cloned git repository {} from '{}' to '{}': {}".format(
+ self.source, url, tmpdir, self.mirror, e
+ )
+ ) from e
def _fetch(self, alias_override=None):
- url = self.source.translate_url(self.url,
- alias_override=alias_override,
- primary=self.primary)
+ url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
if alias_override:
remote_name = utils.url_directory_name(alias_override)
_, remotes = self.source.check_output(
- [self.source.host_git, 'remote'],
+ [self.source.host_git, "remote"],
fail="Failed to retrieve list of remotes in {}".format(self.mirror),
- cwd=self.mirror
+ cwd=self.mirror,
)
if remote_name not in remotes:
self.source.call(
- [self.source.host_git, 'remote', 'add', remote_name, url],
+ [self.source.host_git, "remote", "add", remote_name, url],
fail="Failed to add remote {} with url {}".format(remote_name, url),
- cwd=self.mirror
+ cwd=self.mirror,
)
else:
remote_name = "origin"
- self.source.call([self.source.host_git, 'fetch', remote_name, '--prune',
- '+refs/heads/*:refs/heads/*', '+refs/tags/*:refs/tags/*'],
- fail="Failed to fetch from remote git repository: {}".format(url),
- fail_temporarily=True,
- cwd=self.mirror)
+ self.source.call(
+ [
+ self.source.host_git,
+ "fetch",
+ remote_name,
+ "--prune",
+ "+refs/heads/*:refs/heads/*",
+ "+refs/tags/*:refs/tags/*",
+ ],
+ fail="Failed to fetch from remote git repository: {}".format(url),
+ fail_temporarily=True,
+ cwd=self.mirror,
+ )
def fetch(self, alias_override=None): # pylint: disable=arguments-differ
# Resolve the URL for the message
- resolved_url = self.source.translate_url(self.url,
- alias_override=alias_override,
- primary=self.primary)
+ resolved_url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
- with self.source.timed_activity("Fetching from {}"
- .format(resolved_url),
- silent_nested=True):
+ with self.source.timed_activity("Fetching from {}".format(resolved_url), silent_nested=True):
self.ensure(alias_override)
if not self.has_ref():
self._fetch(alias_override)
@@ -147,48 +151,49 @@ class _GitMirror(SourceFetcher):
return False
# Check if the ref is really there
- rc = self.source.call([self.source.host_git, 'cat-file', '-t', self.ref], cwd=self.mirror)
+ rc = self.source.call([self.source.host_git, "cat-file", "-t", self.ref], cwd=self.mirror)
return rc == 0
def assert_ref(self):
if not self.has_ref():
- raise SourceError("{}: expected ref '{}' was not found in git repository: '{}'"
- .format(self.source, self.ref, self.url))
+ raise SourceError(
+ "{}: expected ref '{}' was not found in git repository: '{}'".format(self.source, self.ref, self.url)
+ )
def latest_commit_with_tags(self, tracking, track_tags=False):
_, output = self.source.check_output(
- [self.source.host_git, 'rev-parse', tracking],
+ [self.source.host_git, "rev-parse", tracking],
fail="Unable to find commit for specified branch name '{}'".format(tracking),
- cwd=self.mirror)
- ref = output.rstrip('\n')
+ cwd=self.mirror,
+ )
+ ref = output.rstrip("\n")
if self.source.ref_format == _RefFormat.GIT_DESCRIBE:
# Prefix the ref with the closest tag, if available,
# to make the ref human readable
exit_code, output = self.source.check_output(
- [self.source.host_git, 'describe', '--tags', '--abbrev=40', '--long', ref],
- cwd=self.mirror)
+ [self.source.host_git, "describe", "--tags", "--abbrev=40", "--long", ref], cwd=self.mirror
+ )
if exit_code == 0:
- ref = output.rstrip('\n')
+ ref = output.rstrip("\n")
if not track_tags:
return ref, []
tags = set()
- for options in [[], ['--first-parent'], ['--tags'], ['--tags', '--first-parent']]:
+ for options in [[], ["--first-parent"], ["--tags"], ["--tags", "--first-parent"]]:
exit_code, output = self.source.check_output(
- [self.source.host_git, 'describe', '--abbrev=0', ref, *options],
- cwd=self.mirror)
+ [self.source.host_git, "describe", "--abbrev=0", ref, *options], cwd=self.mirror
+ )
if exit_code == 0:
tag = output.strip()
_, commit_ref = self.source.check_output(
- [self.source.host_git, 'rev-parse', tag + '^{commit}'],
+ [self.source.host_git, "rev-parse", tag + "^{commit}"],
fail="Unable to resolve tag '{}'".format(tag),
- cwd=self.mirror)
- exit_code = self.source.call(
- [self.source.host_git, 'cat-file', 'tag', tag],
- cwd=self.mirror)
- annotated = (exit_code == 0)
+ cwd=self.mirror,
+ )
+ exit_code = self.source.call([self.source.host_git, "cat-file", "tag", tag], cwd=self.mirror)
+ annotated = exit_code == 0
tags.add((tag, commit_ref.strip(), annotated))
@@ -200,13 +205,17 @@ class _GitMirror(SourceFetcher):
# Using --shared here avoids copying the objects into the checkout, in any
# case we're just checking out a specific commit and then removing the .git/
# directory.
- self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', self.mirror, fullpath],
- fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
- fail_temporarily=True)
-
- self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
- fail="Failed to checkout git ref {}".format(self.ref),
- cwd=fullpath)
+ self.source.call(
+ [self.source.host_git, "clone", "--no-checkout", "--shared", self.mirror, fullpath],
+ fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
+ fail_temporarily=True,
+ )
+
+ self.source.call(
+ [self.source.host_git, "checkout", "--force", self.ref],
+ fail="Failed to checkout git ref {}".format(self.ref),
+ cwd=fullpath,
+ )
# Remove .git dir
shutil.rmtree(os.path.join(fullpath, ".git"))
@@ -217,34 +226,37 @@ class _GitMirror(SourceFetcher):
fullpath = os.path.join(directory, self.path)
url = self.source.translate_url(self.url)
- self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
- fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
- fail_temporarily=True)
+ self.source.call(
+ [self.source.host_git, "clone", "--no-checkout", self.mirror, fullpath],
+ fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
+ fail_temporarily=True,
+ )
- self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', url],
- fail='Failed to add remote origin "{}"'.format(url),
- cwd=fullpath)
+ self.source.call(
+ [self.source.host_git, "remote", "set-url", "origin", url],
+ fail='Failed to add remote origin "{}"'.format(url),
+ cwd=fullpath,
+ )
- self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
- fail="Failed to checkout git ref {}".format(self.ref),
- cwd=fullpath)
+ self.source.call(
+ [self.source.host_git, "checkout", "--force", self.ref],
+ fail="Failed to checkout git ref {}".format(self.ref),
+ cwd=fullpath,
+ )
# List the submodules (path/url tuples) present at the given ref of this repo
def submodule_list(self):
modules = "{}:{}".format(self.ref, GIT_MODULES)
- exit_code, output = self.source.check_output(
- [self.source.host_git, 'show', modules], cwd=self.mirror)
+ exit_code, output = self.source.check_output([self.source.host_git, "show", modules], cwd=self.mirror)
# If git show reports error code 128 here, we take it to mean there is
# no .gitmodules file to display for the given revision.
if exit_code == 128:
return
elif exit_code != 0:
- raise SourceError(
- "{plugin}: Failed to show gitmodules at ref {ref}".format(
- plugin=self, ref=self.ref))
+ raise SourceError("{plugin}: Failed to show gitmodules at ref {ref}".format(plugin=self, ref=self.ref))
- content = '\n'.join([l.strip() for l in output.splitlines()])
+ content = "\n".join([l.strip() for l in output.splitlines()])
io = StringIO(content)
parser = RawConfigParser()
@@ -253,8 +265,8 @@ class _GitMirror(SourceFetcher):
for section in parser.sections():
# validate section name against the 'submodule "foo"' pattern
if re.match(r'submodule "(.*)"', section):
- path = parser.get(section, 'path')
- url = parser.get(section, 'url')
+ path = parser.get(section, "path")
+ url = parser.get(section, "url")
yield (path, url)
@@ -266,31 +278,37 @@ class _GitMirror(SourceFetcher):
# list objects in the parent repo tree to find the commit
# object that corresponds to the submodule
- _, output = self.source.check_output([self.source.host_git, 'ls-tree', ref, submodule],
- fail="ls-tree failed for commit {} and submodule: {}".format(
- ref, submodule),
- cwd=self.mirror)
+ _, output = self.source.check_output(
+ [self.source.host_git, "ls-tree", ref, submodule],
+ fail="ls-tree failed for commit {} and submodule: {}".format(ref, submodule),
+ cwd=self.mirror,
+ )
# read the commit hash from the output
fields = output.split()
- if len(fields) >= 2 and fields[1] == 'commit':
+ if len(fields) >= 2 and fields[1] == "commit":
submodule_commit = output.split()[2]
# fail if the commit hash is invalid
if len(submodule_commit) != 40:
- raise SourceError("{}: Error reading commit information for submodule '{}'"
- .format(self.source, submodule))
+ raise SourceError(
+ "{}: Error reading commit information for submodule '{}'".format(self.source, submodule)
+ )
return submodule_commit
else:
- detail = "The submodule '{}' is defined either in the BuildStream source\n".format(submodule) + \
- "definition, or in a .gitmodules file. But the submodule was never added to the\n" + \
- "underlying git repository with `git submodule add`."
+ detail = (
+ "The submodule '{}' is defined either in the BuildStream source\n".format(submodule)
+ + "definition, or in a .gitmodules file. But the submodule was never added to the\n"
+ + "underlying git repository with `git submodule add`."
+ )
- self.source.warn("{}: Ignoring inconsistent submodule '{}'"
- .format(self.source, submodule), detail=detail,
- warning_token=WARN_INCONSISTENT_SUBMODULE)
+ self.source.warn(
+ "{}: Ignoring inconsistent submodule '{}'".format(self.source, submodule),
+ detail=detail,
+ warning_token=WARN_INCONSISTENT_SUBMODULE,
+ )
return None
@@ -307,17 +325,24 @@ class _GitMirror(SourceFetcher):
# rev-list does not work in case of same rev
shallow.add(self.ref)
else:
- _, out = self.source.check_output([self.source.host_git, 'rev-list',
- '--ancestry-path', '--boundary',
- '{}..{}'.format(commit_ref, self.ref)],
- fail="Failed to get git history {}..{} in directory: {}"
- .format(commit_ref, self.ref, fullpath),
- fail_temporarily=True,
- cwd=self.mirror)
+ _, out = self.source.check_output(
+ [
+ self.source.host_git,
+ "rev-list",
+ "--ancestry-path",
+ "--boundary",
+ "{}..{}".format(commit_ref, self.ref),
+ ],
+ fail="Failed to get git history {}..{} in directory: {}".format(
+ commit_ref, self.ref, fullpath
+ ),
+ fail_temporarily=True,
+ cwd=self.mirror,
+ )
self.source.warn("refs {}..{}: {}".format(commit_ref, self.ref, out.splitlines()))
for line in out.splitlines():
- rev = line.lstrip('-')
- if line[0] == '-':
+ rev = line.lstrip("-")
+ if line[0] == "-":
shallow.add(rev)
else:
included.add(rev)
@@ -325,52 +350,64 @@ class _GitMirror(SourceFetcher):
shallow -= included
included |= shallow
- self.source.call([self.source.host_git, 'init'],
- fail="Cannot initialize git repository: {}".format(fullpath),
- cwd=fullpath)
+ self.source.call(
+ [self.source.host_git, "init"],
+ fail="Cannot initialize git repository: {}".format(fullpath),
+ cwd=fullpath,
+ )
for rev in included:
with TemporaryFile(dir=tmpdir) as commit_file:
- self.source.call([self.source.host_git, 'cat-file', 'commit', rev],
- stdout=commit_file,
- fail="Failed to get commit {}".format(rev),
- cwd=self.mirror)
+ self.source.call(
+ [self.source.host_git, "cat-file", "commit", rev],
+ stdout=commit_file,
+ fail="Failed to get commit {}".format(rev),
+ cwd=self.mirror,
+ )
commit_file.seek(0, 0)
- self.source.call([self.source.host_git, 'hash-object', '-w', '-t', 'commit', '--stdin'],
- stdin=commit_file,
- fail="Failed to add commit object {}".format(rev),
- cwd=fullpath)
-
- with open(os.path.join(fullpath, '.git', 'shallow'), 'w') as shallow_file:
+ self.source.call(
+ [self.source.host_git, "hash-object", "-w", "-t", "commit", "--stdin"],
+ stdin=commit_file,
+ fail="Failed to add commit object {}".format(rev),
+ cwd=fullpath,
+ )
+
+ with open(os.path.join(fullpath, ".git", "shallow"), "w") as shallow_file:
for rev in shallow:
- shallow_file.write('{}\n'.format(rev))
+ shallow_file.write("{}\n".format(rev))
for tag, commit_ref, annotated in self.tags:
if annotated:
with TemporaryFile(dir=tmpdir) as tag_file:
- tag_data = 'object {}\ntype commit\ntag {}\n'.format(commit_ref, tag)
- tag_file.write(tag_data.encode('ascii'))
+ tag_data = "object {}\ntype commit\ntag {}\n".format(commit_ref, tag)
+ tag_file.write(tag_data.encode("ascii"))
tag_file.seek(0, 0)
_, tag_ref = self.source.check_output(
- [self.source.host_git, 'hash-object', '-w', '-t',
- 'tag', '--stdin'],
+ [self.source.host_git, "hash-object", "-w", "-t", "tag", "--stdin"],
stdin=tag_file,
fail="Failed to add tag object {}".format(tag),
- cwd=fullpath)
-
- self.source.call([self.source.host_git, 'tag', tag, tag_ref.strip()],
- fail="Failed to tag: {}".format(tag),
- cwd=fullpath)
+ cwd=fullpath,
+ )
+
+ self.source.call(
+ [self.source.host_git, "tag", tag, tag_ref.strip()],
+ fail="Failed to tag: {}".format(tag),
+ cwd=fullpath,
+ )
else:
- self.source.call([self.source.host_git, 'tag', tag, commit_ref],
- fail="Failed to tag: {}".format(tag),
- cwd=fullpath)
+ self.source.call(
+ [self.source.host_git, "tag", tag, commit_ref],
+ fail="Failed to tag: {}".format(tag),
+ cwd=fullpath,
+ )
- with open(os.path.join(fullpath, '.git', 'HEAD'), 'w') as head:
- self.source.call([self.source.host_git, 'rev-parse', self.ref],
- stdout=head,
- fail="Failed to parse commit {}".format(self.ref),
- cwd=self.mirror)
+ with open(os.path.join(fullpath, ".git", "HEAD"), "w") as head:
+ self.source.call(
+ [self.source.host_git, "rev-parse", self.ref],
+ stdout=head,
+ fail="Failed to parse commit {}".format(self.ref),
+ cwd=self.mirror,
+ )
class _GitSourceBase(Source):
@@ -382,58 +419,57 @@ class _GitSourceBase(Source):
BST_MIRROR_CLASS = _GitMirror
def configure(self, node):
- ref = node.get_str('ref', None)
+ ref = node.get_str("ref", None)
- config_keys = ['url', 'track', 'ref', 'submodules',
- 'checkout-submodules', 'ref-format',
- 'track-tags', 'tags']
+ config_keys = ["url", "track", "ref", "submodules", "checkout-submodules", "ref-format", "track-tags", "tags"]
node.validate_keys(config_keys + Source.COMMON_CONFIG_KEYS)
- tags_node = node.get_sequence('tags', [])
+ tags_node = node.get_sequence("tags", [])
for tag_node in tags_node:
- tag_node.validate_keys(['tag', 'commit', 'annotated'])
+ tag_node.validate_keys(["tag", "commit", "annotated"])
tags = self._load_tags(node)
- self.track_tags = node.get_bool('track-tags', default=False)
+ self.track_tags = node.get_bool("track-tags", default=False)
- self.original_url = node.get_str('url')
- self.mirror = self.BST_MIRROR_CLASS(self, '', self.original_url, ref, tags=tags, primary=True)
- self.tracking = node.get_str('track', None)
+ self.original_url = node.get_str("url")
+ self.mirror = self.BST_MIRROR_CLASS(self, "", self.original_url, ref, tags=tags, primary=True)
+ self.tracking = node.get_str("track", None)
- self.ref_format = node.get_enum('ref-format', _RefFormat, _RefFormat.SHA1)
+ self.ref_format = node.get_enum("ref-format", _RefFormat, _RefFormat.SHA1)
# At this point we now know if the source has a ref and/or a track.
# If it is missing both then we will be unable to track or build.
if self.mirror.ref is None and self.tracking is None:
- raise SourceError("{}: Git sources require a ref and/or track".format(self),
- reason="missing-track-and-ref")
+ raise SourceError(
+ "{}: Git sources require a ref and/or track".format(self), reason="missing-track-and-ref"
+ )
- self.checkout_submodules = node.get_bool('checkout-submodules', default=True)
+ self.checkout_submodules = node.get_bool("checkout-submodules", default=True)
self.submodules = []
# Parse a dict of submodule overrides, stored in the submodule_overrides
# and submodule_checkout_overrides dictionaries.
self.submodule_overrides = {}
self.submodule_checkout_overrides = {}
- modules = node.get_mapping('submodules', {})
+ modules = node.get_mapping("submodules", {})
for path in modules.keys():
submodule = modules.get_mapping(path)
- url = submodule.get_str('url', None)
+ url = submodule.get_str("url", None)
# Make sure to mark all URLs that are specified in the configuration
if url:
self.mark_download_url(url, primary=False)
self.submodule_overrides[path] = url
- if 'checkout' in submodule:
- checkout = submodule.get_bool('checkout')
+ if "checkout" in submodule:
+ checkout = submodule.get_bool("checkout")
self.submodule_checkout_overrides[path] = checkout
self.mark_download_url(self.original_url)
def preflight(self):
# Check if git is installed, get the binary at the same time
- self.host_git = utils.get_host_tool('git')
+ self.host_git = utils.get_host_tool("git")
def get_unique_key(self):
# Here we want to encode the local name of the repository and
@@ -442,7 +478,7 @@ class _GitSourceBase(Source):
key = [self.original_url, self.mirror.ref]
if self.mirror.tags:
tags = {tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags}
- key.append({'tags': tags})
+ key.append({"tags": tags})
# Only modify the cache key with checkout_submodules if it's something
# other than the default behaviour.
@@ -467,7 +503,7 @@ class _GitSourceBase(Source):
return Consistency.INCONSISTENT
def load_ref(self, node):
- self.mirror.ref = node.get_str('ref', None)
+ self.mirror.ref = node.get_str("ref", None)
self.mirror.tags = self._load_tags(node)
def get_ref(self):
@@ -478,25 +514,23 @@ class _GitSourceBase(Source):
def set_ref(self, ref, node):
if not ref:
self.mirror.ref = None
- if 'ref' in node:
- del node['ref']
+ if "ref" in node:
+ del node["ref"]
self.mirror.tags = []
- if 'tags' in node:
- del node['tags']
+ if "tags" in node:
+ del node["tags"]
else:
actual_ref, tags = ref
- node['ref'] = self.mirror.ref = actual_ref
+ node["ref"] = self.mirror.ref = actual_ref
self.mirror.tags = tags
if tags:
- node['tags'] = []
+ node["tags"] = []
for tag, commit_ref, annotated in tags:
- data = {'tag': tag,
- 'commit': commit_ref,
- 'annotated': annotated}
- node['tags'].append(data)
+ data = {"tag": tag, "commit": commit_ref, "annotated": annotated}
+ node["tags"].append(data)
else:
- if 'tags' in node:
- del node['tags']
+ if "tags" in node:
+ del node["tags"]
def track(self): # pylint: disable=arguments-differ
@@ -504,17 +538,13 @@ class _GitSourceBase(Source):
if not self.tracking:
# Is there a better way to check if a ref is given.
if self.mirror.ref is None:
- detail = 'Without a tracking branch ref can not be updated. Please ' + \
- 'provide a ref or a track.'
- raise SourceError("{}: No track or ref".format(self),
- detail=detail, reason="track-attempt-no-track")
+ detail = "Without a tracking branch ref can not be updated. Please " + "provide a ref or a track."
+ raise SourceError("{}: No track or ref".format(self), detail=detail, reason="track-attempt-no-track")
return None
# Resolve the URL for the message
resolved_url = self.translate_url(self.mirror.url)
- with self.timed_activity("Tracking {} from {}"
- .format(self.tracking, resolved_url),
- silent_nested=True):
+ with self.timed_activity("Tracking {} from {}".format(self.tracking, resolved_url), silent_nested=True):
self.mirror.ensure()
self.mirror._fetch()
@@ -578,11 +608,12 @@ class _GitSourceBase(Source):
for path, url in invalid_submodules:
detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
- self.warn("{}: Invalid submodules specified".format(self),
- warning_token=WARN_INVALID_SUBMODULE,
- detail="The following submodules are specified in the source "
- "description but do not exist according to the repository\n\n" +
- "\n".join(detail))
+ self.warn(
+ "{}: Invalid submodules specified".format(self),
+ warning_token=WARN_INVALID_SUBMODULE,
+ detail="The following submodules are specified in the source "
+ "description but do not exist according to the repository\n\n" + "\n".join(detail),
+ )
# Warn about submodules which exist but have not been explicitly configured
if unlisted_submodules:
@@ -590,37 +621,47 @@ class _GitSourceBase(Source):
for path, url in unlisted_submodules:
detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
- self.warn("{}: Unlisted submodules exist".format(self),
- warning_token=WARN_UNLISTED_SUBMODULE,
- detail="The following submodules exist but are not specified " +
- "in the source description\n\n" +
- "\n".join(detail))
+ self.warn(
+ "{}: Unlisted submodules exist".format(self),
+ warning_token=WARN_UNLISTED_SUBMODULE,
+ detail="The following submodules exist but are not specified "
+ + "in the source description\n\n"
+ + "\n".join(detail),
+ )
# Assert that the ref exists in the track tag/branch, if track has been specified.
ref_in_track = False
if self.tracking:
- _, branch = self.check_output([self.host_git, 'branch', '--list', self.tracking,
- '--contains', self.mirror.ref],
- cwd=self.mirror.mirror)
+ _, branch = self.check_output(
+ [self.host_git, "branch", "--list", self.tracking, "--contains", self.mirror.ref],
+ cwd=self.mirror.mirror,
+ )
if branch:
ref_in_track = True
else:
- _, tag = self.check_output([self.host_git, 'tag', '--list', self.tracking,
- '--contains', self.mirror.ref],
- cwd=self.mirror.mirror)
+ _, tag = self.check_output(
+ [self.host_git, "tag", "--list", self.tracking, "--contains", self.mirror.ref],
+ cwd=self.mirror.mirror,
+ )
if tag:
ref_in_track = True
if not ref_in_track:
- detail = "The ref provided for the element does not exist locally " + \
- "in the provided track branch / tag '{}'.\n".format(self.tracking) + \
- "You may wish to track the element to update the ref from '{}' ".format(self.tracking) + \
- "with `bst source track`,\n" + \
- "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
+ detail = (
+ "The ref provided for the element does not exist locally "
+ + "in the provided track branch / tag '{}'.\n".format(self.tracking)
+ + "You may wish to track the element to update the ref from '{}' ".format(self.tracking)
+ + "with `bst source track`,\n"
+ + "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
+ )
- self.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
- .format(self, self.mirror.ref, self.tracking, self.mirror.url),
- detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
+ self.warn(
+ "{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n".format(
+ self, self.mirror.ref, self.tracking, self.mirror.url
+ ),
+ detail=detail,
+ warning_token=CoreWarnings.REF_NOT_IN_TRACK,
+ )
###########################################################
# Local Functions #
@@ -668,11 +709,11 @@ class _GitSourceBase(Source):
def _load_tags(self, node):
tags = []
- tags_node = node.get_sequence('tags', [])
+ tags_node = node.get_sequence("tags", [])
for tag_node in tags_node:
- tag = tag_node.get_str('tag')
- commit_ref = tag_node.get_str('commit')
- annotated = tag_node.get_bool('annotated')
+ tag = tag_node.get_str("tag")
+ commit_ref = tag_node.get_str("commit")
+ annotated = tag_node.get_bool("annotated")
tags.append((tag, commit_ref, annotated))
return tags
diff --git a/src/buildstream/_includes.py b/src/buildstream/_includes.py
index c04601b91..bc0d7718b 100644
--- a/src/buildstream/_includes.py
+++ b/src/buildstream/_includes.py
@@ -14,7 +14,6 @@ from ._exceptions import LoadError, LoadErrorReason
# provenance. Should be true if intended to be
# serialized.
class Includes:
-
def __init__(self, loader, *, copy_tree=False):
self._loader = loader
self._loaded = {}
@@ -29,14 +28,11 @@ class Includes:
# included (set): Fail for recursion if trying to load any files in this set
# current_loader (Loader): Use alternative loader (for junction files)
# only_local (bool): Whether to ignore junction files
- def process(self, node, *,
- included=set(),
- current_loader=None,
- only_local=False):
+ def process(self, node, *, included=set(), current_loader=None, only_local=False):
if current_loader is None:
current_loader = self._loader
- includes_node = node.get_node('(@)', allowed_types=[ScalarNode, SequenceNode], allow_none=True)
+ includes_node = node.get_node("(@)", allowed_types=[ScalarNode, SequenceNode], allow_none=True)
if includes_node:
if type(includes_node) is ScalarNode: # pylint: disable=unidiomatic-typecheck
@@ -44,23 +40,24 @@ class Includes:
else:
includes = includes_node.as_str_list()
- del node['(@)']
+ del node["(@)"]
for include in reversed(includes):
- if only_local and ':' in include:
+ if only_local and ":" in include:
continue
try:
- include_node, file_path, sub_loader = self._include_file(include,
- current_loader)
+ include_node, file_path, sub_loader = self._include_file(include, current_loader)
except LoadError as e:
include_provenance = includes_node.get_provenance()
if e.reason == LoadErrorReason.MISSING_FILE:
message = "{}: Include block references a file that could not be found: '{}'.".format(
- include_provenance, include)
+ include_provenance, include
+ )
raise LoadError(message, LoadErrorReason.MISSING_FILE) from e
if e.reason == LoadErrorReason.LOADING_DIRECTORY:
message = "{}: Include block references a directory instead of a file: '{}'.".format(
- include_provenance, include)
+ include_provenance, include
+ )
raise LoadError(message, LoadErrorReason.LOADING_DIRECTORY) from e
# Otherwise, we don't know the reason, so just raise
@@ -68,8 +65,10 @@ class Includes:
if file_path in included:
include_provenance = includes_node.get_provenance()
- raise LoadError("{}: trying to recursively include {}". format(include_provenance, file_path),
- LoadErrorReason.RECURSIVE_INCLUDE)
+ raise LoadError(
+ "{}: trying to recursively include {}".format(include_provenance, file_path),
+ LoadErrorReason.RECURSIVE_INCLUDE,
+ )
# Because the included node will be modified, we need
# to copy it so that we do not modify the toplevel
# node of the provenance.
@@ -77,19 +76,14 @@ class Includes:
try:
included.add(file_path)
- self.process(include_node, included=included,
- current_loader=sub_loader,
- only_local=only_local)
+ self.process(include_node, included=included, current_loader=sub_loader, only_local=only_local)
finally:
included.remove(file_path)
include_node._composite_under(node)
for value in node.values():
- self._process_value(value,
- included=included,
- current_loader=current_loader,
- only_local=only_local)
+ self._process_value(value, included=included, current_loader=current_loader, only_local=only_local)
# _include_file()
#
@@ -101,8 +95,8 @@ class Includes:
# loader (Loader): Loader for the current project.
def _include_file(self, include, loader):
shortname = include
- if ':' in include:
- junction, include = include.split(':', 1)
+ if ":" in include:
+ junction, include = include.split(":", 1)
junction_loader = loader._get_loader(junction)
current_loader = junction_loader
else:
@@ -112,10 +106,7 @@ class Includes:
file_path = os.path.join(directory, include)
key = (current_loader, file_path)
if key not in self._loaded:
- self._loaded[key] = _yaml.load(file_path,
- shortname=shortname,
- project=project,
- copy_tree=self._copy_tree)
+ self._loaded[key] = _yaml.load(file_path, shortname=shortname, project=project, copy_tree=self._copy_tree)
return self._loaded[key], file_path, current_loader
# _process_value()
@@ -127,20 +118,11 @@ class Includes:
# included (set): Fail for recursion if trying to load any files in this set
# current_loader (Loader): Use alternative loader (for junction files)
# only_local (bool): Whether to ignore junction files
- def _process_value(self, value, *,
- included=set(),
- current_loader=None,
- only_local=False):
+ def _process_value(self, value, *, included=set(), current_loader=None, only_local=False):
value_type = type(value)
if value_type is MappingNode:
- self.process(value,
- included=included,
- current_loader=current_loader,
- only_local=only_local)
+ self.process(value, included=included, current_loader=current_loader, only_local=only_local)
elif value_type is SequenceNode:
for v in value:
- self._process_value(v,
- included=included,
- current_loader=current_loader,
- only_local=only_local)
+ self._process_value(v, included=included, current_loader=current_loader, only_local=only_local)
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index e5859e9e8..da0c0fb29 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -54,8 +54,7 @@ _NO_PROGRESS = object()
# fetch_subprojects (callable): A function to fetch subprojects
# parent (Loader): A parent Loader object, in the case this is a junctioned Loader
#
-class Loader():
-
+class Loader:
def __init__(self, context, project, *, fetch_subprojects, parent=None):
# Ensure we have an absolute path for the base directory
@@ -66,22 +65,22 @@ class Loader():
#
# Public members
#
- self.project = project # The associated Project
- self.loaded = None # The number of loaded Elements
+ self.project = project # The associated Project
+ self.loaded = None # The number of loaded Elements
#
# Private members
#
self._context = context
- self._options = project.options # Project options (OptionPool)
- self._basedir = basedir # Base project directory
+ self._options = project.options # Project options (OptionPool)
+ self._basedir = basedir # Base project directory
self._first_pass_options = project.first_pass_config.options # Project options (OptionPool)
- self._parent = parent # The parent loader
+ self._parent = parent # The parent loader
self._fetch_subprojects = fetch_subprojects
self._meta_elements = {} # Dict of resolved meta elements by name
- self._elements = {} # Dict of elements
- self._loaders = {} # Dict of junction loaders
+ self._elements = {} # Dict of elements
+ self._loaders = {} # Dict of junction loaders
self._includes = Includes(self, copy_tree=True)
@@ -105,9 +104,11 @@ class Loader():
if os.path.isabs(filename):
# XXX Should this just be an assertion ?
# Expect that the caller gives us the right thing at least ?
- raise LoadError("Target '{}' was not specified as a relative "
- "path to the base project directory: {}"
- .format(filename, self._basedir), LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Target '{}' was not specified as a relative "
+ "path to the base project directory: {}".format(filename, self._basedir),
+ LoadErrorReason.INVALID_DATA,
+ )
self._warn_invalid_elements(targets)
@@ -130,8 +131,7 @@ class Loader():
dummy_target = LoadElement(Node.from_dict({}), "", self)
# Pylint is not very happy with Cython and can't understand 'dependencies' is a list
dummy_target.dependencies.extend( # pylint: disable=no-member
- Dependency(element, Symbol.RUNTIME, False)
- for element in target_elements
+ Dependency(element, Symbol.RUNTIME, False) for element in target_elements
)
with PROFILER.profile(Topics.CIRCULAR_CHECK, "_".join(targets)):
@@ -180,12 +180,12 @@ class Loader():
# too late. The only time that seems just right is here, when preparing
# the child process' copy of the Loader.
#
- del state['_fetch_subprojects']
+ del state["_fetch_subprojects"]
# Also there's no gain in pickling over the caches, and they might
# contain things which are unpleasantly large or unable to pickle.
- del state['_elements']
- del state['_meta_elements']
+ del state["_elements"]
+ del state["_meta_elements"]
return state
@@ -230,14 +230,14 @@ class Loader():
# Load the data and process any conditional statements therein
fullpath = os.path.join(self._basedir, filename)
try:
- node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable,
- project=self.project)
+ node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable, project=self.project)
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_FILE:
if self.project.junction:
- message = "Could not find element '{}' in project referred to by junction element '{}'" \
- .format(filename, self.project.junction.name)
+ message = "Could not find element '{}' in project referred to by junction element '{}'".format(
+ filename, self.project.junction.name
+ )
else:
message = "Could not find element '{}' in elements directory '{}'".format(filename, self._basedir)
@@ -262,8 +262,8 @@ class Loader():
if provenance:
message = "{}: {}".format(provenance, message)
detail = None
- if os.path.exists(os.path.join(self._basedir, filename + '.bst')):
- element_name = filename + '.bst'
+ if os.path.exists(os.path.join(self._basedir, filename + ".bst")):
+ element_name = filename + ".bst"
detail = "Did you mean '{}'?\n".format(element_name)
raise LoadError(message, LoadErrorReason.LOADING_DIRECTORY, detail=detail) from e
@@ -333,10 +333,9 @@ class Loader():
if dep.junction:
self._load_file(dep.junction, rewritable, ticker, dep.provenance)
- loader = self._get_loader(dep.junction,
- rewritable=rewritable,
- ticker=ticker,
- provenance=dep.provenance)
+ loader = self._get_loader(
+ dep.junction, rewritable=rewritable, ticker=ticker, provenance=dep.provenance
+ )
dep_element = loader._load_file(dep.name, rewritable, ticker, dep.provenance)
else:
dep_element = self._elements.get(dep.name)
@@ -350,14 +349,16 @@ class Loader():
loader_queue.append((dep_element, list(reversed(dep_deps)), []))
# Pylint is not very happy about Cython and can't understand 'node' is a 'MappingNode'
- if dep_element.node.get_str(Symbol.KIND) == 'junction': # pylint: disable=no-member
- raise LoadError("{}: Cannot depend on junction" .format(dep.provenance),
- LoadErrorReason.INVALID_DATA)
+ if dep_element.node.get_str(Symbol.KIND) == "junction": # pylint: disable=no-member
+ raise LoadError(
+ "{}: Cannot depend on junction".format(dep.provenance), LoadErrorReason.INVALID_DATA
+ )
# All is well, push the dependency onto the LoadElement
# Pylint is not very happy with Cython and can't understand 'dependencies' is a list
current_element[0].dependencies.append( # pylint: disable=no-member
- Dependency(dep_element, dep.dep_type, dep.strict))
+ Dependency(dep_element, dep.dep_type, dep.strict)
+ )
else:
# We do not have any more dependencies to load for this
# element on the queue, report any invalid dep names
@@ -397,12 +398,14 @@ class Loader():
# Create `chain`, the loop of element dependencies from this
# element back to itself, by trimming everything before this
# element from the sequence under consideration.
- chain = [element.full_name for element in sequence[sequence.index(element):]]
+ chain = [element.full_name for element in sequence[sequence.index(element) :]]
chain.append(element.full_name)
- raise LoadError(("Circular dependency detected at element: {}\n" +
- "Dependency chain: {}")
- .format(element.full_name, " -> ".join(chain)),
- LoadErrorReason.CIRCULAR_DEPENDENCY)
+ raise LoadError(
+ ("Circular dependency detected at element: {}\n" + "Dependency chain: {}").format(
+ element.full_name, " -> ".join(chain)
+ ),
+ LoadErrorReason.CIRCULAR_DEPENDENCY,
+ )
if element not in validated:
# We've not already validated this element, so let's
# descend into it to check it out
@@ -447,9 +450,9 @@ class Loader():
workspace = self._context.get_workspaces().get_workspace(element.name)
skip_workspace = True
if workspace:
- workspace_node = {'kind': 'workspace'}
- workspace_node['path'] = workspace.get_absolute_path()
- workspace_node['ref'] = str(workspace.to_dict().get('last_successful', 'ignored'))
+ workspace_node = {"kind": "workspace"}
+ workspace_node["path"] = workspace.get_absolute_path()
+ workspace_node["ref"] = str(workspace.to_dict().get("last_successful", "ignored"))
node[Symbol.SOURCES] = [workspace_node]
skip_workspace = False
@@ -457,7 +460,7 @@ class Loader():
for index, source in enumerate(sources):
kind = source.get_str(Symbol.KIND)
# the workspace source plugin cannot be used unless the element is workspaced
- if kind == 'workspace' and skip_workspace:
+ if kind == "workspace" and skip_workspace:
continue
del source[Symbol.KIND]
@@ -469,15 +472,20 @@ class Loader():
meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
meta_sources.append(meta_source)
- meta_element = MetaElement(self.project, element.name, element_kind,
- elt_provenance, meta_sources,
- node.get_mapping(Symbol.CONFIG, default={}),
- node.get_mapping(Symbol.VARIABLES, default={}),
- node.get_mapping(Symbol.ENVIRONMENT, default={}),
- node.get_str_list(Symbol.ENV_NOCACHE, default=[]),
- node.get_mapping(Symbol.PUBLIC, default={}),
- node.get_mapping(Symbol.SANDBOX, default={}),
- element_kind == 'junction')
+ meta_element = MetaElement(
+ self.project,
+ element.name,
+ element_kind,
+ elt_provenance,
+ meta_sources,
+ node.get_mapping(Symbol.CONFIG, default={}),
+ node.get_mapping(Symbol.VARIABLES, default={}),
+ node.get_mapping(Symbol.ENVIRONMENT, default={}),
+ node.get_str_list(Symbol.ENV_NOCACHE, default=[]),
+ node.get_mapping(Symbol.PUBLIC, default={}),
+ node.get_mapping(Symbol.SANDBOX, default={}),
+ element_kind == "junction",
+ )
# Cache it now, make sure it's already there before recursing
self._meta_elements[element.name] = meta_element
@@ -522,9 +530,9 @@ class Loader():
else:
meta_dep = loader._meta_elements[name]
- if dep.dep_type != 'runtime':
+ if dep.dep_type != "runtime":
meta_element.build_dependencies.append(meta_dep)
- if dep.dep_type != 'build':
+ if dep.dep_type != "build":
meta_element.dependencies.append(meta_dep)
if dep.strict:
meta_element.strict_dependencies.append(meta_dep)
@@ -543,8 +551,7 @@ class Loader():
# Raises: LoadError
#
# Returns: A Loader or None if specified junction does not exist
- def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0,
- provenance=None):
+ def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0, provenance=None):
provenance_str = ""
if provenance is not None:
@@ -557,17 +564,21 @@ class Loader():
if loader is None:
# do not allow junctions with the same name in different
# subprojects
- raise LoadError("{}Conflicting junction {} in subprojects, define junction in {}"
- .format(provenance_str, filename, self.project.name),
- LoadErrorReason.CONFLICTING_JUNCTION)
+ raise LoadError(
+ "{}Conflicting junction {} in subprojects, define junction in {}".format(
+ provenance_str, filename, self.project.name
+ ),
+ LoadErrorReason.CONFLICTING_JUNCTION,
+ )
return loader
if self._parent:
# junctions in the parent take precedence over junctions defined
# in subprojects
- loader = self._parent._get_loader(filename, rewritable=rewritable, ticker=ticker,
- level=level + 1, provenance=provenance)
+ loader = self._parent._get_loader(
+ filename, rewritable=rewritable, ticker=ticker, level=level + 1, provenance=provenance
+ )
if loader:
self._loaders[filename] = loader
return loader
@@ -599,10 +610,11 @@ class Loader():
# Any task counting *inside* the junction will be handled by
# its loader.
meta_element = self._collect_element_no_deps(self._elements[filename], _NO_PROGRESS)
- if meta_element.kind != 'junction':
- raise LoadError("{}{}: Expected junction but element kind is {}"
- .format(provenance_str, filename, meta_element.kind),
- LoadErrorReason.INVALID_DATA)
+ if meta_element.kind != "junction":
+ raise LoadError(
+ "{}{}: Expected junction but element kind is {}".format(provenance_str, filename, meta_element.kind),
+ LoadErrorReason.INVALID_DATA,
+ )
# We check that junctions have no dependencies a little
# early. This is cheating, since we don't technically know
@@ -618,9 +630,7 @@ class Loader():
# would be nice if this could be done for *all* element types,
# but since we haven't loaded those yet that's impossible.
if self._elements[filename].dependencies:
- raise LoadError(
- "Dependencies are forbidden for 'junction' elements",
- LoadErrorReason.INVALID_JUNCTION)
+ raise LoadError("Dependencies are forbidden for 'junction' elements", LoadErrorReason.INVALID_JUNCTION)
element = Element._new_from_meta(meta_element)
element._update_state()
@@ -628,10 +638,12 @@ class Loader():
# If this junction element points to a sub-sub-project, we need to
# find loader for that project.
if element.target:
- subproject_loader = self._get_loader(element.target_junction, rewritable=rewritable, ticker=ticker,
- level=level, provenance=provenance)
- loader = subproject_loader._get_loader(element.target_element, rewritable=rewritable, ticker=ticker,
- level=level, provenance=provenance)
+ subproject_loader = self._get_loader(
+ element.target_junction, rewritable=rewritable, ticker=ticker, level=level, provenance=provenance
+ )
+ loader = subproject_loader._get_loader(
+ element.target_element, rewritable=rewritable, ticker=ticker, level=level, provenance=provenance
+ )
self._loaders[filename] = loader
return loader
@@ -639,15 +651,18 @@ class Loader():
#
if element._get_consistency() >= Consistency.RESOLVED and not element._source_cached():
if ticker:
- ticker(filename, 'Fetching subproject')
+ ticker(filename, "Fetching subproject")
self._fetch_subprojects([element])
# Handle the case where a subproject has no ref
#
elif element._get_consistency() == Consistency.INCONSISTENT:
detail = "Try tracking the junction element with `bst source track {}`".format(filename)
- raise LoadError("{}Subproject has no ref for junction: {}".format(provenance_str, filename),
- LoadErrorReason.SUBPROJECT_INCONSISTENT, detail=detail)
+ raise LoadError(
+ "{}Subproject has no ref for junction: {}".format(provenance_str, filename),
+ LoadErrorReason.SUBPROJECT_INCONSISTENT,
+ detail=detail,
+ )
sources = list(element.sources())
if len(sources) == 1 and sources[0]._get_local_path():
@@ -656,8 +671,9 @@ class Loader():
else:
# Stage sources
element._set_required()
- basedir = os.path.join(self.project.directory, ".bst", "staged-junctions",
- filename, element._get_cache_key())
+ basedir = os.path.join(
+ self.project.directory, ".bst", "staged-junctions", filename, element._get_cache_key()
+ )
if not os.path.exists(basedir):
os.makedirs(basedir, exist_ok=True)
element._stage_sources_at(basedir)
@@ -666,9 +682,15 @@ class Loader():
project_dir = os.path.join(basedir, element.path)
try:
from .._project import Project # pylint: disable=cyclic-import
- project = Project(project_dir, self._context, junction=element,
- parent_loader=self, search_for_project=False,
- fetch_subprojects=self._fetch_subprojects)
+
+ project = Project(
+ project_dir,
+ self._context,
+ junction=element,
+ parent_loader=self,
+ search_for_project=False,
+ fetch_subprojects=self._fetch_subprojects,
+ )
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
message = (
@@ -706,7 +728,7 @@ class Loader():
# We allow to split only once since deep junctions names are forbidden.
# Users who want to refer to elements in sub-sub-projects are required
# to create junctions on the top level project.
- junction_path = name.rsplit(':', 1)
+ junction_path = name.rsplit(":", 1)
if len(junction_path) == 1:
return None, junction_path[-1], self
else:
@@ -760,11 +782,17 @@ class Loader():
invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME].append(filename)
if invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]:
- self._warn("Target elements '{}' do not have expected file extension `.bst` "
- "Improperly named elements will not be discoverable by commands"
- .format(invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]),
- warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX)
+ self._warn(
+ "Target elements '{}' do not have expected file extension `.bst` "
+ "Improperly named elements will not be discoverable by commands".format(
+ invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]
+ ),
+ warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX,
+ )
if invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]:
- self._warn("Target elements '{}' have invalid characerts in their name."
- .format(invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]),
- warning_token=CoreWarnings.BAD_CHARACTERS_IN_NAME)
+ self._warn(
+ "Target elements '{}' have invalid characerts in their name.".format(
+ invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]
+ ),
+ warning_token=CoreWarnings.BAD_CHARACTERS_IN_NAME,
+ )
diff --git a/src/buildstream/_loader/metaelement.py b/src/buildstream/_loader/metaelement.py
index 00d8560f8..97b0de242 100644
--- a/src/buildstream/_loader/metaelement.py
+++ b/src/buildstream/_loader/metaelement.py
@@ -20,7 +20,7 @@
from ..node import Node
-class MetaElement():
+class MetaElement:
# MetaElement()
#
@@ -40,9 +40,21 @@ class MetaElement():
# sandbox: Configuration specific to the sandbox environment
# first_pass: The element is to be loaded with first pass configuration (junction)
#
- def __init__(self, project, name, kind=None, provenance=None, sources=None, config=None,
- variables=None, environment=None, env_nocache=None, public=None,
- sandbox=None, first_pass=False):
+ def __init__(
+ self,
+ project,
+ name,
+ kind=None,
+ provenance=None,
+ sources=None,
+ config=None,
+ variables=None,
+ environment=None,
+ env_nocache=None,
+ public=None,
+ sandbox=None,
+ first_pass=False,
+ ):
self.project = project
self.name = name
self.kind = kind
diff --git a/src/buildstream/_loader/metasource.py b/src/buildstream/_loader/metasource.py
index da2c0e292..5466d3aa5 100644
--- a/src/buildstream/_loader/metasource.py
+++ b/src/buildstream/_loader/metasource.py
@@ -18,7 +18,7 @@
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-class MetaSource():
+class MetaSource:
# MetaSource()
#
diff --git a/src/buildstream/_message.py b/src/buildstream/_message.py
index f4f342a6a..a2844ddce 100644
--- a/src/buildstream/_message.py
+++ b/src/buildstream/_message.py
@@ -23,57 +23,55 @@ import os
# Types of status messages.
#
-class MessageType():
- DEBUG = "debug" # Debugging message
- STATUS = "status" # Status message, verbose details
- INFO = "info" # Informative messages
- WARN = "warning" # Warning messages
- ERROR = "error" # Error messages
- BUG = "bug" # An unhandled exception was raised in a plugin
- LOG = "log" # Messages for log files _only_, never in the frontend
+class MessageType:
+ DEBUG = "debug" # Debugging message
+ STATUS = "status" # Status message, verbose details
+ INFO = "info" # Informative messages
+ WARN = "warning" # Warning messages
+ ERROR = "error" # Error messages
+ BUG = "bug" # An unhandled exception was raised in a plugin
+ LOG = "log" # Messages for log files _only_, never in the frontend
# Timed Messages: SUCCESS and FAIL have duration timestamps
- START = "start" # Status start message
- SUCCESS = "success" # Successful status complete message
- FAIL = "failure" # Failing status complete message
+ START = "start" # Status start message
+ SUCCESS = "success" # Successful status complete message
+ FAIL = "failure" # Failing status complete message
SKIPPED = "skipped"
# Messages which should be reported regardless of whether
# they are currently silenced or not
-unconditional_messages = [
- MessageType.INFO,
- MessageType.WARN,
- MessageType.FAIL,
- MessageType.ERROR,
- MessageType.BUG
-]
+unconditional_messages = [MessageType.INFO, MessageType.WARN, MessageType.FAIL, MessageType.ERROR, MessageType.BUG]
# Message object
#
-class Message():
-
- def __init__(self, message_type, message, *,
- element_name=None,
- element_key=None,
- detail=None,
- action_name=None,
- elapsed=None,
- logfile=None,
- sandbox=False,
- scheduler=False):
+class Message:
+ def __init__(
+ self,
+ message_type,
+ message,
+ *,
+ element_name=None,
+ element_key=None,
+ detail=None,
+ action_name=None,
+ elapsed=None,
+ logfile=None,
+ sandbox=False,
+ scheduler=False
+ ):
self.message_type = message_type # Message type
- self.message = message # The message string
- self.element_name = element_name # The instance element name of the issuing plugin
- self.element_key = element_key # The display key of the issuing plugin element
- self.detail = detail # An additional detail string
- self.action_name = action_name # Name of the task queue (fetch, refresh, build, etc)
- self.elapsed = elapsed # The elapsed time, in timed messages
- self.logfile = logfile # The log file path where commands took place
- self.sandbox = sandbox # Whether the error that caused this message used a sandbox
- self.pid = os.getpid() # The process pid
- self.scheduler = scheduler # Whether this is a scheduler level message
+ self.message = message # The message string
+ self.element_name = element_name # The instance element name of the issuing plugin
+ self.element_key = element_key # The display key of the issuing plugin element
+ self.detail = detail # An additional detail string
+ self.action_name = action_name # Name of the task queue (fetch, refresh, build, etc)
+ self.elapsed = elapsed # The elapsed time, in timed messages
+ self.logfile = logfile # The log file path where commands took place
+ self.sandbox = sandbox # Whether the error that caused this message used a sandbox
+ self.pid = os.getpid() # The process pid
+ self.scheduler = scheduler # Whether this is a scheduler level message
self.creation_time = datetime.datetime.now()
if message_type in (MessageType.SUCCESS, MessageType.FAIL):
assert elapsed is not None
diff --git a/src/buildstream/_messenger.py b/src/buildstream/_messenger.py
index 20c327728..03b2833ec 100644
--- a/src/buildstream/_messenger.py
+++ b/src/buildstream/_messenger.py
@@ -39,15 +39,14 @@ if "BST_TEST_SUITE" in os.environ:
# TimeData class to contain times in an object that can be passed around
# and updated from different places
-class _TimeData():
- __slots__ = ['start_time']
+class _TimeData:
+ __slots__ = ["start_time"]
def __init__(self, start_time):
self.start_time = start_time
-class Messenger():
-
+class Messenger:
def __init__(self):
self._message_handler = None
self._silence_scope_depth = 0
@@ -238,8 +237,9 @@ class Messenger():
detail = "{} of {} subtasks processed".format(task.current_progress, task.maximum_progress)
else:
detail = "{} subtasks processed".format(task.current_progress)
- message = Message(MessageType.SUCCESS, activity_name, elapsed=elapsed, detail=detail,
- element_name=element_name)
+ message = Message(
+ MessageType.SUCCESS, activity_name, elapsed=elapsed, detail=detail, element_name=element_name
+ )
self.message(message)
# recorded_messages()
@@ -274,14 +274,13 @@ class Messenger():
# Create the fully qualified logfile in the log directory,
# appending the pid and .log extension at the end.
- self._log_filename = os.path.join(logdir,
- '{}.{}.log'.format(filename, os.getpid()))
+ self._log_filename = os.path.join(logdir, "{}.{}.log".format(filename, os.getpid()))
# Ensure the directory exists first
directory = os.path.dirname(self._log_filename)
os.makedirs(directory, exist_ok=True)
- with open(self._log_filename, 'a') as logfile:
+ with open(self._log_filename, "a") as logfile:
# Write one last line to the log and flush it to disk
def flush_log():
@@ -291,7 +290,7 @@ class Messenger():
#
# So just try to flush as well as we can at SIGTERM time
try:
- logfile.write('\n\nForcefully terminated\n')
+ logfile.write("\n\nForcefully terminated\n")
logfile.flush()
except RuntimeError:
os.fsync(logfile.fileno())
@@ -352,26 +351,28 @@ class Messenger():
template += ": {message}"
- detail = ''
+ detail = ""
if message.detail is not None:
template += "\n\n{detail}"
- detail = message.detail.rstrip('\n')
+ detail = message.detail.rstrip("\n")
detail = INDENT + INDENT.join(detail.splitlines(True))
timecode = EMPTYTIME
if message.message_type in (MessageType.SUCCESS, MessageType.FAIL):
- hours, remainder = divmod(int(message.elapsed.total_seconds()), 60**2)
+ hours, remainder = divmod(int(message.elapsed.total_seconds()), 60 ** 2)
minutes, seconds = divmod(remainder, 60)
timecode = "{0:02d}:{1:02d}:{2:02d}".format(hours, minutes, seconds)
- text = template.format(timecode=timecode,
- element_name=element_name,
- type=message.message_type.upper(),
- message=message.message,
- detail=detail)
+ text = template.format(
+ timecode=timecode,
+ element_name=element_name,
+ type=message.message_type.upper(),
+ message=message.message,
+ detail=detail,
+ )
# Write to the open log file
- self._log_handle.write('{}\n'.format(text))
+ self._log_handle.write("{}\n".format(text))
self._log_handle.flush()
# get_state_for_child_job_pickling(self)
@@ -399,21 +400,21 @@ class Messenger():
# access to private details of Messenger, but it would open up a window
# where messagesw wouldn't be handled as expected.
#
- del state['_message_handler']
+ del state["_message_handler"]
# The render status callback is only used in the main process
#
- del state['_render_status_cb']
+ del state["_render_status_cb"]
# The "simple_task" context manager is not needed outside the main
# process. During testing we override it to something that cannot
# pickle, so just drop it when pickling to a child job. Note that it
# will only appear in 'state' if it has been overridden.
#
- state.pop('simple_task', None)
+ state.pop("simple_task", None)
# The State object is not needed outside the main process
- del state['_state']
+ del state["_state"]
return state
diff --git a/src/buildstream/_options/option.py b/src/buildstream/_options/option.py
index 51017be22..71d2f12f3 100644
--- a/src/buildstream/_options/option.py
+++ b/src/buildstream/_options/option.py
@@ -27,11 +27,7 @@ if TYPE_CHECKING:
# Shared symbols for validation purposes
#
-OPTION_SYMBOLS = [
- 'type',
- 'description',
- 'variable'
-]
+OPTION_SYMBOLS = ["type", "description", "variable"]
# Option()
@@ -42,7 +38,7 @@ OPTION_SYMBOLS = [
# the loaded project options is a collection of typed Option
# instances.
#
-class Option():
+class Option:
# Subclasses use this to specify the type name used
# for the yaml format and error messages
@@ -66,12 +62,12 @@ class Option():
def load(self, node):
# We don't use the description, but we do require that options have a
# description.
- node.get_str('description')
- self.variable = node.get_str('variable', default=None)
+ node.get_str("description")
+ self.variable = node.get_str("variable", default=None)
# Assert valid symbol name for variable name
if self.variable is not None:
- _assert_symbol_name(self.variable, 'variable name', ref_node=node.get_node('variable'))
+ _assert_symbol_name(self.variable, "variable name", ref_node=node.get_node("variable"))
# load_value()
#
diff --git a/src/buildstream/_options/optionarch.py b/src/buildstream/_options/optionarch.py
index cbe360f9e..2d663f0ef 100644
--- a/src/buildstream/_options/optionarch.py
+++ b/src/buildstream/_options/optionarch.py
@@ -36,7 +36,7 @@ from .optionenum import OptionEnum
#
class OptionArch(OptionEnum):
- OPTION_TYPE = 'arch'
+ OPTION_TYPE = "arch"
def load(self, node):
super().load_special(node, allow_default_definition=False)
@@ -54,12 +54,14 @@ class OptionArch(OptionEnum):
# Do not terminate the loop early to ensure we validate
# all values in the list.
except PlatformError as e:
- provenance = node.get_sequence('values').scalar_at(index).get_provenance()
+ provenance = node.get_sequence("values").scalar_at(index).get_provenance()
prefix = ""
if provenance:
prefix = "{}: ".format(provenance)
- raise LoadError("{}Invalid value for {} option '{}': {}"
- .format(prefix, self.OPTION_TYPE, self.name, e), LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "{}Invalid value for {} option '{}': {}".format(prefix, self.OPTION_TYPE, self.name, e),
+ LoadErrorReason.INVALID_DATA,
+ )
if default_value is None:
# Host architecture is not supported by the project.
diff --git a/src/buildstream/_options/optionbool.py b/src/buildstream/_options/optionbool.py
index f91cb257d..c7289b936 100644
--- a/src/buildstream/_options/optionbool.py
+++ b/src/buildstream/_options/optionbool.py
@@ -27,13 +27,13 @@ from .option import Option, OPTION_SYMBOLS
#
class OptionBool(Option):
- OPTION_TYPE = 'bool'
+ OPTION_TYPE = "bool"
def load(self, node):
super().load(node)
- node.validate_keys(OPTION_SYMBOLS + ['default'])
- self.value = node.get_bool('default')
+ node.validate_keys(OPTION_SYMBOLS + ["default"])
+ self.value = node.get_bool("default")
def load_value(self, node, *, transform=None):
if transform:
@@ -42,13 +42,14 @@ class OptionBool(Option):
self.value = node.get_bool(self.name)
def set_value(self, value):
- if value in ('True', 'true'):
+ if value in ("True", "true"):
self.value = True
- elif value in ('False', 'false'):
+ elif value in ("False", "false"):
self.value = False
else:
- raise LoadError("Invalid value for boolean option {}: {}".format(self.name, value),
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Invalid value for boolean option {}: {}".format(self.name, value), LoadErrorReason.INVALID_DATA
+ )
def get_value(self):
if self.value:
diff --git a/src/buildstream/_options/optioneltmask.py b/src/buildstream/_options/optioneltmask.py
index 178999fa1..5a0d15f8e 100644
--- a/src/buildstream/_options/optioneltmask.py
+++ b/src/buildstream/_options/optioneltmask.py
@@ -28,7 +28,7 @@ from .optionflags import OptionFlags
#
class OptionEltMask(OptionFlags):
- OPTION_TYPE = 'element-mask'
+ OPTION_TYPE = "element-mask"
def load(self, node):
# Ask the parent constructor to disallow value definitions,
@@ -41,6 +41,6 @@ class OptionEltMask(OptionFlags):
def load_valid_values(self, node):
values = []
for filename in utils.list_relative_paths(self.pool.element_path):
- if filename.endswith('.bst'):
+ if filename.endswith(".bst"):
values.append(filename)
return values
diff --git a/src/buildstream/_options/optionenum.py b/src/buildstream/_options/optionenum.py
index 4a0941369..d30f45696 100644
--- a/src/buildstream/_options/optionenum.py
+++ b/src/buildstream/_options/optionenum.py
@@ -27,7 +27,7 @@ from .option import Option, OPTION_SYMBOLS
#
class OptionEnum(Option):
- OPTION_TYPE = 'enum'
+ OPTION_TYPE = "enum"
def __init__(self, name, definition, pool):
self.values = None
@@ -39,17 +39,20 @@ class OptionEnum(Option):
def load_special(self, node, allow_default_definition=True):
super().load(node)
- valid_symbols = OPTION_SYMBOLS + ['values']
+ valid_symbols = OPTION_SYMBOLS + ["values"]
if allow_default_definition:
- valid_symbols += ['default']
+ valid_symbols += ["default"]
node.validate_keys(valid_symbols)
- self.values = node.get_str_list('values', default=[])
+ self.values = node.get_str_list("values", default=[])
if not self.values:
- raise LoadError("{}: No values specified for {} option '{}'"
- .format(node.get_provenance(), self.OPTION_TYPE, self.name),
- LoadErrorReason.INVALID_DATA,)
+ raise LoadError(
+ "{}: No values specified for {} option '{}'".format(
+ node.get_provenance(), self.OPTION_TYPE, self.name
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
# Allow subclass to define the default value
self.value = self.load_default_value(node)
@@ -77,13 +80,14 @@ class OptionEnum(Option):
prefix = "{}: ".format(provenance)
else:
prefix = ""
- raise LoadError("{}Invalid value for {} option '{}': {}\n"
- .format(prefix, self.OPTION_TYPE, self.name, value) +
- "Valid values: {}".format(", ".join(self.values)),
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "{}Invalid value for {} option '{}': {}\n".format(prefix, self.OPTION_TYPE, self.name, value)
+ + "Valid values: {}".format(", ".join(self.values)),
+ LoadErrorReason.INVALID_DATA,
+ )
def load_default_value(self, node):
- value_node = node.get_scalar('default')
+ value_node = node.get_scalar("default")
value = value_node.as_str()
self.validate(value, value_node)
return value
diff --git a/src/buildstream/_options/optionflags.py b/src/buildstream/_options/optionflags.py
index e5217a718..82ede5649 100644
--- a/src/buildstream/_options/optionflags.py
+++ b/src/buildstream/_options/optionflags.py
@@ -27,7 +27,7 @@ from .option import Option, OPTION_SYMBOLS
#
class OptionFlags(Option):
- OPTION_TYPE = 'flags'
+ OPTION_TYPE = "flags"
def __init__(self, name, definition, pool):
self.values = None
@@ -39,20 +39,23 @@ class OptionFlags(Option):
def load_special(self, node, allow_value_definitions=True):
super().load(node)
- valid_symbols = OPTION_SYMBOLS + ['default']
+ valid_symbols = OPTION_SYMBOLS + ["default"]
if allow_value_definitions:
- valid_symbols += ['values']
+ valid_symbols += ["values"]
node.validate_keys(valid_symbols)
# Allow subclass to define the valid values
self.values = self.load_valid_values(node)
if not self.values:
- raise LoadError("{}: No values specified for {} option '{}'"
- .format(node.get_provenance(), self.OPTION_TYPE, self.name),
- LoadErrorReason.INVALID_DATA)
-
- value_node = node.get_sequence('default', default=[])
+ raise LoadError(
+ "{}: No values specified for {} option '{}'".format(
+ node.get_provenance(), self.OPTION_TYPE, self.name
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
+
+ value_node = node.get_sequence("default", default=[])
self.value = value_node.as_str_list()
self.validate(self.value, value_node)
@@ -70,7 +73,7 @@ class OptionFlags(Option):
stripped = "".join(value.split())
# Get the comma separated values
- list_value = stripped.split(',')
+ list_value = stripped.split(",")
self.validate(list_value)
self.value = sorted(list_value)
@@ -86,12 +89,13 @@ class OptionFlags(Option):
prefix = "{}: ".format(provenance)
else:
prefix = ""
- raise LoadError("{}Invalid value for flags option '{}': {}\n"
- .format(prefix, self.name, value) +
- "Valid values: {}".format(", ".join(self.values)),
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "{}Invalid value for flags option '{}': {}\n".format(prefix, self.name, value)
+ + "Valid values: {}".format(", ".join(self.values)),
+ LoadErrorReason.INVALID_DATA,
+ )
def load_valid_values(self, node):
# Allow the more descriptive error to raise when no values
# exist rather than bailing out here (by specifying default_value)
- return node.get_str_list('values', default=[])
+ return node.get_str_list("values", default=[])
diff --git a/src/buildstream/_options/optionos.py b/src/buildstream/_options/optionos.py
index fcf4552f5..3f4e902c9 100644
--- a/src/buildstream/_options/optionos.py
+++ b/src/buildstream/_options/optionos.py
@@ -1,4 +1,3 @@
-
#
# Copyright (C) 2017 Codethink Limited
#
@@ -26,7 +25,7 @@ from .optionenum import OptionEnum
#
class OptionOS(OptionEnum):
- OPTION_TYPE = 'os'
+ OPTION_TYPE = "os"
def load(self, node):
super().load_special(node, allow_default_definition=False)
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index a0730c617..f105bb12c 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -50,8 +50,7 @@ class OptionTypes(FastEnum):
OS = OptionOS.OPTION_TYPE
-class OptionPool():
-
+class OptionPool:
def __init__(self, element_path):
# We hold on to the element path for the sake of OptionEltMask
self.element_path = element_path
@@ -59,7 +58,7 @@ class OptionPool():
#
# Private members
#
- self._options = {} # The Options
+ self._options = {} # The Options
self._variables = None # The Options resolved into typed variables
self._environment = None
@@ -69,7 +68,7 @@ class OptionPool():
state = self.__dict__.copy()
# Jinja2 Environments don't appear to be serializable. It is easy
# enough for us to reconstruct this one anyway, so no need to pickle it.
- del state['_environment']
+ del state["_environment"]
return state
def __setstate__(self, state):
@@ -90,7 +89,7 @@ class OptionPool():
# Assert that the option name is a valid symbol
_assert_symbol_name(option_name, "option name", ref_node=option_definition, allow_dashes=False)
- opt_type_name = option_definition.get_enum('type', OptionTypes)
+ opt_type_name = option_definition.get_enum("type", OptionTypes)
opt_type = _OPTION_TYPES[opt_type_name.value]
option = opt_type(option_name, option_definition, self)
@@ -110,8 +109,9 @@ class OptionPool():
option = self._options[option_name]
except KeyError as e:
p = option_value.get_provenance()
- raise LoadError("{}: Unknown option '{}' specified"
- .format(p, option_name), LoadErrorReason.INVALID_DATA) from e
+ raise LoadError(
+ "{}: Unknown option '{}' specified".format(p, option_name), LoadErrorReason.INVALID_DATA
+ ) from e
option.load_value(node, transform=transform)
# load_cli_values()
@@ -129,8 +129,10 @@ class OptionPool():
option = self._options[option_name]
except KeyError as e:
if not ignore_unknown:
- raise LoadError("Unknown option '{}' specified on the command line"
- .format(option_name), LoadErrorReason.INVALID_DATA) from e
+ raise LoadError(
+ "Unknown option '{}' specified on the command line".format(option_name),
+ LoadErrorReason.INVALID_DATA,
+ ) from e
else:
option.set_value(option_value)
@@ -239,11 +241,13 @@ class OptionPool():
elif val == "False":
return False
else: # pragma: nocover
- raise LoadError("Failed to evaluate expression: {}".format(expression),
- LoadErrorReason.EXPRESSION_FAILED)
+ raise LoadError(
+ "Failed to evaluate expression: {}".format(expression), LoadErrorReason.EXPRESSION_FAILED
+ )
except jinja2.exceptions.TemplateError as e:
- raise LoadError("Failed to evaluate expression ({}): {}".format(expression, e),
- LoadErrorReason.EXPRESSION_FAILED)
+ raise LoadError(
+ "Failed to evaluate expression ({}): {}".format(expression, e), LoadErrorReason.EXPRESSION_FAILED
+ )
# Recursion assistent for lists, in case there
# are lists of lists.
@@ -262,25 +266,27 @@ class OptionPool():
# Return true if a conditional was processed.
#
def _process_one_node(self, node):
- conditions = node.get_sequence('(?)', default=None)
- assertion = node.get_str('(!)', default=None)
+ conditions = node.get_sequence("(?)", default=None)
+ assertion = node.get_str("(!)", default=None)
# Process assersions first, we want to abort on the first encountered
# assertion in a given dictionary, and not lose an assertion due to
# it being overwritten by a later assertion which might also trigger.
if assertion is not None:
- p = node.get_scalar('(!)').get_provenance()
+ p = node.get_scalar("(!)").get_provenance()
raise LoadError("{}: {}".format(p, assertion.strip()), LoadErrorReason.USER_ASSERTION)
if conditions is not None:
- del node['(?)']
+ del node["(?)"]
for condition in conditions:
tuples = list(condition.items())
if len(tuples) > 1:
provenance = condition.get_provenance()
- raise LoadError("{}: Conditional statement has more than one key".format(provenance),
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "{}: Conditional statement has more than one key".format(provenance),
+ LoadErrorReason.INVALID_DATA,
+ )
expression, value = tuples[0]
try:
@@ -292,8 +298,10 @@ class OptionPool():
if type(value) is not MappingNode: # pylint: disable=unidiomatic-typecheck
provenance = condition.get_provenance()
- raise LoadError("{}: Only values of type 'dict' can be composed.".format(provenance),
- LoadErrorReason.ILLEGAL_COMPOSITE)
+ raise LoadError(
+ "{}: Only values of type 'dict' can be composed.".format(provenance),
+ LoadErrorReason.ILLEGAL_COMPOSITE,
+ )
# Apply the yaml fragment if its condition evaluates to true
if apply_fragment:
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index b9efc7826..0b9ab5f24 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -40,27 +40,27 @@ from ._project import ProjectRefStorage
#
# These values correspond to the CLI `--deps` arguments for convenience.
#
-class PipelineSelection():
+class PipelineSelection:
# Select only the target elements in the associated targets
- NONE = 'none'
+ NONE = "none"
# As NONE, but redirect elements that are capable of it
- REDIRECT = 'redirect'
+ REDIRECT = "redirect"
# Select elements which must be built for the associated targets to be built
- PLAN = 'plan'
+ PLAN = "plan"
# All dependencies of all targets, including the targets
- ALL = 'all'
+ ALL = "all"
# All direct build dependencies and their recursive runtime dependencies,
# excluding the targets
- BUILD = 'build'
+ BUILD = "build"
# All direct runtime dependencies and their recursive runtime dependencies,
# including the targets
- RUN = 'run'
+ RUN = "run"
# Pipeline()
@@ -70,12 +70,11 @@ class PipelineSelection():
# context (Context): The Context object
# artifacts (Context): The ArtifactCache object
#
-class Pipeline():
-
+class Pipeline:
def __init__(self, context, project, artifacts):
- self._context = context # The Context
- self._project = project # The toplevel project
+ self._context = context # The Context
+ self._project = project # The toplevel project
#
# Private members
@@ -108,10 +107,7 @@ class Pipeline():
# Now create element groups to match the input target groups
elt_iter = iter(elements)
- element_groups = [
- [next(elt_iter) for i in range(len(group))]
- for group in target_groups
- ]
+ element_groups = [[next(elt_iter) for i in range(len(group))] for group in target_groups]
return tuple(element_groups)
@@ -240,8 +236,7 @@ class Pipeline():
for t in targets:
new_elm = t._get_source_element()
if new_elm != t and not silent:
- self._message(MessageType.INFO, "Element '{}' redirected to '{}'"
- .format(t.name, new_elm.name))
+ self._message(MessageType.INFO, "Element '{}' redirected to '{}'".format(t.name, new_elm.name))
if new_elm not in elements:
elements.append(new_elm)
elif mode == PipelineSelection.PLAN:
@@ -296,9 +291,7 @@ class Pipeline():
# Build a list of 'intersection' elements, i.e. the set of
# elements that lie on the border closest to excepted elements
# between excepted and target elements.
- intersection = list(itertools.chain.from_iterable(
- find_intersection(element) for element in except_targets
- ))
+ intersection = list(itertools.chain.from_iterable(find_intersection(element) for element in except_targets))
# Now use this set of elements to traverse the targeted
# elements, except 'intersection' elements and their unique
@@ -354,10 +347,7 @@ class Pipeline():
#
def subtract_elements(self, elements, subtract):
subtract_set = set(subtract)
- return [
- e for e in elements
- if e not in subtract_set
- ]
+ return [e for e in elements if e not in subtract_set]
# add_elements()
#
@@ -426,14 +416,13 @@ class Pipeline():
for source in element.sources():
if source._get_consistency() == Consistency.INCONSISTENT:
detail += " {} is missing ref\n".format(source)
- detail += '\n'
+ detail += "\n"
detail += "Try tracking these elements first with `bst source track`\n"
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
if inconsistent_workspaced:
- detail = "Some workspaces exist but are not closed\n" + \
- "Try closing them with `bst workspace close`\n\n"
+ detail = "Some workspaces exist but are not closed\n" + "Try closing them with `bst workspace close`\n\n"
for element in inconsistent_workspaced:
detail += " " + element._get_full_name() + "\n"
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
@@ -449,8 +438,7 @@ class Pipeline():
uncached = []
with self._context.messenger.timed_activity("Checking sources"):
for element in elements:
- if element._get_consistency() < Consistency.CACHED and \
- not element._source_cached():
+ if element._get_consistency() < Consistency.CACHED and not element._source_cached():
uncached.append(element)
if uncached:
@@ -460,9 +448,11 @@ class Pipeline():
for source in element.sources():
if source._get_consistency() < Consistency.CACHED:
detail += " {}\n".format(source)
- detail += '\n'
- detail += "Try fetching these elements first with `bst source fetch`,\n" + \
- "or run this command with `--fetch` option\n"
+ detail += "\n"
+ detail += (
+ "Try fetching these elements first with `bst source fetch`,\n"
+ + "or run this command with `--fetch` option\n"
+ )
raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
@@ -483,10 +473,7 @@ class Pipeline():
# not contain any cross junction elements.
#
def _filter_cross_junctions(self, project, elements):
- return [
- element for element in elements
- if element._get_project() is project
- ]
+ return [element for element in elements if element._get_project() is project]
# _assert_junction_tracking()
#
@@ -511,8 +498,10 @@ class Pipeline():
for element in elements:
element_project = element._get_project()
if element_project is not self._project:
- detail = "Requested to track sources across junction boundaries\n" + \
- "in a project which does not use project.refs ref-storage."
+ detail = (
+ "Requested to track sources across junction boundaries\n"
+ + "in a project which does not use project.refs ref-storage."
+ )
raise PipelineError("Untrackable sources", detail=detail, reason="untrackable-sources")
@@ -522,8 +511,7 @@ class Pipeline():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self._context.messenger.message(
- Message(message_type, message, **args))
+ self._context.messenger.message(Message(message_type, message, **args))
# _Planner()
@@ -533,7 +521,7 @@ class Pipeline():
# parts need to be built depending on build only dependencies
# being cached, and depth sorting for more efficient processing.
#
-class _Planner():
+class _Planner:
def __init__(self):
self.depth_map = OrderedDict()
self.visiting_elements = set()
diff --git a/src/buildstream/_platform/darwin.py b/src/buildstream/_platform/darwin.py
index f23535373..06491e8b4 100644
--- a/src/buildstream/_platform/darwin.py
+++ b/src/buildstream/_platform/darwin.py
@@ -59,9 +59,9 @@ class Darwin(Platform):
@staticmethod
def _create_dummy_sandbox(*args, **kwargs):
- kwargs['dummy_reason'] = \
- "OSXFUSE is not supported and there are no supported sandbox " + \
- "technologies for MacOS at this time"
+ kwargs["dummy_reason"] = (
+ "OSXFUSE is not supported and there are no supported sandbox " + "technologies for MacOS at this time"
+ )
return SandboxDummy(*args, **kwargs)
def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_platform/fallback.py b/src/buildstream/_platform/fallback.py
index 4f7ff8086..b9e9f520d 100644
--- a/src/buildstream/_platform/fallback.py
+++ b/src/buildstream/_platform/fallback.py
@@ -20,15 +20,15 @@ from .platform import Platform
class Fallback(Platform):
-
def _check_dummy_sandbox_config(self, config):
return True
def _create_dummy_sandbox(self, *args, **kwargs):
- kwargs['dummy_reason'] = \
- ("FallBack platform only implements dummy sandbox, "
- "Buildstream may be having issues correctly detecting your platform, "
- "platform can be forced with BST_FORCE_BACKEND")
+ kwargs["dummy_reason"] = (
+ "FallBack platform only implements dummy sandbox, "
+ "Buildstream may be having issues correctly detecting your platform, "
+ "platform can be forced with BST_FORCE_BACKEND"
+ )
return SandboxDummy(*args, **kwargs)
def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_platform/linux.py b/src/buildstream/_platform/linux.py
index b400bfaac..bdc2e0df1 100644
--- a/src/buildstream/_platform/linux.py
+++ b/src/buildstream/_platform/linux.py
@@ -28,17 +28,16 @@ from .._exceptions import PlatformError
class Linux(Platform):
-
def _setup_sandbox(self, force_sandbox):
sandbox_setups = {
- 'bwrap': self._setup_bwrap_sandbox,
- 'buildbox': self._setup_buildbox_sandbox,
- 'chroot': self._setup_chroot_sandbox,
- 'dummy': self._setup_dummy_sandbox,
+ "bwrap": self._setup_bwrap_sandbox,
+ "buildbox": self._setup_buildbox_sandbox,
+ "chroot": self._setup_chroot_sandbox,
+ "dummy": self._setup_dummy_sandbox,
}
preferred_sandboxes = [
- 'bwrap',
+ "bwrap",
]
self._try_sandboxes(force_sandbox, sandbox_setups, preferred_sandboxes)
@@ -54,11 +53,12 @@ class Linux(Platform):
def can_crossbuild(self, config):
host_arch = self.get_host_arch()
- if ((config.build_arch == "x86-32" and host_arch == "x86-64") or
- (config.build_arch == "aarch32" and host_arch == "aarch64")):
+ if (config.build_arch == "x86-32" and host_arch == "x86-64") or (
+ config.build_arch == "aarch32" and host_arch == "aarch64"
+ ):
if self.linux32 is None:
try:
- utils.get_host_tool('linux32')
+ utils.get_host_tool("linux32")
self.linux32 = True
except utils.ProgramNotFoundError:
self.linux32 = False
@@ -76,7 +76,7 @@ class Linux(Platform):
def _create_dummy_sandbox(self, *args, **kwargs):
dummy_reasons = " and ".join(self.dummy_reasons)
- kwargs['dummy_reason'] = dummy_reasons
+ kwargs["dummy_reason"] = dummy_reasons
return SandboxDummy(*args, **kwargs)
def _setup_dummy_sandbox(self):
@@ -87,11 +87,13 @@ class Linux(Platform):
# Bubble-wrap sandbox methods
def _check_sandbox_config_bwrap(self, config):
from ..sandbox._sandboxbwrap import SandboxBwrap
+
return SandboxBwrap.check_sandbox_config(self, config)
def _create_bwrap_sandbox(self, *args, **kwargs):
from ..sandbox._sandboxbwrap import SandboxBwrap
- kwargs['linux32'] = self.linux32
+
+ kwargs["linux32"] = self.linux32
return SandboxBwrap(*args, **kwargs)
def _setup_bwrap_sandbox(self):
@@ -110,15 +112,18 @@ class Linux(Platform):
# Chroot sandbox methods
def _check_sandbox_config_chroot(self, config):
from ..sandbox._sandboxchroot import SandboxChroot
+
return SandboxChroot.check_sandbox_config(self, config)
@staticmethod
def _create_chroot_sandbox(*args, **kwargs):
from ..sandbox._sandboxchroot import SandboxChroot
+
return SandboxChroot(*args, **kwargs)
def _setup_chroot_sandbox(self):
from ..sandbox._sandboxchroot import SandboxChroot
+
self._check_sandbox(SandboxChroot)
self.check_sandbox_config = self._check_sandbox_config_chroot
self.create_sandbox = Linux._create_chroot_sandbox
@@ -127,18 +132,23 @@ class Linux(Platform):
# Buildbox sandbox methods
def _check_sandbox_config_buildbox(self, config):
from ..sandbox._sandboxbuildbox import SandboxBuildBox
+
return SandboxBuildBox.check_sandbox_config(self, config)
@staticmethod
def _create_buildbox_sandbox(*args, **kwargs):
from ..sandbox._sandboxbuildbox import SandboxBuildBox
- if kwargs.get('allow_real_directory'):
- raise PlatformError("The BuildBox Sandbox does not support real directories.",
- reason="You are using BuildBox sandbox because BST_FORCE_SANBOX=buildbox")
+
+ if kwargs.get("allow_real_directory"):
+ raise PlatformError(
+ "The BuildBox Sandbox does not support real directories.",
+ reason="You are using BuildBox sandbox because BST_FORCE_SANBOX=buildbox",
+ )
return SandboxBuildBox(*args, **kwargs)
def _setup_buildbox_sandbox(self):
from ..sandbox._sandboxbuildbox import SandboxBuildBox
+
self._check_sandbox(SandboxBuildBox)
self.check_sandbox_config = self._check_sandbox_config_buildbox
self.create_sandbox = self._create_buildbox_sandbox
diff --git a/src/buildstream/_platform/platform.py b/src/buildstream/_platform/platform.py
index af49b9e82..1fddbe82c 100644
--- a/src/buildstream/_platform/platform.py
+++ b/src/buildstream/_platform/platform.py
@@ -29,7 +29,7 @@ from .._exceptions import PlatformError, ImplError, SandboxError
from .. import utils
-class Platform():
+class Platform:
# Platform()
#
# A class to manage platform-specific details. Currently holds the
@@ -45,7 +45,7 @@ class Platform():
self._setup_sandbox(force_sandbox)
def _setup_sandbox(self, force_sandbox):
- sandbox_setups = {'dummy': self._setup_dummy_sandbox}
+ sandbox_setups = {"dummy": self._setup_dummy_sandbox}
preferred_sandboxes = []
self._try_sandboxes(force_sandbox, sandbox_setups, preferred_sandboxes)
@@ -58,12 +58,16 @@ class Platform():
try:
sandbox_setups[force_sandbox]()
except KeyError:
- raise PlatformError("Forced Sandbox is unavailable on this platform: BST_FORCE_SANDBOX"
- " is set to {} but it is not available".format(force_sandbox))
+ raise PlatformError(
+ "Forced Sandbox is unavailable on this platform: BST_FORCE_SANDBOX"
+ " is set to {} but it is not available".format(force_sandbox)
+ )
except SandboxError as Error:
- raise PlatformError("Forced Sandbox Error: BST_FORCE_SANDBOX"
- " is set to {} but cannot be setup".format(force_sandbox),
- detail=" and ".join(self.dummy_reasons)) from Error
+ raise PlatformError(
+ "Forced Sandbox Error: BST_FORCE_SANDBOX"
+ " is set to {} but cannot be setup".format(force_sandbox),
+ detail=" and ".join(self.dummy_reasons),
+ ) from Error
else:
for good_sandbox in preferred_sandboxes:
try:
@@ -73,7 +77,7 @@ class Platform():
continue
except utils.ProgramNotFoundError:
continue
- sandbox_setups['dummy']()
+ sandbox_setups["dummy"]()
def _check_sandbox(self, Sandbox):
try:
@@ -87,29 +91,29 @@ class Platform():
# Meant for testing purposes and therefore hidden in the
# deepest corners of the source code. Try not to abuse this,
# please?
- if os.getenv('BST_FORCE_SANDBOX'):
- force_sandbox = os.getenv('BST_FORCE_SANDBOX')
+ if os.getenv("BST_FORCE_SANDBOX"):
+ force_sandbox = os.getenv("BST_FORCE_SANDBOX")
else:
force_sandbox = None
- if os.getenv('BST_FORCE_BACKEND'):
- backend = os.getenv('BST_FORCE_BACKEND')
- elif sys.platform.startswith('darwin'):
- backend = 'darwin'
- elif sys.platform.startswith('linux'):
- backend = 'linux'
- elif sys.platform == 'win32':
- backend = 'win32'
+ if os.getenv("BST_FORCE_BACKEND"):
+ backend = os.getenv("BST_FORCE_BACKEND")
+ elif sys.platform.startswith("darwin"):
+ backend = "darwin"
+ elif sys.platform.startswith("linux"):
+ backend = "linux"
+ elif sys.platform == "win32":
+ backend = "win32"
else:
- backend = 'fallback'
+ backend = "fallback"
- if backend == 'linux':
+ if backend == "linux":
from .linux import Linux as PlatformImpl # pylint: disable=cyclic-import
- elif backend == 'darwin':
+ elif backend == "darwin":
from .darwin import Darwin as PlatformImpl # pylint: disable=cyclic-import
- elif backend == 'win32':
+ elif backend == "win32":
from .win32 import Win32 as PlatformImpl # pylint: disable=cyclic-import
- elif backend == 'fallback':
+ elif backend == "fallback":
from .fallback import Fallback as PlatformImpl # pylint: disable=cyclic-import
else:
raise PlatformError("No such platform: '{}'".format(backend))
@@ -156,11 +160,11 @@ class Platform():
"sparc64": "sparc-v9",
"sparc-v9": "sparc-v9",
"x86-32": "x86-32",
- "x86-64": "x86-64"
+ "x86-64": "x86-64",
}
try:
- return aliases[arch.replace('_', '-').lower()]
+ return aliases[arch.replace("_", "-").lower()]
except KeyError:
raise PlatformError("Unknown architecture: {}".format(arch))
@@ -188,7 +192,7 @@ class Platform():
def does_multiprocessing_start_require_pickling(self):
# Note that if the start method has not been set before now, it will be
# set to the platform default by `get_start_method`.
- return multiprocessing.get_start_method() != 'fork'
+ return multiprocessing.get_start_method() != "fork"
##################################################################
# Sandbox functions #
@@ -206,12 +210,12 @@ class Platform():
# (Sandbox) A sandbox
#
def create_sandbox(self, *args, **kwargs):
- raise ImplError("Platform {platform} does not implement create_sandbox()"
- .format(platform=type(self).__name__))
+ raise ImplError("Platform {platform} does not implement create_sandbox()".format(platform=type(self).__name__))
def check_sandbox_config(self, config):
- raise ImplError("Platform {platform} does not implement check_sandbox_config()"
- .format(platform=type(self).__name__))
+ raise ImplError(
+ "Platform {platform} does not implement check_sandbox_config()".format(platform=type(self).__name__)
+ )
def maximize_open_file_limit(self):
# Need to set resources for _frontend/app.py as this is dependent on the platform
@@ -230,5 +234,6 @@ class Platform():
resource.setrlimit(resource.RLIMIT_NOFILE, (hard_limit, hard_limit))
def _setup_dummy_sandbox(self):
- raise ImplError("Platform {platform} does not implement _setup_dummy_sandbox()"
- .format(platform=type(self).__name__))
+ raise ImplError(
+ "Platform {platform} does not implement _setup_dummy_sandbox()".format(platform=type(self).__name__)
+ )
diff --git a/src/buildstream/_platform/win32.py b/src/buildstream/_platform/win32.py
index 36680019d..a2529d8f6 100644
--- a/src/buildstream/_platform/win32.py
+++ b/src/buildstream/_platform/win32.py
@@ -20,7 +20,6 @@ from .platform import Platform
class Win32(Platform):
-
def maximize_open_file_limit(self):
# Note that on Windows, we don't have the 'resource' module to help us
# configure open file limits.
@@ -50,7 +49,7 @@ class Win32(Platform):
@staticmethod
def _create_dummy_sandbox(*args, **kwargs):
- kwargs['dummy_reason'] = "There are no supported sandbox technologies for Win32 at this time."
+ kwargs["dummy_reason"] = "There are no supported sandbox technologies for Win32 at this time."
return SandboxDummy(*args, **kwargs)
def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_plugincontext.py b/src/buildstream/_plugincontext.py
index b07c2b31a..54839e16b 100644
--- a/src/buildstream/_plugincontext.py
+++ b/src/buildstream/_plugincontext.py
@@ -41,10 +41,8 @@ from . import utils
# a given BuildStream project are isolated to their respective
# Pipelines.
#
-class PluginContext():
-
- def __init__(self, plugin_base, base_type, site_plugin_path, *,
- plugin_origins=None, format_versions={}):
+class PluginContext:
+ def __init__(self, plugin_base, base_type, site_plugin_path, *, plugin_origins=None, format_versions={}):
# For pickling across processes, make sure this context has a unique
# identifier, which we prepend to the identifier of each PluginSource.
@@ -59,7 +57,7 @@ class PluginContext():
# Private members
#
self._base_type = base_type # The base class plugins derive from
- self._types = {} # Plugin type lookup table by kind
+ self._types = {} # Plugin type lookup table by kind
self._plugin_origins = plugin_origins or []
# The PluginSource object
@@ -72,8 +70,7 @@ class PluginContext():
def _init_site_source(self):
self._site_source = self._plugin_base.make_plugin_source(
- searchpath=self._site_plugin_path,
- identifier=self._identifier + 'site',
+ searchpath=self._site_plugin_path, identifier=self._identifier + "site",
)
def __getstate__(self):
@@ -93,11 +90,11 @@ class PluginContext():
# this by making sure we are not creating new members, only clearing
# existing ones.
#
- del state['_site_source']
- assert '_types' in state
- state['_types'] = {}
- assert '_alternate_sources' in state
- state['_alternate_sources'] = {}
+ del state["_site_source"]
+ assert "_types" in state
+ state["_types"] = {}
+ assert "_alternate_sources" in state
+ state["_alternate_sources"] = {}
return state
@@ -133,60 +130,51 @@ class PluginContext():
return self._types.values()
def _get_local_plugin_source(self, path):
- if ('local', path) not in self._alternate_sources:
+ if ("local", path) not in self._alternate_sources:
# key by a tuple to avoid collision
- source = self._plugin_base.make_plugin_source(
- searchpath=[path],
- identifier=self._identifier + path,
- )
+ source = self._plugin_base.make_plugin_source(searchpath=[path], identifier=self._identifier + path,)
# Ensure that sources never get garbage collected,
# as they'll take the plugins with them.
- self._alternate_sources[('local', path)] = source
+ self._alternate_sources[("local", path)] = source
else:
- source = self._alternate_sources[('local', path)]
+ source = self._alternate_sources[("local", path)]
return source
def _get_pip_plugin_source(self, package_name, kind):
defaults = None
- if ('pip', package_name) not in self._alternate_sources:
+ if ("pip", package_name) not in self._alternate_sources:
import pkg_resources
+
# key by a tuple to avoid collision
try:
- package = pkg_resources.get_entry_info(package_name,
- 'buildstream.plugins',
- kind)
+ package = pkg_resources.get_entry_info(package_name, "buildstream.plugins", kind)
except pkg_resources.DistributionNotFound as e:
- raise PluginError("Failed to load {} plugin '{}': {}"
- .format(self._base_type.__name__, kind, e)) from e
+ raise PluginError("Failed to load {} plugin '{}': {}".format(self._base_type.__name__, kind, e)) from e
if package is None:
- raise PluginError("Pip package {} does not contain a plugin named '{}'"
- .format(package_name, kind))
+ raise PluginError("Pip package {} does not contain a plugin named '{}'".format(package_name, kind))
location = package.dist.get_resource_filename(
- pkg_resources._manager,
- package.module_name.replace('.', os.sep) + '.py'
+ pkg_resources._manager, package.module_name.replace(".", os.sep) + ".py"
)
# Also load the defaults - required since setuptools
# may need to extract the file.
try:
defaults = package.dist.get_resource_filename(
- pkg_resources._manager,
- package.module_name.replace('.', os.sep) + '.yaml'
+ pkg_resources._manager, package.module_name.replace(".", os.sep) + ".yaml"
)
except KeyError:
# The plugin didn't have an accompanying YAML file
defaults = None
source = self._plugin_base.make_plugin_source(
- searchpath=[os.path.dirname(location)],
- identifier=self._identifier + os.path.dirname(location),
+ searchpath=[os.path.dirname(location)], identifier=self._identifier + os.path.dirname(location),
)
- self._alternate_sources[('pip', package_name)] = source
+ self._alternate_sources[("pip", package_name)] = source
else:
- source = self._alternate_sources[('pip', package_name)]
+ source = self._alternate_sources[("pip", package_name)]
return source, defaults
@@ -199,27 +187,27 @@ class PluginContext():
loaded_dependency = False
for origin in self._plugin_origins:
- if kind not in origin.get_str_list('plugins'):
+ if kind not in origin.get_str_list("plugins"):
continue
- if origin.get_str('origin') == 'local':
- local_path = origin.get_str('path')
+ if origin.get_str("origin") == "local":
+ local_path = origin.get_str("path")
source = self._get_local_plugin_source(local_path)
- elif origin.get_str('origin') == 'pip':
- package_name = origin.get_str('package-name')
+ elif origin.get_str("origin") == "pip":
+ package_name = origin.get_str("package-name")
source, defaults = self._get_pip_plugin_source(package_name, kind)
else:
- raise PluginError("Failed to load plugin '{}': "
- "Unexpected plugin origin '{}'"
- .format(kind, origin.get_str('origin')))
+ raise PluginError(
+ "Failed to load plugin '{}': "
+ "Unexpected plugin origin '{}'".format(kind, origin.get_str("origin"))
+ )
loaded_dependency = True
break
# Fall back to getting the source from site
if not source:
if kind not in self._site_source.list_plugins():
- raise PluginError("No {} type registered for kind '{}'"
- .format(self._base_type.__name__, kind))
+ raise PluginError("No {} type registered for kind '{}'".format(self._base_type.__name__, kind))
source = self._site_source
@@ -241,17 +229,18 @@ class PluginContext():
defaults = os.path.join(plugin_dir, plugin_conf_name)
except ImportError as e:
- raise PluginError("Failed to load {} plugin '{}': {}"
- .format(self._base_type.__name__, kind, e)) from e
+ raise PluginError("Failed to load {} plugin '{}': {}".format(self._base_type.__name__, kind, e)) from e
try:
plugin_type = plugin.setup()
except AttributeError as e:
- raise PluginError("{} plugin '{}' did not provide a setup() function"
- .format(self._base_type.__name__, kind)) from e
+ raise PluginError(
+ "{} plugin '{}' did not provide a setup() function".format(self._base_type.__name__, kind)
+ ) from e
except TypeError as e:
- raise PluginError("setup symbol in {} plugin '{}' is not a function"
- .format(self._base_type.__name__, kind)) from e
+ raise PluginError(
+ "setup symbol in {} plugin '{}' is not a function".format(self._base_type.__name__, kind)
+ ) from e
self._assert_plugin(kind, plugin_type)
self._assert_version(kind, plugin_type)
@@ -259,19 +248,23 @@ class PluginContext():
def _assert_plugin(self, kind, plugin_type):
if kind in self._types:
- raise PluginError("Tried to register {} plugin for existing kind '{}' "
- "(already registered {})"
- .format(self._base_type.__name__, kind, self._types[kind].__name__))
+ raise PluginError(
+ "Tried to register {} plugin for existing kind '{}' "
+ "(already registered {})".format(self._base_type.__name__, kind, self._types[kind].__name__)
+ )
try:
if not issubclass(plugin_type, self._base_type):
- raise PluginError("{} plugin '{}' returned type '{}', which is not a subclass of {}"
- .format(self._base_type.__name__, kind,
- plugin_type.__name__,
- self._base_type.__name__))
+ raise PluginError(
+ "{} plugin '{}' returned type '{}', which is not a subclass of {}".format(
+ self._base_type.__name__, kind, plugin_type.__name__, self._base_type.__name__
+ )
+ )
except TypeError as e:
- raise PluginError("{} plugin '{}' returned something that is not a type (expected subclass of {})"
- .format(self._base_type.__name__, kind,
- self._base_type.__name__)) from e
+ raise PluginError(
+ "{} plugin '{}' returned something that is not a type (expected subclass of {})".format(
+ self._base_type.__name__, kind, self._base_type.__name__
+ )
+ ) from e
def _assert_version(self, kind, plugin_type):
@@ -282,12 +275,16 @@ class PluginContext():
req_minor = plugin_type.BST_REQUIRED_VERSION_MINOR
if (bst_major, bst_minor) < (req_major, req_minor):
- raise PluginError("BuildStream {}.{} is too old for {} plugin '{}' (requires {}.{})"
- .format(
- bst_major, bst_minor,
- self._base_type.__name__, kind,
- plugin_type.BST_REQUIRED_VERSION_MAJOR,
- plugin_type.BST_REQUIRED_VERSION_MINOR))
+ raise PluginError(
+ "BuildStream {}.{} is too old for {} plugin '{}' (requires {}.{})".format(
+ bst_major,
+ bst_minor,
+ self._base_type.__name__,
+ kind,
+ plugin_type.BST_REQUIRED_VERSION_MAJOR,
+ plugin_type.BST_REQUIRED_VERSION_MINOR,
+ )
+ )
# _assert_plugin_format()
#
@@ -296,6 +293,9 @@ class PluginContext():
#
def _assert_plugin_format(self, plugin, version):
if plugin.BST_FORMAT_VERSION < version:
- raise LoadError("{}: Format version {} is too old for requested version {}"
- .format(plugin, plugin.BST_FORMAT_VERSION, version),
- LoadErrorReason.UNSUPPORTED_PLUGIN)
+ raise LoadError(
+ "{}: Format version {} is too old for requested version {}".format(
+ plugin, plugin.BST_FORMAT_VERSION, version
+ ),
+ LoadErrorReason.UNSUPPORTED_PLUGIN,
+ )
diff --git a/src/buildstream/_profile.py b/src/buildstream/_profile.py
index c68d058ad..fdde04ab7 100644
--- a/src/buildstream/_profile.py
+++ b/src/buildstream/_profile.py
@@ -40,15 +40,15 @@ from ._exceptions import ProfileError
# BST_PROFILE=circ-dep-check:sort-deps bst <command> <args>
#
# The special 'all' value will enable all profiles.
-class Topics():
- CIRCULAR_CHECK = 'circ-dep-check'
- SORT_DEPENDENCIES = 'sort-deps'
- LOAD_CONTEXT = 'load-context'
- LOAD_PROJECT = 'load-project'
- LOAD_PIPELINE = 'load-pipeline'
- LOAD_SELECTION = 'load-selection'
- SCHEDULER = 'scheduler'
- ALL = 'all'
+class Topics:
+ CIRCULAR_CHECK = "circ-dep-check"
+ SORT_DEPENDENCIES = "sort-deps"
+ LOAD_CONTEXT = "load-context"
+ LOAD_PROJECT = "load-project"
+ LOAD_PIPELINE = "load-pipeline"
+ LOAD_SELECTION = "load-selection"
+ SCHEDULER = "scheduler"
+ ALL = "all"
class _Profile:
@@ -64,8 +64,8 @@ class _Profile:
os.getcwd(),
"profile-{}-{}".format(
datetime.datetime.fromtimestamp(self.start_time).strftime("%Y%m%dT%H%M%S"),
- self.key.replace("/", "-").replace(".", "-")
- )
+ self.key.replace("/", "-").replace(".", "-"),
+ ),
)
self.log_filename = "{}.log".format(filename_template)
self.cprofile_filename = "{}.cprofile".format(filename_template)
@@ -87,14 +87,16 @@ class _Profile:
self.profiler.disable()
def save(self):
- heading = "\n".join([
- "-" * 64,
- "Profile for key: {}".format(self.key),
- "Started at: {}".format(self.start_time),
- "\n\t{}".format(self.message) if self.message else "",
- "-" * 64,
- "" # for a final new line
- ])
+ heading = "\n".join(
+ [
+ "-" * 64,
+ "Profile for key: {}".format(self.key),
+ "Started at: {}".format(self.start_time),
+ "\n\t{}".format(self.message) if self.message else "",
+ "-" * 64,
+ "", # for a final new line
+ ]
+ )
with open(self.log_filename, "a") as fp:
stats = pstats.Stats(self.profiler, *self._additional_pstats_files, stream=fp)
@@ -116,10 +118,7 @@ class _Profiler:
self._valid_topics = False
if settings:
- self.enabled_topics = {
- topic
- for topic in settings.split(":")
- }
+ self.enabled_topics = {topic for topic in settings.split(":")}
@contextlib.contextmanager
def profile(self, topic, key, message=None):
@@ -170,8 +169,7 @@ class _Profiler:
non_valid_topics = [topic for topic in self.enabled_topics if topic not in vars(Topics).values()]
if non_valid_topics:
- raise ProfileError("Provided BST_PROFILE topics do not exist: {}"
- .format(", ".join(non_valid_topics)))
+ raise ProfileError("Provided BST_PROFILE topics do not exist: {}".format(", ".join(non_valid_topics)))
self._valid_topics = True
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 54a011e0d..67d41a6b5 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -49,7 +49,7 @@ from ._workspaces import WORKSPACE_PROJECT_FILE
# Project Configuration file
-_PROJECT_CONF_FILE = 'project.conf'
+_PROJECT_CONF_FILE = "project.conf"
# List of all places plugins can come from
@@ -64,8 +64,7 @@ class PluginOrigins(FastEnum):
# A simple object describing the behavior of
# a host mount.
#
-class HostMount():
-
+class HostMount:
def __init__(self, path, host_path=None, optional=False):
# Support environment variable expansion in host mounts
@@ -73,9 +72,9 @@ class HostMount():
if host_path is not None:
host_path = os.path.expandvars(host_path)
- self.path = path # Path inside the sandbox
- self.host_path = host_path # Path on the host
- self.optional = optional # Optional mounts do not incur warnings or errors
+ self.path = path # Path inside the sandbox
+ self.host_path = host_path # Path on the host
+ self.optional = optional # Optional mounts do not incur warnings or errors
if self.host_path is None:
self.host_path = self.path
@@ -86,24 +85,32 @@ class ProjectConfig:
def __init__(self):
self.element_factory = None
self.source_factory = None
- self.options = None # OptionPool
- self.base_variables = {} # The base set of variables
- self.element_overrides = {} # Element specific configurations
- self.source_overrides = {} # Source specific configurations
- self.mirrors = OrderedDict() # contains dicts of alias-mappings to URIs.
- self.default_mirror = None # The name of the preferred mirror.
- self._aliases = None # Aliases dictionary
+ self.options = None # OptionPool
+ self.base_variables = {} # The base set of variables
+ self.element_overrides = {} # Element specific configurations
+ self.source_overrides = {} # Source specific configurations
+ self.mirrors = OrderedDict() # contains dicts of alias-mappings to URIs.
+ self.default_mirror = None # The name of the preferred mirror.
+ self._aliases = None # Aliases dictionary
# Project()
#
# The Project Configuration
#
-class Project():
-
- def __init__(self, directory, context, *, junction=None, cli_options=None,
- default_mirror=None, parent_loader=None,
- search_for_project=True, fetch_subprojects=None):
+class Project:
+ def __init__(
+ self,
+ directory,
+ context,
+ *,
+ junction=None,
+ cli_options=None,
+ default_mirror=None,
+ parent_loader=None,
+ search_for_project=True,
+ fetch_subprojects=None
+ ):
# The project name
self.name = None
@@ -125,31 +132,31 @@ class Project():
self._default_targets = None
# ProjectRefs for the main refs and also for junctions
- self.refs = ProjectRefs(self.directory, 'project.refs')
- self.junction_refs = ProjectRefs(self.directory, 'junction.refs')
+ self.refs = ProjectRefs(self.directory, "project.refs")
+ self.junction_refs = ProjectRefs(self.directory, "junction.refs")
self.config = ProjectConfig()
self.first_pass_config = ProjectConfig()
- self.junction = junction # The junction Element object, if this is a subproject
+ self.junction = junction # The junction Element object, if this is a subproject
- self.ref_storage = None # ProjectRefStorage setting
- self.base_environment = {} # The base set of environment variables
- self.base_env_nocache = None # The base nocache mask (list) for the environment
+ self.ref_storage = None # ProjectRefStorage setting
+ self.base_environment = {} # The base set of environment variables
+ self.base_env_nocache = None # The base nocache mask (list) for the environment
#
# Private Members
#
- self._default_mirror = default_mirror # The name of the preferred mirror.
+ self._default_mirror = default_mirror # The name of the preferred mirror.
self._cli_options = cli_options
- self._fatal_warnings = [] # A list of warnings which should trigger an error
+ self._fatal_warnings = [] # A list of warnings which should trigger an error
- self._shell_command = [] # The default interactive shell command
+ self._shell_command = [] # The default interactive shell command
self._shell_environment = {} # Statically set environment vars
- self._shell_host_files = [] # A list of HostMount objects
+ self._shell_host_files = [] # A list of HostMount objects
self.artifact_cache_specs = None
self.source_cache_specs = None
@@ -163,7 +170,7 @@ class Project():
self._fully_loaded = False
self._project_includes = None
- with PROFILER.profile(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-')):
+ with PROFILER.profile(Topics.LOAD_PROJECT, self.directory.replace(os.sep, "-")):
self._load(parent_loader=parent_loader, fetch_subprojects=fetch_subprojects)
self._partially_loaded = True
@@ -252,23 +259,24 @@ class Project():
# (LoadError): In case that the project path is not valid or does not
# exist
#
- def get_path_from_node(self, node, *,
- check_is_file=False, check_is_dir=False):
+ def get_path_from_node(self, node, *, check_is_file=False, check_is_dir=False):
path_str = node.as_str()
path = Path(path_str)
full_path = self._absolute_directory_path / path
if full_path.is_symlink():
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' must not point to "
- "symbolic links ".format(provenance, path_str),
- LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ raise LoadError(
+ "{}: Specified path '{}' must not point to " "symbolic links ".format(provenance, path_str),
+ LoadErrorReason.PROJ_PATH_INVALID_KIND,
+ )
- if path.parts and path.parts[0] == '..':
+ if path.parts and path.parts[0] == "..":
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' first component must "
- "not be '..'".format(provenance, path_str),
- LoadErrorReason.PROJ_PATH_INVALID)
+ raise LoadError(
+ "{}: Specified path '{}' first component must " "not be '..'".format(provenance, path_str),
+ LoadErrorReason.PROJ_PATH_INVALID,
+ )
try:
if sys.version_info[0] == 3 and sys.version_info[1] < 6:
@@ -277,55 +285,81 @@ class Project():
full_resolved_path = full_path.resolve(strict=True) # pylint: disable=unexpected-keyword-arg
except FileNotFoundError:
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' does not exist".format(provenance, path_str),
- LoadErrorReason.MISSING_FILE)
+ raise LoadError(
+ "{}: Specified path '{}' does not exist".format(provenance, path_str), LoadErrorReason.MISSING_FILE
+ )
is_inside = self._absolute_directory_path in full_resolved_path.parents or (
- full_resolved_path == self._absolute_directory_path)
+ full_resolved_path == self._absolute_directory_path
+ )
if not is_inside:
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' must not lead outside of the "
- "project directory".format(provenance, path_str),
- LoadErrorReason.PROJ_PATH_INVALID)
+ raise LoadError(
+ "{}: Specified path '{}' must not lead outside of the "
+ "project directory".format(provenance, path_str),
+ LoadErrorReason.PROJ_PATH_INVALID,
+ )
if path.is_absolute():
provenance = node.get_provenance()
- raise LoadError("{}: Absolute path: '{}' invalid.\n"
- "Please specify a path relative to the project's root."
- .format(provenance, path), LoadErrorReason.PROJ_PATH_INVALID)
+ raise LoadError(
+ "{}: Absolute path: '{}' invalid.\n"
+ "Please specify a path relative to the project's root.".format(provenance, path),
+ LoadErrorReason.PROJ_PATH_INVALID,
+ )
- if full_resolved_path.is_socket() or (
- full_resolved_path.is_fifo() or
- full_resolved_path.is_block_device()):
+ if full_resolved_path.is_socket() or (full_resolved_path.is_fifo() or full_resolved_path.is_block_device()):
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' points to an unsupported "
- "file kind".format(provenance, path_str), LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ raise LoadError(
+ "{}: Specified path '{}' points to an unsupported " "file kind".format(provenance, path_str),
+ LoadErrorReason.PROJ_PATH_INVALID_KIND,
+ )
if check_is_file and not full_resolved_path.is_file():
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' is not a regular file"
- .format(provenance, path_str), LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ raise LoadError(
+ "{}: Specified path '{}' is not a regular file".format(provenance, path_str),
+ LoadErrorReason.PROJ_PATH_INVALID_KIND,
+ )
if check_is_dir and not full_resolved_path.is_dir():
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' is not a directory"
- .format(provenance, path_str), LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ raise LoadError(
+ "{}: Specified path '{}' is not a directory".format(provenance, path_str),
+ LoadErrorReason.PROJ_PATH_INVALID_KIND,
+ )
return path_str
def _validate_node(self, node):
- node.validate_keys([
- 'format-version',
- 'element-path', 'variables',
- 'environment', 'environment-nocache',
- 'split-rules', 'elements', 'plugins',
- 'aliases', 'name', 'defaults',
- 'artifacts', 'options',
- 'fail-on-overlap', 'shell', 'fatal-warnings',
- 'ref-storage', 'sandbox', 'mirrors', 'remote-execution',
- 'sources', 'source-caches', '(@)'
- ])
+ node.validate_keys(
+ [
+ "format-version",
+ "element-path",
+ "variables",
+ "environment",
+ "environment-nocache",
+ "split-rules",
+ "elements",
+ "plugins",
+ "aliases",
+ "name",
+ "defaults",
+ "artifacts",
+ "options",
+ "fail-on-overlap",
+ "shell",
+ "fatal-warnings",
+ "ref-storage",
+ "sandbox",
+ "mirrors",
+ "remote-execution",
+ "sources",
+ "source-caches",
+ "(@)",
+ ]
+ )
# create_element()
#
@@ -438,10 +472,7 @@ class Project():
with self._context.messenger.simple_task("Resolving elements") as task:
if task:
task.set_maximum_progress(self.loader.loaded)
- elements = [
- Element._new_from_meta(meta, task)
- for meta in meta_elements
- ]
+ elements = [Element._new_from_meta(meta, task) for meta in meta_elements]
Element._clear_meta_elements_cache()
@@ -450,13 +481,11 @@ class Project():
redundant_refs = Element._get_redundant_source_refs()
if redundant_refs:
detail = "The following inline specified source references will be ignored:\n\n"
- lines = [
- "{}:{}".format(source._get_provenance(), ref)
- for source, ref in redundant_refs
- ]
+ lines = ["{}:{}".format(source._get_provenance(), ref) for source, ref in redundant_refs]
detail += "\n".join(lines)
self._context.messenger.message(
- Message(MessageType.WARN, "Ignoring redundant source references", detail=detail))
+ Message(MessageType.WARN, "Ignoring redundant source references", detail=detail)
+ )
return elements
@@ -590,49 +619,49 @@ class Project():
self._project_conf._composite(pre_config_node)
# Assert project's format version early, before validating toplevel keys
- format_version = pre_config_node.get_int('format-version')
+ format_version = pre_config_node.get_int("format-version")
if format_version < BST_FORMAT_VERSION_MIN:
major, minor = utils.get_bst_version()
raise LoadError(
"Project requested format version {}, but BuildStream {}.{} only supports format version {} or above."
- "Use latest 1.x release"
- .format(format_version, major, minor, BST_FORMAT_VERSION_MIN), LoadErrorReason.UNSUPPORTED_PROJECT)
+ "Use latest 1.x release".format(format_version, major, minor, BST_FORMAT_VERSION_MIN),
+ LoadErrorReason.UNSUPPORTED_PROJECT,
+ )
if BST_FORMAT_VERSION < format_version:
major, minor = utils.get_bst_version()
raise LoadError(
- "Project requested format version {}, but BuildStream {}.{} only supports up until format version {}"
- .format(format_version, major, minor, BST_FORMAT_VERSION), LoadErrorReason.UNSUPPORTED_PROJECT)
+ "Project requested format version {}, but BuildStream {}.{} only supports up until format version {}".format(
+ format_version, major, minor, BST_FORMAT_VERSION
+ ),
+ LoadErrorReason.UNSUPPORTED_PROJECT,
+ )
self._validate_node(pre_config_node)
# The project name, element path and option declarations
# are constant and cannot be overridden by option conditional statements
# FIXME: we should be keeping node information for further composition here
- self.name = self._project_conf.get_str('name')
+ self.name = self._project_conf.get_str("name")
# Validate that project name is a valid symbol name
- _assert_symbol_name(self.name, "project name",
- ref_node=pre_config_node.get_node('name'))
+ _assert_symbol_name(self.name, "project name", ref_node=pre_config_node.get_node("name"))
self.element_path = os.path.join(
- self.directory,
- self.get_path_from_node(pre_config_node.get_scalar('element-path'),
- check_is_dir=True)
+ self.directory, self.get_path_from_node(pre_config_node.get_scalar("element-path"), check_is_dir=True)
)
self.config.options = OptionPool(self.element_path)
self.first_pass_config.options = OptionPool(self.element_path)
- defaults = pre_config_node.get_mapping('defaults')
- defaults.validate_keys(['targets'])
+ defaults = pre_config_node.get_mapping("defaults")
+ defaults.validate_keys(["targets"])
self._default_targets = defaults.get_str_list("targets")
# Fatal warnings
- self._fatal_warnings = pre_config_node.get_str_list('fatal-warnings', default=[])
+ self._fatal_warnings = pre_config_node.get_str_list("fatal-warnings", default=[])
- self.loader = Loader(self._context, self,
- parent=parent_loader, fetch_subprojects=fetch_subprojects)
+ self.loader = Loader(self._context, self, parent=parent_loader, fetch_subprojects=fetch_subprojects)
self._project_includes = Includes(self.loader, copy_tree=False)
@@ -641,16 +670,17 @@ class Project():
config_no_include = self._default_config_node.clone()
project_conf_first_pass._composite(config_no_include)
- self._load_pass(config_no_include, self.first_pass_config,
- ignore_unknown=True)
+ self._load_pass(config_no_include, self.first_pass_config, ignore_unknown=True)
# Use separate file for storing source references
- ref_storage_node = pre_config_node.get_scalar('ref-storage')
+ ref_storage_node = pre_config_node.get_scalar("ref-storage")
self.ref_storage = ref_storage_node.as_str()
if self.ref_storage not in [ProjectRefStorage.INLINE, ProjectRefStorage.PROJECT_REFS]:
p = ref_storage_node.get_provenance()
- raise LoadError("{}: Invalid value '{}' specified for ref-storage"
- .format(p, self.ref_storage), LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "{}: Invalid value '{}' specified for ref-storage".format(p, self.ref_storage),
+ LoadErrorReason.INVALID_DATA,
+ )
if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
self.junction_refs.load(self.first_pass_config.options)
@@ -692,8 +722,7 @@ class Project():
# Load remote-execution configuration for this project
project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
- override_specs = SandboxRemote.specs_from_config_node(
- self._context.get_overrides(self.name), self.directory)
+ override_specs = SandboxRemote.specs_from_config_node(self._context.get_overrides(self.name), self.directory)
if override_specs is not None:
self.remote_execution_specs = override_specs
@@ -703,25 +732,25 @@ class Project():
self.remote_execution_specs = self._context.remote_execution_specs
# Load sandbox environment variables
- self.base_environment = config.get_mapping('environment')
- self.base_env_nocache = config.get_str_list('environment-nocache')
+ self.base_environment = config.get_mapping("environment")
+ self.base_env_nocache = config.get_str_list("environment-nocache")
# Load sandbox configuration
- self._sandbox = config.get_mapping('sandbox')
+ self._sandbox = config.get_mapping("sandbox")
# Load project split rules
- self._splits = config.get_mapping('split-rules')
+ self._splits = config.get_mapping("split-rules")
# Support backwards compatibility for fail-on-overlap
- fail_on_overlap = config.get_scalar('fail-on-overlap', None)
+ fail_on_overlap = config.get_scalar("fail-on-overlap", None)
# Deprecation check
if not fail_on_overlap.is_none():
self._context.messenger.message(
Message(
MessageType.WARN,
- "Use of fail-on-overlap within project.conf " +
- "is deprecated. Consider using fatal-warnings instead."
+ "Use of fail-on-overlap within project.conf "
+ + "is deprecated. Consider using fatal-warnings instead.",
)
)
@@ -733,29 +762,29 @@ class Project():
self.refs.load(self.options)
# Parse shell options
- shell_options = config.get_mapping('shell')
- shell_options.validate_keys(['command', 'environment', 'host-files'])
- self._shell_command = shell_options.get_str_list('command')
+ shell_options = config.get_mapping("shell")
+ shell_options.validate_keys(["command", "environment", "host-files"])
+ self._shell_command = shell_options.get_str_list("command")
# Perform environment expansion right away
- shell_environment = shell_options.get_mapping('environment', default={})
+ shell_environment = shell_options.get_mapping("environment", default={})
for key in shell_environment.keys():
value = shell_environment.get_str(key)
self._shell_environment[key] = os.path.expandvars(value)
# Host files is parsed as a list for convenience
- host_files = shell_options.get_sequence('host-files', default=[])
+ host_files = shell_options.get_sequence("host-files", default=[])
for host_file in host_files:
if isinstance(host_file, ScalarNode):
mount = HostMount(host_file)
else:
# Some validation
- host_file.validate_keys(['path', 'host_path', 'optional'])
+ host_file.validate_keys(["path", "host_path", "optional"])
# Parse the host mount
- path = host_file.get_str('path')
- host_path = host_file.get_str('host_path', default=None)
- optional = host_file.get_bool('optional', default=False)
+ path = host_file.get_str("path")
+ host_path = host_file.get_str("host_path", default=None)
+ optional = host_file.get_bool("optional", default=False)
mount = HostMount(path, host_path, optional)
self._shell_host_files.append(mount)
@@ -770,22 +799,21 @@ class Project():
# output (ProjectConfig) - ProjectConfig to load configuration onto.
# ignore_unknown (bool) - Whether option loader shoud ignore unknown options.
#
- def _load_pass(self, config, output, *,
- ignore_unknown=False):
+ def _load_pass(self, config, output, *, ignore_unknown=False):
# Element and Source type configurations will be composited later onto
# element/source types, so we delete it from here and run our final
# assertion after.
- output.element_overrides = config.get_mapping('elements', default={})
- output.source_overrides = config.get_mapping('sources', default={})
- config.safe_del('elements')
- config.safe_del('sources')
+ output.element_overrides = config.get_mapping("elements", default={})
+ output.source_overrides = config.get_mapping("sources", default={})
+ config.safe_del("elements")
+ config.safe_del("sources")
config._assert_fully_composited()
self._load_plugin_factories(config, output)
# Load project options
- options_node = config.get_mapping('options', default={})
+ options_node = config.get_mapping("options", default={})
output.options.load(options_node)
if self.junction:
# load before user configuration
@@ -793,7 +821,7 @@ class Project():
# Collect option values specified in the user configuration
overrides = self._context.get_overrides(self.name)
- override_options = overrides.get_mapping('options', default={})
+ override_options = overrides.get_mapping("options", default={})
output.options.load_yaml_values(override_options)
if self._cli_options:
output.options.load_cli_values(self._cli_options, ignore_unknown=ignore_unknown)
@@ -812,10 +840,10 @@ class Project():
output.options.process_node(output.source_overrides)
# Load base variables
- output.base_variables = config.get_mapping('variables')
+ output.base_variables = config.get_mapping("variables")
# Add the project name as a default variable
- output.base_variables['project-name'] = self.name
+ output.base_variables["project-name"] = self.name
# Extend variables with automatic variables and option exports
# Initialize it as a string as all variables are processed as strings.
@@ -825,27 +853,24 @@ class Project():
if self._context.build_max_jobs == 0:
# User requested automatic max-jobs
platform = self._context.platform
- output.base_variables['max-jobs'] = str(platform.get_cpu_count(8))
+ output.base_variables["max-jobs"] = str(platform.get_cpu_count(8))
else:
# User requested explicit max-jobs setting
- output.base_variables['max-jobs'] = str(self._context.build_max_jobs)
+ output.base_variables["max-jobs"] = str(self._context.build_max_jobs)
# Export options into variables, if that was requested
output.options.export_variables(output.base_variables)
# Override default_mirror if not set by command-line
- output.default_mirror = self._default_mirror or overrides.get_str(
- 'default-mirror', default=None)
+ output.default_mirror = self._default_mirror or overrides.get_str("default-mirror", default=None)
- mirrors = config.get_sequence('mirrors', default=[])
+ mirrors = config.get_sequence("mirrors", default=[])
for mirror in mirrors:
- allowed_mirror_fields = [
- 'name', 'aliases'
- ]
+ allowed_mirror_fields = ["name", "aliases"]
mirror.validate_keys(allowed_mirror_fields)
- mirror_name = mirror.get_str('name')
+ mirror_name = mirror.get_str("name")
alias_mappings = {}
- for alias_mapping, uris in mirror.get_mapping('aliases').items():
+ for alias_mapping, uris in mirror.get_mapping("aliases").items():
assert type(uris) is SequenceNode # pylint: disable=unidiomatic-typecheck
alias_mappings[alias_mapping] = uris.as_str_list()
output.mirrors[mirror_name] = alias_mappings
@@ -853,7 +878,7 @@ class Project():
output.default_mirror = mirror_name
# Source url aliases
- output._aliases = config.get_mapping('aliases', default={})
+ output._aliases = config.get_mapping("aliases", default={})
# _find_project_dir()
#
@@ -873,9 +898,7 @@ class Project():
def _find_project_dir(self, directory):
workspace_element = None
config_filenames = [_PROJECT_CONF_FILE, WORKSPACE_PROJECT_FILE]
- found_directory, filename = utils._search_upward_for_files(
- directory, config_filenames
- )
+ found_directory, filename = utils._search_upward_for_files(directory, config_filenames)
if filename == _PROJECT_CONF_FILE:
project_directory = found_directory
elif filename == WORKSPACE_PROJECT_FILE:
@@ -885,57 +908,62 @@ class Project():
project_directory = workspace_project.get_default_project_path()
workspace_element = workspace_project.get_default_element()
else:
- raise LoadError("None of {names} found in '{path}' or any of its parent directories"
- .format(names=config_filenames, path=directory), LoadErrorReason.MISSING_PROJECT_CONF)
+ raise LoadError(
+ "None of {names} found in '{path}' or any of its parent directories".format(
+ names=config_filenames, path=directory
+ ),
+ LoadErrorReason.MISSING_PROJECT_CONF,
+ )
return project_directory, workspace_element
def _load_plugin_factories(self, config, output):
- plugin_source_origins = [] # Origins of custom sources
+ plugin_source_origins = [] # Origins of custom sources
plugin_element_origins = [] # Origins of custom elements
# Plugin origins and versions
- origins = config.get_sequence('plugins', default=[])
+ origins = config.get_sequence("plugins", default=[])
source_format_versions = {}
element_format_versions = {}
for origin in origins:
allowed_origin_fields = [
- 'origin', 'sources', 'elements',
- 'package-name', 'path',
+ "origin",
+ "sources",
+ "elements",
+ "package-name",
+ "path",
]
origin.validate_keys(allowed_origin_fields)
# Store source versions for checking later
- source_versions = origin.get_mapping('sources', default={})
+ source_versions = origin.get_mapping("sources", default={})
for key in source_versions.keys():
if key in source_format_versions:
- raise LoadError("Duplicate listing of source '{}'".format(key),
- LoadErrorReason.INVALID_YAML)
+ raise LoadError("Duplicate listing of source '{}'".format(key), LoadErrorReason.INVALID_YAML)
source_format_versions[key] = source_versions.get_int(key)
# Store element versions for checking later
- element_versions = origin.get_mapping('elements', default={})
+ element_versions = origin.get_mapping("elements", default={})
for key in element_versions.keys():
if key in element_format_versions:
- raise LoadError("Duplicate listing of element '{}'".format(key),
- LoadErrorReason.INVALID_YAML)
+ raise LoadError("Duplicate listing of element '{}'".format(key), LoadErrorReason.INVALID_YAML)
element_format_versions[key] = element_versions.get_int(key)
# Store the origins if they're not 'core'.
# core elements are loaded by default, so storing is unnecessary.
- origin_value = origin.get_enum('origin', PluginOrigins)
+ origin_value = origin.get_enum("origin", PluginOrigins)
if origin_value != PluginOrigins.CORE:
- self._store_origin(origin, 'sources', plugin_source_origins)
- self._store_origin(origin, 'elements', plugin_element_origins)
+ self._store_origin(origin, "sources", plugin_source_origins)
+ self._store_origin(origin, "elements", plugin_element_origins)
- pluginbase = PluginBase(package='buildstream.plugins')
- output.element_factory = ElementFactory(pluginbase,
- plugin_origins=plugin_element_origins,
- format_versions=element_format_versions)
- output.source_factory = SourceFactory(pluginbase,
- plugin_origins=plugin_source_origins,
- format_versions=source_format_versions)
+ pluginbase = PluginBase(package="buildstream.plugins")
+ output.element_factory = ElementFactory(
+ pluginbase, plugin_origins=plugin_element_origins, format_versions=element_format_versions
+ )
+ output.source_factory = SourceFactory(
+ pluginbase, plugin_origins=plugin_source_origins, format_versions=source_format_versions
+ )
# _store_origin()
#
@@ -951,25 +979,25 @@ class Project():
# Raises:
# LoadError if 'origin' is an unexpected value
def _store_origin(self, origin, plugin_group, destination):
- expected_groups = ['sources', 'elements']
+ expected_groups = ["sources", "elements"]
if plugin_group not in expected_groups:
- raise LoadError("Unexpected plugin group: {}, expecting {}"
- .format(plugin_group, expected_groups),
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Unexpected plugin group: {}, expecting {}".format(plugin_group, expected_groups),
+ LoadErrorReason.INVALID_DATA,
+ )
if plugin_group in origin.keys():
origin_node = origin.clone()
plugins = origin.get_mapping(plugin_group, default={})
- origin_node['plugins'] = plugins.keys()
+ origin_node["plugins"] = plugins.keys()
for group in expected_groups:
if group in origin_node:
del origin_node[group]
- if origin_node.get_enum('origin', PluginOrigins) == PluginOrigins.LOCAL:
- path = self.get_path_from_node(origin.get_scalar('path'),
- check_is_dir=True)
+ if origin_node.get_enum("origin", PluginOrigins) == PluginOrigins.LOCAL:
+ path = self.get_path_from_node(origin.get_scalar("path"), check_is_dir=True)
# paths are passed in relative to the project, but must be absolute
- origin_node['path'] = os.path.join(self.directory, path)
+ origin_node["path"] = os.path.join(self.directory, path)
destination.append(origin_node)
# _warning_is_fatal():
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index 0555488c8..aca7c6712 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -26,15 +26,15 @@ from ._exceptions import LoadError, LoadErrorReason
# ProjectRefStorage()
#
# Indicates the type of ref storage
-class ProjectRefStorage():
+class ProjectRefStorage:
# Source references are stored inline
#
- INLINE = 'inline'
+ INLINE = "inline"
# Source references are stored in a central project.refs file
#
- PROJECT_REFS = 'project.refs'
+ PROJECT_REFS = "project.refs"
# ProjectRefs()
@@ -45,8 +45,7 @@ class ProjectRefStorage():
# directory (str): The project directory
# base_name (str): The project.refs basename
#
-class ProjectRefs():
-
+class ProjectRefs:
def __init__(self, directory, base_name):
directory = os.path.abspath(directory)
self._fullpath = os.path.join(directory, base_name)
@@ -83,12 +82,12 @@ class ProjectRefs():
self._toplevel_node = _new_synthetic_file(self._fullpath)
self._toplevel_save = self._toplevel_node
- self._toplevel_node.validate_keys(['projects'])
+ self._toplevel_node.validate_keys(["projects"])
# Ensure we create our toplevel entry point on the fly here
for node in [self._toplevel_node, self._toplevel_save]:
- if 'projects' not in node:
- node['projects'] = {}
+ if "projects" not in node:
+ node["projects"] = {}
# lookup_ref()
#
@@ -122,7 +121,7 @@ class ProjectRefs():
# Looks up a ref node in the project.refs file, creates one if ensure is True.
#
def _lookup(self, toplevel, project, element, source_index, *, ensure=False):
- projects = toplevel.get_mapping('projects')
+ projects = toplevel.get_mapping("projects")
# Fetch the project
try:
diff --git a/src/buildstream/_remote.py b/src/buildstream/_remote.py
index ab1dc1924..78f67726a 100644
--- a/src/buildstream/_remote.py
+++ b/src/buildstream/_remote.py
@@ -35,14 +35,14 @@ class RemoteType(FastEnum):
ALL = "all"
def __str__(self):
- return self.name.lower().replace('_', '-')
+ return self.name.lower().replace("_", "-")
# RemoteSpec():
#
# Defines the basic structure of a remote specification.
#
-class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key client_cert instance_name type')):
+class RemoteSpec(namedtuple("RemoteSpec", "url push server_cert client_key client_cert instance_name type")):
# new_from_config_node
#
@@ -60,15 +60,15 @@ class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key clien
#
@classmethod
def new_from_config_node(cls, spec_node, basedir=None):
- spec_node.validate_keys(['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name', 'type'])
+ spec_node.validate_keys(["url", "push", "server-cert", "client-key", "client-cert", "instance-name", "type"])
- url = spec_node.get_str('url')
+ url = spec_node.get_str("url")
if not url:
- provenance = spec_node.get_node('url').get_provenance()
+ provenance = spec_node.get_node("url").get_provenance()
raise LoadError("{}: empty artifact cache URL".format(provenance), LoadErrorReason.INVALID_DATA)
- push = spec_node.get_bool('push', default=False)
- instance_name = spec_node.get_str('instance-name', default=None)
+ push = spec_node.get_bool("push", default=False)
+ instance_name = spec_node.get_str("instance-name", default=None)
def parse_cert(key):
cert = spec_node.get_str(key, default=None)
@@ -80,20 +80,22 @@ class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key clien
return cert
- cert_keys = ('server-cert', 'client-key', 'client-cert')
+ cert_keys = ("server-cert", "client-key", "client-cert")
server_cert, client_key, client_cert = tuple(parse_cert(key) for key in cert_keys)
if client_key and not client_cert:
- provenance = spec_node.get_node('client-key').get_provenance()
- raise LoadError("{}: 'client-key' was specified without 'client-cert'".format(provenance),
- LoadErrorReason.INVALID_DATA)
+ provenance = spec_node.get_node("client-key").get_provenance()
+ raise LoadError(
+ "{}: 'client-key' was specified without 'client-cert'".format(provenance), LoadErrorReason.INVALID_DATA
+ )
if client_cert and not client_key:
- provenance = spec_node.get_node('client-cert').get_provenance()
- raise LoadError("{}: 'client-cert' was specified without 'client-key'".format(provenance),
- LoadErrorReason.INVALID_DATA)
+ provenance = spec_node.get_node("client-cert").get_provenance()
+ raise LoadError(
+ "{}: 'client-cert' was specified without 'client-key'".format(provenance), LoadErrorReason.INVALID_DATA
+ )
- type_ = spec_node.get_enum('type', RemoteType, default=RemoteType.ALL)
+ type_ = spec_node.get_enum("type", RemoteType, default=RemoteType.ALL)
return cls(url, push, server_cert, client_key, client_cert, instance_name, type_)
@@ -108,11 +110,11 @@ class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key clien
RemoteSpec.__new__.__defaults__ = ( # type: ignore
# mandatory # url - The url of the remote
# mandatory # push - Whether the remote should be used for pushing
- None, # server_cert - The server certificate
- None, # client_key - The (private) client key
- None, # client_cert - The (public) client certificate
- None, # instance_name - The (grpc) instance name of the remote
- RemoteType.ALL # type - The type of the remote (index, storage, both)
+ None, # server_cert - The server certificate
+ None, # client_key - The (private) client key
+ None, # client_cert - The (public) client certificate
+ None, # instance_name - The (grpc) instance name of the remote
+ RemoteType.ALL, # type - The type of the remote (index, storage, both)
)
@@ -126,7 +128,7 @@ RemoteSpec.__new__.__defaults__ = ( # type: ignore
# Customization for the particular protocol is expected to be
# performed in children.
#
-class BaseRemote():
+class BaseRemote:
key_name = None
def __init__(self, spec):
@@ -154,25 +156,24 @@ class BaseRemote():
# Set up the communcation channel
url = urlparse(self.spec.url)
- if url.scheme == 'http':
+ if url.scheme == "http":
port = url.port or 80
- self.channel = grpc.insecure_channel('{}:{}'.format(url.hostname, port))
- elif url.scheme == 'https':
+ self.channel = grpc.insecure_channel("{}:{}".format(url.hostname, port))
+ elif url.scheme == "https":
port = url.port or 443
try:
server_cert, client_key, client_cert = _read_files(
- self.spec.server_cert,
- self.spec.client_key,
- self.spec.client_cert)
+ self.spec.server_cert, self.spec.client_key, self.spec.client_cert
+ )
except FileNotFoundError as e:
raise RemoteError("Could not read certificates: {}".format(e)) from e
self.server_cert = server_cert
self.client_key = client_key
self.client_cert = client_cert
- credentials = grpc.ssl_channel_credentials(root_certificates=self.server_cert,
- private_key=self.client_key,
- certificate_chain=self.client_cert)
- self.channel = grpc.secure_channel('{}:{}'.format(url.hostname, port), credentials)
+ credentials = grpc.ssl_channel_credentials(
+ root_certificates=self.server_cert, private_key=self.client_key, certificate_chain=self.client_cert
+ )
+ self.channel = grpc.secure_channel("{}:{}".format(url.hostname, port), credentials)
else:
raise RemoteError("Unsupported URL: {}".format(self.spec.url))
@@ -258,7 +259,8 @@ class BaseRemote():
def _read_files(*files):
def read_file(f):
if f:
- with open(f, 'rb') as data:
+ with open(f, "rb") as data:
return data.read()
return None
+
return (read_file(f) for f in files)
diff --git a/src/buildstream/_scheduler/jobs/elementjob.py b/src/buildstream/_scheduler/jobs/elementjob.py
index 246eb75c6..6e035be9c 100644
--- a/src/buildstream/_scheduler/jobs/elementjob.py
+++ b/src/buildstream/_scheduler/jobs/elementjob.py
@@ -69,9 +69,9 @@ class ElementJob(Job):
super().__init__(*args, **kwargs)
self.set_name(element._get_full_name())
self.queue = queue
- self._element = element # Set the Element pertaining to the job
- self._action_cb = action_cb # The action callable function
- self._complete_cb = complete_cb # The complete callable function
+ self._element = element # Set the Element pertaining to the job
+ self._action_cb = action_cb # The action callable function
+ self._complete_cb = complete_cb # The complete callable function
# Set the plugin element name & key for logging purposes
self.set_message_element_name(self.name)
@@ -97,9 +97,7 @@ class ChildElementJob(ChildJob):
# This should probably be omitted for non-build tasks but it's harmless here
elt_env = self._element.get_environment()
env_dump = yaml.round_trip_dump(elt_env, default_flow_style=False, allow_unicode=True)
- self.message(MessageType.LOG,
- "Build environment for element {}".format(self._element.name),
- detail=env_dump)
+ self.message(MessageType.LOG, "Build environment for element {}".format(self._element.name), detail=env_dump)
# Run the action
return self._action_cb(self._element)
@@ -109,6 +107,6 @@ class ChildElementJob(ChildJob):
workspace = self._element._get_workspace()
if workspace is not None:
- data['workspace'] = workspace.to_dict()
+ data["workspace"] = workspace.to_dict()
return data
diff --git a/src/buildstream/_scheduler/jobs/job.py b/src/buildstream/_scheduler/jobs/job.py
index 3363d7b60..e7866bcd4 100644
--- a/src/buildstream/_scheduler/jobs/job.py
+++ b/src/buildstream/_scheduler/jobs/job.py
@@ -66,7 +66,7 @@ class JobStatus(FastEnum):
# Used to distinguish between status messages and return values
-class _Envelope():
+class _Envelope:
def __init__(self, message_type, message):
self.message_type = message_type
self.message = message
@@ -115,35 +115,34 @@ class _MessageType(FastEnum):
# that should be used - should contain {pid}.
# max_retries (int): The maximum number of retries
#
-class Job():
-
+class Job:
def __init__(self, scheduler, action_name, logfile, *, max_retries=0):
#
# Public members
#
- self.name = None # The name of the job, set by the job's subclass
- self.action_name = action_name # The action name for the Queue
- self.child_data = None # Data to be sent to the main process
+ self.name = None # The name of the job, set by the job's subclass
+ self.action_name = action_name # The action name for the Queue
+ self.child_data = None # Data to be sent to the main process
#
# Private members
#
- self._scheduler = scheduler # The scheduler
- self._queue = None # A message passing queue
- self._process = None # The Process object
- self._watcher = None # Child process watcher
- self._listening = False # Whether the parent is currently listening
- self._suspended = False # Whether this job is currently suspended
- self._max_retries = max_retries # Maximum number of automatic retries
- self._result = None # Return value of child action in the parent
- self._tries = 0 # Try count, for retryable jobs
- self._terminated = False # Whether this job has been explicitly terminated
+ self._scheduler = scheduler # The scheduler
+ self._queue = None # A message passing queue
+ self._process = None # The Process object
+ self._watcher = None # Child process watcher
+ self._listening = False # Whether the parent is currently listening
+ self._suspended = False # Whether this job is currently suspended
+ self._max_retries = max_retries # Maximum number of automatic retries
+ self._result = None # Return value of child action in the parent
+ self._tries = 0 # Try count, for retryable jobs
+ self._terminated = False # Whether this job has been explicitly terminated
self._logfile = logfile
- self._message_element_name = None # The plugin instance element name for messaging
- self._message_element_key = None # The element key for messaging
- self._element = None # The Element() passed to the Job() constructor, if applicable
+ self._message_element_name = None # The plugin instance element name for messaging
+ self._message_element_key = None # The element key for messaging
+ self._element = None # The Element() passed to the Job() constructor, if applicable
# set_name()
#
@@ -170,23 +169,16 @@ class Job():
self._max_retries,
self._tries,
self._message_element_name,
- self._message_element_key
+ self._message_element_key,
)
if self._scheduler.context.platform.does_multiprocessing_start_require_pickling():
- pickled = pickle_child_job(
- child_job,
- self._scheduler.context.get_projects(),
- )
+ pickled = pickle_child_job(child_job, self._scheduler.context.get_projects(),)
self._process = _multiprocessing.AsyncioSafeProcess(
- target=do_pickled_child_job,
- args=[pickled, self._queue],
+ target=do_pickled_child_job, args=[pickled, self._queue],
)
else:
- self._process = _multiprocessing.AsyncioSafeProcess(
- target=child_job.child_action,
- args=[self._queue],
- )
+ self._process = _multiprocessing.AsyncioSafeProcess(target=child_job.child_action, args=[self._queue],)
# Block signals which are handled in the main process such that
# the child process does not inherit the parent's state, but the main
@@ -257,8 +249,7 @@ class Job():
#
def kill(self):
# Force kill
- self.message(MessageType.WARN,
- "{} did not terminate gracefully, killing".format(self.action_name))
+ self.message(MessageType.WARN, "{} did not terminate gracefully, killing".format(self.action_name))
utils._kill_process_tree(self._process.pid)
# suspend()
@@ -267,8 +258,7 @@ class Job():
#
def suspend(self):
if not self._suspended:
- self.message(MessageType.STATUS,
- "{} suspending".format(self.action_name))
+ self.message(MessageType.STATUS, "{} suspending".format(self.action_name))
try:
# Use SIGTSTP so that child processes may handle and propagate
@@ -292,8 +282,7 @@ class Job():
def resume(self, silent=False):
if self._suspended:
if not silent and not self._scheduler.terminated:
- self.message(MessageType.STATUS,
- "{} resuming".format(self.action_name))
+ self.message(MessageType.STATUS, "{} resuming".format(self.action_name))
os.kill(self._process.pid, signal.SIGCONT)
self._suspended = False
@@ -335,7 +324,7 @@ class Job():
# override 'element_name' and 'element_key' this way.
#
def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
- kwargs['scheduler'] = True
+ kwargs["scheduler"] = True
# If default name & key values not provided, set as given job attributes
if element_name is None:
element_name = self._message_element_name
@@ -373,8 +362,7 @@ class Job():
# lists, dicts, numbers, but not Element instances).
#
def handle_message(self, message):
- raise ImplError("Job '{kind}' does not implement handle_message()"
- .format(kind=type(self).__name__))
+ raise ImplError("Job '{kind}' does not implement handle_message()".format(kind=type(self).__name__))
# parent_complete()
#
@@ -386,8 +374,7 @@ class Job():
# result (any): The result returned by child_process().
#
def parent_complete(self, status, result):
- raise ImplError("Job '{kind}' does not implement parent_complete()"
- .format(kind=type(self).__name__))
+ raise ImplError("Job '{kind}' does not implement parent_complete()".format(kind=type(self).__name__))
# create_child_job()
#
@@ -405,8 +392,7 @@ class Job():
# (ChildJob): An instance of a subclass of ChildJob.
#
def create_child_job(self, *args, **kwargs):
- raise ImplError("Job '{kind}' does not implement create_child_job()"
- .format(kind=type(self).__name__))
+ raise ImplError("Job '{kind}' does not implement create_child_job()".format(kind=type(self).__name__))
#######################################################
# Local Private Methods #
@@ -437,9 +423,11 @@ class Job():
returncode = _ReturnCode(returncode)
except ValueError:
# An unexpected return code was returned; fail permanently and report
- self.message(MessageType.ERROR,
- "Internal job process unexpectedly died with exit code {}".format(returncode),
- logfile=self._logfile)
+ self.message(
+ MessageType.ERROR,
+ "Internal job process unexpectedly died with exit code {}".format(returncode),
+ logfile=self._logfile,
+ )
returncode = _ReturnCode.PERM_FAIL
# We don't want to retry if we got OK or a permanent fail.
@@ -503,8 +491,7 @@ class Job():
# For regression tests only, save the last error domain / reason
# reported from a child task in the main process, this global state
# is currently managed in _exceptions.py
- set_last_task_error(envelope.message['domain'],
- envelope.message['reason'])
+ set_last_task_error(envelope.message["domain"], envelope.message["reason"])
elif envelope.message_type is _MessageType.RESULT:
assert self._result is None
self._result = envelope.message
@@ -514,8 +501,7 @@ class Job():
elif envelope.message_type is _MessageType.SUBCLASS_CUSTOM_MESSAGE:
self.handle_message(envelope.message)
else:
- assert False, "Unhandled message type '{}': {}".format(
- envelope.message_type, envelope.message)
+ assert False, "Unhandled message type '{}': {}".format(envelope.message_type, envelope.message)
# _parent_process_queue()
#
@@ -552,8 +538,7 @@ class Job():
# http://bugs.python.org/issue3831
#
if not self._listening:
- self._scheduler.loop.add_reader(
- self._queue._reader.fileno(), self._parent_recv)
+ self._scheduler.loop.add_reader(self._queue._reader.fileno(), self._parent_recv)
self._listening = True
# _parent_stop_listening()
@@ -589,11 +574,10 @@ class Job():
# message_element_key (tuple): None, or the element display key tuple
# to be supplied to the Message() constructor.
#
-class ChildJob():
-
+class ChildJob:
def __init__(
- self, action_name, messenger, logdir, logfile, max_retries, tries,
- message_element_name, message_element_key):
+ self, action_name, messenger, logdir, logfile, max_retries, tries, message_element_name, message_element_key
+ ):
self.action_name = action_name
@@ -624,14 +608,15 @@ class ChildJob():
# overriden here.
#
def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
- kwargs['scheduler'] = True
+ kwargs["scheduler"] = True
# If default name & key values not provided, set as given job attributes
if element_name is None:
element_name = self._message_element_name
if element_key is None:
element_key = self._message_element_key
- self._messenger.message(Message(message_type, message, element_name=element_name,
- element_key=element_key, **kwargs))
+ self._messenger.message(
+ Message(message_type, message, element_name=element_name, element_key=element_key, **kwargs)
+ )
# send_message()
#
@@ -668,8 +653,7 @@ class ChildJob():
# the result of the Job.
#
def child_process(self):
- raise ImplError("ChildJob '{kind}' does not implement child_process()"
- .format(kind=type(self).__name__))
+ raise ImplError("ChildJob '{kind}' does not implement child_process()".format(kind=type(self).__name__))
# child_process_data()
#
@@ -723,12 +707,13 @@ class ChildJob():
def resume_time():
nonlocal stopped_time
nonlocal starttime
- starttime += (datetime.datetime.now() - stopped_time)
+ starttime += datetime.datetime.now() - stopped_time
# Time, log and and run the action function
#
- with _signals.suspendable(stop_time, resume_time), \
- self._messenger.recorded_messages(self._logfile, self._logdir) as filename:
+ with _signals.suspendable(stop_time, resume_time), self._messenger.recorded_messages(
+ self._logfile, self._logdir
+ ) as filename:
# Graciously handle sigterms.
def handle_sigterm(_signum, _sigframe):
@@ -743,8 +728,7 @@ class ChildJob():
result = self.child_process() # pylint: disable=assignment-from-no-return
except SkipJob as e:
elapsed = datetime.datetime.now() - starttime
- self.message(MessageType.SKIPPED, str(e),
- elapsed=elapsed, logfile=filename)
+ self.message(MessageType.SKIPPED, str(e), elapsed=elapsed, logfile=filename)
# Alert parent of skip by return code
self._child_shutdown(_ReturnCode.SKIPPED)
@@ -753,13 +737,16 @@ class ChildJob():
retry_flag = e.temporary
if retry_flag and (self._tries <= self._max_retries):
- self.message(MessageType.FAIL,
- "Try #{} failed, retrying".format(self._tries),
- elapsed=elapsed, logfile=filename)
+ self.message(
+ MessageType.FAIL,
+ "Try #{} failed, retrying".format(self._tries),
+ elapsed=elapsed,
+ logfile=filename,
+ )
else:
- self.message(MessageType.FAIL, str(e),
- elapsed=elapsed, detail=e.detail,
- logfile=filename, sandbox=e.sandbox)
+ self.message(
+ MessageType.FAIL, str(e), elapsed=elapsed, detail=e.detail, logfile=filename, sandbox=e.sandbox
+ )
self._send_message(_MessageType.CHILD_DATA, self.child_process_data())
@@ -770,7 +757,7 @@ class ChildJob():
#
self._child_shutdown(_ReturnCode.FAIL if retry_flag else _ReturnCode.PERM_FAIL)
- except Exception: # pylint: disable=broad-except
+ except Exception: # pylint: disable=broad-except
# If an unhandled (not normalized to BstError) occurs, that's a bug,
# send the traceback and formatted exception back to the frontend
@@ -779,9 +766,7 @@ class ChildJob():
elapsed = datetime.datetime.now() - starttime
detail = "An unhandled exception occured:\n\n{}".format(traceback.format_exc())
- self.message(MessageType.BUG, self.action_name,
- elapsed=elapsed, detail=detail,
- logfile=filename)
+ self.message(MessageType.BUG, self.action_name, elapsed=elapsed, detail=detail, logfile=filename)
# Unhandled exceptions should permenantly fail
self._child_shutdown(_ReturnCode.PERM_FAIL)
@@ -791,8 +776,7 @@ class ChildJob():
self._child_send_result(result)
elapsed = datetime.datetime.now() - starttime
- self.message(MessageType.SUCCESS, self.action_name, elapsed=elapsed,
- logfile=filename)
+ self.message(MessageType.SUCCESS, self.action_name, elapsed=elapsed, logfile=filename)
# Shutdown needs to stay outside of the above context manager,
# make sure we dont try to handle SIGTERM while the process
@@ -831,10 +815,7 @@ class ChildJob():
domain = e.domain
reason = e.reason
- self._send_message(_MessageType.ERROR, {
- 'domain': domain,
- 'reason': reason
- })
+ self._send_message(_MessageType.ERROR, {"domain": domain, "reason": reason})
# _child_send_result()
#
diff --git a/src/buildstream/_scheduler/jobs/jobpickler.py b/src/buildstream/_scheduler/jobs/jobpickler.py
index b0465ec9e..1d47f67db 100644
--- a/src/buildstream/_scheduler/jobs/jobpickler.py
+++ b/src/buildstream/_scheduler/jobs/jobpickler.py
@@ -37,9 +37,7 @@ _NAME_TO_PROTO_CLASS = {
"digest": DigestProto,
}
-_PROTO_CLASS_TO_NAME = {
- cls: name for name, cls in _NAME_TO_PROTO_CLASS.items()
-}
+_PROTO_CLASS_TO_NAME = {cls: name for name, cls in _NAME_TO_PROTO_CLASS.items()}
# pickle_child_job()
@@ -57,10 +55,7 @@ def pickle_child_job(child_job, projects):
# necessary for the job, this includes e.g. the global state of the node
# module.
node_module_state = node._get_state_for_pickling()
- return _pickle_child_job_data(
- (child_job, node_module_state),
- projects,
- )
+ return _pickle_child_job_data((child_job, node_module_state), projects,)
# do_pickled_child_job()
@@ -146,10 +141,7 @@ def _pickle_child_job_data(child_job_data, projects):
]
plugin_class_to_factory = {
- cls: factory
- for factory in factory_list
- if factory is not None
- for cls, _ in factory.all_loaded_plugins()
+ cls: factory for factory in factory_list if factory is not None for cls, _ in factory.all_loaded_plugins()
}
pickled_data = io.BytesIO()
diff --git a/src/buildstream/_scheduler/queues/buildqueue.py b/src/buildstream/_scheduler/queues/buildqueue.py
index dc33e6510..d98b49476 100644
--- a/src/buildstream/_scheduler/queues/buildqueue.py
+++ b/src/buildstream/_scheduler/queues/buildqueue.py
@@ -50,10 +50,15 @@ class BuildQueue(Queue):
self._tried.add(element)
_, description, detail = element._get_build_result()
logfile = element._get_build_log()
- self._message(element, MessageType.FAIL, description,
- detail=detail, action_name=self.action_name,
- elapsed=timedelta(seconds=0),
- logfile=logfile)
+ self._message(
+ element,
+ MessageType.FAIL,
+ description,
+ detail=detail,
+ action_name=self.action_name,
+ elapsed=timedelta(seconds=0),
+ logfile=logfile,
+ )
self._done_queue.append(element)
element_name = element._get_full_name()
self._task_group.add_failed_task(element_name)
diff --git a/src/buildstream/_scheduler/queues/queue.py b/src/buildstream/_scheduler/queues/queue.py
index 49fae5677..986ac6c0a 100644
--- a/src/buildstream/_scheduler/queues/queue.py
+++ b/src/buildstream/_scheduler/queues/queue.py
@@ -57,11 +57,11 @@ class QueueStatus(FastEnum):
# Args:
# scheduler (Scheduler): The Scheduler
#
-class Queue():
+class Queue:
# These should be overridden on class data of of concrete Queue implementations
- action_name = None # type: Optional[str]
- complete_name = None # type: Optional[str]
+ action_name = None # type: Optional[str]
+ complete_name = None # type: Optional[str]
# Resources this queues' jobs want
resources = [] # type: List[int]
@@ -72,11 +72,11 @@ class Queue():
#
self._scheduler = scheduler
self._resources = scheduler.resources # Shared resource pool
- self._ready_queue = [] # Ready elements
- self._done_queue = deque() # Processed / Skipped elements
+ self._ready_queue = [] # Ready elements
+ self._done_queue = deque() # Processed / Skipped elements
self._max_retries = 0
- self._required_element_check = False # Whether we should check that elements are required before enqueuing
+ self._required_element_check = False # Whether we should check that elements are required before enqueuing
# Assert the subclass has setup class data
assert self.action_name is not None
@@ -162,8 +162,7 @@ class Queue():
# element (Element): The element waiting to be pushed into the queue
#
def register_pending_element(self, element):
- raise ImplError("Queue type: {} does not implement register_pending_element()"
- .format(self.action_name))
+ raise ImplError("Queue type: {} does not implement register_pending_element()".format(self.action_name))
#####################################################
# Scheduler / Pipeline facing APIs #
@@ -229,12 +228,16 @@ class Queue():
ready.append(element)
return [
- ElementJob(self._scheduler, self.action_name,
- self._element_log_path(element),
- element=element, queue=self,
- action_cb=self.get_process_func(),
- complete_cb=self._job_done,
- max_retries=self._max_retries)
+ ElementJob(
+ self._scheduler,
+ self.action_name,
+ self._element_log_path(element),
+ element=element,
+ queue=self,
+ action_cb=self.get_process_func(),
+ complete_cb=self._job_done,
+ max_retries=self._max_retries,
+ )
for element in ready
]
@@ -267,7 +270,7 @@ class Queue():
def _update_workspaces(self, element, job):
workspace_dict = None
if job.child_data:
- workspace_dict = job.child_data.get('workspace', None)
+ workspace_dict = job.child_data.get("workspace", None)
# Handle any workspace modifications now
#
@@ -279,10 +282,13 @@ class Queue():
workspaces.save_config()
except BstError as e:
self._message(element, MessageType.ERROR, "Error saving workspaces", detail=str(e))
- except Exception: # pylint: disable=broad-except
- self._message(element, MessageType.BUG,
- "Unhandled exception while saving workspaces",
- detail=traceback.format_exc())
+ except Exception: # pylint: disable=broad-except
+ self._message(
+ element,
+ MessageType.BUG,
+ "Unhandled exception while saving workspaces",
+ detail=traceback.format_exc(),
+ )
# _job_done()
#
@@ -322,13 +328,13 @@ class Queue():
#
set_last_task_error(e.domain, e.reason)
- except Exception: # pylint: disable=broad-except
+ except Exception: # pylint: disable=broad-except
# Report unhandled exceptions and mark as failed
#
- self._message(element, MessageType.BUG,
- "Unhandled exception in post processing",
- detail=traceback.format_exc())
+ self._message(
+ element, MessageType.BUG, "Unhandled exception in post processing", detail=traceback.format_exc()
+ )
self._task_group.add_failed_task(element._get_full_name())
else:
# All elements get placed on the done queue for later processing.
@@ -372,7 +378,7 @@ class Queue():
if status == QueueStatus.SKIP:
# Place skipped elements into the done queue immediately
self._task_group.add_skipped_task()
- self._done_queue.append(element) # Elements to proceed to the next queue
+ self._done_queue.append(element) # Elements to proceed to the next queue
elif status == QueueStatus.READY:
# Push elements which are ready to be processed immediately into the queue
heapq.heappush(self._ready_queue, (element._depth, element))
diff --git a/src/buildstream/_scheduler/resources.py b/src/buildstream/_scheduler/resources.py
index 73bf66b4a..e76158779 100644
--- a/src/buildstream/_scheduler/resources.py
+++ b/src/buildstream/_scheduler/resources.py
@@ -1,17 +1,17 @@
-class ResourceType():
+class ResourceType:
CACHE = 0
DOWNLOAD = 1
PROCESS = 2
UPLOAD = 3
-class Resources():
+class Resources:
def __init__(self, num_builders, num_fetchers, num_pushers):
self._max_resources = {
ResourceType.CACHE: 0,
ResourceType.DOWNLOAD: num_fetchers,
ResourceType.PROCESS: num_builders,
- ResourceType.UPLOAD: num_pushers
+ ResourceType.UPLOAD: num_pushers,
}
# Resources jobs are currently using.
@@ -19,7 +19,7 @@ class Resources():
ResourceType.CACHE: 0,
ResourceType.DOWNLOAD: 0,
ResourceType.PROCESS: 0,
- ResourceType.UPLOAD: 0
+ ResourceType.UPLOAD: 0,
}
# Resources jobs currently want exclusive access to. The set
@@ -31,7 +31,7 @@ class Resources():
ResourceType.CACHE: set(),
ResourceType.DOWNLOAD: set(),
ResourceType.PROCESS: set(),
- ResourceType.UPLOAD: set()
+ ResourceType.UPLOAD: set(),
}
# reserve()
@@ -90,8 +90,7 @@ class Resources():
# available. If we don't have enough, the job cannot be
# scheduled.
for resource in resources:
- if (self._max_resources[resource] > 0 and
- self._used_resources[resource] >= self._max_resources[resource]):
+ if self._max_resources[resource] > 0 and self._used_resources[resource] >= self._max_resources[resource]:
return False
# Now we register the fact that our job is using the resources
diff --git a/src/buildstream/_scheduler/scheduler.py b/src/buildstream/_scheduler/scheduler.py
index 86e3af021..171281bd9 100644
--- a/src/buildstream/_scheduler/scheduler.py
+++ b/src/buildstream/_scheduler/scheduler.py
@@ -76,17 +76,18 @@ class NotificationType(FastEnum):
# required. NOTE: The notification object should be lightweight
# and all attributes must be picklable.
#
-class Notification():
-
- def __init__(self,
- notification_type,
- *,
- full_name=None,
- job_action=None,
- job_status=None,
- time=None,
- element=None,
- message=None):
+class Notification:
+ def __init__(
+ self,
+ notification_type,
+ *,
+ full_name=None,
+ job_action=None,
+ job_status=None,
+ time=None,
+ element=None,
+ message=None
+ ):
self.notification_type = notification_type
self.full_name = full_name
self.job_action = job_action
@@ -116,40 +117,36 @@ class Notification():
# interrupt_callback: A callback to handle ^C
# ticker_callback: A callback call once per second
#
-class Scheduler():
-
- def __init__(self, context,
- start_time, state, notification_queue, notifier):
+class Scheduler:
+ def __init__(self, context, start_time, state, notification_queue, notifier):
#
# Public members
#
- self.queues = None # Exposed for the frontend to print summaries
- self.context = context # The Context object shared with Queues
- self.terminated = False # Whether the scheduler was asked to terminate or has terminated
- self.suspended = False # Whether the scheduler is currently suspended
+ self.queues = None # Exposed for the frontend to print summaries
+ self.context = context # The Context object shared with Queues
+ self.terminated = False # Whether the scheduler was asked to terminate or has terminated
+ self.suspended = False # Whether the scheduler is currently suspended
# These are shared with the Job, but should probably be removed or made private in some way.
- self.loop = None # Shared for Job access to observe the message queue
- self.internal_stops = 0 # Amount of SIGSTP signals we've introduced, this is shared with job.py
+ self.loop = None # Shared for Job access to observe the message queue
+ self.internal_stops = 0 # Amount of SIGSTP signals we've introduced, this is shared with job.py
#
# Private members
#
- self._active_jobs = [] # Jobs currently being run in the scheduler
- self._starttime = start_time # Initial application start time
- self._suspendtime = None # Session time compensation for suspended state
- self._queue_jobs = True # Whether we should continue to queue jobs
+ self._active_jobs = [] # Jobs currently being run in the scheduler
+ self._starttime = start_time # Initial application start time
+ self._suspendtime = None # Session time compensation for suspended state
+ self._queue_jobs = True # Whether we should continue to queue jobs
self._state = state
- self._casd_process = None # handle to the casd process for monitoring purpose
+ self._casd_process = None # handle to the casd process for monitoring purpose
# Bidirectional queue to send notifications back to the Scheduler's owner
self._notification_queue = notification_queue
self._notifier = notifier
- self.resources = Resources(context.sched_builders,
- context.sched_fetchers,
- context.sched_pushers)
+ self.resources = Resources(context.sched_builders, context.sched_fetchers, context.sched_pushers)
# run()
#
@@ -310,11 +307,13 @@ class Scheduler():
element_info = None
# Now check for more jobs
- notification = Notification(NotificationType.JOB_COMPLETE,
- full_name=job.name,
- job_action=job.action_name,
- job_status=status,
- element=element_info)
+ notification = Notification(
+ NotificationType.JOB_COMPLETE,
+ full_name=job.name,
+ job_action=job.action_name,
+ job_status=status,
+ element=element_info,
+ )
self._notify(notification)
self._sched()
@@ -360,10 +359,12 @@ class Scheduler():
#
def _start_job(self, job):
self._active_jobs.append(job)
- notification = Notification(NotificationType.JOB_START,
- full_name=job.name,
- job_action=job.action_name,
- time=self._state.elapsed_time(start_time=self._starttime))
+ notification = Notification(
+ NotificationType.JOB_START,
+ full_name=job.name,
+ job_action=job.action_name,
+ time=self._state.elapsed_time(start_time=self._starttime),
+ )
self._notify(notification)
job.start()
@@ -399,9 +400,7 @@ class Scheduler():
# to fetch tasks for elements which failed to pull, and
# thus need all the pulls to complete before ever starting
# a build
- ready.extend(chain.from_iterable(
- q.harvest_jobs() for q in reversed(self.queues)
- ))
+ ready.extend(chain.from_iterable(q.harvest_jobs() for q in reversed(self.queues)))
# harvest_jobs() may have decided to skip some jobs, making
# them eligible for promotion to the next queue as a side effect.
@@ -471,7 +470,7 @@ class Scheduler():
self.suspended = False
# Notify that we're unsuspended
self._notify(Notification(NotificationType.SUSPENDED))
- self._starttime += (datetime.datetime.now() - self._suspendtime)
+ self._starttime += datetime.datetime.now() - self._suspendtime
self._notify(Notification(NotificationType.SCHED_START_TIME, time=self._starttime))
self._suspendtime = None
diff --git a/src/buildstream/_signals.py b/src/buildstream/_signals.py
index 31982c199..425a57239 100644
--- a/src/buildstream/_signals.py
+++ b/src/buildstream/_signals.py
@@ -37,8 +37,8 @@ if TYPE_CHECKING:
# typing.MutableSequence. However, that is only available in Python versions
# 3.5.4 onward and 3.6.1 onward.
# Debian 9 ships with 3.5.3.
-terminator_stack = deque() # type: MutableSequence[Callable]
-suspendable_stack = deque() # type: MutableSequence[Callable]
+terminator_stack = deque() # type: MutableSequence[Callable]
+suspendable_stack = deque() # type: MutableSequence[Callable]
# Per process SIGTERM handler
@@ -47,16 +47,18 @@ def terminator_handler(signal_, frame):
terminator_ = terminator_stack.pop()
try:
terminator_()
- except: # noqa pylint: disable=bare-except
+ except: # noqa pylint: disable=bare-except
# Ensure we print something if there's an exception raised when
# processing the handlers. Note that the default exception
# handler won't be called because we os._exit next, so we must
# catch all possible exceptions with the unqualified 'except'
# clause.
traceback.print_exc(file=sys.stderr)
- print('Error encountered in BuildStream while processing custom SIGTERM handler:',
- terminator_,
- file=sys.stderr)
+ print(
+ "Error encountered in BuildStream while processing custom SIGTERM handler:",
+ terminator_,
+ file=sys.stderr,
+ )
# Use special exit here, terminate immediately, recommended
# for precisely this situation where child processes are teminated.
@@ -79,7 +81,7 @@ def terminator_handler(signal_, frame):
#
@contextmanager
def terminator(terminate_func):
- global terminator_stack # pylint: disable=global-statement
+ global terminator_stack # pylint: disable=global-statement
# Signal handling only works in the main thread
if threading.current_thread() != threading.main_thread():
@@ -101,7 +103,7 @@ def terminator(terminate_func):
# Just a simple object for holding on to two callbacks
-class Suspender():
+class Suspender:
def __init__(self, suspend_callback, resume_callback):
self.suspend = suspend_callback
self.resume = resume_callback
@@ -144,7 +146,7 @@ def suspend_handler(sig, frame):
#
@contextmanager
def suspendable(suspend_callback, resume_callback):
- global suspendable_stack # pylint: disable=global-statement
+ global suspendable_stack # pylint: disable=global-statement
outermost = bool(not suspendable_stack)
suspender = Suspender(suspend_callback, resume_callback)
diff --git a/src/buildstream/_site.py b/src/buildstream/_site.py
index 8940fa34a..db0587120 100644
--- a/src/buildstream/_site.py
+++ b/src/buildstream/_site.py
@@ -30,22 +30,22 @@ import subprocess
root = os.path.dirname(os.path.abspath(__file__))
# The Element plugin directory
-element_plugins = os.path.join(root, 'plugins', 'elements')
+element_plugins = os.path.join(root, "plugins", "elements")
# The Source plugin directory
-source_plugins = os.path.join(root, 'plugins', 'sources')
+source_plugins = os.path.join(root, "plugins", "sources")
# Default user configuration
-default_user_config = os.path.join(root, 'data', 'userconfig.yaml')
+default_user_config = os.path.join(root, "data", "userconfig.yaml")
# Default project configuration
-default_project_config = os.path.join(root, 'data', 'projectconfig.yaml')
+default_project_config = os.path.join(root, "data", "projectconfig.yaml")
# Script template to call module building scripts
-build_all_template = os.path.join(root, 'data', 'build-all.sh.in')
+build_all_template = os.path.join(root, "data", "build-all.sh.in")
# Module building script template
-build_module_template = os.path.join(root, 'data', 'build-module.sh.in')
+build_module_template = os.path.join(root, "data", "build-module.sh.in")
def get_bwrap_version():
@@ -53,7 +53,7 @@ def get_bwrap_version():
#
# returns None if no bwrap was found
# otherwise returns a tuple of 3 int: major, minor, patch
- bwrap_path = shutil.which('bwrap')
+ bwrap_path = shutil.which("bwrap")
if not bwrap_path:
return None
diff --git a/src/buildstream/_sourcecache.py b/src/buildstream/_sourcecache.py
index 28ad82831..03e2d1830 100644
--- a/src/buildstream/_sourcecache.py
+++ b/src/buildstream/_sourcecache.py
@@ -26,12 +26,10 @@ from .storage._casbaseddirectory import CasBasedDirectory
from ._basecache import BaseCache
from ._exceptions import CASError, CASRemoteError, SourceCacheError, RemoteError
from . import utils
-from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
- source_pb2, source_pb2_grpc
+from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, source_pb2, source_pb2_grpc
class SourceRemote(BaseRemote):
-
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.source_service = None
@@ -65,8 +63,10 @@ class SourceRemote(BaseRemote):
except grpc.RpcError as e:
# Check if this remote has the artifact service
if e.code() == grpc.StatusCode.UNIMPLEMENTED:
- raise RemoteError("Configured remote does not have the BuildStream "
- "capabilities service. Please check remote configuration.")
+ raise RemoteError(
+ "Configured remote does not have the BuildStream "
+ "capabilities service. Please check remote configuration."
+ )
raise RemoteError("Remote initialisation failed: {}".format(e.details()))
if not response.source_capabilities:
@@ -129,7 +129,7 @@ class SourceCache(BaseCache):
def __init__(self, context):
super().__init__(context)
- self.sourcerefdir = os.path.join(context.cachedir, 'source_protos')
+ self.sourcerefdir = os.path.join(context.cachedir, "source_protos")
os.makedirs(self.sourcerefdir, exist_ok=True)
# list_sources()
@@ -182,7 +182,7 @@ class SourceCache(BaseCache):
vdir.import_files(self.export(previous_source))
if not source.BST_STAGE_VIRTUAL_DIRECTORY:
- with utils._tempdir(dir=self.context.tmpdir, prefix='staging-temp') as tmpdir:
+ with utils._tempdir(dir=self.context.tmpdir, prefix="staging-temp") as tmpdir:
if not vdir.is_empty():
vdir.export_files(tmpdir)
source._stage(tmpdir)
@@ -233,12 +233,12 @@ class SourceCache(BaseCache):
source_proto = self._pull_source(ref, remote)
if source_proto is None:
- source.info("Remote source service ({}) does not have source {} cached".format(
- remote, display_key))
+ source.info(
+ "Remote source service ({}) does not have source {} cached".format(remote, display_key)
+ )
continue
except CASError as e:
- raise SourceCacheError("Failed to pull source {}: {}".format(
- display_key, e)) from e
+ raise SourceCacheError("Failed to pull source {}: {}".format(display_key, e)) from e
if not source_proto:
return False
@@ -255,8 +255,7 @@ class SourceCache(BaseCache):
missing_blobs = self.cas.fetch_blobs(remote, missing_blobs)
if missing_blobs:
- source.info("Remote cas ({}) does not have source {} cached".format(
- remote, display_key))
+ source.info("Remote cas ({}) does not have source {} cached".format(remote, display_key))
continue
source.info("Pulled source {} <- {}".format(display_key, remote))
@@ -266,8 +265,7 @@ class SourceCache(BaseCache):
source.info("Remote cas ({}) does not have blob {} cached".format(remote, e.blob))
continue
except CASError as e:
- raise SourceCacheError("Failed to pull source {}: {}".format(
- display_key, e)) from e
+ raise SourceCacheError("Failed to pull source {}: {}".format(display_key, e)) from e
return False
@@ -315,8 +313,7 @@ class SourceCache(BaseCache):
# check whether cache has files already
if self._pull_source(ref, remote) is not None:
- source.info("Remote ({}) already has source {} cached"
- .format(remote, display_key))
+ source.info("Remote ({}) already has source {} cached".format(remote, display_key))
continue
if not self._push_source(ref, remote):
@@ -340,19 +337,18 @@ class SourceCache(BaseCache):
def _store_proto(self, proto, ref):
path = self._source_path(ref)
os.makedirs(os.path.dirname(path), exist_ok=True)
- with utils.save_file_atomic(path, 'w+b') as f:
+ with utils.save_file_atomic(path, "w+b") as f:
f.write(proto.SerializeToString())
def _get_source(self, ref):
path = self._source_path(ref)
source_proto = source_pb2.Source()
try:
- with open(path, 'r+b') as f:
+ with open(path, "r+b") as f:
source_proto.ParseFromString(f.read())
return source_proto
except FileNotFoundError as e:
- raise SourceCacheError("Attempted to access unavailable source: {}"
- .format(e)) from e
+ raise SourceCacheError("Attempted to access unavailable source: {}".format(e)) from e
def _source_path(self, ref):
return os.path.join(self.sourcerefdir, ref)
@@ -361,7 +357,7 @@ class SourceCache(BaseCache):
for root, _, files in os.walk(self.sourcerefdir):
for source_file in files:
source = source_pb2.Source()
- with open(os.path.join(root, source_file), 'r+b') as f:
+ with open(os.path.join(root, source_file), "r+b") as f:
source.ParseFromString(f.read())
yield source.files
diff --git a/src/buildstream/_sourcefactory.py b/src/buildstream/_sourcefactory.py
index 1d959a140..7c90042a8 100644
--- a/src/buildstream/_sourcefactory.py
+++ b/src/buildstream/_sourcefactory.py
@@ -30,14 +30,11 @@ from .source import Source
# plugin_origins (list): Data used to search for external Source plugins
#
class SourceFactory(PluginContext):
+ def __init__(self, plugin_base, *, format_versions={}, plugin_origins=None):
- def __init__(self, plugin_base, *,
- format_versions={},
- plugin_origins=None):
-
- super().__init__(plugin_base, Source, [_site.source_plugins],
- format_versions=format_versions,
- plugin_origins=plugin_origins)
+ super().__init__(
+ plugin_base, Source, [_site.source_plugins], format_versions=format_versions, plugin_origins=plugin_origins
+ )
# create():
#
diff --git a/src/buildstream/_state.py b/src/buildstream/_state.py
index 310e12a63..d85e348f2 100644
--- a/src/buildstream/_state.py
+++ b/src/buildstream/_state.py
@@ -28,7 +28,7 @@ from collections import OrderedDict
# state (State): The state object
# complete_name (str): Optional name for frontend status rendering, e.g. 'built'
#
-class TaskGroup():
+class TaskGroup:
def __init__(self, name, state, complete_name=None):
self.name = name
self.complete_name = complete_name
@@ -98,14 +98,14 @@ class TaskGroup():
# Args:
# session_start (datetime): The time the session started
#
-class State():
+class State:
def __init__(self, session_start):
self._session_start = session_start
self.task_groups = OrderedDict() # key is TaskGroup name
# Note: A Task's full_name is technically unique, but only accidentally.
- self.tasks = OrderedDict() # key is a tuple of action_name and full_name
+ self.tasks = OrderedDict() # key is a tuple of action_name and full_name
self._task_added_cbs = []
self._task_removed_cbs = []
@@ -281,8 +281,9 @@ class State():
#
def add_task(self, action_name, full_name, elapsed_offset=None):
task_key = (action_name, full_name)
- assert task_key not in self.tasks, \
- "Trying to add task '{}:{}' to '{}'".format(action_name, full_name, self.tasks)
+ assert task_key not in self.tasks, "Trying to add task '{}:{}' to '{}'".format(
+ action_name, full_name, self.tasks
+ )
if not elapsed_offset:
elapsed_offset = self.elapsed_time()
@@ -366,7 +367,7 @@ class State():
# e.g. an element's name.
# elapsed_offset (timedelta): The time the task started, relative to
# buildstream's start time.
-class _Task():
+class _Task:
def __init__(self, state, action_name, full_name, elapsed_offset):
self._state = state
self.action_name = action_name
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index f09a46185..2515fadce 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -33,8 +33,19 @@ from collections import deque
from ._artifactelement import verify_artifact_ref, ArtifactElement
from ._exceptions import StreamError, ImplError, BstError, ArtifactElementError, ArtifactError
from ._message import Message, MessageType
-from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, \
- SourcePushQueue, BuildQueue, PullQueue, ArtifactPushQueue, NotificationType, Notification, JobStatus
+from ._scheduler import (
+ Scheduler,
+ SchedStatus,
+ TrackQueue,
+ FetchQueue,
+ SourcePushQueue,
+ BuildQueue,
+ PullQueue,
+ ArtifactPushQueue,
+ NotificationType,
+ Notification,
+ JobStatus,
+)
from ._pipeline import Pipeline, PipelineSelection
from ._profile import Topics, PROFILER
from ._state import State
@@ -55,20 +66,18 @@ from . import Scope, Consistency
# interrupt_callback (callable): A callback to invoke when we get interrupted
# ticker_callback (callable): Invoked every second while running the scheduler
#
-class Stream():
-
- def __init__(self, context, session_start, *,
- session_start_callback=None,
- interrupt_callback=None,
- ticker_callback=None):
+class Stream:
+ def __init__(
+ self, context, session_start, *, session_start_callback=None, interrupt_callback=None, ticker_callback=None
+ ):
#
# Public members
#
- self.targets = [] # Resolved target elements
- self.session_elements = [] # List of elements being processed this session
- self.total_elements = [] # Total list of elements based on targets
- self.queues = [] # Queue objects
+ self.targets = [] # Resolved target elements
+ self.session_elements = [] # List of elements being processed this session
+ self.total_elements = [] # Total list of elements based on targets
+ self.queues = [] # Queue objects
#
# Private members
@@ -84,8 +93,9 @@ class Stream():
context.messenger.set_state(self._state)
- self._scheduler = Scheduler(context, session_start, self._state, self._notification_queue,
- self._scheduler_notification_handler)
+ self._scheduler = Scheduler(
+ context, session_start, self._state, self._notification_queue, self._scheduler_notification_handler
+ )
self._first_non_track_queue = None
self._session_start_callback = session_start_callback
self._ticker_callback = ticker_callback
@@ -138,17 +148,24 @@ class Stream():
#
# Returns:
# (list of Element): The selected elements
- def load_selection(self, targets, *,
- selection=PipelineSelection.NONE,
- except_targets=(),
- use_artifact_config=False,
- load_refs=False):
+ def load_selection(
+ self,
+ targets,
+ *,
+ selection=PipelineSelection.NONE,
+ except_targets=(),
+ use_artifact_config=False,
+ load_refs=False
+ ):
with PROFILER.profile(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, "-") for t in targets)):
- target_objects, _ = self._load(targets, (),
- selection=selection,
- except_targets=except_targets,
- use_artifact_config=use_artifact_config,
- load_refs=load_refs)
+ target_objects, _ = self._load(
+ targets,
+ (),
+ selection=selection,
+ except_targets=except_targets,
+ use_artifact_config=use_artifact_config,
+ load_refs=load_refs,
+ )
return target_objects
@@ -171,14 +188,20 @@ class Stream():
# Returns:
# (int): The exit code of the launched shell
#
- def shell(self, element, scope, prompt, *,
- directory=None,
- mounts=None,
- isolate=False,
- command=None,
- usebuildtree=None,
- pull_dependencies=None,
- unique_id=None):
+ def shell(
+ self,
+ element,
+ scope,
+ prompt,
+ *,
+ directory=None,
+ mounts=None,
+ isolate=False,
+ command=None,
+ usebuildtree=None,
+ pull_dependencies=None,
+ unique_id=None
+ ):
# Load the Element via the unique_id if given
if unique_id and element is None:
@@ -192,18 +215,16 @@ class Stream():
if not element._source_cached():
raise StreamError(
"Sources for element {} are not cached."
- "Element must be fetched.".format(element._get_full_name()))
+ "Element must be fetched.".format(element._get_full_name())
+ )
- missing_deps = [
- dep for dep in self._pipeline.dependencies([element], scope)
- if not dep._cached()
- ]
+ missing_deps = [dep for dep in self._pipeline.dependencies([element], scope) if not dep._cached()]
if missing_deps:
if not pull_dependencies:
raise StreamError(
"Elements need to be built or downloaded before staging a shell environment",
- detail="\n"
- .join(list(map(lambda x: x._get_full_name(), missing_deps))))
+ detail="\n".join(list(map(lambda x: x._get_full_name(), missing_deps))),
+ )
self._message(MessageType.INFO, "Attempting to fetch missing or incomplete artifacts")
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
@@ -236,8 +257,9 @@ class Stream():
else:
buildtree = True
- return element._shell(scope, directory, mounts=mounts, isolate=isolate, prompt=prompt, command=command,
- usebuildtree=buildtree)
+ return element._shell(
+ scope, directory, mounts=mounts, isolate=isolate, prompt=prompt, command=command, usebuildtree=buildtree
+ )
# build()
#
@@ -252,23 +274,22 @@ class Stream():
# If `remote` specified as None, then regular configuration will be used
# to determine where to push artifacts to.
#
- def build(self, targets, *,
- selection=PipelineSelection.PLAN,
- ignore_junction_targets=False,
- remote=None):
+ def build(self, targets, *, selection=PipelineSelection.PLAN, ignore_junction_targets=False, remote=None):
use_config = True
if remote:
use_config = False
- elements, _ = \
- self._load(targets, [],
- selection=selection,
- ignore_junction_targets=ignore_junction_targets,
- use_artifact_config=use_config,
- artifact_remote_url=remote,
- use_source_config=True,
- dynamic_plan=True)
+ elements, _ = self._load(
+ targets,
+ [],
+ selection=selection,
+ ignore_junction_targets=ignore_junction_targets,
+ use_artifact_config=use_config,
+ artifact_remote_url=remote,
+ use_source_config=True,
+ dynamic_plan=True,
+ )
# Assert that the elements are consistent
self._pipeline.assert_consistent(elements)
@@ -317,12 +338,16 @@ class Stream():
# track_cross_junctions (bool): Whether tracking should cross junction boundaries
# remote (str|None): The URL of a specific remote server to pull from.
#
- def fetch(self, targets, *,
- selection=PipelineSelection.PLAN,
- except_targets=None,
- track_targets=False,
- track_cross_junctions=False,
- remote=None):
+ def fetch(
+ self,
+ targets,
+ *,
+ selection=PipelineSelection.PLAN,
+ except_targets=None,
+ track_targets=False,
+ track_cross_junctions=False,
+ remote=None
+ ):
if track_targets:
track_targets = targets
@@ -337,14 +362,17 @@ class Stream():
if remote:
use_source_config = False
- elements, track_elements = \
- self._load(targets, track_targets,
- selection=selection, track_selection=track_selection,
- except_targets=except_targets,
- track_except_targets=track_except_targets,
- track_cross_junctions=track_cross_junctions,
- use_source_config=use_source_config,
- source_remote_url=remote)
+ elements, track_elements = self._load(
+ targets,
+ track_targets,
+ selection=selection,
+ track_selection=track_selection,
+ except_targets=except_targets,
+ track_except_targets=track_except_targets,
+ track_cross_junctions=track_cross_junctions,
+ use_source_config=use_source_config,
+ source_remote_url=remote,
+ )
# Delegated to a shared fetch method
self._fetch(elements, track_elements=track_elements)
@@ -362,20 +390,20 @@ class Stream():
# If no error is encountered while tracking, then the project files
# are rewritten inline.
#
- def track(self, targets, *,
- selection=PipelineSelection.REDIRECT,
- except_targets=None,
- cross_junctions=False):
+ def track(self, targets, *, selection=PipelineSelection.REDIRECT, except_targets=None, cross_junctions=False):
# We pass no target to build. Only to track. Passing build targets
# would fully load project configuration which might not be
# possible before tracking is done.
- _, elements = \
- self._load([], targets,
- selection=selection, track_selection=selection,
- except_targets=except_targets,
- track_except_targets=except_targets,
- track_cross_junctions=cross_junctions)
+ _, elements = self._load(
+ [],
+ targets,
+ selection=selection,
+ track_selection=selection,
+ except_targets=except_targets,
+ track_except_targets=except_targets,
+ track_cross_junctions=cross_junctions,
+ )
# FIXME: this can be refactored after element._update_state is simplified/removed
elements = [element for element in elements if element._schedule_tracking()]
@@ -400,21 +428,21 @@ class Stream():
# If `remote` specified as None, then regular configuration will be used
# to determine where to pull artifacts from.
#
- def pull(self, targets, *,
- selection=PipelineSelection.NONE,
- ignore_junction_targets=False,
- remote=None):
+ def pull(self, targets, *, selection=PipelineSelection.NONE, ignore_junction_targets=False, remote=None):
use_config = True
if remote:
use_config = False
- elements, _ = self._load(targets, (),
- selection=selection,
- ignore_junction_targets=ignore_junction_targets,
- use_artifact_config=use_config,
- artifact_remote_url=remote,
- load_refs=True)
+ elements, _ = self._load(
+ targets,
+ (),
+ selection=selection,
+ ignore_junction_targets=ignore_junction_targets,
+ use_artifact_config=use_config,
+ artifact_remote_url=remote,
+ load_refs=True,
+ )
if not self._artifacts.has_fetch_remotes():
raise StreamError("No artifact caches available for pulling artifacts")
@@ -442,21 +470,21 @@ class Stream():
# a pull queue will be created if user context and available remotes allow for
# attempting to fetch them.
#
- def push(self, targets, *,
- selection=PipelineSelection.NONE,
- ignore_junction_targets=False,
- remote=None):
+ def push(self, targets, *, selection=PipelineSelection.NONE, ignore_junction_targets=False, remote=None):
use_config = True
if remote:
use_config = False
- elements, _ = self._load(targets, (),
- selection=selection,
- ignore_junction_targets=ignore_junction_targets,
- use_artifact_config=use_config,
- artifact_remote_url=remote,
- load_refs=True)
+ elements, _ = self._load(
+ targets,
+ (),
+ selection=selection,
+ ignore_junction_targets=ignore_junction_targets,
+ use_artifact_config=use_config,
+ artifact_remote_url=remote,
+ load_refs=True,
+ )
if not self._artifacts.has_push_remotes():
raise StreamError("No artifact caches available for pushing artifacts")
@@ -500,8 +528,10 @@ class Stream():
# ready an uncached element in the PushQueue.
if self._context.sched_error_action == _SchedulerErrorAction.CONTINUE and uncached_elements:
names = [element.name for element in uncached_elements]
- fail_str = "Error while pushing. The following elements were not pushed as they are " \
+ fail_str = (
+ "Error while pushing. The following elements were not pushed as they are "
"not yet cached:\n\n\t{}\n".format("\n\t".join(names))
+ )
raise StreamError(fail_str)
@@ -525,15 +555,19 @@ class Stream():
# pull (bool): If true will attempt to pull any missing or incomplete
# artifacts.
#
- def checkout(self, target, *,
- location=None,
- force=False,
- selection=PipelineSelection.RUN,
- integrate=True,
- hardlinks=False,
- compression='',
- pull=False,
- tar=False):
+ def checkout(
+ self,
+ target,
+ *,
+ location=None,
+ force=False,
+ selection=PipelineSelection.RUN,
+ integrate=True,
+ hardlinks=False,
+ compression="",
+ pull=False,
+ tar=False
+ ):
elements, _ = self._load((target,), (), selection=selection, use_artifact_config=True, load_refs=True)
@@ -554,15 +588,15 @@ class Stream():
self._run()
try:
- scope = {'run': Scope.RUN, 'build': Scope.BUILD, 'none': Scope.NONE, 'all': Scope.ALL}
- with target._prepare_sandbox(scope=scope[selection], directory=None,
- integrate=integrate) as sandbox:
+ scope = {"run": Scope.RUN, "build": Scope.BUILD, "none": Scope.NONE, "all": Scope.ALL}
+ with target._prepare_sandbox(scope=scope[selection], directory=None, integrate=integrate) as sandbox:
# Copy or move the sandbox to the target directory
virdir = sandbox.get_virtual_directory()
self._export_artifact(tar, location, compression, target, hardlinks, virdir)
except BstError as e:
- raise StreamError("Error while staging dependencies into a sandbox"
- ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
+ raise StreamError(
+ "Error while staging dependencies into a sandbox" ": '{}'".format(e), detail=e.detail, reason=e.reason
+ ) from e
# _export_artifact()
#
@@ -578,34 +612,32 @@ class Stream():
#
def _export_artifact(self, tar, location, compression, target, hardlinks, virdir):
if not tar:
- with target.timed_activity("Checking out files in '{}'"
- .format(location)):
+ with target.timed_activity("Checking out files in '{}'".format(location)):
try:
if hardlinks:
self._checkout_hardlinks(virdir, location)
else:
virdir.export_files(location)
except OSError as e:
- raise StreamError("Failed to checkout files: '{}'"
- .format(e)) from e
+ raise StreamError("Failed to checkout files: '{}'".format(e)) from e
else:
- to_stdout = location == '-'
+ to_stdout = location == "-"
mode = _handle_compression(compression, to_stream=to_stdout)
with target.timed_activity("Creating tarball"):
if to_stdout:
# Save the stdout FD to restore later
saved_fd = os.dup(sys.stdout.fileno())
try:
- with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
+ with os.fdopen(sys.stdout.fileno(), "wb") as fo:
with tarfile.open(fileobj=fo, mode=mode) as tf:
- virdir.export_to_tar(tf, '.')
+ virdir.export_to_tar(tf, ".")
finally:
# No matter what, restore stdout for further use
os.dup2(saved_fd, sys.stdout.fileno())
os.close(saved_fd)
else:
with tarfile.open(location, mode=mode) as tf:
- virdir.export_to_tar(tf, '.')
+ virdir.export_to_tar(tf, ".")
# artifact_show()
#
@@ -614,13 +646,9 @@ class Stream():
# Args:
# targets (str): Targets to show the cached state of
#
- def artifact_show(self, targets, *,
- selection=PipelineSelection.NONE):
+ def artifact_show(self, targets, *, selection=PipelineSelection.NONE):
# Obtain list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(targets,
- selection=selection,
- use_artifact_config=True,
- load_refs=True)
+ target_objects = self.load_selection(targets, selection=selection, use_artifact_config=True, load_refs=True)
if self._artifacts.has_fetch_remotes():
self._pipeline.check_remotes(target_objects)
@@ -695,8 +723,7 @@ class Stream():
# Args:
# targets (str): Targets to remove
#
- def artifact_delete(self, targets, *,
- selection=PipelineSelection.NONE):
+ def artifact_delete(self, targets, *, selection=PipelineSelection.NONE):
# Return list of Element and/or ArtifactElement objects
target_objects = self.load_selection(targets, selection=selection, load_refs=True)
@@ -736,20 +763,22 @@ class Stream():
# compression (str): The type of compression for tarball
# include_build_scripts (bool): Whether to include build scripts in the checkout
#
- def source_checkout(self, target, *,
- location=None,
- force=False,
- deps='none',
- except_targets=(),
- tar=False,
- compression=None,
- include_build_scripts=False):
+ def source_checkout(
+ self,
+ target,
+ *,
+ location=None,
+ force=False,
+ deps="none",
+ except_targets=(),
+ tar=False,
+ compression=None,
+ include_build_scripts=False
+ ):
self._check_location_writable(location, force=force, tar=tar)
- elements, _ = self._load((target,), (),
- selection=deps,
- except_targets=except_targets)
+ elements, _ = self._load((target,), (), selection=deps, except_targets=except_targets)
# Assert all sources are cached in the source dir
self._fetch(elements)
@@ -757,11 +786,11 @@ class Stream():
# Stage all sources determined by scope
try:
- self._source_checkout(elements, location, force, deps,
- tar, compression, include_build_scripts)
+ self._source_checkout(elements, location, force, deps, tar, compression, include_build_scripts)
except BstError as e:
- raise StreamError("Error while writing sources"
- ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
+ raise StreamError(
+ "Error while writing sources" ": '{}'".format(e), detail=e.detail, reason=e.reason
+ ) from e
self._message(MessageType.INFO, "Checked out sources to '{}'".format(location))
@@ -776,11 +805,7 @@ class Stream():
# force (bool): Whether to ignore contents in an existing directory
# custom_dir (str): Custom location to create a workspace or false to use default location.
#
- def workspace_open(self, targets, *,
- no_checkout,
- track_first,
- force,
- custom_dir):
+ def workspace_open(self, targets, *, no_checkout, track_first, force, custom_dir):
# This function is a little funny but it is trying to be as atomic as possible.
if track_first:
@@ -788,9 +813,9 @@ class Stream():
else:
track_targets = ()
- elements, track_elements = self._load(targets, track_targets,
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT)
+ elements, track_elements = self._load(
+ targets, track_targets, selection=PipelineSelection.REDIRECT, track_selection=PipelineSelection.REDIRECT
+ )
workspaces = self._context.get_workspaces()
@@ -819,33 +844,44 @@ class Stream():
workspace = workspaces.get_workspace(target._get_full_name())
if workspace:
if not force:
- raise StreamError("Element '{}' already has an open workspace defined at: {}"
- .format(target.name, workspace.get_absolute_path()))
+ raise StreamError(
+ "Element '{}' already has an open workspace defined at: {}".format(
+ target.name, workspace.get_absolute_path()
+ )
+ )
if not no_checkout:
- target.warn("Replacing existing workspace for element '{}' defined at: {}"
- .format(target.name, workspace.get_absolute_path()))
+ target.warn(
+ "Replacing existing workspace for element '{}' defined at: {}".format(
+ target.name, workspace.get_absolute_path()
+ )
+ )
self.workspace_close(target._get_full_name(), remove_dir=not no_checkout)
target_consistency = target._get_consistency()
- if not no_checkout and target_consistency < Consistency.CACHED and \
- target_consistency._source_cached():
- raise StreamError("Could not stage uncached source. For {} ".format(target.name) +
- "Use `--track` to track and " +
- "fetch the latest version of the " +
- "source.")
+ if not no_checkout and target_consistency < Consistency.CACHED and target_consistency._source_cached():
+ raise StreamError(
+ "Could not stage uncached source. For {} ".format(target.name)
+ + "Use `--track` to track and "
+ + "fetch the latest version of the "
+ + "source."
+ )
if not custom_dir:
directory = os.path.abspath(os.path.join(self._context.workspacedir, target.name))
- if directory[-4:] == '.bst':
+ if directory[-4:] == ".bst":
directory = directory[:-4]
expanded_directories.append(directory)
if custom_dir:
if len(elements) != 1:
- raise StreamError("Exactly one element can be given if --directory is used",
- reason='directory-with-multiple-elements')
+ raise StreamError(
+ "Exactly one element can be given if --directory is used",
+ reason="directory-with-multiple-elements",
+ )
directory = os.path.abspath(custom_dir)
- expanded_directories = [directory, ]
+ expanded_directories = [
+ directory,
+ ]
else:
# If this fails it is a bug in what ever calls this, usually cli.py and so can not be tested for via the
# run bst test mechanism.
@@ -854,12 +890,16 @@ class Stream():
for target, directory in zip(elements, expanded_directories):
if os.path.exists(directory):
if not os.path.isdir(directory):
- raise StreamError("For element '{}', Directory path is not a directory: {}"
- .format(target.name, directory), reason='bad-directory')
+ raise StreamError(
+ "For element '{}', Directory path is not a directory: {}".format(target.name, directory),
+ reason="bad-directory",
+ )
if not (no_checkout or force) and os.listdir(directory):
- raise StreamError("For element '{}', Directory path is not empty: {}"
- .format(target.name, directory), reason='bad-directory')
+ raise StreamError(
+ "For element '{}', Directory path is not empty: {}".format(target.name, directory),
+ reason="bad-directory",
+ )
if os.listdir(directory):
if force and not no_checkout:
utils._force_rmtree(directory)
@@ -868,8 +908,7 @@ class Stream():
# Now it does the bits that can not be made atomic.
targetGenerator = zip(elements, expanded_directories)
for target, directory in targetGenerator:
- self._message(MessageType.INFO, "Creating workspace for element {}"
- .format(target.name))
+ self._message(MessageType.INFO, "Creating workspace for element {}".format(target.name))
workspace = workspaces.get_workspace(target._get_full_name())
if workspace and not no_checkout:
@@ -886,8 +925,7 @@ class Stream():
raise StreamError("Failed to create workspace directory: {}".format(e) + todo_elements) from e
workspaces.create_workspace(target, directory, checkout=not no_checkout)
- self._message(MessageType.INFO, "Created a workspace for element: {}"
- .format(target._get_full_name()))
+ self._message(MessageType.INFO, "Created a workspace for element: {}".format(target._get_full_name()))
# workspace_close
#
@@ -903,13 +941,13 @@ class Stream():
# Remove workspace directory if prompted
if remove_dir:
- with self._context.messenger.timed_activity("Removing workspace directory {}"
- .format(workspace.get_absolute_path())):
+ with self._context.messenger.timed_activity(
+ "Removing workspace directory {}".format(workspace.get_absolute_path())
+ ):
try:
shutil.rmtree(workspace.get_absolute_path())
except OSError as e:
- raise StreamError("Could not remove '{}': {}"
- .format(workspace.get_absolute_path(), e)) from e
+ raise StreamError("Could not remove '{}': {}".format(workspace.get_absolute_path(), e)) from e
# Delete the workspace and save the configuration
workspaces.delete_workspace(element_name)
@@ -928,9 +966,9 @@ class Stream():
#
def workspace_reset(self, targets, *, soft, track_first):
- elements, _ = self._load(targets, [],
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT)
+ elements, _ = self._load(
+ targets, [], selection=PipelineSelection.REDIRECT, track_selection=PipelineSelection.REDIRECT
+ )
nonexisting = []
for element in elements:
@@ -946,14 +984,20 @@ class Stream():
if soft:
workspace.prepared = False
- self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
- .format(element.name, workspace_path))
+ self._message(
+ MessageType.INFO, "Reset workspace state for {} at: {}".format(element.name, workspace_path)
+ )
continue
self.workspace_close(element._get_full_name(), remove_dir=True)
workspaces.save_config()
- self.workspace_open([element._get_full_name()],
- no_checkout=False, track_first=track_first, force=True, custom_dir=workspace_path)
+ self.workspace_open(
+ [element._get_full_name()],
+ no_checkout=False,
+ track_first=track_first,
+ force=True,
+ custom_dir=workspace_path,
+ )
# workspace_exists
#
@@ -1001,14 +1045,12 @@ class Stream():
workspaces = []
for element_name, workspace_ in self._context.get_workspaces().list():
workspace_detail = {
- 'element': element_name,
- 'directory': workspace_.get_absolute_path(),
+ "element": element_name,
+ "directory": workspace_.get_absolute_path(),
}
workspaces.append(workspace_detail)
- _yaml.roundtrip_dump({
- 'workspaces': workspaces
- })
+ _yaml.roundtrip_dump({"workspaces": workspaces})
# redirect_element_names()
#
@@ -1034,9 +1076,9 @@ class Stream():
else:
output_elements.add(e)
if load_elements:
- loaded_elements, _ = self._load(load_elements, (),
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT)
+ loaded_elements, _ = self._load(
+ load_elements, (), selection=PipelineSelection.REDIRECT, track_selection=PipelineSelection.REDIRECT
+ )
for e in loaded_elements:
output_elements.add(e.name)
@@ -1166,26 +1208,31 @@ class Stream():
# (list of Element): The primary element selection
# (list of Element): The tracking element selection
#
- def _load(self, targets, track_targets, *,
- selection=PipelineSelection.NONE,
- track_selection=PipelineSelection.NONE,
- except_targets=(),
- track_except_targets=(),
- track_cross_junctions=False,
- ignore_junction_targets=False,
- use_artifact_config=False,
- use_source_config=False,
- artifact_remote_url=None,
- source_remote_url=None,
- dynamic_plan=False,
- load_refs=False):
+ def _load(
+ self,
+ targets,
+ track_targets,
+ *,
+ selection=PipelineSelection.NONE,
+ track_selection=PipelineSelection.NONE,
+ except_targets=(),
+ track_except_targets=(),
+ track_cross_junctions=False,
+ ignore_junction_targets=False,
+ use_artifact_config=False,
+ use_source_config=False,
+ artifact_remote_url=None,
+ source_remote_url=None,
+ dynamic_plan=False,
+ load_refs=False
+ ):
# Classify element and artifact strings
target_elements, target_artifacts = self._classify_artifacts(targets)
if target_artifacts:
if not load_refs:
- detail = '\n'.join(target_artifacts)
+ detail = "\n".join(target_artifacts)
raise ArtifactElementError("Cannot perform this operation with artifact refs:", detail=detail)
if selection in (PipelineSelection.ALL, PipelineSelection.RUN):
raise StreamError("Error: '--deps {}' is not supported for artifact refs".format(selection))
@@ -1198,8 +1245,9 @@ class Stream():
# Load all target elements
loadable = [target_elements, except_targets, track_targets, track_except_targets]
if any(loadable):
- elements, except_elements, track_elements, track_except_elements = \
- self._pipeline.load(loadable, rewritable=rewritable)
+ elements, except_elements, track_elements, track_except_elements = self._pipeline.load(
+ loadable, rewritable=rewritable
+ )
else:
elements, except_elements, track_elements, track_except_elements = [], [], [], []
@@ -1208,7 +1256,7 @@ class Stream():
# Optionally filter out junction elements
if ignore_junction_targets:
- elements = [e for e in elements if e.get_kind() != 'junction']
+ elements = [e for e in elements if e.get_kind() != "junction"]
# Hold on to the targets
self.targets = elements + artifacts
@@ -1233,14 +1281,10 @@ class Stream():
for project, project_elements in track_projects.items():
selected = self._pipeline.get_selection(project_elements, track_selection)
- selected = self._pipeline.track_cross_junction_filter(project,
- selected,
- track_cross_junctions)
+ selected = self._pipeline.track_cross_junction_filter(project, selected, track_cross_junctions)
track_selected.extend(selected)
- track_selected = self._pipeline.except_elements(track_elements,
- track_selected,
- track_except_elements)
+ track_selected = self._pipeline.except_elements(track_elements, track_selected, track_except_elements)
if not targets:
return [], track_selected
@@ -1257,9 +1301,7 @@ class Stream():
#
self._pipeline.resolve_elements(self.targets)
selected = self._pipeline.get_selection(self.targets, selection, silent=False)
- selected = self._pipeline.except_elements(self.targets,
- selected,
- except_elements)
+ selected = self._pipeline.except_elements(self.targets, selected, except_elements)
if selection == PipelineSelection.PLAN and dynamic_plan:
# We use a dynamic build plan, only request artifacts of top-level targets,
@@ -1279,8 +1321,7 @@ class Stream():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self._context.messenger.message(
- Message(message_type, message, **args))
+ self._context.messenger.message(Message(message_type, message, **args))
# _add_queue()
#
@@ -1321,9 +1362,7 @@ class Stream():
# unique_id (str): A unique_id to load an Element instance
#
def _failure_retry(self, action_name, unique_id):
- notification = Notification(NotificationType.RETRY,
- job_action=action_name,
- element=unique_id)
+ notification = Notification(NotificationType.RETRY, job_action=action_name, element=unique_id)
self._notify(notification)
# _run()
@@ -1370,8 +1409,7 @@ class Stream():
# Filter out elements with cached sources, only from the fetch plan
# let the track plan resolve new refs.
- cached = [elt for elt in fetch_plan
- if not elt._should_fetch(fetch_original)]
+ cached = [elt for elt in fetch_plan if not elt._should_fetch(fetch_original)]
fetch_plan = self._pipeline.subtract_elements(fetch_plan, cached)
# Construct queues, enqueue and run
@@ -1406,21 +1444,16 @@ class Stream():
try:
os.makedirs(location, exist_ok=True)
except OSError as e:
- raise StreamError("Failed to create destination directory: '{}'"
- .format(e)) from e
+ raise StreamError("Failed to create destination directory: '{}'".format(e)) from e
if not os.access(location, os.W_OK):
- raise StreamError("Destination directory '{}' not writable"
- .format(location))
+ raise StreamError("Destination directory '{}' not writable".format(location))
if not force and os.listdir(location):
- raise StreamError("Destination directory '{}' not empty"
- .format(location))
- elif os.path.exists(location) and location != '-':
+ raise StreamError("Destination directory '{}' not empty".format(location))
+ elif os.path.exists(location) and location != "-":
if not os.access(location, os.W_OK):
- raise StreamError("Output file '{}' not writable"
- .format(location))
+ raise StreamError("Output file '{}' not writable".format(location))
if not force and os.path.exists(location):
- raise StreamError("Output file '{}' already exists"
- .format(location))
+ raise StreamError("Output file '{}' already exists".format(location))
# Helper function for checkout()
#
@@ -1433,13 +1466,16 @@ class Stream():
sandbox_vroot.export_files(directory, can_link=True, can_destroy=True)
# Helper function for source_checkout()
- def _source_checkout(self, elements,
- location=None,
- force=False,
- deps='none',
- tar=False,
- compression=None,
- include_build_scripts=False):
+ def _source_checkout(
+ self,
+ elements,
+ location=None,
+ force=False,
+ deps="none",
+ tar=False,
+ compression=None,
+ include_build_scripts=False,
+ ):
location = os.path.abspath(location)
# Stage all our sources in a temporary directory. The this
@@ -1455,8 +1491,7 @@ class Stream():
else:
self._move_directory(temp_source_dir.name, location, force)
except OSError as e:
- raise StreamError("Failed to checkout sources to {}: {}"
- .format(location, e)) from e
+ raise StreamError("Failed to checkout sources to {}: {}".format(location, e)) from e
finally:
with suppress(FileNotFoundError):
temp_source_dir.cleanup()
@@ -1498,10 +1533,10 @@ class Stream():
# Create a tarball from the content of directory
def _create_tarball(self, directory, tar_name, compression):
if compression is None:
- compression = ''
+ compression = ""
mode = _handle_compression(compression)
try:
- with utils.save_file_atomic(tar_name, mode='wb') as f:
+ with utils.save_file_atomic(tar_name, mode="wb") as f:
tarball = tarfile.open(fileobj=f, mode=mode)
for item in os.listdir(str(directory)):
file_to_add = os.path.join(directory, item)
@@ -1598,7 +1633,7 @@ class Stream():
artifact_globs = []
for target in targets:
- if target.endswith('.bst'):
+ if target.endswith(".bst"):
if any(c in "*?[" for c in target):
element_globs.append(target)
else:
@@ -1628,7 +1663,7 @@ class Stream():
for glob in artifact_globs:
artifact_refs.extend(self._artifacts.list_artifacts(glob=glob))
if not artifact_refs:
- self._message(MessageType.WARN, "No artifacts found for globs: {}".format(', '.join(artifact_globs)))
+ self._message(MessageType.WARN, "No artifacts found for globs: {}".format(", ".join(artifact_globs)))
return element_targets, artifact_refs
@@ -1648,8 +1683,7 @@ class Stream():
elif notification.notification_type == NotificationType.JOB_COMPLETE:
self._state.remove_task(notification.job_action, notification.full_name)
if notification.job_status == JobStatus.FAIL:
- self._state.fail_task(notification.job_action, notification.full_name,
- notification.element)
+ self._state.fail_task(notification.job_action, notification.full_name, notification.element)
elif notification.notification_type == NotificationType.SCHED_START_TIME:
self._starttime = notification.time
elif notification.notification_type == NotificationType.RUNNING:
@@ -1694,5 +1728,5 @@ class Stream():
# (str): The tarfile mode string
#
def _handle_compression(compression, *, to_stream=False):
- mode_prefix = 'w|' if to_stream else 'w:'
+ mode_prefix = "w|" if to_stream else "w:"
return mode_prefix + compression
diff --git a/src/buildstream/_version.py b/src/buildstream/_version.py
index 03f946cb8..10905c4ea 100644
--- a/src/buildstream/_version.py
+++ b/src/buildstream/_version.py
@@ -60,17 +60,18 @@ HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
+
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
+
return decorate
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
- env=None):
+def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
@@ -78,10 +79,9 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
+ p = subprocess.Popen(
+ [c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)
+ )
break
except EnvironmentError:
e = sys.exc_info()[1]
@@ -118,16 +118,19 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
- return {"version": dirname[len(parentdir_prefix):],
- "full-revisionid": None,
- "dirty": False, "error": None, "date": None}
+ return {
+ "version": dirname[len(parentdir_prefix) :],
+ "full-revisionid": None,
+ "dirty": False,
+ "error": None,
+ "date": None,
+ }
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
- print("Tried directories %s but none started with prefix %s" %
- (str(rootdirs), parentdir_prefix))
+ print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@@ -183,7 +186,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
@@ -192,7 +195,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
+ tags = set([r for r in refs if re.search(r"\d", r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
@@ -200,19 +203,26 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
+ r = ref[len(tag_prefix) :]
if verbose:
print("picking %s" % r)
- return {"version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": None,
- "date": date}
+ return {
+ "version": r,
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False,
+ "error": None,
+ "date": date,
+ }
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
- return {"version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": "no suitable tags", "date": None}
+ return {
+ "version": "0+unknown",
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False,
+ "error": "no suitable tags",
+ "date": None,
+ }
@register_vcs_handler("git", "pieces_from_vcs")
@@ -227,8 +237,7 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
+ out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
@@ -236,10 +245,11 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%s%s" % (tag_prefix, tag_regex)],
- cwd=root)
+ describe_out, rc = run_command(
+ GITS,
+ ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s%s" % (tag_prefix, tag_regex)],
+ cwd=root,
+ )
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
@@ -262,17 +272,16 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
- git_describe = git_describe[:git_describe.rindex("-dirty")]
+ git_describe = git_describe[: git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
- mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
+ mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
- pieces["error"] = ("unable to parse git-describe output: '%s'"
- % describe_out)
+ pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
return pieces
# tag
@@ -281,10 +290,9 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
- pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
- % (full_tag, tag_prefix))
+ pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)
return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix):]
+ pieces["closest-tag"] = full_tag[len(tag_prefix) :]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
@@ -295,13 +303,11 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
else:
# HEX: no tags
pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
+ count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
- cwd=root)[0].strip()
+ date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
@@ -332,8 +338,7 @@ def render_pep440(pieces):
rendered += ".dirty"
else:
# exception #1
- rendered = "0+untagged.%d.g%s" % (pieces["distance"],
- pieces["short"])
+ rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
@@ -447,11 +452,13 @@ def render_git_describe_long(pieces):
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
- return {"version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None}
+ return {
+ "version": "unknown",
+ "full-revisionid": pieces.get("long"),
+ "dirty": None,
+ "error": pieces["error"],
+ "date": None,
+ }
if not style or style == "default":
style = "pep440" # the default
@@ -471,9 +478,13 @@ def render(pieces, style):
else:
raise ValueError("unknown style '%s'" % style)
- return {"version": rendered, "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"], "error": None,
- "date": pieces.get("date")}
+ return {
+ "version": rendered,
+ "full-revisionid": pieces["long"],
+ "dirty": pieces["dirty"],
+ "error": None,
+ "date": pieces.get("date"),
+ }
def get_versions():
@@ -487,8 +498,7 @@ def get_versions():
verbose = cfg.verbose
try:
- return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
- verbose)
+ return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
except NotThisMethod:
pass
@@ -497,13 +507,16 @@ def get_versions():
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
- for i in cfg.versionfile_source.split('/'):
+ for i in cfg.versionfile_source.split("/"):
root = os.path.dirname(root)
except NameError:
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None,
- "error": "unable to find root of source tree",
- "date": None}
+ return {
+ "version": "0+unknown",
+ "full-revisionid": None,
+ "dirty": None,
+ "error": "unable to find root of source tree",
+ "date": None,
+ }
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, cfg.tag_regex, root, verbose)
@@ -517,6 +530,10 @@ def get_versions():
except NotThisMethod:
pass
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None,
- "error": "unable to compute version", "date": None}
+ return {
+ "version": "0+unknown",
+ "full-revisionid": None,
+ "dirty": None,
+ "error": "unable to compute version",
+ "date": None,
+ }
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index f9023dc54..3d50fd9c0 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -38,7 +38,7 @@ WORKSPACE_PROJECT_FILE = ".bstproject.yaml"
# Args:
# directory (str): The directory that the workspace exists in.
#
-class WorkspaceProject():
+class WorkspaceProject:
def __init__(self, directory):
self._projects = []
self._directory = directory
@@ -51,7 +51,7 @@ class WorkspaceProject():
# (str): The path to a project
#
def get_default_project_path(self):
- return self._projects[0]['project-path']
+ return self._projects[0]["project-path"]
# get_default_element()
#
@@ -61,7 +61,7 @@ class WorkspaceProject():
# (str): The name of an element
#
def get_default_element(self):
- return self._projects[0]['element-name']
+ return self._projects[0]["element-name"]
# to_dict()
#
@@ -72,8 +72,8 @@ class WorkspaceProject():
#
def to_dict(self):
ret = {
- 'projects': self._projects,
- 'format-version': BST_WORKSPACE_PROJECT_FORMAT_VERSION,
+ "projects": self._projects,
+ "format-version": BST_WORKSPACE_PROJECT_FORMAT_VERSION,
}
return ret
@@ -91,13 +91,14 @@ class WorkspaceProject():
@classmethod
def from_dict(cls, directory, dictionary):
# Only know how to handle one format-version at the moment.
- format_version = int(dictionary['format-version'])
- assert format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION, \
- "Format version {} not found in {}".format(BST_WORKSPACE_PROJECT_FORMAT_VERSION, dictionary)
+ format_version = int(dictionary["format-version"])
+ assert format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION, "Format version {} not found in {}".format(
+ BST_WORKSPACE_PROJECT_FORMAT_VERSION, dictionary
+ )
workspace_project = cls(directory)
- for item in dictionary['projects']:
- workspace_project.add_project(item['project-path'], item['element-name'])
+ for item in dictionary["projects"]:
+ workspace_project.add_project(item["project-path"], item["element-name"])
return workspace_project
@@ -145,15 +146,15 @@ class WorkspaceProject():
# element_name (str): The name of the element that the workspace belongs to.
#
def add_project(self, project_path, element_name):
- assert (project_path and element_name)
- self._projects.append({'project-path': project_path, 'element-name': element_name})
+ assert project_path and element_name
+ self._projects.append({"project-path": project_path, "element-name": element_name})
# WorkspaceProjectCache()
#
# A class to manage workspace project data for multiple workspaces.
#
-class WorkspaceProjectCache():
+class WorkspaceProjectCache:
def __init__(self):
self._projects = {} # Mapping of a workspace directory to its WorkspaceProject
@@ -216,8 +217,9 @@ class WorkspaceProjectCache():
def remove(self, directory):
workspace_project = self.get(directory)
if not workspace_project:
- raise LoadError("Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE),
- LoadErrorReason.MISSING_FILE)
+ raise LoadError(
+ "Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE), LoadErrorReason.MISSING_FILE
+ )
path = workspace_project.get_filename()
try:
os.unlink(path)
@@ -242,7 +244,7 @@ class WorkspaceProjectCache():
# changed between failed builds. Should be
# made obsolete with failed build artifacts.
#
-class Workspace():
+class Workspace:
def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
self.prepared = prepared
self.last_successful = last_successful
@@ -260,11 +262,7 @@ class Workspace():
# (dict) A dict representation of the workspace
#
def to_dict(self):
- ret = {
- 'prepared': self.prepared,
- 'path': self._path,
- 'running_files': self.running_files
- }
+ ret = {"prepared": self.prepared, "path": self._path, "running_files": self.running_files}
if self.last_successful is not None:
ret["last_successful"] = self.last_successful
return ret
@@ -363,8 +361,7 @@ class Workspace():
try:
stat = os.lstat(filename)
except OSError as e:
- raise LoadError("Failed to stat file in workspace: {}".format(e),
- LoadErrorReason.MISSING_FILE)
+ raise LoadError("Failed to stat file in workspace: {}".format(e), LoadErrorReason.MISSING_FILE)
# Use the mtime of any file with sub second precision
return stat.st_mtime_ns
@@ -378,8 +375,7 @@ class Workspace():
if os.path.isdir(fullpath):
filelist = utils.list_relative_paths(fullpath)
filelist = [
- (relpath, os.path.join(fullpath, relpath)) for relpath in filelist
- if relpath not in excluded_files
+ (relpath, os.path.join(fullpath, relpath)) for relpath in filelist if relpath not in excluded_files
]
else:
filelist = [(self.get_absolute_path(), fullpath)]
@@ -404,7 +400,7 @@ class Workspace():
# toplevel_project (Project): Top project used to resolve paths.
# workspace_project_cache (WorkspaceProjectCache): The cache of WorkspaceProjects
#
-class Workspaces():
+class Workspaces:
def __init__(self, toplevel_project, workspace_project_cache):
self._toplevel_project = toplevel_project
self._bst_directory = os.path.join(toplevel_project.directory, ".bst")
@@ -525,11 +521,8 @@ class Workspaces():
assert utils._is_main_process()
config = {
- 'format-version': BST_WORKSPACE_FORMAT_VERSION,
- 'workspaces': {
- element: workspace.to_dict()
- for element, workspace in self._workspaces.items()
- }
+ "format-version": BST_WORKSPACE_FORMAT_VERSION,
+ "workspaces": {element: workspace.to_dict() for element, workspace in self._workspaces.items()},
}
os.makedirs(self._bst_directory, exist_ok=True)
_yaml.roundtrip_dump(config, self._get_filename())
@@ -572,10 +565,11 @@ class Workspaces():
#
def _parse_workspace_config(self, workspaces):
try:
- version = workspaces.get_int('format-version', default=0)
+ version = workspaces.get_int("format-version", default=0)
except ValueError:
- raise LoadError("Format version is not an integer in workspace configuration",
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Format version is not an integer in workspace configuration", LoadErrorReason.INVALID_DATA
+ )
if version == 0:
# Pre-versioning format can be of two forms
@@ -588,17 +582,17 @@ class Workspaces():
elif config_type is MappingNode:
sources = list(config.values())
if len(sources) > 1:
- detail = "There are multiple workspaces open for '{}'.\n" + \
- "This is not supported anymore.\n" + \
- "Please remove this element from '{}'."
- raise LoadError(detail.format(element, self._get_filename()),
- LoadErrorReason.INVALID_DATA)
+ detail = (
+ "There are multiple workspaces open for '{}'.\n"
+ + "This is not supported anymore.\n"
+ + "Please remove this element from '{}'."
+ )
+ raise LoadError(detail.format(element, self._get_filename()), LoadErrorReason.INVALID_DATA)
workspaces[element] = sources[0]
else:
- raise LoadError("Workspace config is in unexpected format.",
- LoadErrorReason.INVALID_DATA)
+ raise LoadError("Workspace config is in unexpected format.", LoadErrorReason.INVALID_DATA)
res = {
element: Workspace(self._toplevel_project, path=config.as_str())
@@ -607,13 +601,16 @@ class Workspaces():
elif 1 <= version <= BST_WORKSPACE_FORMAT_VERSION:
workspaces = workspaces.get_mapping("workspaces", default={})
- res = {element: self._load_workspace(node)
- for element, node in workspaces.items()}
+ res = {element: self._load_workspace(node) for element, node in workspaces.items()}
else:
- raise LoadError("Workspace configuration format version {} not supported."
- "Your version of buildstream may be too old. Max supported version: {}"
- .format(version, BST_WORKSPACE_FORMAT_VERSION), LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Workspace configuration format version {} not supported."
+ "Your version of buildstream may be too old. Max supported version: {}".format(
+ version, BST_WORKSPACE_FORMAT_VERSION
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
return res
@@ -628,15 +625,15 @@ class Workspaces():
# (Workspace): A newly instantiated Workspace
#
def _load_workspace(self, node):
- running_files = node.get_mapping('running_files', default=None)
+ running_files = node.get_mapping("running_files", default=None)
if running_files:
running_files = running_files.strip_node_info()
dictionary = {
- 'prepared': node.get_bool('prepared', default=False),
- 'path': node.get_str('path'),
- 'last_successful': node.get_str('last_successful', default=None),
- 'running_files': running_files,
+ "prepared": node.get_bool("prepared", default=False),
+ "path": node.get_str("path"),
+ "last_successful": node.get_str("last_successful", default=None),
+ "running_files": running_files,
}
return Workspace.from_dict(self._toplevel_project, dictionary)
diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py
index 7fe97c168..f04d3b0dc 100644
--- a/src/buildstream/buildelement.py
+++ b/src/buildstream/buildelement.py
@@ -144,17 +144,16 @@ from .types import Scope
# This list is preserved because of an unfortunate situation, we
# need to remove these older commands which were secret and never
# documented, but without breaking the cache keys.
-_legacy_command_steps = ['bootstrap-commands',
- 'configure-commands',
- 'build-commands',
- 'test-commands',
- 'install-commands',
- 'strip-commands']
+_legacy_command_steps = [
+ "bootstrap-commands",
+ "configure-commands",
+ "build-commands",
+ "test-commands",
+ "install-commands",
+ "strip-commands",
+]
-_command_steps = ['configure-commands',
- 'build-commands',
- 'install-commands',
- 'strip-commands']
+_command_steps = ["configure-commands", "build-commands", "install-commands", "strip-commands"]
class BuildElement(Element):
@@ -190,21 +189,21 @@ class BuildElement(Element):
# cache key, while having the side effect of setting max-jobs to 1,
# which is normally automatically resolved and does not affect
# the cache key.
- if self.get_variable('notparallel'):
- dictionary['notparallel'] = True
+ if self.get_variable("notparallel"):
+ dictionary["notparallel"] = True
return dictionary
def configure_sandbox(self, sandbox):
- build_root = self.get_variable('build-root')
- install_root = self.get_variable('install-root')
+ build_root = self.get_variable("build-root")
+ install_root = self.get_variable("install-root")
# Tell the sandbox to mount the build root and install root
sandbox.mark_directory(build_root)
sandbox.mark_directory(install_root)
# Allow running all commands in a specified subdirectory
- command_subdir = self.get_variable('command-subdir')
+ command_subdir = self.get_variable("command-subdir")
if command_subdir:
command_dir = os.path.join(build_root, command_subdir)
else:
@@ -230,13 +229,13 @@ class BuildElement(Element):
dep.integrate(sandbox)
# Stage sources in the build root
- self.stage_sources(sandbox, self.get_variable('build-root'))
+ self.stage_sources(sandbox, self.get_variable("build-root"))
def assemble(self, sandbox):
# Run commands
for command_name in _command_steps:
commands = self.__commands[command_name]
- if not commands or command_name == 'configure-commands':
+ if not commands or command_name == "configure-commands":
continue
with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running {}".format(command_name)):
@@ -247,21 +246,22 @@ class BuildElement(Element):
# to - if an element later attempts to stage to a location
# that is not empty, we abort the build - in this case this
# will almost certainly happen.
- staged_build = os.path.join(self.get_variable('install-root'),
- self.get_variable('build-root'))
+ staged_build = os.path.join(self.get_variable("install-root"), self.get_variable("build-root"))
if os.path.isdir(staged_build) and os.listdir(staged_build):
- self.warn("Writing to %{install-root}/%{build-root}.",
- detail="Writing to this directory will almost " +
- "certainly cause an error, since later elements " +
- "will not be allowed to stage to %{build-root}.")
+ self.warn(
+ "Writing to %{install-root}/%{build-root}.",
+ detail="Writing to this directory will almost "
+ + "certainly cause an error, since later elements "
+ + "will not be allowed to stage to %{build-root}.",
+ )
# Return the payload, this is configurable but is generally
# always the /buildstream-install directory
- return self.get_variable('install-root')
+ return self.get_variable("install-root")
def prepare(self, sandbox):
- commands = self.__commands['configure-commands']
+ commands = self.__commands["configure-commands"]
if commands:
with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"):
for cmd in commands:
@@ -282,15 +282,10 @@ class BuildElement(Element):
#############################################################
def __get_commands(self, node, name):
raw_commands = node.get_sequence(name, [])
- return [
- self.node_subst_vars(command)
- for command in raw_commands
- ]
+ return [self.node_subst_vars(command) for command in raw_commands]
def __run_command(self, sandbox, cmd):
# Note the -e switch to 'sh' means to exit with an error
# if any untested command fails.
#
- sandbox.run(['sh', '-c', '-e', cmd + '\n'],
- SandboxFlags.ROOT_READ_ONLY,
- label=cmd)
+ sandbox.run(["sh", "-c", "-e", cmd + "\n"], SandboxFlags.ROOT_READ_ONLY, label=cmd)
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 5fa8f14df..5028cc5fa 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -90,8 +90,7 @@ from pyroaring import BitMap # pylint: disable=no-name-in-module
from . import _yaml
from ._variables import Variables
from ._versions import BST_CORE_ARTIFACT_VERSION
-from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
- ErrorDomain, SourceCacheError
+from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, ErrorDomain, SourceCacheError
from .utils import FileListResult
from . import utils
from . import _cachekey
@@ -122,6 +121,7 @@ if TYPE_CHECKING:
from ._context import Context
from ._loader.metaelement import MetaElement
from ._project import Project
+
# pylint: enable=cyclic-import
@@ -136,13 +136,10 @@ class ElementError(BstError):
collect: An optional directory containing partial install contents
temporary: An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
"""
- def __init__(self,
- message: str,
- *,
- detail: str = None,
- reason: str = None,
- collect: str = None,
- temporary: bool = False):
+
+ def __init__(
+ self, message: str, *, detail: str = None, reason: str = None, collect: str = None, temporary: bool = False
+ ):
super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=temporary)
self.collect = collect
@@ -156,12 +153,13 @@ class Element(Plugin):
All elements derive from this class, this interface defines how
the core will be interacting with Elements.
"""
+
# The defaults from the yaml file and project
__defaults = None
# A hash of Element by MetaElement
- __instantiated_elements = {} # type: Dict[MetaElement, Element]
+ __instantiated_elements = {} # type: Dict[MetaElement, Element]
# A list of (source, ref) tuples which were redundantly specified
- __redundant_source_refs = [] # type: List[Tuple[Source, SourceRef]]
+ __redundant_source_refs = [] # type: List[Tuple[Source, SourceRef]]
BST_ARTIFACT_VERSION = 0
"""The element plugin's artifact version
@@ -215,10 +213,10 @@ class Element(Plugin):
*Since: 1.90*
"""
- def __init__(self, context: 'Context', project: 'Project', meta: 'MetaElement', plugin_conf: Dict[str, Any]):
+ def __init__(self, context: "Context", project: "Project", meta: "MetaElement", plugin_conf: Dict[str, Any]):
- self.__cache_key_dict = None # Dict for cache key calculation
- self.__cache_key = None # Our cached cache key
+ self.__cache_key_dict = None # Dict for cache key calculation
+ self.__cache_key = None # Our cached cache key
super().__init__(meta.name, context, project, meta.provenance, "element")
@@ -236,75 +234,75 @@ class Element(Plugin):
"""
# Direct runtime dependency Elements
- self.__runtime_dependencies = [] # type: List[Element]
+ self.__runtime_dependencies = [] # type: List[Element]
# Direct build dependency Elements
- self.__build_dependencies = [] # type: List[Element]
+ self.__build_dependencies = [] # type: List[Element]
# Direct build dependency subset which require strict rebuilds
- self.__strict_dependencies = [] # type: List[Element]
+ self.__strict_dependencies = [] # type: List[Element]
# Direct reverse build dependency Elements
- self.__reverse_build_deps = set() # type: Set[Element]
+ self.__reverse_build_deps = set() # type: Set[Element]
# Direct reverse runtime dependency Elements
- self.__reverse_runtime_deps = set() # type: Set[Element]
- self.__build_deps_without_strict_cache_key = None # Number of build dependencies without a strict key
+ self.__reverse_runtime_deps = set() # type: Set[Element]
+ self.__build_deps_without_strict_cache_key = None # Number of build dependencies without a strict key
self.__runtime_deps_without_strict_cache_key = None # Number of runtime dependencies without a strict key
- self.__build_deps_without_cache_key = None # Number of build dependencies without a cache key
+ self.__build_deps_without_cache_key = None # Number of build dependencies without a cache key
self.__runtime_deps_without_cache_key = None # Number of runtime dependencies without a cache key
- self.__build_deps_uncached = None # Build dependencies which are not yet cached
+ self.__build_deps_uncached = None # Build dependencies which are not yet cached
self.__runtime_deps_uncached = None # Runtime dependencies which are not yet cached
self.__updated_strict_cache_keys_of_rdeps = False # Whether we've updated strict cache keys of rdeps
- self.__ready_for_runtime = False # Whether the element and its runtime dependencies have cache keys
+ self.__ready_for_runtime = False # Whether the element and its runtime dependencies have cache keys
self.__ready_for_runtime_and_cached = False # Whether all runtime deps are cached, as well as the element
- self.__cached_remotely = None # Whether the element is cached remotely
+ self.__cached_remotely = None # Whether the element is cached remotely
# List of Sources
- self.__sources = [] # type: List[Source]
- self.__weak_cache_key = None # Our cached weak cache key
- self.__strict_cache_key = None # Our cached cache key for strict builds
+ self.__sources = [] # type: List[Source]
+ self.__weak_cache_key = None # Our cached weak cache key
+ self.__strict_cache_key = None # Our cached cache key for strict builds
self.__artifacts = context.artifactcache # Artifact cache
self.__sourcecache = context.sourcecache # Source cache
self.__consistency = Consistency.INCONSISTENT # Cached overall consistency state
- self.__assemble_scheduled = False # Element is scheduled to be assembled
- self.__assemble_done = False # Element is assembled
- self.__tracking_scheduled = False # Sources are scheduled to be tracked
- self.__pull_done = False # Whether pull was attempted
- self.__cached_successfully = None # If the Element is known to be successfully cached
- self.__source_cached = None # If the sources are known to be successfully cached
- self.__splits = None # Resolved regex objects for computing split domains
- self.__whitelist_regex = None # Resolved regex object to check if file is allowed to overlap
+ self.__assemble_scheduled = False # Element is scheduled to be assembled
+ self.__assemble_done = False # Element is assembled
+ self.__tracking_scheduled = False # Sources are scheduled to be tracked
+ self.__pull_done = False # Whether pull was attempted
+ self.__cached_successfully = None # If the Element is known to be successfully cached
+ self.__source_cached = None # If the sources are known to be successfully cached
+ self.__splits = None # Resolved regex objects for computing split domains
+ self.__whitelist_regex = None # Resolved regex object to check if file is allowed to overlap
# Location where Element.stage_sources() was called
self.__staged_sources_directory = None # type: Optional[str]
- self.__tainted = None # Whether the artifact is tainted and should not be shared
- self.__required = False # Whether the artifact is required in the current session
+ self.__tainted = None # Whether the artifact is tainted and should not be shared
+ self.__required = False # Whether the artifact is required in the current session
self.__artifact_files_required = False # Whether artifact files are required in the local cache
- self.__build_result = None # The result of assembling this Element (success, description, detail)
- self._build_log_path = None # The path of the build log for this Element
+ self.__build_result = None # The result of assembling this Element (success, description, detail)
+ self._build_log_path = None # The path of the build log for this Element
# Artifact class for direct artifact composite interaction
- self.__artifact = None # type: Optional[Artifact]
- self.__strict_artifact = None # Artifact for strict cache key
- self.__meta_kind = meta.kind # The kind of this source, required for unpickling
+ self.__artifact = None # type: Optional[Artifact]
+ self.__strict_artifact = None # Artifact for strict cache key
+ self.__meta_kind = meta.kind # The kind of this source, required for unpickling
# the index of the last source in this element that requires previous
# sources for staging
self.__last_source_requires_previous_ix = None
- self.__batch_prepare_assemble = False # Whether batching across prepare()/assemble() is configured
- self.__batch_prepare_assemble_flags = 0 # Sandbox flags for batching across prepare()/assemble()
+ self.__batch_prepare_assemble = False # Whether batching across prepare()/assemble() is configured
+ self.__batch_prepare_assemble_flags = 0 # Sandbox flags for batching across prepare()/assemble()
# Collect dir for batching across prepare()/assemble()
self.__batch_prepare_assemble_collect = None # type: Optional[str]
# Callbacks
- self.__required_callback = None # Callback to Queues
- self.__can_query_cache_callback = None # Callback to PullQueue/FetchQueue
- self.__buildable_callback = None # Callback to BuildQueue
+ self.__required_callback = None # Callback to Queues
+ self.__can_query_cache_callback = None # Callback to PullQueue/FetchQueue
+ self.__buildable_callback = None # Callback to BuildQueue
- self._depth = None # Depth of Element in its current dependency graph
- self._resolved_initial_state = False # Whether the initial state of the Element has been resolved
+ self._depth = None # Depth of Element in its current dependency graph
+ self._resolved_initial_state = False # Whether the initial state of the Element has been resolved
# Ensure we have loaded this class's defaults
self.__init_defaults(project, plugin_conf, meta.kind, meta.is_junction)
# Collect the composited variables and resolve them
variables = self.__extract_variables(project, meta)
- variables['element-name'] = self.name
+ variables["element-name"] = self.name
self.__variables = Variables(variables)
# Collect the composited environment now that we have variables
@@ -348,7 +346,7 @@ class Element(Plugin):
#############################################################
# Abstract Methods #
#############################################################
- def configure_sandbox(self, sandbox: 'Sandbox') -> None:
+ def configure_sandbox(self, sandbox: "Sandbox") -> None:
"""Configures the the sandbox for execution
Args:
@@ -360,10 +358,9 @@ class Element(Plugin):
Elements must implement this method to configure the sandbox object
for execution.
"""
- raise ImplError("element plugin '{kind}' does not implement configure_sandbox()".format(
- kind=self.get_kind()))
+ raise ImplError("element plugin '{kind}' does not implement configure_sandbox()".format(kind=self.get_kind()))
- def stage(self, sandbox: 'Sandbox') -> None:
+ def stage(self, sandbox: "Sandbox") -> None:
"""Stage inputs into the sandbox directories
Args:
@@ -377,10 +374,9 @@ class Element(Plugin):
objects, by staging the artifacts of the elements this element depends
on, or both.
"""
- raise ImplError("element plugin '{kind}' does not implement stage()".format(
- kind=self.get_kind()))
+ raise ImplError("element plugin '{kind}' does not implement stage()".format(kind=self.get_kind()))
- def prepare(self, sandbox: 'Sandbox') -> None:
+ def prepare(self, sandbox: "Sandbox") -> None:
"""Run one-off preparation commands.
This is run before assemble(), but is guaranteed to run only
@@ -400,7 +396,7 @@ class Element(Plugin):
*Since: 1.2*
"""
- def assemble(self, sandbox: 'Sandbox') -> str:
+ def assemble(self, sandbox: "Sandbox") -> str:
"""Assemble the output artifact
Args:
@@ -415,8 +411,7 @@ class Element(Plugin):
Elements must implement this method to create an output
artifact from its sources and dependencies.
"""
- raise ImplError("element plugin '{kind}' does not implement assemble()".format(
- kind=self.get_kind()))
+ raise ImplError("element plugin '{kind}' does not implement assemble()".format(kind=self.get_kind()))
def generate_script(self) -> str:
"""Generate a build (sh) script to build this element
@@ -437,13 +432,12 @@ class Element(Plugin):
If the script fails, it is expected to return with an exit
code != 0.
"""
- raise ImplError("element plugin '{kind}' does not implement write_script()".format(
- kind=self.get_kind()))
+ raise ImplError("element plugin '{kind}' does not implement write_script()".format(kind=self.get_kind()))
#############################################################
# Public Methods #
#############################################################
- def sources(self) -> Iterator['Source']:
+ def sources(self) -> Iterator["Source"]:
"""A generator function to enumerate the element sources
Yields:
@@ -452,7 +446,7 @@ class Element(Plugin):
for source in self.__sources:
yield source
- def dependencies(self, scope: Scope, *, recurse: bool = True, visited=None) -> Iterator['Element']:
+ def dependencies(self, scope: Scope, *, recurse: bool = True, visited=None) -> Iterator["Element"]:
"""dependencies(scope, *, recurse=True)
A generator function which yields the dependencies of the given element.
@@ -479,6 +473,7 @@ class Element(Plugin):
if scope in (Scope.RUN, Scope.ALL):
yield from self.__runtime_dependencies
else:
+
def visit(element, scope, visited):
if scope == Scope.ALL:
visited[0].add(element._unique_id)
@@ -519,7 +514,7 @@ class Element(Plugin):
yield from visit(self, scope, visited)
- def search(self, scope: Scope, name: str) -> Optional['Element']:
+ def search(self, scope: Scope, name: str) -> Optional["Element"]:
"""Search for a dependency by name
Args:
@@ -535,7 +530,7 @@ class Element(Plugin):
return None
- def node_subst_vars(self, node: 'ScalarNode') -> str:
+ def node_subst_vars(self, node: "ScalarNode") -> str:
"""Replace any variables in the string contained in the node and returns it.
Args:
@@ -559,9 +554,9 @@ class Element(Plugin):
return self.__variables.subst(node.as_str())
except LoadError as e:
provenance = node.get_provenance()
- raise LoadError('{}: {}'.format(provenance, e), e.reason, detail=e.detail) from e
+ raise LoadError("{}: {}".format(provenance, e), e.reason, detail=e.detail) from e
- def node_subst_sequence_vars(self, node: 'SequenceNode[ScalarNode]') -> List[str]:
+ def node_subst_sequence_vars(self, node: "SequenceNode[ScalarNode]") -> List[str]:
"""Substitute any variables in the given sequence
Args:
@@ -580,14 +575,12 @@ class Element(Plugin):
ret.append(self.__variables.subst(value.as_str()))
except LoadError as e:
provenance = value.get_provenance()
- raise LoadError('{}: {}'.format(provenance, e), e.reason, detail=e.detail) from e
+ raise LoadError("{}: {}".format(provenance, e), e.reason, detail=e.detail) from e
return ret
- def compute_manifest(self,
- *,
- include: Optional[List[str]] = None,
- exclude: Optional[List[str]] = None,
- orphans: bool = True) -> str:
+ def compute_manifest(
+ self, *, include: Optional[List[str]] = None, exclude: Optional[List[str]] = None, orphans: bool = True
+ ) -> str:
"""Compute and return this element's selective manifest
The manifest consists on the list of file paths in the
@@ -630,14 +623,16 @@ class Element(Plugin):
return _compose_artifact_name(self.project_name, self.normal_name, key)
- def stage_artifact(self,
- sandbox: 'Sandbox',
- *,
- path: str = None,
- include: Optional[List[str]] = None,
- exclude: Optional[List[str]] = None,
- orphans: bool = True,
- update_mtimes: Optional[List[str]] = None) -> FileListResult:
+ def stage_artifact(
+ self,
+ sandbox: "Sandbox",
+ *,
+ path: str = None,
+ include: Optional[List[str]] = None,
+ exclude: Optional[List[str]] = None,
+ orphans: bool = True,
+ update_mtimes: Optional[List[str]] = None
+ ) -> FileListResult:
"""Stage this element's output artifact in the sandbox
This will stage the files from the artifact to the sandbox at specified location.
@@ -675,10 +670,11 @@ class Element(Plugin):
"""
if not self._cached():
- detail = "No artifacts have been cached yet for that element\n" + \
- "Try building the element first with `bst build`\n"
- raise ElementError("No artifacts to stage",
- detail=detail, reason="uncached-checkout-attempt")
+ detail = (
+ "No artifacts have been cached yet for that element\n"
+ + "Try building the element first with `bst build`\n"
+ )
+ raise ElementError("No artifacts to stage", detail=detail, reason="uncached-checkout-attempt")
if update_mtimes is None:
update_mtimes = []
@@ -689,47 +685,49 @@ class Element(Plugin):
with self.timed_activity("Staging {}/{}".format(self.name, self._get_brief_display_key())):
# Disable type checking since we can't easily tell mypy that
# `self.__artifact` can't be None at this stage.
- files_vdir = self.__artifact.get_files() # type: ignore
+ files_vdir = self.__artifact.get_files() # type: ignore
# Hard link it into the staging area
#
vbasedir = sandbox.get_virtual_directory()
- vstagedir = vbasedir \
- if path is None \
- else vbasedir.descend(*path.lstrip(os.sep).split(os.sep))
+ vstagedir = vbasedir if path is None else vbasedir.descend(*path.lstrip(os.sep).split(os.sep))
split_filter = self.__split_filter_func(include, exclude, orphans)
# We must not hardlink files whose mtimes we want to update
if update_mtimes:
+
def link_filter(path):
- return ((split_filter is None or split_filter(path)) and
- path not in update_mtimes)
+ return (split_filter is None or split_filter(path)) and path not in update_mtimes
def copy_filter(path):
- return ((split_filter is None or split_filter(path)) and
- path in update_mtimes)
+ return (split_filter is None or split_filter(path)) and path in update_mtimes
+
else:
link_filter = split_filter
- result = vstagedir.import_files(files_vdir, filter_callback=link_filter,
- report_written=True, can_link=True)
+ result = vstagedir.import_files(
+ files_vdir, filter_callback=link_filter, report_written=True, can_link=True
+ )
if update_mtimes:
- copy_result = vstagedir.import_files(files_vdir, filter_callback=copy_filter,
- report_written=True, update_mtime=True)
+ copy_result = vstagedir.import_files(
+ files_vdir, filter_callback=copy_filter, report_written=True, update_mtime=True
+ )
result = result.combine(copy_result)
return result
- def stage_dependency_artifacts(self,
- sandbox: 'Sandbox',
- scope: Scope,
- *,
- path: str = None,
- include: Optional[List[str]] = None,
- exclude: Optional[List[str]] = None,
- orphans: bool = True) -> None:
+ def stage_dependency_artifacts(
+ self,
+ sandbox: "Sandbox",
+ scope: Scope,
+ *,
+ path: str = None,
+ include: Optional[List[str]] = None,
+ exclude: Optional[List[str]] = None,
+ orphans: bool = True
+ ) -> None:
"""Stage element dependencies in scope
This is primarily a convenience wrapper around
@@ -751,8 +749,8 @@ class Element(Plugin):
occur.
"""
ignored = {}
- overlaps = OrderedDict() # type: OrderedDict[str, List[str]]
- files_written = {} # type: Dict[str, List[str]]
+ overlaps = OrderedDict() # type: OrderedDict[str, List[str]]
+ files_written = {} # type: Dict[str, List[str]]
old_dep_keys = None
workspace = self._get_workspace()
context = self._get_context()
@@ -803,12 +801,9 @@ class Element(Plugin):
if utils._is_main_process():
context.get_workspaces().save_config()
- result = dep.stage_artifact(sandbox,
- path=path,
- include=include,
- exclude=exclude,
- orphans=orphans,
- update_mtimes=to_update)
+ result = dep.stage_artifact(
+ sandbox, path=path, include=include, exclude=exclude, orphans=orphans, update_mtimes=to_update
+ )
if result.overwritten:
for overwrite in result.overwritten:
# Completely new overwrite
@@ -841,8 +836,9 @@ class Element(Plugin):
warning_detail += _overlap_error_detail(f, overlap_warning_elements, elements)
if overlap_warning:
- self.warn("Non-whitelisted overlaps detected", detail=warning_detail,
- warning_token=CoreWarnings.OVERLAPS)
+ self.warn(
+ "Non-whitelisted overlaps detected", detail=warning_detail, warning_token=CoreWarnings.OVERLAPS
+ )
if ignored:
detail = "Not staging files which would replace non-empty directories:\n"
@@ -851,7 +847,7 @@ class Element(Plugin):
detail += " " + " ".join(["/" + f + "\n" for f in value])
self.warn("Ignored files", detail=detail)
- def integrate(self, sandbox: 'Sandbox') -> None:
+ def integrate(self, sandbox: "Sandbox") -> None:
"""Integrate currently staged filesystem against this artifact.
Args:
@@ -863,19 +859,18 @@ class Element(Plugin):
commands will create and update important system cache files
required for running the installed software (such as the ld.so.cache).
"""
- bstdata = self.get_public_data('bst')
+ bstdata = self.get_public_data("bst")
environment = self.get_environment()
if bstdata is not None:
with sandbox.batch(SandboxFlags.NONE):
- commands = bstdata.get_sequence('integration-commands', [])
+ commands = bstdata.get_sequence("integration-commands", [])
for command in commands:
cmd = self.node_subst_vars(command)
- sandbox.run(['sh', '-e', '-c', cmd], 0, env=environment, cwd='/',
- label=cmd)
+ sandbox.run(["sh", "-e", "-c", cmd], 0, env=environment, cwd="/", label=cmd)
- def stage_sources(self, sandbox: 'Sandbox', directory: str) -> None:
+ def stage_sources(self, sandbox: "Sandbox", directory: str) -> None:
"""Stage this element's sources to a directory in the sandbox
Args:
@@ -892,7 +887,7 @@ class Element(Plugin):
self._stage_sources_in_sandbox(sandbox, directory)
- def get_public_data(self, domain: str) -> 'MappingNode[Any, Any]':
+ def get_public_data(self, domain: str) -> "MappingNode[Any, Any]":
"""Fetch public data on this element
Args:
@@ -911,13 +906,13 @@ class Element(Plugin):
# Disable type-checking since we can't easily tell mypy that
# `self.__dynamic_public` can't be None here.
- data = self.__dynamic_public.get_mapping(domain, default=None) # type: ignore
+ data = self.__dynamic_public.get_mapping(domain, default=None) # type: ignore
if data is not None:
data = data.clone()
return data
- def set_public_data(self, domain: str, data: 'MappingNode[Any, Any]') -> None:
+ def set_public_data(self, domain: str, data: "MappingNode[Any, Any]") -> None:
"""Set public data on this element
Args:
@@ -935,7 +930,7 @@ class Element(Plugin):
if data is not None:
data = data.clone()
- self.__dynamic_public[domain] = data # type: ignore
+ self.__dynamic_public[domain] = data # type: ignore
def get_environment(self) -> Dict[str, str]:
"""Fetch the environment suitable for running in the sandbox
@@ -1016,8 +1011,7 @@ class Element(Plugin):
# Instantiate sources and generate their keys
for meta_source in meta.sources:
meta_source.first_pass = meta.is_junction
- source = meta.project.create_source(meta_source,
- first_pass=meta.first_pass)
+ source = meta.project.create_source(meta_source, first_pass=meta.first_pass)
redundant_ref = source._load_ref()
@@ -1190,8 +1184,7 @@ class Element(Plugin):
# (bool): Whether this element can currently be built
#
def _buildable(self):
- if self._get_consistency() < Consistency.CACHED and \
- not self._source_cached():
+ if self._get_consistency() < Consistency.CACHED and not self._source_cached():
return False
if not self.__assemble_scheduled:
@@ -1261,11 +1254,14 @@ class Element(Plugin):
# If the element wasn't assembled and isn't scheduled to be assemble,
# or cached, or waiting to be pulled but has an artifact then schedule
# the assembly.
- if (not self.__assemble_scheduled and not self.__assemble_done and
- self.__artifact and
- self._is_required() and
- not self._cached() and
- not self._pull_pending()):
+ if (
+ not self.__assemble_scheduled
+ and not self.__assemble_done
+ and self.__artifact
+ and self._is_required()
+ and not self._cached()
+ and not self._pull_pending()
+ ):
self._schedule_assemble()
# If a build has been scheduled, we know that the element
@@ -1298,7 +1294,7 @@ class Element(Plugin):
cache_key = self._get_cache_key()
if not cache_key:
- cache_key = "{:?<64}".format('')
+ cache_key = "{:?<64}".format("")
elif cache_key == self.__strict_cache_key:
# Strong cache key used in this session matches cache key
# that would be used in strict build mode
@@ -1378,8 +1374,10 @@ class Element(Plugin):
# Complimentary warning that the new ref will be unused.
if old_ref != new_ref and self._get_workspace():
- detail = "This source has an open workspace.\n" \
+ detail = (
+ "This source has an open workspace.\n"
+ "To start using the new reference, please close the existing workspace."
+ )
source.warn("Updated reference will be ignored as source has open workspace", detail=detail)
return refs
@@ -1393,8 +1391,9 @@ class Element(Plugin):
def _prepare_sandbox(self, scope, directory, shell=False, integrate=True, usebuildtree=False):
# bst shell and bst artifact checkout require a local sandbox.
bare_directory = bool(directory)
- with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False,
- bare_directory=bare_directory) as sandbox:
+ with self.__sandbox(
+ directory, config=self.__sandbox_config, allow_remote=False, bare_directory=bare_directory
+ ) as sandbox:
sandbox._usebuildtree = usebuildtree
# Configure always comes first, and we need it.
@@ -1452,8 +1451,9 @@ class Element(Plugin):
# It's advantageous to have this temporary directory on
# the same file system as the rest of our cache.
- with self.timed_activity("Staging sources", silent_nested=True), \
- utils._tempdir(dir=context.tmpdir, prefix='staging-temp') as temp_staging_directory:
+ with self.timed_activity("Staging sources", silent_nested=True), utils._tempdir(
+ dir=context.tmpdir, prefix="staging-temp"
+ ) as temp_staging_directory:
import_dir = temp_staging_directory
@@ -1488,12 +1488,12 @@ class Element(Plugin):
import_dir.import_files(source_dir)
except SourceCacheError as e:
- raise ElementError("Error trying to export source for {}: {}"
- .format(self.name, e))
+ raise ElementError("Error trying to export source for {}: {}".format(self.name, e))
except VirtualDirectoryError as e:
- raise ElementError("Error trying to import sources together for {}: {}"
- .format(self.name, e),
- reason="import-source-files-fail")
+ raise ElementError(
+ "Error trying to import sources together for {}: {}".format(self.name, e),
+ reason="import-source-files-fail",
+ )
with utils._deterministic_umask():
vdirectory.import_files(import_dir)
@@ -1601,8 +1601,7 @@ class Element(Plugin):
self._update_ready_for_runtime_and_cached()
if self._get_workspace() and self._cached_success():
- assert utils._is_main_process(), \
- "Attempted to save workspace configuration from child process"
+ assert utils._is_main_process(), "Attempted to save workspace configuration from child process"
#
# Note that this block can only happen in the
# main process, since `self._cached_success()` cannot
@@ -1638,9 +1637,12 @@ class Element(Plugin):
with self._output_file() as output_file:
if not self.__sandbox_config_supported:
- self.warn("Sandbox configuration is not supported by the platform.",
- detail="Falling back to UID {} GID {}. Artifact will not be pushed."
- .format(self.__sandbox_config.build_uid, self.__sandbox_config.build_gid))
+ self.warn(
+ "Sandbox configuration is not supported by the platform.",
+ detail="Falling back to UID {} GID {}. Artifact will not be pushed.".format(
+ self.__sandbox_config.build_uid, self.__sandbox_config.build_gid
+ ),
+ )
# Explicitly clean it up, keep the build dir around if exceptions are raised
os.makedirs(context.builddir, exist_ok=True)
@@ -1650,13 +1652,14 @@ class Element(Plugin):
def cleanup_rootdir():
utils._force_rmtree(rootdir)
- with _signals.terminator(cleanup_rootdir), \
- self.__sandbox(rootdir, output_file, output_file, self.__sandbox_config) as sandbox: # noqa
+ with _signals.terminator(cleanup_rootdir), self.__sandbox(
+ rootdir, output_file, output_file, self.__sandbox_config
+ ) as sandbox: # noqa
# Let the sandbox know whether the buildtree will be required.
# This allows the remote execution sandbox to skip buildtree
# download when it's not needed.
- buildroot = self.get_variable('build-root')
+ buildroot = self.get_variable("build-root")
cache_buildtrees = context.cache_buildtrees
if cache_buildtrees != _CacheBuildTrees.NEVER:
always_cache_buildtrees = cache_buildtrees == _CacheBuildTrees.ALWAYS
@@ -1681,8 +1684,9 @@ class Element(Plugin):
self.stage(sandbox)
try:
if self.__batch_prepare_assemble:
- cm = sandbox.batch(self.__batch_prepare_assemble_flags,
- collect=self.__batch_prepare_assemble_collect)
+ cm = sandbox.batch(
+ self.__batch_prepare_assemble_flags, collect=self.__batch_prepare_assemble_collect
+ )
else:
cm = contextlib.suppress()
@@ -1724,11 +1728,13 @@ class Element(Plugin):
# result. Element types without a build-root dir will be cached
# with an empty buildtreedir regardless of this configuration.
- if cache_buildtrees == _CacheBuildTrees.ALWAYS or \
- (cache_buildtrees == _CacheBuildTrees.AUTO and not build_success):
+ if cache_buildtrees == _CacheBuildTrees.ALWAYS or (
+ cache_buildtrees == _CacheBuildTrees.AUTO and not build_success
+ ):
try:
sandbox_build_dir = sandbox_vroot.descend(
- *self.get_variable('build-root').lstrip(os.sep).split(os.sep))
+ *self.get_variable("build-root").lstrip(os.sep).split(os.sep)
+ )
sandbox._fetch_missing_blobs(sandbox_build_dir)
except VirtualDirectoryError:
# Directory could not be found. Pre-virtual
@@ -1747,14 +1753,13 @@ class Element(Plugin):
self._assemble_done()
with self.timed_activity("Caching artifact"):
- artifact_size = self.__artifact.cache(rootdir, sandbox_build_dir, collectvdir,
- buildresult, publicdata)
+ artifact_size = self.__artifact.cache(rootdir, sandbox_build_dir, collectvdir, buildresult, publicdata)
if collect is not None and collectvdir is None:
raise ElementError(
"Directory '{}' was not found inside the sandbox, "
- "unable to collect artifact contents"
- .format(collect))
+ "unable to collect artifact contents".format(collect)
+ )
return artifact_size
@@ -1855,8 +1860,7 @@ class Element(Plugin):
def _skip_source_push(self):
if not self.__sources or self._get_workspace():
return True
- return not (self.__sourcecache.has_push_remotes(plugin=self) and
- self._source_cached())
+ return not (self.__sourcecache.has_push_remotes(plugin=self) and self._source_cached())
def _source_push(self):
# try and push sources if we've got them
@@ -1931,8 +1935,9 @@ class Element(Plugin):
# Returns: Exit code
#
# If directory is not specified, one will be staged using scope
- def _shell(self, scope=None, directory=None, *, mounts=None, isolate=False, prompt=None, command=None,
- usebuildtree=False):
+ def _shell(
+ self, scope=None, directory=None, *, mounts=None, isolate=False, prompt=None, command=None, usebuildtree=False
+ ):
with self._prepare_sandbox(scope, directory, shell=True, usebuildtree=usebuildtree) as sandbox:
environment = self.get_environment()
@@ -1946,7 +1951,7 @@ class Element(Plugin):
shell_command, shell_environment, shell_host_files = project.get_shell_config()
if prompt is not None:
- environment['PS1'] = prompt
+ environment["PS1"] = prompt
# Special configurations for non-isolated sandboxes
if not isolate:
@@ -2002,8 +2007,7 @@ class Element(Plugin):
# additional support from Source implementations.
#
os.makedirs(context.builddir, exist_ok=True)
- with utils._tempdir(dir=context.builddir, prefix='workspace-{}'
- .format(self.normal_name)) as temp:
+ with utils._tempdir(dir=context.builddir, prefix="workspace-{}".format(self.normal_name)) as temp:
for source in self.sources():
source._init_workspace(temp)
@@ -2032,10 +2036,10 @@ class Element(Plugin):
script = script_template.format(
name=self.normal_name,
- build_root=self.get_variable('build-root'),
- install_root=self.get_variable('install-root'),
+ build_root=self.get_variable("build-root"),
+ install_root=self.get_variable("install-root"),
variables=variable_string,
- commands=self.generate_script()
+ commands=self.generate_script(),
)
os.makedirs(directory, exist_ok=True)
@@ -2120,8 +2124,7 @@ class Element(Plugin):
continue
# try and fetch from source cache
- if source._get_consistency() < Consistency.CACHED and \
- self.__sourcecache.has_fetch_remotes():
+ if source._get_consistency() < Consistency.CACHED and self.__sourcecache.has_fetch_remotes():
if self.__sourcecache.pull(source):
continue
@@ -2154,35 +2157,31 @@ class Element(Plugin):
# Generate dict that is used as base for all cache keys
if self.__cache_key_dict is None:
# Filter out nocache variables from the element's environment
- cache_env = {
- key: value
- for key, value in self.__environment.items()
- if key not in self.__env_nocache
- }
+ cache_env = {key: value for key, value in self.__environment.items() if key not in self.__env_nocache}
project = self._get_project()
self.__cache_key_dict = {
- 'core-artifact-version': BST_CORE_ARTIFACT_VERSION,
- 'element-plugin-key': self.get_unique_key(),
- 'element-plugin-name': self.get_kind(),
- 'element-plugin-version': self.BST_ARTIFACT_VERSION,
- 'sandbox': self.__sandbox_config.get_unique_key(),
- 'environment': cache_env,
- 'public': self.__public.strip_node_info()
+ "core-artifact-version": BST_CORE_ARTIFACT_VERSION,
+ "element-plugin-key": self.get_unique_key(),
+ "element-plugin-name": self.get_kind(),
+ "element-plugin-version": self.BST_ARTIFACT_VERSION,
+ "sandbox": self.__sandbox_config.get_unique_key(),
+ "environment": cache_env,
+ "public": self.__public.strip_node_info(),
}
- self.__cache_key_dict['sources'] = []
+ self.__cache_key_dict["sources"] = []
for source in self.__sources:
- self.__cache_key_dict['sources'].append(
- {'key': source._get_unique_key(),
- 'name': source._get_source_name()})
+ self.__cache_key_dict["sources"].append(
+ {"key": source._get_unique_key(), "name": source._get_source_name()}
+ )
- self.__cache_key_dict['fatal-warnings'] = sorted(project._fatal_warnings)
+ self.__cache_key_dict["fatal-warnings"] = sorted(project._fatal_warnings)
cache_key_dict = self.__cache_key_dict.copy()
- cache_key_dict['dependencies'] = dependencies
+ cache_key_dict["dependencies"] = dependencies
return _cachekey.generate_key(cache_key_dict)
@@ -2216,8 +2215,9 @@ class Element(Plugin):
Args:
fetch_original (bool): whether we need to original unstaged source
"""
- if (self._get_consistency() == Consistency.CACHED and fetch_original) or \
- (self._source_cached() and not fetch_original):
+ if (self._get_consistency() == Consistency.CACHED and fetch_original) or (
+ self._source_cached() and not fetch_original
+ ):
return False
else:
return True
@@ -2299,8 +2299,7 @@ class Element(Plugin):
#
def _update_ready_for_runtime_and_cached(self):
if not self.__ready_for_runtime_and_cached:
- if self.__runtime_deps_uncached == 0 and self._cached_success() and \
- self.__cache_key:
+ if self.__runtime_deps_uncached == 0 and self._cached_success() and self.__cache_key:
self.__ready_for_runtime_and_cached = True
# Notify reverse dependencies
@@ -2450,6 +2449,7 @@ class Element(Plugin):
self.prepare(sandbox)
if workspace:
+
def mark_workspace_prepared():
workspace.prepared = True
@@ -2466,23 +2466,31 @@ class Element(Plugin):
if self.BST_FORBID_RDEPENDS and self.BST_FORBID_BDEPENDS:
if any(self.dependencies(Scope.RUN, recurse=False)) or any(self.dependencies(Scope.BUILD, recurse=False)):
- raise ElementError("{}: Dependencies are forbidden for '{}' elements"
- .format(self, self.get_kind()), reason="element-forbidden-depends")
+ raise ElementError(
+ "{}: Dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
+ reason="element-forbidden-depends",
+ )
if self.BST_FORBID_RDEPENDS:
if any(self.dependencies(Scope.RUN, recurse=False)):
- raise ElementError("{}: Runtime dependencies are forbidden for '{}' elements"
- .format(self, self.get_kind()), reason="element-forbidden-rdepends")
+ raise ElementError(
+ "{}: Runtime dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
+ reason="element-forbidden-rdepends",
+ )
if self.BST_FORBID_BDEPENDS:
if any(self.dependencies(Scope.BUILD, recurse=False)):
- raise ElementError("{}: Build dependencies are forbidden for '{}' elements"
- .format(self, self.get_kind()), reason="element-forbidden-bdepends")
+ raise ElementError(
+ "{}: Build dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
+ reason="element-forbidden-bdepends",
+ )
if self.BST_FORBID_SOURCES:
if any(self.sources()):
- raise ElementError("{}: Sources are forbidden for '{}' elements"
- .format(self, self.get_kind()), reason="element-forbidden-sources")
+ raise ElementError(
+ "{}: Sources are forbidden for '{}' elements".format(self, self.get_kind()),
+ reason="element-forbidden-sources",
+ )
try:
self.preflight()
@@ -2492,9 +2500,10 @@ class Element(Plugin):
# Ensure that the first source does not need access to previous soruces
if self.__sources and self.__sources[0]._requires_previous_sources():
- raise ElementError("{}: {} cannot be the first source of an element "
- "as it requires access to previous sources"
- .format(self, self.__sources[0]))
+ raise ElementError(
+ "{}: {} cannot be the first source of an element "
+ "as it requires access to previous sources".format(self, self.__sources[0])
+ )
# Preflight the sources
for source in self.sources():
@@ -2505,8 +2514,7 @@ class Element(Plugin):
# Raises an error if the artifact is not cached.
#
def __assert_cached(self):
- assert self._cached(), "{}: Missing artifact {}".format(
- self, self._get_brief_display_key())
+ assert self._cached(), "{}: Missing artifact {}".format(self, self._get_brief_display_key())
# __get_tainted():
#
@@ -2532,8 +2540,7 @@ class Element(Plugin):
workspaced_dependencies = self.__artifact.get_metadata_workspaced_dependencies()
# Other conditions should be or-ed
- self.__tainted = (workspaced or workspaced_dependencies or
- not self.__sandbox_config_supported)
+ self.__tainted = workspaced or workspaced_dependencies or not self.__sandbox_config_supported
return self.__tainted
@@ -2572,36 +2579,45 @@ class Element(Plugin):
if directory is not None and allow_remote and self.__use_remote_execution():
if not self.BST_VIRTUAL_DIRECTORY:
- raise ElementError("Element {} is configured to use remote execution but plugin does not support it."
- .format(self.name), detail="Plugin '{kind}' does not support virtual directories."
- .format(kind=self.get_kind()))
+ raise ElementError(
+ "Element {} is configured to use remote execution but plugin does not support it.".format(
+ self.name
+ ),
+ detail="Plugin '{kind}' does not support virtual directories.".format(kind=self.get_kind()),
+ )
self.info("Using a remote sandbox for artifact {} with directory '{}'".format(self.name, directory))
output_files_required = context.require_artifact_files or self._artifact_files_required()
- sandbox = SandboxRemote(context, project,
- directory,
- plugin=self,
- stdout=stdout,
- stderr=stderr,
- config=config,
- specs=self.__remote_execution_specs,
- bare_directory=bare_directory,
- allow_real_directory=False,
- output_files_required=output_files_required)
+ sandbox = SandboxRemote(
+ context,
+ project,
+ directory,
+ plugin=self,
+ stdout=stdout,
+ stderr=stderr,
+ config=config,
+ specs=self.__remote_execution_specs,
+ bare_directory=bare_directory,
+ allow_real_directory=False,
+ output_files_required=output_files_required,
+ )
yield sandbox
elif directory is not None and os.path.exists(directory):
- sandbox = platform.create_sandbox(context, project,
- directory,
- plugin=self,
- stdout=stdout,
- stderr=stderr,
- config=config,
- bare_directory=bare_directory,
- allow_real_directory=not self.BST_VIRTUAL_DIRECTORY)
+ sandbox = platform.create_sandbox(
+ context,
+ project,
+ directory,
+ plugin=self,
+ stdout=stdout,
+ stderr=stderr,
+ config=config,
+ bare_directory=bare_directory,
+ allow_real_directory=not self.BST_VIRTUAL_DIRECTORY,
+ )
yield sandbox
else:
@@ -2609,8 +2625,9 @@ class Element(Plugin):
rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
# Recursive contextmanager...
- with self.__sandbox(rootdir, stdout=stdout, stderr=stderr, config=config,
- allow_remote=allow_remote, bare_directory=False) as sandbox:
+ with self.__sandbox(
+ rootdir, stdout=stdout, stderr=stderr, config=config, allow_remote=allow_remote, bare_directory=False
+ ) as sandbox:
yield sandbox
# Cleanup the build dir
@@ -2632,9 +2649,9 @@ class Element(Plugin):
# Extend project wide split rules with any split rules defined by the element
element_splits._composite(splits)
- element_bst['split-rules'] = splits
- element_public['bst'] = element_bst
- defaults['public'] = element_public
+ element_bst["split-rules"] = splits
+ element_public["bst"] = element_bst
+ defaults["public"] = element_public
@classmethod
def __init_defaults(cls, project, plugin_conf, kind, is_junction):
@@ -2704,7 +2721,7 @@ class Element(Plugin):
else:
project_nocache = project.base_env_nocache
- default_nocache = cls.__defaults.get_str_list('environment-nocache', default=[])
+ default_nocache = cls.__defaults.get_str_list("environment-nocache", default=[])
element_nocache = meta.env_nocache
# Accumulate values from the element default, the project and the element
@@ -2719,7 +2736,7 @@ class Element(Plugin):
#
@classmethod
def __extract_variables(cls, project, meta):
- default_vars = cls.__defaults.get_mapping('variables', default={})
+ default_vars = cls.__defaults.get_mapping("variables", default={})
if meta.is_junction:
variables = project.first_pass_config.base_variables.clone()
@@ -2730,7 +2747,7 @@ class Element(Plugin):
meta.variables._composite(variables)
variables._assert_fully_composited()
- for var in ('project-name', 'element-name', 'max-jobs'):
+ for var in ("project-name", "element-name", "max-jobs"):
node = variables.get_node(var, allow_none=True)
if node is None:
@@ -2738,8 +2755,10 @@ class Element(Plugin):
provenance = node.get_provenance()
if not provenance._is_synthetic:
- raise LoadError("{}: invalid redefinition of protected variable '{}'"
- .format(provenance, var), LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
+ raise LoadError(
+ "{}: invalid redefinition of protected variable '{}'".format(provenance, var),
+ LoadErrorReason.PROTECTED_VARIABLE_REDEFINED,
+ )
return variables
@@ -2750,7 +2769,7 @@ class Element(Plugin):
def __extract_config(cls, meta):
# The default config is already composited with the project overrides
- config = cls.__defaults.get_mapping('config', default={})
+ config = cls.__defaults.get_mapping("config", default={})
config = config.clone()
meta.config._composite(config)
@@ -2763,10 +2782,7 @@ class Element(Plugin):
@classmethod
def __extract_sandbox_config(cls, context, project, meta):
if meta.is_junction:
- sandbox_config = Node.from_dict({
- 'build-uid': 0,
- 'build-gid': 0
- })
+ sandbox_config = Node.from_dict({"build-uid": 0, "build-gid": 0})
else:
sandbox_config = project._sandbox.clone()
@@ -2776,7 +2792,7 @@ class Element(Plugin):
host_os = platform.get_host_os()
# The default config is already composited with the project overrides
- sandbox_defaults = cls.__defaults.get_mapping('sandbox', default={})
+ sandbox_defaults = cls.__defaults.get_mapping("sandbox", default={})
sandbox_defaults = sandbox_defaults.clone()
sandbox_defaults._composite(sandbox_config)
@@ -2784,41 +2800,42 @@ class Element(Plugin):
sandbox_config._assert_fully_composited()
# Sandbox config, unlike others, has fixed members so we should validate them
- sandbox_config.validate_keys(['build-uid', 'build-gid', 'build-os', 'build-arch'])
+ sandbox_config.validate_keys(["build-uid", "build-gid", "build-os", "build-arch"])
- build_arch = sandbox_config.get_str('build-arch', default=None)
+ build_arch = sandbox_config.get_str("build-arch", default=None)
if build_arch:
build_arch = Platform.canonicalize_arch(build_arch)
else:
build_arch = host_arch
return SandboxConfig(
- sandbox_config.get_int('build-uid'),
- sandbox_config.get_int('build-gid'),
- sandbox_config.get_str('build-os', default=host_os),
- build_arch)
+ sandbox_config.get_int("build-uid"),
+ sandbox_config.get_int("build-gid"),
+ sandbox_config.get_str("build-os", default=host_os),
+ build_arch,
+ )
# This makes a special exception for the split rules, which
# elements may extend but whos defaults are defined in the project.
#
@classmethod
def __extract_public(cls, meta):
- base_public = cls.__defaults.get_mapping('public', default={})
+ base_public = cls.__defaults.get_mapping("public", default={})
base_public = base_public.clone()
- base_bst = base_public.get_mapping('bst', default={})
- base_splits = base_bst.get_mapping('split-rules', default={})
+ base_bst = base_public.get_mapping("bst", default={})
+ base_splits = base_bst.get_mapping("split-rules", default={})
element_public = meta.public.clone()
- element_bst = element_public.get_mapping('bst', default={})
- element_splits = element_bst.get_mapping('split-rules', default={})
+ element_bst = element_public.get_mapping("bst", default={})
+ element_splits = element_bst.get_mapping("split-rules", default={})
# Allow elements to extend the default splits defined in their project or
# element specific defaults
element_splits._composite(base_splits)
- element_bst['split-rules'] = base_splits
- element_public['bst'] = element_bst
+ element_bst["split-rules"] = base_splits
+ element_public["bst"] = element_bst
element_public._assert_fully_composited()
@@ -2826,24 +2843,21 @@ class Element(Plugin):
# Expand the splits in the public data using the Variables in the element
def __expand_splits(self, element_public):
- element_bst = element_public.get_mapping('bst', default={})
- element_splits = element_bst.get_mapping('split-rules', default={})
+ element_bst = element_public.get_mapping("bst", default={})
+ element_splits = element_bst.get_mapping("split-rules", default={})
# Resolve any variables in the public split rules directly
for domain, splits in element_splits.items():
- splits = [
- self.__variables.subst(split.strip())
- for split in splits.as_str_list()
- ]
+ splits = [self.__variables.subst(split.strip()) for split in splits.as_str_list()]
element_splits[domain] = splits
return element_public
def __init_splits(self):
- bstdata = self.get_public_data('bst')
- splits = bstdata.get_mapping('split-rules')
+ bstdata = self.get_public_data("bst")
+ splits = bstdata.get_mapping("split-rules")
self.__splits = {
- domain: re.compile('^(?:' + '|'.join([utils._glob2re(r) for r in rules.as_str_list()]) + ')$')
+ domain: re.compile("^(?:" + "|".join([utils._glob2re(r) for r in rules.as_str_list()]) + ")$")
for domain, rules in splits.items()
}
@@ -2944,10 +2958,10 @@ class Element(Plugin):
# the build, but I can think of no reason to change it mid-build.
# If this ever changes, things will go wrong unexpectedly.
if not self.__whitelist_regex:
- bstdata = self.get_public_data('bst')
- whitelist = bstdata.get_str_list('overlap-whitelist', default=[])
+ bstdata = self.get_public_data("bst")
+ whitelist = bstdata.get_str_list("overlap-whitelist", default=[])
whitelist_expressions = [utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist]
- expression = ('^(?:' + '|'.join(whitelist_expressions) + ')$')
+ expression = "^(?:" + "|".join(whitelist_expressions) + ")$"
self.__whitelist_regex = re.compile(expression)
return self.__whitelist_regex.match(os.path.join(os.sep, path))
@@ -3005,8 +3019,7 @@ class Element(Plugin):
#
def __pull_weak(self, *, pull_buildtrees):
weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
- if not self.__artifacts.pull(self, weak_key,
- pull_buildtrees=pull_buildtrees):
+ if not self.__artifacts.pull(self, weak_key, pull_buildtrees=pull_buildtrees):
return False
# extract strong cache key from this newly fetched artifact
@@ -3159,8 +3172,9 @@ class Element(Plugin):
return
if not self.__strict_artifact:
- self.__strict_artifact = Artifact(self, context, strong_key=self.__strict_cache_key,
- weak_key=self.__weak_cache_key)
+ self.__strict_artifact = Artifact(
+ self, context, strong_key=self.__strict_cache_key, weak_key=self.__weak_cache_key
+ )
if context.get_strict():
self.__artifact = self.__strict_artifact
@@ -3192,9 +3206,7 @@ class Element(Plugin):
self.__cache_key = strong_key
elif self.__assemble_scheduled or self.__assemble_done:
# Artifact will or has been built, not downloaded
- dependencies = [
- e._get_cache_key() for e in self.dependencies(Scope.BUILD)
- ]
+ dependencies = [e._get_cache_key() for e in self.dependencies(Scope.BUILD)]
self.__cache_key = self._calculate_cache_key(dependencies)
if self.__cache_key is None:
@@ -3216,8 +3228,7 @@ class Element(Plugin):
#
def __update_strict_cache_key_of_rdeps(self):
if not self.__updated_strict_cache_keys_of_rdeps:
- if self.__runtime_deps_without_strict_cache_key == 0 and \
- self.__strict_cache_key is not None:
+ if self.__runtime_deps_without_strict_cache_key == 0 and self.__strict_cache_key is not None:
self.__updated_strict_cache_keys_of_rdeps = True
# Notify reverse dependencies
@@ -3251,8 +3262,7 @@ class Element(Plugin):
#
def __update_ready_for_runtime(self):
if not self.__ready_for_runtime:
- if self.__runtime_deps_without_cache_key == 0 and \
- self.__cache_key is not None:
+ if self.__runtime_deps_without_cache_key == 0 and self.__cache_key is not None:
self.__ready_for_runtime = True
# Notify reverse dependencies
@@ -3279,10 +3289,12 @@ class Element(Plugin):
def _overlap_error_detail(f, forbidden_overlap_elements, elements):
if forbidden_overlap_elements:
- return ("/{}: {} {} not permitted to overlap other elements, order {} \n"
- .format(f, " and ".join(forbidden_overlap_elements),
- "is" if len(forbidden_overlap_elements) == 1 else "are",
- " above ".join(reversed(elements))))
+ return "/{}: {} {} not permitted to overlap other elements, order {} \n".format(
+ f,
+ " and ".join(forbidden_overlap_elements),
+ "is" if len(forbidden_overlap_elements) == 1 else "are",
+ " above ".join(reversed(elements)),
+ )
else:
return ""
@@ -3299,7 +3311,7 @@ def _overlap_error_detail(f, forbidden_overlap_elements, elements):
# (str): The normalised element name
#
def _get_normal_name(element_name):
- return os.path.splitext(element_name.replace(os.sep, '-'))[0]
+ return os.path.splitext(element_name.replace(os.sep, "-"))[0]
# _compose_artifact_name():
@@ -3315,12 +3327,9 @@ def _get_normal_name(element_name):
# (str): The constructed artifact name path
#
def _compose_artifact_name(project_name, normal_name, cache_key):
- valid_chars = string.digits + string.ascii_letters + '-._'
- normal_name = ''.join([
- x if x in valid_chars else '_'
- for x in normal_name
- ])
+ valid_chars = string.digits + string.ascii_letters + "-._"
+ normal_name = "".join([x if x in valid_chars else "_" for x in normal_name])
# Note that project names are not allowed to contain slashes. Element names containing
# a '/' will have this replaced with a '-' upon Element object instantiation.
- return '{0}/{1}/{2}'.format(project_name, normal_name, cache_key)
+ return "{0}/{1}/{2}".format(project_name, normal_name, cache_key)
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index c1ee333f7..0cbd72e27 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -127,10 +127,11 @@ if TYPE_CHECKING:
# pylint: disable=cyclic-import
from ._context import Context
from ._project import Project
+
# pylint: enable=cyclic-import
-class Plugin():
+class Plugin:
"""Plugin()
Base Plugin class.
@@ -210,15 +211,17 @@ class Plugin():
#
# Note that Plugins can only be instantiated in the main process before
# scheduling tasks.
- __TABLE = WeakValueDictionary() # type: WeakValueDictionary[int, Plugin]
-
- def __init__(self,
- name: str,
- context: 'Context',
- project: 'Project',
- provenance: ProvenanceInformation,
- type_tag: str,
- unique_id: Optional[int] = None):
+ __TABLE = WeakValueDictionary() # type: WeakValueDictionary[int, Plugin]
+
+ def __init__(
+ self,
+ name: str,
+ context: "Context",
+ project: "Project",
+ provenance: ProvenanceInformation,
+ type_tag: str,
+ unique_id: Optional[int] = None,
+ ):
self.name = name
"""The plugin name
@@ -248,30 +251,29 @@ class Plugin():
# plugin in a subprocess and should use the same ID.
self._unique_id = unique_id
- self.__context = context # The Context object
+ self.__context = context # The Context object
# Note that when pickling jobs over to a child process, we rely on this
# reference to the Project, it keeps the plugin factory alive. If the
# factory were to be GC'd then we would see undefined behaviour. Make
# sure to test plugin pickling if this reference is to be removed.
- self.__project = project # The Project object
+ self.__project = project # The Project object
self.__provenance = provenance # The Provenance information
- self.__type_tag = type_tag # The type of plugin (element or source)
- self.__configuring = False # Whether we are currently configuring
+ self.__type_tag = type_tag # The type of plugin (element or source)
+ self.__configuring = False # Whether we are currently configuring
# Get the full_name as project & type_tag are resolved
self.__full_name = self.__get_full_name()
# Infer the kind identifier
modulename = type(self).__module__
- self.__kind = modulename.split('.')[-1]
+ self.__kind = modulename.split(".")[-1]
self.debug("Created: {}".format(self))
# If this plugin has been deprecated, emit a warning.
if self.BST_PLUGIN_DEPRECATED and not self.__deprecation_warning_silenced():
- detail = "Using deprecated plugin {}: {}".format(self.__kind,
- self.BST_PLUGIN_DEPRECATION_MESSAGE)
+ detail = "Using deprecated plugin {}: {}".format(self.__kind, self.BST_PLUGIN_DEPRECATION_MESSAGE)
self.__message(MessageType.WARN, detail)
def __del__(self):
@@ -282,9 +284,8 @@ class Plugin():
def __str__(self):
return "{kind} {typetag} at {provenance}".format(
- kind=self.__kind,
- typetag=self.__type_tag,
- provenance=self.__provenance)
+ kind=self.__kind, typetag=self.__type_tag, provenance=self.__provenance
+ )
#############################################################
# Abstract Methods #
@@ -312,8 +313,9 @@ class Plugin():
:func:`Element.node_subst_member() <buildstream.element.Element.node_subst_member>`
method can be used.
"""
- raise ImplError("{tag} plugin '{kind}' does not implement configure()".format(
- tag=self.__type_tag, kind=self.get_kind()))
+ raise ImplError(
+ "{tag} plugin '{kind}' does not implement configure()".format(tag=self.__type_tag, kind=self.get_kind())
+ )
def preflight(self) -> None:
"""Preflight Check
@@ -333,8 +335,9 @@ class Plugin():
them with :func:`utils.get_host_tool() <buildstream.utils.get_host_tool>` which
will raise an error automatically informing the user that a host tool is needed.
"""
- raise ImplError("{tag} plugin '{kind}' does not implement preflight()".format(
- tag=self.__type_tag, kind=self.get_kind()))
+ raise ImplError(
+ "{tag} plugin '{kind}' does not implement preflight()".format(tag=self.__type_tag, kind=self.get_kind())
+ )
def get_unique_key(self) -> SourceRef:
"""Return something which uniquely identifies the plugin input
@@ -355,8 +358,11 @@ class Plugin():
which is to say that the Source is expected to have an exact *ref* indicating
exactly what source is going to be staged.
"""
- raise ImplError("{tag} plugin '{kind}' does not implement get_unique_key()".format(
- tag=self.__type_tag, kind=self.get_kind()))
+ raise ImplError(
+ "{tag} plugin '{kind}' does not implement get_unique_key()".format(
+ tag=self.__type_tag, kind=self.get_kind()
+ )
+ )
#############################################################
# Public Methods #
@@ -369,8 +375,7 @@ class Plugin():
"""
return self.__kind
- def node_get_project_path(self, node, *,
- check_is_file=False, check_is_dir=False):
+ def node_get_project_path(self, node, *, check_is_file=False, check_is_dir=False):
"""Fetches a project path from a dictionary node and validates it
Paths are asserted to never lead to a directory outside of the
@@ -408,9 +413,7 @@ class Plugin():
"""
- return self.__project.get_path_from_node(node,
- check_is_file=check_is_file,
- check_is_dir=check_is_dir)
+ return self.__project.get_path_from_node(node, check_is_file=check_is_file, check_is_dir=check_is_dir)
def debug(self, brief: str, *, detail: Optional[str] = None) -> None:
"""Print a debugging message
@@ -485,11 +488,9 @@ class Plugin():
self.__message(MessageType.LOG, brief, detail=detail)
@contextmanager
- def timed_activity(self,
- activity_name: str,
- *,
- detail: Optional[str] = None,
- silent_nested: bool = False) -> Generator[None, None, None]:
+ def timed_activity(
+ self, activity_name: str, *, detail: Optional[str] = None, silent_nested: bool = False
+ ) -> Generator[None, None, None]:
"""Context manager for performing timed activities in plugins
Args:
@@ -511,10 +512,9 @@ class Plugin():
# This will raise SourceError on its own
self.call(... command which takes time ...)
"""
- with self.__context.messenger.timed_activity(activity_name,
- element_name=self._get_full_name(),
- detail=detail,
- silent_nested=silent_nested):
+ with self.__context.messenger.timed_activity(
+ activity_name, element_name=self._get_full_name(), detail=detail, silent_nested=silent_nested
+ ):
yield
def call(self, *popenargs, fail: Optional[str] = None, fail_temporarily: bool = False, **kwargs) -> int:
@@ -722,8 +722,11 @@ class Plugin():
# so it's not an ImplError - those apply to custom plugins. Direct
# descendants of Plugin must implement this, e.g. Element and Source.
# Raise NotImplementedError as this would be an internal bug.
- raise NotImplementedError("{tag} plugin '{kind}' does not implement _get_args_for_child_job_pickling()".format(
- tag=self.__type_tag, kind=self.get_kind()))
+ raise NotImplementedError(
+ "{tag} plugin '{kind}' does not implement _get_args_for_child_job_pickling()".format(
+ tag=self.__type_tag, kind=self.get_kind()
+ )
+ )
#############################################################
# Local Private Methods #
@@ -734,20 +737,19 @@ class Plugin():
def __call(self, *popenargs, collect_stdout=False, fail=None, fail_temporarily=False, **kwargs):
with self._output_file() as output_file:
- if 'stdout' not in kwargs:
- kwargs['stdout'] = output_file
- if 'stderr' not in kwargs:
- kwargs['stderr'] = output_file
+ if "stdout" not in kwargs:
+ kwargs["stdout"] = output_file
+ if "stderr" not in kwargs:
+ kwargs["stderr"] = output_file
if collect_stdout:
- kwargs['stdout'] = subprocess.PIPE
+ kwargs["stdout"] = subprocess.PIPE
self.__note_command(output_file, *popenargs, **kwargs)
exit_code, output = utils._call(*popenargs, **kwargs)
if fail and exit_code:
- raise PluginError("{plugin}: {message}".format(plugin=self, message=fail),
- temporary=fail_temporarily)
+ raise PluginError("{plugin}: {message}".format(plugin=self, message=fail), temporary=fail_temporarily)
return (exit_code, output)
@@ -756,11 +758,11 @@ class Plugin():
self.__context.messenger.message(message)
def __note_command(self, output, *popenargs, **kwargs):
- workdir = kwargs.get('cwd', os.getcwd())
+ workdir = kwargs.get("cwd", os.getcwd())
command = " ".join(popenargs[0])
- output.write('Running host command {}: {}\n'.format(workdir, command))
+ output.write("Running host command {}: {}\n".format(workdir, command))
output.flush()
- self.status('Running host command', detail=command)
+ self.status("Running host command", detail=command)
def __deprecation_warning_silenced(self):
if not self.BST_PLUGIN_DEPRECATED:
@@ -770,10 +772,10 @@ class Plugin():
project = self.__project
for key, value in project.element_overrides.items():
- if value.get_bool('suppress-deprecation-warnings', default=False):
+ if value.get_bool("suppress-deprecation-warnings", default=False):
silenced_warnings.add(key)
for key, value in project.source_overrides.items():
- if value.get_bool('suppress-deprecation-warnings', default=False):
+ if value.get_bool("suppress-deprecation-warnings", default=False):
silenced_warnings.add(key)
return self.get_kind() in silenced_warnings
@@ -783,18 +785,14 @@ class Plugin():
# Set the name, depending on element or source plugin type
name = self._element_name if self.__type_tag == "source" else self.name # pylint: disable=no-member
if project.junction:
- return '{}:{}'.format(project.junction.name, name)
+ return "{}:{}".format(project.junction.name, name)
else:
return name
# A local table for _prefix_warning()
#
-__CORE_WARNINGS = [
- value
- for name, value in CoreWarnings.__dict__.items()
- if not name.startswith("__")
-]
+__CORE_WARNINGS = [value for name, value in CoreWarnings.__dict__.items() if not name.startswith("__")]
# _prefix_warning():
diff --git a/src/buildstream/plugins/elements/autotools.py b/src/buildstream/plugins/elements/autotools.py
index 7a05336b7..089c9bca0 100644
--- a/src/buildstream/plugins/elements/autotools.py
+++ b/src/buildstream/plugins/elements/autotools.py
@@ -66,8 +66,7 @@ class AutotoolsElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
- collect=self.get_variable('install-root'))
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/compose.py b/src/buildstream/plugins/elements/compose.py
index 511925731..063c5d44f 100644
--- a/src/buildstream/plugins/elements/compose.py
+++ b/src/buildstream/plugins/elements/compose.py
@@ -62,27 +62,23 @@ class ComposeElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- node.validate_keys([
- 'integrate', 'include', 'exclude', 'include-orphans'
- ])
+ node.validate_keys(["integrate", "include", "exclude", "include-orphans"])
# We name this variable 'integration' only to avoid
# collision with the Element.integrate() method.
- self.integration = node.get_bool('integrate')
- self.include = node.get_str_list('include')
- self.exclude = node.get_str_list('exclude')
- self.include_orphans = node.get_bool('include-orphans')
+ self.integration = node.get_bool("integrate")
+ self.include = node.get_str_list("include")
+ self.exclude = node.get_str_list("exclude")
+ self.include_orphans = node.get_bool("include-orphans")
def preflight(self):
pass
def get_unique_key(self):
- key = {'integrate': self.integration,
- 'include': sorted(self.include),
- 'orphans': self.include_orphans}
+ key = {"integrate": self.integration, "include": sorted(self.include), "orphans": self.include_orphans}
if self.exclude:
- key['exclude'] = sorted(self.exclude)
+ key["exclude"] = sorted(self.exclude)
return key
@@ -104,9 +100,9 @@ class ComposeElement(Element):
if require_split:
with self.timed_activity("Computing split", silent_nested=True):
for dep in self.dependencies(Scope.BUILD):
- files = dep.compute_manifest(include=self.include,
- exclude=self.exclude,
- orphans=self.include_orphans)
+ files = dep.compute_manifest(
+ include=self.include, exclude=self.exclude, orphans=self.include_orphans
+ )
manifest.update(files)
# Make a snapshot of all the files.
@@ -141,13 +137,16 @@ class ComposeElement(Element):
for path in basedir_contents:
if path not in snapshot:
added_files.add(path)
- self.info("Integration modified {}, added {} and removed {} files"
- .format(len(modified_files), len(added_files), len(removed_files)))
+ self.info(
+ "Integration modified {}, added {} and removed {} files".format(
+ len(modified_files), len(added_files), len(removed_files)
+ )
+ )
# The remainder of this is expensive, make an early exit if
# we're not being selective about what is to be included.
if not require_split:
- return '/'
+ return "/"
# Do we want to force include files which were modified by
# the integration commands, even if they were not added ?
@@ -159,7 +158,7 @@ class ComposeElement(Element):
# instead of into a subdir. The element assemble() method should
# support this in some way.
#
- installdir = vbasedir.descend('buildstream', 'install', create=True)
+ installdir = vbasedir.descend("buildstream", "install", create=True)
# We already saved the manifest for created files in the integration phase,
# now collect the rest of the manifest.
@@ -189,7 +188,7 @@ class ComposeElement(Element):
installdir.import_files(vbasedir, filter_callback=import_filter, can_link=True)
# And we're done
- return os.path.join(os.sep, 'buildstream', 'install')
+ return os.path.join(os.sep, "buildstream", "install")
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/filter.py b/src/buildstream/plugins/elements/filter.py
index d808c9e5a..71ed1f6cb 100644
--- a/src/buildstream/plugins/elements/filter.py
+++ b/src/buildstream/plugins/elements/filter.py
@@ -167,17 +167,15 @@ class FilterElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- node.validate_keys([
- 'include', 'exclude', 'include-orphans', 'pass-integration'
- ])
+ node.validate_keys(["include", "exclude", "include-orphans", "pass-integration"])
- self.include_node = node.get_sequence('include')
- self.exclude_node = node.get_sequence('exclude')
+ self.include_node = node.get_sequence("include")
+ self.exclude_node = node.get_sequence("exclude")
self.include = self.include_node.as_str_list()
self.exclude = self.exclude_node.as_str_list()
- self.include_orphans = node.get_bool('include-orphans')
- self.pass_integration = node.get_bool('pass-integration', False)
+ self.include_orphans = node.get_bool("include-orphans")
+ self.pass_integration = node.get_bool("pass-integration", False)
def preflight(self):
# Exactly one build-depend is permitted
@@ -186,9 +184,13 @@ class FilterElement(Element):
detail = "Full list of build-depends:\n"
deps_list = " \n".join([x.name for x in build_deps])
detail += deps_list
- raise ElementError("{}: {} element must have exactly 1 build-dependency, actually have {}"
- .format(self, type(self).__name__, len(build_deps)),
- detail=detail, reason="filter-bdepend-wrong-count")
+ raise ElementError(
+ "{}: {} element must have exactly 1 build-dependency, actually have {}".format(
+ self, type(self).__name__, len(build_deps)
+ ),
+ detail=detail,
+ reason="filter-bdepend-wrong-count",
+ )
# That build-depend must not also be a runtime-depend
runtime_deps = list(self.dependencies(Scope.RUN, recurse=False))
@@ -196,23 +198,29 @@ class FilterElement(Element):
detail = "Full list of runtime depends:\n"
deps_list = " \n".join([x.name for x in runtime_deps])
detail += deps_list
- raise ElementError("{}: {} element's build dependency must not also be a runtime dependency"
- .format(self, type(self).__name__),
- detail=detail, reason="filter-bdepend-also-rdepend")
+ raise ElementError(
+ "{}: {} element's build dependency must not also be a runtime dependency".format(
+ self, type(self).__name__
+ ),
+ detail=detail,
+ reason="filter-bdepend-also-rdepend",
+ )
# If a parent does not produce an artifact, fail and inform user that the dependency
# must produce artifacts
if not build_deps[0].BST_ELEMENT_HAS_ARTIFACT:
detail = "{} does not produce an artifact, so there is nothing to filter".format(build_deps[0].name)
- raise ElementError("{}: {} element's build dependency must produce an artifact"
- .format(self, type(self).__name__),
- detail=detail, reason="filter-bdepend-no-artifact")
+ raise ElementError(
+ "{}: {} element's build dependency must produce an artifact".format(self, type(self).__name__),
+ detail=detail,
+ reason="filter-bdepend-no-artifact",
+ )
def get_unique_key(self):
key = {
- 'include': sorted(self.include),
- 'exclude': sorted(self.exclude),
- 'orphans': self.include_orphans,
+ "include": sorted(self.include),
+ "exclude": sorted(self.exclude),
+ "orphans": self.include_orphans,
}
return key
@@ -226,8 +234,8 @@ class FilterElement(Element):
with self.timed_activity("Staging artifact", silent_nested=True):
for dep in self.dependencies(Scope.BUILD, recurse=False):
# Check that all the included/excluded domains exist
- pub_data = dep.get_public_data('bst')
- split_rules = pub_data.get_mapping('split-rules', {})
+ pub_data = dep.get_public_data("bst")
+ split_rules = pub_data.get_mapping("split-rules", {})
unfound_includes = []
for domain in self.include:
if domain not in split_rules:
@@ -240,18 +248,17 @@ class FilterElement(Element):
detail = []
if unfound_includes:
detail.append("Unknown domains were used in {}".format(self.include_node.get_provenance()))
- detail.extend([' - {}'.format(domain) for domain in unfound_includes])
+ detail.extend([" - {}".format(domain) for domain in unfound_includes])
if unfound_excludes:
detail.append("Unknown domains were used in {}".format(self.exclude_node.get_provenance()))
- detail.extend([' - {}'.format(domain) for domain in unfound_excludes])
+ detail.extend([" - {}".format(domain) for domain in unfound_excludes])
if detail:
- detail = '\n'.join(detail)
+ detail = "\n".join(detail)
raise ElementError("Unknown domains declared.", detail=detail)
- dep.stage_artifact(sandbox, include=self.include,
- exclude=self.exclude, orphans=self.include_orphans)
+ dep.stage_artifact(sandbox, include=self.include, exclude=self.exclude, orphans=self.include_orphans)
return ""
def _get_source_element(self):
diff --git a/src/buildstream/plugins/elements/import.py b/src/buildstream/plugins/elements/import.py
index 404a0f4ee..2b68197a7 100644
--- a/src/buildstream/plugins/elements/import.py
+++ b/src/buildstream/plugins/elements/import.py
@@ -45,12 +45,10 @@ class ImportElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- node.validate_keys([
- 'source', 'target'
- ])
+ node.validate_keys(["source", "target"])
- self.source = self.node_subst_vars(node.get_scalar('source'))
- self.target = self.node_subst_vars(node.get_scalar('target'))
+ self.source = self.node_subst_vars(node.get_scalar("source"))
+ self.target = self.node_subst_vars(node.get_scalar("target"))
def preflight(self):
# Assert that we have at least one source to fetch.
@@ -60,10 +58,7 @@ class ImportElement(Element):
raise ElementError("{}: An import element must have at least one source.".format(self))
def get_unique_key(self):
- return {
- 'source': self.source,
- 'target': self.target
- }
+ return {"source": self.source, "target": self.target}
def configure_sandbox(self, sandbox):
pass
@@ -74,11 +69,11 @@ class ImportElement(Element):
def assemble(self, sandbox):
# Stage sources into the input directory
- self.stage_sources(sandbox, 'input')
+ self.stage_sources(sandbox, "input")
rootdir = sandbox.get_virtual_directory()
- inputdir = rootdir.descend('input')
- outputdir = rootdir.descend('output', create=True)
+ inputdir = rootdir.descend("input")
+ outputdir = rootdir.descend("output", create=True)
# The directory to grab
inputdir = inputdir.descend(*self.source.strip(os.sep).split(os.sep))
@@ -87,18 +82,17 @@ class ImportElement(Element):
outputdir = outputdir.descend(*self.target.strip(os.sep).split(os.sep), create=True)
if inputdir.is_empty():
- raise ElementError("{}: No files were found inside directory '{}'"
- .format(self, self.source))
+ raise ElementError("{}: No files were found inside directory '{}'".format(self, self.source))
# Move it over
outputdir.import_files(inputdir)
# And we're done
- return '/output'
+ return "/output"
def generate_script(self):
- build_root = self.get_variable('build-root')
- install_root = self.get_variable('install-root')
+ build_root = self.get_variable("build-root")
+ install_root = self.get_variable("install-root")
commands = []
# The directory to grab
diff --git a/src/buildstream/plugins/elements/junction.py b/src/buildstream/plugins/elements/junction.py
index aec32516b..42b9ef08e 100644
--- a/src/buildstream/plugins/elements/junction.py
+++ b/src/buildstream/plugins/elements/junction.py
@@ -187,13 +187,13 @@ class JunctionElement(Element):
BST_FORBID_RDEPENDS = True
def configure(self, node):
- self.path = node.get_str('path', default='')
- self.options = node.get_mapping('options', default={})
- self.target = node.get_str('target', default=None)
+ self.path = node.get_str("path", default="")
+ self.options = node.get_mapping("options", default={})
+ self.target = node.get_str("target", default=None)
self.target_element = None
self.target_junction = None
- self.cache_junction_elements = node.get_bool('cache-junction-elements', default=False)
- self.ignore_junction_remotes = node.get_bool('ignore-junction-remotes', default=False)
+ self.cache_junction_elements = node.get_bool("cache-junction-elements", default=False)
+ self.ignore_junction_remotes = node.get_bool("ignore-junction-remotes", default=False)
def preflight(self):
# "target" cannot be used in conjunction with:
diff --git a/src/buildstream/plugins/elements/manual.py b/src/buildstream/plugins/elements/manual.py
index bbda65312..97da41615 100644
--- a/src/buildstream/plugins/elements/manual.py
+++ b/src/buildstream/plugins/elements/manual.py
@@ -42,8 +42,7 @@ class ManualElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
- collect=self.get_variable('install-root'))
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/pip.py b/src/buildstream/plugins/elements/pip.py
index 4a9eefde1..93303748d 100644
--- a/src/buildstream/plugins/elements/pip.py
+++ b/src/buildstream/plugins/elements/pip.py
@@ -42,8 +42,7 @@ class PipElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
- collect=self.get_variable('install-root'))
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/script.py b/src/buildstream/plugins/elements/script.py
index f3f0a2f7a..abfb7b3b0 100644
--- a/src/buildstream/plugins/elements/script.py
+++ b/src/buildstream/plugins/elements/script.py
@@ -46,21 +46,19 @@ class ScriptElement(buildstream.ScriptElement):
BST_VIRTUAL_DIRECTORY = True
def configure(self, node):
- for n in node.get_sequence('layout', []):
- dst = self.node_subst_vars(n.get_scalar('destination'))
- elm = self.node_subst_vars(n.get_scalar('element', None))
+ for n in node.get_sequence("layout", []):
+ dst = self.node_subst_vars(n.get_scalar("destination"))
+ elm = self.node_subst_vars(n.get_scalar("element", None))
self.layout_add(elm, dst)
- node.validate_keys([
- 'commands', 'root-read-only', 'layout'
- ])
+ node.validate_keys(["commands", "root-read-only", "layout"])
cmds = self.node_subst_sequence_vars(node.get_sequence("commands"))
self.add_commands("commands", cmds)
self.set_work_dir()
self.set_install_root()
- self.set_root_read_only(node.get_bool('root-read-only', default=False))
+ self.set_root_read_only(node.get_bool("root-read-only", default=False))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/stack.py b/src/buildstream/plugins/elements/stack.py
index ae15af63a..f569199b6 100644
--- a/src/buildstream/plugins/elements/stack.py
+++ b/src/buildstream/plugins/elements/stack.py
@@ -64,10 +64,10 @@ class StackElement(Element):
# Just create a dummy empty artifact, its existence is a statement
# that all this stack's dependencies are built.
vrootdir = sandbox.get_virtual_directory()
- vrootdir.descend('output', create=True)
+ vrootdir.descend("output", create=True)
# And we're done
- return '/output'
+ return "/output"
# Plugin entry point
diff --git a/src/buildstream/plugins/sources/_downloadablefilesource.py b/src/buildstream/plugins/sources/_downloadablefilesource.py
index 1e759b94f..4e43ee3e3 100644
--- a/src/buildstream/plugins/sources/_downloadablefilesource.py
+++ b/src/buildstream/plugins/sources/_downloadablefilesource.py
@@ -12,7 +12,6 @@ from buildstream import utils
class _NetrcFTPOpener(urllib.request.FTPHandler):
-
def __init__(self, netrc_config):
self.netrc = netrc_config
@@ -28,11 +27,11 @@ class _NetrcFTPOpener(urllib.request.FTPHandler):
def _unsplit(self, host, port, user, passwd):
if port:
- host = '{}:{}'.format(host, port)
+ host = "{}:{}".format(host, port)
if user:
if passwd:
- user = '{}:{}'.format(user, passwd)
- host = '{}@{}'.format(user, host)
+ user = "{}:{}".format(user, passwd)
+ host = "{}@{}".format(user, host)
return host
@@ -50,7 +49,6 @@ class _NetrcFTPOpener(urllib.request.FTPHandler):
class _NetrcPasswordManager:
-
def __init__(self, netrc_config):
self.netrc = netrc_config
@@ -72,17 +70,16 @@ class _NetrcPasswordManager:
class DownloadableFileSource(Source):
# pylint: disable=attribute-defined-outside-init
- COMMON_CONFIG_KEYS = Source.COMMON_CONFIG_KEYS + ['url', 'ref', 'etag']
+ COMMON_CONFIG_KEYS = Source.COMMON_CONFIG_KEYS + ["url", "ref", "etag"]
__urlopener = None
__default_mirror_file = None
def configure(self, node):
- self.original_url = node.get_str('url')
- self.ref = node.get_str('ref', None)
+ self.original_url = node.get_str("url")
+ self.ref = node.get_str("ref", None)
self.url = self.translate_url(self.original_url)
- self._mirror_dir = os.path.join(self.get_mirror_directory(),
- utils.url_directory_name(self.original_url))
+ self._mirror_dir = os.path.join(self.get_mirror_directory(), utils.url_directory_name(self.original_url))
self._warn_deprecated_etag(node)
def preflight(self):
@@ -102,28 +99,29 @@ class DownloadableFileSource(Source):
return Consistency.RESOLVED
def load_ref(self, node):
- self.ref = node.get_str('ref', None)
+ self.ref = node.get_str("ref", None)
self._warn_deprecated_etag(node)
def get_ref(self):
return self.ref
def set_ref(self, ref, node):
- node['ref'] = self.ref = ref
+ node["ref"] = self.ref = ref
def track(self): # pylint: disable=arguments-differ
# there is no 'track' field in the source to determine what/whether
# or not to update refs, because tracking a ref is always a conscious
# decision by the user.
- with self.timed_activity("Tracking {}".format(self.url),
- silent_nested=True):
+ with self.timed_activity("Tracking {}".format(self.url), silent_nested=True):
new_ref = self._ensure_mirror()
if self.ref and self.ref != new_ref:
- detail = "When tracking, new ref differs from current ref:\n" \
- + " Tracked URL: {}\n".format(self.url) \
- + " Current ref: {}\n".format(self.ref) \
+ detail = (
+ "When tracking, new ref differs from current ref:\n"
+ + " Tracked URL: {}\n".format(self.url)
+ + " Current ref: {}\n".format(self.ref)
+ " New ref: {}\n".format(new_ref)
+ )
self.warn("Potential man-in-the-middle attack!", detail=detail)
return new_ref
@@ -142,25 +140,26 @@ class DownloadableFileSource(Source):
with self.timed_activity("Fetching {}".format(self.url), silent_nested=True):
sha256 = self._ensure_mirror()
if sha256 != self.ref:
- raise SourceError("File downloaded from {} has sha256sum '{}', not '{}'!"
- .format(self.url, sha256, self.ref))
+ raise SourceError(
+ "File downloaded from {} has sha256sum '{}', not '{}'!".format(self.url, sha256, self.ref)
+ )
def _warn_deprecated_etag(self, node):
- etag = node.get_str('etag', None)
+ etag = node.get_str("etag", None)
if etag:
provenance = node.get_scalar(etag).get_provenance()
self.warn('{} "etag" is deprecated and ignored.'.format(provenance))
def _get_etag(self, ref):
- etagfilename = os.path.join(self._mirror_dir, '{}.etag'.format(ref))
+ etagfilename = os.path.join(self._mirror_dir, "{}.etag".format(ref))
if os.path.exists(etagfilename):
- with open(etagfilename, 'r') as etagfile:
+ with open(etagfilename, "r") as etagfile:
return etagfile.read()
return None
def _store_etag(self, ref, etag):
- etagfilename = os.path.join(self._mirror_dir, '{}.etag'.format(ref))
+ etagfilename = os.path.join(self._mirror_dir, "{}.etag".format(ref))
with utils.save_file_atomic(etagfilename) as etagfile:
etagfile.write(etag)
@@ -170,7 +169,7 @@ class DownloadableFileSource(Source):
with self.tempdir() as td:
default_name = os.path.basename(self.url)
request = urllib.request.Request(self.url)
- request.add_header('Accept', '*/*')
+ request.add_header("Accept", "*/*")
# We do not use etag in case what we have in cache is
# not matching ref in order to be able to recover from
@@ -180,18 +179,18 @@ class DownloadableFileSource(Source):
# Do not re-download the file if the ETag matches.
if etag and self.get_consistency() == Consistency.CACHED:
- request.add_header('If-None-Match', etag)
+ request.add_header("If-None-Match", etag)
opener = self.__get_urlopener()
with contextlib.closing(opener.open(request)) as response:
info = response.info()
- etag = info['ETag'] if 'ETag' in info else None
+ etag = info["ETag"] if "ETag" in info else None
filename = info.get_filename(default_name)
filename = os.path.basename(filename)
local_file = os.path.join(td, filename)
- with open(local_file, 'wb') as dest:
+ with open(local_file, "wb") as dest:
shutil.copyfileobj(response, dest)
# Make sure url-specific mirror dir exists.
@@ -214,14 +213,12 @@ class DownloadableFileSource(Source):
# Because we use etag only for matching ref, currently specified ref is what
# we would have downloaded.
return self.ref
- raise SourceError("{}: Error mirroring {}: {}"
- .format(self, self.url, e), temporary=True) from e
+ raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True) from e
except (urllib.error.URLError, urllib.error.ContentTooShortError, OSError, ValueError) as e:
# Note that urllib.request.Request in the try block may throw a
# ValueError for unknown url types, so we handle it here.
- raise SourceError("{}: Error mirroring {}: {}"
- .format(self, self.url, e), temporary=True) from e
+ raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True) from e
def _get_mirror_file(self, sha=None):
if sha is not None:
@@ -245,7 +242,7 @@ class DownloadableFileSource(Source):
#
DownloadableFileSource.__urlopener = urllib.request.build_opener()
except netrc.NetrcParseError as e:
- self.warn('{}: While reading .netrc: {}'.format(self, e))
+ self.warn("{}: While reading .netrc: {}".format(self, e))
return urllib.request.build_opener()
else:
netrc_pw_mgr = _NetrcPasswordManager(netrc_config)
diff --git a/src/buildstream/plugins/sources/bzr.py b/src/buildstream/plugins/sources/bzr.py
index 88dba7dc2..30ce55585 100644
--- a/src/buildstream/plugins/sources/bzr.py
+++ b/src/buildstream/plugins/sources/bzr.py
@@ -67,16 +67,16 @@ class BzrSource(Source):
# pylint: disable=attribute-defined-outside-init
def configure(self, node):
- node.validate_keys(['url', 'track', 'ref', *Source.COMMON_CONFIG_KEYS])
+ node.validate_keys(["url", "track", "ref", *Source.COMMON_CONFIG_KEYS])
- self.original_url = node.get_str('url')
- self.tracking = node.get_str('track')
- self.ref = node.get_str('ref', None)
+ self.original_url = node.get_str("url")
+ self.tracking = node.get_str("track")
+ self.ref = node.get_str("ref", None)
self.url = self.translate_url(self.original_url)
def preflight(self):
# Check if bzr is installed, get the binary at the same time.
- self.host_bzr = utils.get_host_tool('bzr')
+ self.host_bzr = utils.get_host_tool("bzr")
def get_unique_key(self):
return [self.original_url, self.tracking, self.ref]
@@ -93,39 +93,44 @@ class BzrSource(Source):
return Consistency.RESOLVED
def load_ref(self, node):
- self.ref = node.get_str('ref', None)
+ self.ref = node.get_str("ref", None)
def get_ref(self):
return self.ref
def set_ref(self, ref, node):
- node['ref'] = self.ref = ref
+ node["ref"] = self.ref = ref
def track(self): # pylint: disable=arguments-differ
- with self.timed_activity("Tracking {}".format(self.url),
- silent_nested=True), self._locked():
+ with self.timed_activity("Tracking {}".format(self.url), silent_nested=True), self._locked():
self._ensure_mirror(skip_ref_check=True)
- ret, out = self.check_output([self.host_bzr, "version-info",
- "--custom", "--template={revno}",
- self._get_branch_dir()],
- fail="Failed to read the revision number at '{}'"
- .format(self._get_branch_dir()))
+ ret, out = self.check_output(
+ [self.host_bzr, "version-info", "--custom", "--template={revno}", self._get_branch_dir()],
+ fail="Failed to read the revision number at '{}'".format(self._get_branch_dir()),
+ )
if ret != 0:
raise SourceError("{}: Failed to get ref for tracking {}".format(self, self.tracking))
return out
def fetch(self): # pylint: disable=arguments-differ
- with self.timed_activity("Fetching {}".format(self.url),
- silent_nested=True), self._locked():
+ with self.timed_activity("Fetching {}".format(self.url), silent_nested=True), self._locked():
self._ensure_mirror()
def stage(self, directory):
- self.call([self.host_bzr, "checkout", "--lightweight",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir(), directory],
- fail="Failed to checkout revision {} from branch {} to {}"
- .format(self.ref, self._get_branch_dir(), directory))
+ self.call(
+ [
+ self.host_bzr,
+ "checkout",
+ "--lightweight",
+ "--revision=revno:{}".format(self.ref),
+ self._get_branch_dir(),
+ directory,
+ ],
+ fail="Failed to checkout revision {} from branch {} to {}".format(
+ self.ref, self._get_branch_dir(), directory
+ ),
+ )
# Remove .bzr dir
shutil.rmtree(os.path.join(directory, ".bzr"))
@@ -133,16 +138,24 @@ class BzrSource(Source):
url = os.path.join(self.url, self.tracking)
with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
# Checkout from the cache
- self.call([self.host_bzr, "branch",
- "--use-existing-dir",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir(), directory],
- fail="Failed to branch revision {} from branch {} to {}"
- .format(self.ref, self._get_branch_dir(), directory))
+ self.call(
+ [
+ self.host_bzr,
+ "branch",
+ "--use-existing-dir",
+ "--revision=revno:{}".format(self.ref),
+ self._get_branch_dir(),
+ directory,
+ ],
+ fail="Failed to branch revision {} from branch {} to {}".format(
+ self.ref, self._get_branch_dir(), directory
+ ),
+ )
# Switch the parent branch to the source's origin
- self.call([self.host_bzr, "switch",
- "--directory={}".format(directory), url],
- fail="Failed to switch workspace's parent branch to {}".format(url))
+ self.call(
+ [self.host_bzr, "switch", "--directory={}".format(directory), url],
+ fail="Failed to switch workspace's parent branch to {}".format(url),
+ )
# _locked()
#
@@ -151,13 +164,10 @@ class BzrSource(Source):
#
@contextmanager
def _locked(self):
- lockdir = os.path.join(self.get_mirror_directory(), 'locks')
- lockfile = os.path.join(
- lockdir,
- utils.url_directory_name(self.original_url) + '.lock'
- )
+ lockdir = os.path.join(self.get_mirror_directory(), "locks")
+ lockfile = os.path.join(lockdir, utils.url_directory_name(self.original_url) + ".lock")
os.makedirs(lockdir, exist_ok=True)
- with open(lockfile, 'w') as lock:
+ with open(lockfile, "w") as lock:
fcntl.flock(lock, fcntl.LOCK_EX)
try:
yield
@@ -169,41 +179,42 @@ class BzrSource(Source):
if not os.path.exists(self._get_branch_dir()):
return False
- return self.call([self.host_bzr, "revno",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir()]) == 0
+ return self.call([self.host_bzr, "revno", "--revision=revno:{}".format(self.ref), self._get_branch_dir()]) == 0
def _get_branch_dir(self):
return os.path.join(self._get_mirror_dir(), self.tracking)
def _get_mirror_dir(self):
- return os.path.join(self.get_mirror_directory(),
- utils.url_directory_name(self.original_url))
+ return os.path.join(self.get_mirror_directory(), utils.url_directory_name(self.original_url))
def _ensure_mirror(self, skip_ref_check=False):
mirror_dir = self._get_mirror_dir()
bzr_metadata_dir = os.path.join(mirror_dir, ".bzr")
if not os.path.exists(bzr_metadata_dir):
- self.call([self.host_bzr, "init-repo", "--no-trees", mirror_dir],
- fail="Failed to initialize bzr repository")
+ self.call(
+ [self.host_bzr, "init-repo", "--no-trees", mirror_dir], fail="Failed to initialize bzr repository"
+ )
branch_dir = os.path.join(mirror_dir, self.tracking)
branch_url = self.url + "/" + self.tracking
if not os.path.exists(branch_dir):
# `bzr branch` the branch if it doesn't exist
# to get the upstream code
- self.call([self.host_bzr, "branch", branch_url, branch_dir],
- fail="Failed to branch from {} to {}".format(branch_url, branch_dir))
+ self.call(
+ [self.host_bzr, "branch", branch_url, branch_dir],
+ fail="Failed to branch from {} to {}".format(branch_url, branch_dir),
+ )
else:
# `bzr pull` the branch if it does exist
# to get any changes to the upstream code
- self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
- fail="Failed to pull new changes for {}".format(branch_dir))
+ self.call(
+ [self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
+ fail="Failed to pull new changes for {}".format(branch_dir),
+ )
if not skip_ref_check and not self._check_ref():
- raise SourceError("Failed to ensure ref '{}' was mirrored".format(self.ref),
- reason="ref-not-mirrored")
+ raise SourceError("Failed to ensure ref '{}' was mirrored".format(self.ref), reason="ref-not-mirrored")
def setup():
diff --git a/src/buildstream/plugins/sources/deb.py b/src/buildstream/plugins/sources/deb.py
index cc88cf53c..a7437b150 100644
--- a/src/buildstream/plugins/sources/deb.py
+++ b/src/buildstream/plugins/sources/deb.py
@@ -50,7 +50,7 @@ details on common configuration options for sources.
import tarfile
from contextlib import contextmanager
-import arpy # pylint: disable=import-error
+import arpy # pylint: disable=import-error
from .tar import TarSource
@@ -61,14 +61,14 @@ class DebSource(TarSource):
def configure(self, node):
super().configure(node)
- self.base_dir = node.get_str('base-dir', None)
+ self.base_dir = node.get_str("base-dir", None)
def preflight(self):
return
@contextmanager
def _get_tar(self):
- with open(self._get_mirror_file(), 'rb') as deb_file:
+ with open(self._get_mirror_file(), "rb") as deb_file:
arpy_archive = arpy.Archive(fileobj=deb_file)
arpy_archive.read_all_headers()
data_tar_arpy = [v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k][0]
diff --git a/src/buildstream/plugins/sources/local.py b/src/buildstream/plugins/sources/local.py
index f40fd79c0..90d8a8f6f 100644
--- a/src/buildstream/plugins/sources/local.py
+++ b/src/buildstream/plugins/sources/local.py
@@ -54,8 +54,8 @@ class LocalSource(Source):
self.__unique_key = None
def configure(self, node):
- node.validate_keys(['path', *Source.COMMON_CONFIG_KEYS])
- self.path = self.node_get_project_path(node.get_scalar('path'))
+ node.validate_keys(["path", *Source.COMMON_CONFIG_KEYS])
+ self.path = self.node_get_project_path(node.get_scalar("path"))
self.fullpath = os.path.join(self.get_project_directory(), self.path)
def preflight(self):
@@ -89,8 +89,8 @@ class LocalSource(Source):
if result.overwritten or result.ignored:
raise SourceError(
- "Failed to stage source: files clash with existing directory",
- reason='ensure-stage-dir-fail')
+ "Failed to stage source: files clash with existing directory", reason="ensure-stage-dir-fail"
+ )
def _get_local_path(self):
return self.fullpath
diff --git a/src/buildstream/plugins/sources/patch.py b/src/buildstream/plugins/sources/patch.py
index 86811cb4d..082983023 100644
--- a/src/buildstream/plugins/sources/patch.py
+++ b/src/buildstream/plugins/sources/patch.py
@@ -56,8 +56,7 @@ class PatchSource(Source):
def configure(self, node):
node.validate_keys(["path", "strip-level", *Source.COMMON_CONFIG_KEYS])
- self.path = self.node_get_project_path(node.get_scalar('path'),
- check_is_file=True)
+ self.path = self.node_get_project_path(node.get_scalar("path"), check_is_file=True)
self.strip_level = node.get_int("strip-level", default=1)
self.fullpath = os.path.join(self.get_project_directory(), self.path)
@@ -89,12 +88,13 @@ class PatchSource(Source):
# Bail out with a comprehensive message if the target directory is empty
if not os.listdir(directory):
- raise SourceError("Nothing to patch in directory '{}'".format(directory),
- reason="patch-no-files")
+ raise SourceError("Nothing to patch in directory '{}'".format(directory), reason="patch-no-files")
strip_level_option = "-p{}".format(self.strip_level)
- self.call([self.host_patch, strip_level_option, "-i", self.fullpath, "-d", directory],
- fail="Failed to apply patch {}".format(self.path))
+ self.call(
+ [self.host_patch, strip_level_option, "-i", self.fullpath, "-d", directory],
+ fail="Failed to apply patch {}".format(self.path),
+ )
# Plugin entry point
diff --git a/src/buildstream/plugins/sources/pip.py b/src/buildstream/plugins/sources/pip.py
index 758ef665f..2c9773787 100644
--- a/src/buildstream/plugins/sources/pip.py
+++ b/src/buildstream/plugins/sources/pip.py
@@ -74,30 +74,28 @@ import re
from buildstream import Consistency, Source, SourceError, utils
-_OUTPUT_DIRNAME = '.bst_pip_downloads'
-_PYPI_INDEX_URL = 'https://pypi.org/simple/'
+_OUTPUT_DIRNAME = ".bst_pip_downloads"
+_PYPI_INDEX_URL = "https://pypi.org/simple/"
# Used only for finding pip command
_PYTHON_VERSIONS = [
- 'python', # when running in a venv, we might not have the exact version
- 'python2.7',
- 'python3.0',
- 'python3.1',
- 'python3.2',
- 'python3.3',
- 'python3.4',
- 'python3.5',
- 'python3.6',
- 'python3.7',
+ "python", # when running in a venv, we might not have the exact version
+ "python2.7",
+ "python3.0",
+ "python3.1",
+ "python3.2",
+ "python3.3",
+ "python3.4",
+ "python3.5",
+ "python3.6",
+ "python3.7",
]
# List of allowed extensions taken from
# https://docs.python.org/3/distutils/sourcedist.html.
# Names of source distribution archives must be of the form
# '%{package-name}-%{version}.%{extension}'.
-_SDIST_RE = re.compile(
- r'^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$',
- re.IGNORECASE)
+_SDIST_RE = re.compile(r"^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$", re.IGNORECASE)
class PipSource(Source):
@@ -109,16 +107,15 @@ class PipSource(Source):
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = True
def configure(self, node):
- node.validate_keys(['url', 'packages', 'ref', 'requirements-files'] +
- Source.COMMON_CONFIG_KEYS)
- self.ref = node.get_str('ref', None)
- self.original_url = node.get_str('url', _PYPI_INDEX_URL)
+ node.validate_keys(["url", "packages", "ref", "requirements-files"] + Source.COMMON_CONFIG_KEYS)
+ self.ref = node.get_str("ref", None)
+ self.original_url = node.get_str("url", _PYPI_INDEX_URL)
self.index_url = self.translate_url(self.original_url)
- self.packages = node.get_str_list('packages', [])
- self.requirements_files = node.get_str_list('requirements-files', [])
+ self.packages = node.get_str_list("packages", [])
+ self.requirements_files = node.get_str_list("requirements-files", [])
if not (self.packages or self.requirements_files):
- raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified". format(self))
+ raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified".format(self))
def preflight(self):
# Try to find a pip version that spports download command
@@ -126,9 +123,9 @@ class PipSource(Source):
for python in reversed(_PYTHON_VERSIONS):
try:
host_python = utils.get_host_tool(python)
- rc = self.call([host_python, '-m', 'pip', 'download', '--help'])
+ rc = self.call([host_python, "-m", "pip", "download", "--help"])
if rc == 0:
- self.host_pip = [host_python, '-m', 'pip']
+ self.host_pip = [host_python, "-m", "pip"]
break
except utils.ProgramNotFoundError:
pass
@@ -150,10 +147,10 @@ class PipSource(Source):
return self.ref
def load_ref(self, node):
- self.ref = node.get_str('ref', None)
+ self.ref = node.get_str("ref", None)
def set_ref(self, ref, node):
- node['ref'] = self.ref = ref
+ node["ref"] = self.ref = ref
def track(self, previous_sources_dir): # pylint: disable=arguments-differ
# XXX pip does not offer any public API other than the CLI tool so it
@@ -163,32 +160,44 @@ class PipSource(Source):
# for details.
# As a result, we have to wastefully install the packages during track.
with self.tempdir() as tmpdir:
- install_args = self.host_pip + ['download',
- '--no-binary', ':all:',
- '--index-url', self.index_url,
- '--dest', tmpdir]
+ install_args = self.host_pip + [
+ "download",
+ "--no-binary",
+ ":all:",
+ "--index-url",
+ self.index_url,
+ "--dest",
+ tmpdir,
+ ]
for requirement_file in self.requirements_files:
fpath = os.path.join(previous_sources_dir, requirement_file)
- install_args += ['-r', fpath]
+ install_args += ["-r", fpath]
install_args += self.packages
self.call(install_args, fail="Failed to install python packages")
reqs = self._parse_sdist_names(tmpdir)
- return '\n'.join(["{}=={}".format(pkg, ver) for pkg, ver in reqs])
+ return "\n".join(["{}=={}".format(pkg, ver) for pkg, ver in reqs])
def fetch(self): # pylint: disable=arguments-differ
with self.tempdir() as tmpdir:
- packages = self.ref.strip().split('\n')
- package_dir = os.path.join(tmpdir, 'packages')
+ packages = self.ref.strip().split("\n")
+ package_dir = os.path.join(tmpdir, "packages")
os.makedirs(package_dir)
- self.call([*self.host_pip,
- 'download',
- '--no-binary', ':all:',
- '--index-url', self.index_url,
- '--dest', package_dir,
- *packages],
- fail="Failed to install python packages: {}".format(packages))
+ self.call(
+ [
+ *self.host_pip,
+ "download",
+ "--no-binary",
+ ":all:",
+ "--index-url",
+ self.index_url,
+ "--dest",
+ package_dir,
+ *packages,
+ ],
+ fail="Failed to install python packages: {}".format(packages),
+ )
# If the mirror directory already exists, assume that some other
# process has fetched the sources before us and ensure that we do
@@ -200,8 +209,11 @@ class PipSource(Source):
# before us.
pass
except OSError as e:
- raise SourceError("{}: Failed to move downloaded pip packages from '{}' to '{}': {}"
- .format(self, package_dir, self._mirror, e)) from e
+ raise SourceError(
+ "{}: Failed to move downloaded pip packages from '{}' to '{}': {}".format(
+ self, package_dir, self._mirror, e
+ )
+ ) from e
def stage(self, directory):
with self.timed_activity("Staging Python packages", silent_nested=True):
@@ -213,9 +225,11 @@ class PipSource(Source):
def _mirror(self):
if not self.ref:
return None
- return os.path.join(self.get_mirror_directory(),
- utils.url_directory_name(self.original_url),
- hashlib.sha256(self.ref.encode()).hexdigest())
+ return os.path.join(
+ self.get_mirror_directory(),
+ utils.url_directory_name(self.original_url),
+ hashlib.sha256(self.ref.encode()).hexdigest(),
+ )
# Parse names of downloaded source distributions
#
diff --git a/src/buildstream/plugins/sources/remote.py b/src/buildstream/plugins/sources/remote.py
index 68aa577fc..da1a1f964 100644
--- a/src/buildstream/plugins/sources/remote.py
+++ b/src/buildstream/plugins/sources/remote.py
@@ -62,13 +62,14 @@ class RemoteSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.filename = node.get_str('filename', os.path.basename(self.url))
- self.executable = node.get_bool('executable', default=False)
+ self.filename = node.get_str("filename", os.path.basename(self.url))
+ self.executable = node.get_bool("executable", default=False)
if os.sep in self.filename:
- raise SourceError('{}: filename parameter cannot contain directories'.format(self),
- reason="filename-contains-directory")
- node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename', 'executable'])
+ raise SourceError(
+ "{}: filename parameter cannot contain directories".format(self), reason="filename-contains-directory"
+ )
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ["filename", "executable"])
def get_unique_key(self):
return super().get_unique_key() + [self.filename, self.executable]
diff --git a/src/buildstream/plugins/sources/tar.py b/src/buildstream/plugins/sources/tar.py
index 60d464457..658cc2735 100644
--- a/src/buildstream/plugins/sources/tar.py
+++ b/src/buildstream/plugins/sources/tar.py
@@ -73,6 +73,7 @@ class ReadableTarInfo(tarfile.TarInfo):
`mode` attribute in `TarInfo`, the class that encapsulates the internal meta-data of the tarball,
so that the owner-read bit is always set.
"""
+
@property
def mode(self):
# ensure file is readable by owner
@@ -89,13 +90,13 @@ class TarSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.base_dir = node.get_str('base-dir', '*')
- node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['base-dir'])
+ self.base_dir = node.get_str("base-dir", "*")
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ["base-dir"])
def preflight(self):
self.host_lzip = None
- if self.url.endswith('.lz'):
- self.host_lzip = utils.get_host_tool('lzip')
+ if self.url.endswith(".lz"):
+ self.host_lzip = utils.get_host_tool("lzip")
def get_unique_key(self):
return super().get_unique_key() + [self.base_dir]
@@ -104,19 +105,17 @@ class TarSource(DownloadableFileSource):
def _run_lzip(self):
assert self.host_lzip
with TemporaryFile() as lzip_stdout:
- with open(self._get_mirror_file(), 'r') as lzip_file:
- self.call([self.host_lzip, '-d'],
- stdin=lzip_file,
- stdout=lzip_stdout)
+ with open(self._get_mirror_file(), "r") as lzip_file:
+ self.call([self.host_lzip, "-d"], stdin=lzip_file, stdout=lzip_stdout)
lzip_stdout.seek(0, 0)
yield lzip_stdout
@contextmanager
def _get_tar(self):
- if self.url.endswith('.lz'):
+ if self.url.endswith(".lz"):
with self._run_lzip() as lzip_dec:
- with tarfile.open(fileobj=lzip_dec, mode='r:', tarinfo=ReadableTarInfo) as tar:
+ with tarfile.open(fileobj=lzip_dec, mode="r:", tarinfo=ReadableTarInfo) as tar:
yield tar
else:
with tarfile.open(self._get_mirror_file(), tarinfo=ReadableTarInfo) as tar:
@@ -147,14 +146,18 @@ class TarSource(DownloadableFileSource):
def assert_safe(member):
final_path = os.path.abspath(os.path.join(target_dir, member.path))
if not final_path.startswith(target_dir):
- raise SourceError("{}: Tarfile attempts to extract outside the staging area: "
- "{} -> {}".format(self, member.path, final_path))
+ raise SourceError(
+ "{}: Tarfile attempts to extract outside the staging area: "
+ "{} -> {}".format(self, member.path, final_path)
+ )
if member.islnk():
linked_path = os.path.abspath(os.path.join(target_dir, member.linkname))
if not linked_path.startswith(target_dir):
- raise SourceError("{}: Tarfile attempts to hardlink outside the staging area: "
- "{} -> {}".format(self, member.path, final_path))
+ raise SourceError(
+ "{}: Tarfile attempts to hardlink outside the staging area: "
+ "{} -> {}".format(self, member.path, final_path)
+ )
# Don't need to worry about symlinks because they're just
# files here and won't be able to do much harm once we are
@@ -167,9 +170,9 @@ class TarSource(DownloadableFileSource):
for member in tar.getmembers():
# First, ensure that a member never starts with `./`
- if member.path.startswith('./'):
+ if member.path.startswith("./"):
member.path = member.path[2:]
- if member.islnk() and member.linkname.startswith('./'):
+ if member.islnk() and member.linkname.startswith("./"):
member.linkname = member.linkname[2:]
# Now extract only the paths which match the normalized path
@@ -202,16 +205,16 @@ class TarSource(DownloadableFileSource):
# Remove any possible leading './', offer more consistent behavior
# across tarballs encoded with or without a leading '.'
- member_name = member.name.lstrip('./')
+ member_name = member.name.lstrip("./")
if not member.isdir():
# Loop over the components of a path, for a path of a/b/c/d
# we will first visit 'a', then 'a/b' and then 'a/b/c', excluding
# the final component
- components = member_name.split('/')
+ components = member_name.split("/")
for i in range(len(components) - 1):
- dir_component = '/'.join([components[j] for j in range(i + 1)])
+ dir_component = "/".join([components[j] for j in range(i + 1)])
if dir_component not in visited:
visited.add(dir_component)
try:
@@ -219,7 +222,7 @@ class TarSource(DownloadableFileSource):
# exist in the archive
_ = tar.getmember(dir_component)
except KeyError:
- if dir_component != '.':
+ if dir_component != ".":
yield dir_component
continue
@@ -227,7 +230,7 @@ class TarSource(DownloadableFileSource):
# Avoid considering the '.' directory, if any is included in the archive
# this is to avoid the default 'base-dir: *' value behaving differently
# depending on whether the tarball was encoded with a leading '.' or not
- elif member_name == '.':
+ elif member_name == ".":
continue
yield member_name
diff --git a/src/buildstream/plugins/sources/workspace.py b/src/buildstream/plugins/sources/workspace.py
index f40f5fae8..a845fd440 100644
--- a/src/buildstream/plugins/sources/workspace.py
+++ b/src/buildstream/plugins/sources/workspace.py
@@ -59,9 +59,9 @@ class WorkspaceSource(Source):
return None
def configure(self, node: MappingNode) -> None:
- node.validate_keys(['path', 'ref', 'kind'])
- self.path = node.get_str('path')
- self.__digest = node.get_str('ref')
+ node.validate_keys(["path", "ref", "kind"])
+ self.path = node.get_str("path")
+ self.__digest = node.get_str("ref")
def preflight(self) -> None:
pass # pragma: nocover
@@ -79,7 +79,7 @@ class WorkspaceSource(Source):
#
# Raises AssertionError: existing workspaces should not be reinitialized
def init_workspace(self, directory: Directory) -> None:
- raise AssertionError('Attempting to re-open an existing workspace')
+ raise AssertionError("Attempting to re-open an existing workspace")
def get_consistency(self):
# always return cached state
@@ -95,8 +95,8 @@ class WorkspaceSource(Source):
if result.overwritten or result.ignored:
raise SourceError(
- "Failed to stage source: files clash with existing directory",
- reason='ensure-stage-dir-fail')
+ "Failed to stage source: files clash with existing directory", reason="ensure-stage-dir-fail"
+ )
def _get_local_path(self) -> str:
return self.path
diff --git a/src/buildstream/plugins/sources/zip.py b/src/buildstream/plugins/sources/zip.py
index 322be58d7..47933c8eb 100644
--- a/src/buildstream/plugins/sources/zip.py
+++ b/src/buildstream/plugins/sources/zip.py
@@ -72,8 +72,8 @@ class ZipSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.base_dir = node.get_str('base-dir', '*')
- node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['base-dir'])
+ self.base_dir = node.get_str("base-dir", "*")
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ["base-dir"])
def get_unique_key(self):
return super().get_unique_key() + [self.base_dir]
@@ -139,14 +139,14 @@ class ZipSource(DownloadableFileSource):
# ZipInfo.is_dir() is only available in python >= 3.6, but all
# it does is check for a trailing '/' in the name
#
- if not member.filename.endswith('/'):
+ if not member.filename.endswith("/"):
# Loop over the components of a path, for a path of a/b/c/d
# we will first visit 'a', then 'a/b' and then 'a/b/c', excluding
# the final component
- components = member.filename.split('/')
+ components = member.filename.split("/")
for i in range(len(components) - 1):
- dir_component = '/'.join([components[j] for j in range(i + 1)])
+ dir_component = "/".join([components[j] for j in range(i + 1)])
if dir_component not in visited:
visited[dir_component] = True
try:
@@ -154,7 +154,7 @@ class ZipSource(DownloadableFileSource):
# exist in the archive
_ = archive.getinfo(dir_component)
except KeyError:
- if dir_component != '.':
+ if dir_component != ".":
yield dir_component
continue
@@ -162,7 +162,7 @@ class ZipSource(DownloadableFileSource):
# Avoid considering the '.' directory, if any is included in the archive
# this is to avoid the default 'base-dir: *' value behaving differently
# depending on whether the archive was encoded with a leading '.' or not
- elif member.filename == '.' or member.filename == './':
+ elif member.filename == "." or member.filename == "./":
continue
yield member.filename
diff --git a/src/buildstream/sandbox/_config.py b/src/buildstream/sandbox/_config.py
index 457f92b3c..614f22063 100644
--- a/src/buildstream/sandbox/_config.py
+++ b/src/buildstream/sandbox/_config.py
@@ -22,7 +22,7 @@
#
# A container for sandbox configuration data. We want the internals
# of this to be opaque, hence putting it in its own private file.
-class SandboxConfig():
+class SandboxConfig:
def __init__(self, build_uid, build_gid, build_os=None, build_arch=None):
self.build_uid = build_uid
self.build_gid = build_gid
@@ -46,17 +46,14 @@ class SandboxConfig():
# However this should be the right place to support
# such configurations in the future.
#
- unique_key = {
- 'os': self.build_os,
- 'arch': self.build_arch
- }
+ unique_key = {"os": self.build_os, "arch": self.build_arch}
# Avoid breaking cache key calculation with
# the addition of configurabuild build uid/gid
if self.build_uid != 0:
- unique_key['build-uid'] = self.build_uid
+ unique_key["build-uid"] = self.build_uid
if self.build_gid != 0:
- unique_key['build-gid'] = self.build_gid
+ unique_key["build-gid"] = self.build_gid
return unique_key
diff --git a/src/buildstream/sandbox/_mount.py b/src/buildstream/sandbox/_mount.py
index c0f26c8d7..18751dde5 100644
--- a/src/buildstream/sandbox/_mount.py
+++ b/src/buildstream/sandbox/_mount.py
@@ -29,7 +29,7 @@ from .._fuse import SafeHardlinks
#
# Helper data object representing a single mount point in the mount map
#
-class Mount():
+class Mount:
def __init__(self, sandbox, mount_point, safe_hardlinks, fuse_mount_options=None):
# Getting _get_underlying_directory() here is acceptable as
# we're part of the sandbox code. This will fail if our
@@ -54,8 +54,8 @@ class Mount():
# Redirected mount
self.mount_origin = os.path.join(root_directory, mount_point.lstrip(os.sep))
self.mount_base = os.path.join(scratch_directory, utils.url_directory_name(mount_point))
- self.mount_source = os.path.join(self.mount_base, 'mount')
- self.mount_tempdir = os.path.join(self.mount_base, 'temp')
+ self.mount_source = os.path.join(self.mount_base, "mount")
+ self.mount_tempdir = os.path.join(self.mount_base, "temp")
os.makedirs(self.mount_origin, exist_ok=True)
os.makedirs(self.mount_tempdir, exist_ok=True)
else:
@@ -74,10 +74,10 @@ class Mount():
# When mounting a regular file, ensure the parent
# directory exists in the sandbox; and that an empty
# file is created at the mount location.
- parent_dir = os.path.dirname(self.mount_source.rstrip('/'))
+ parent_dir = os.path.dirname(self.mount_source.rstrip("/"))
os.makedirs(parent_dir, exist_ok=True)
if not os.path.exists(self.mount_source):
- with open(self.mount_source, 'w'):
+ with open(self.mount_source, "w"):
pass
@contextmanager
@@ -99,8 +99,7 @@ class Mount():
# sandbox (Sandbox): The sandbox object
# root_readonly (bool): Whether the sandbox root is readonly
#
-class MountMap():
-
+class MountMap:
def __init__(self, sandbox, root_readonly, fuse_mount_options=None):
# We will be doing the mounts in the order in which they were declared.
self.mounts = OrderedDict()
@@ -109,11 +108,11 @@ class MountMap():
fuse_mount_options = {}
# We want safe hardlinks on rootfs whenever root is not readonly
- self.mounts['/'] = Mount(sandbox, '/', not root_readonly, fuse_mount_options)
+ self.mounts["/"] = Mount(sandbox, "/", not root_readonly, fuse_mount_options)
for mark in sandbox._get_marked_directories():
- directory = mark['directory']
- artifact = mark['artifact']
+ directory = mark["directory"]
+ artifact = mark["artifact"]
# We want safe hardlinks for any non-root directory where
# artifacts will be staged to
diff --git a/src/buildstream/sandbox/_mounter.py b/src/buildstream/sandbox/_mounter.py
index 4e31ef67a..3adf8ff5b 100644
--- a/src/buildstream/sandbox/_mounter.py
+++ b/src/buildstream/sandbox/_mounter.py
@@ -25,22 +25,20 @@ from .. import utils, _signals
# A class to wrap the `mount` and `umount` system commands
-class Mounter():
+class Mounter:
@classmethod
- def _mount(cls, dest, src=None, mount_type=None,
- stdout=None, stderr=None, options=None,
- flags=None):
+ def _mount(cls, dest, src=None, mount_type=None, stdout=None, stderr=None, options=None, flags=None):
if stdout is None:
stdout = sys.stdout
if stderr is None:
stderr = sys.stderr
- argv = [utils.get_host_tool('mount')]
+ argv = [utils.get_host_tool("mount")]
if mount_type:
- argv.extend(['-t', mount_type])
+ argv.extend(["-t", mount_type])
if options:
- argv.extend(['-o', options])
+ argv.extend(["-o", options])
if flags:
argv.extend(flags)
@@ -48,16 +46,10 @@ class Mounter():
argv += [src]
argv += [dest]
- status, _ = utils._call(
- argv,
- terminate=True,
- stdout=stdout,
- stderr=stderr
- )
+ status, _ = utils._call(argv, terminate=True, stdout=stdout, stderr=stderr)
if status != 0:
- raise SandboxError('`{}` failed with exit code {}'
- .format(' '.join(argv), status))
+ raise SandboxError("`{}` failed with exit code {}".format(" ".join(argv), status))
return dest
@@ -68,17 +60,11 @@ class Mounter():
if stderr is None:
stderr = sys.stderr
- cmd = [utils.get_host_tool('umount'), '-R', path]
- status, _ = utils._call(
- cmd,
- terminate=True,
- stdout=stdout,
- stderr=stderr
- )
+ cmd = [utils.get_host_tool("umount"), "-R", path]
+ status, _ = utils._call(cmd, terminate=True, stdout=stdout, stderr=stderr)
if status != 0:
- raise SandboxError('`{}` failed with exit code {}'
- .format(' '.join(cmd), status))
+ raise SandboxError("`{}` failed with exit code {}".format(" ".join(cmd), status))
# mount()
#
@@ -98,8 +84,7 @@ class Mounter():
#
@classmethod
@contextmanager
- def mount(cls, dest, src=None, stdout=None,
- stderr=None, mount_type=None, **kwargs):
+ def mount(cls, dest, src=None, stdout=None, stderr=None, mount_type=None, **kwargs):
if stdout is None:
stdout = sys.stdout
if stderr is None:
@@ -108,7 +93,7 @@ class Mounter():
def kill_proc():
cls._umount(dest, stdout, stderr)
- options = ','.join([key for key, val in kwargs.items() if val])
+ options = ",".join([key for key, val in kwargs.items() if val])
path = cls._mount(dest, src, mount_type, stdout=stdout, stderr=stderr, options=options)
try:
@@ -139,8 +124,7 @@ class Mounter():
#
@classmethod
@contextmanager
- def bind_mount(cls, dest, src=None, stdout=None,
- stderr=None, **kwargs):
+ def bind_mount(cls, dest, src=None, stdout=None, stderr=None, **kwargs):
if stdout is None:
stdout = sys.stdout
if stderr is None:
@@ -149,8 +133,8 @@ class Mounter():
def kill_proc():
cls._umount(dest, stdout, stderr)
- kwargs['rbind'] = True
- options = ','.join([key for key, val in kwargs.items() if val])
+ kwargs["rbind"] = True
+ options = ",".join([key for key, val in kwargs.items() if val])
path = cls._mount(dest, src, None, stdout, stderr, options)
@@ -158,7 +142,7 @@ class Mounter():
with _signals.terminator(kill_proc):
# Make the rbind a slave to avoid unmounting vital devices in
# /proc
- cls._mount(dest, flags=['--make-rslave'])
+ cls._mount(dest, flags=["--make-rslave"])
yield path
finally:
cls._umount(dest, stdout, stderr)
diff --git a/src/buildstream/sandbox/_sandboxbuildbox.py b/src/buildstream/sandbox/_sandboxbuildbox.py
index 4258ee26d..c34d95223 100644
--- a/src/buildstream/sandbox/_sandboxbuildbox.py
+++ b/src/buildstream/sandbox/_sandboxbuildbox.py
@@ -34,22 +34,20 @@ from .._exceptions import SandboxError
# BuildBox-based sandbox implementation.
#
class SandboxBuildBox(Sandbox):
-
def __init__(self, context, project, directory, **kwargs):
- if kwargs.get('allow_real_directory'):
+ if kwargs.get("allow_real_directory"):
raise SandboxError("BuildBox does not support real directories")
- kwargs['allow_real_directory'] = False
+ kwargs["allow_real_directory"] = False
super().__init__(context, project, directory, **kwargs)
@classmethod
def check_available(cls):
try:
- utils.get_host_tool('buildbox')
+ utils.get_host_tool("buildbox")
except utils.ProgramNotFoundError as Error:
cls._dummy_reasons += ["buildbox not found"]
- raise SandboxError(" and ".join(cls._dummy_reasons),
- reason="unavailable-local-sandbox") from Error
+ raise SandboxError(" and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox") from Error
@classmethod
def check_sandbox_config(cls, platform, config):
@@ -73,42 +71,42 @@ class SandboxBuildBox(Sandbox):
scratch_directory = self._get_scratch_directory()
if not self._has_command(command[0], env):
- raise SandboxCommandError("Staged artifacts do not provide command "
- "'{}'".format(command[0]),
- reason='missing-command')
+ raise SandboxCommandError(
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command"
+ )
# Grab the full path of the buildbox binary
try:
- buildbox_command = [utils.get_host_tool('buildbox')]
+ buildbox_command = [utils.get_host_tool("buildbox")]
except ProgramNotFoundError as Err:
- raise SandboxError(("BuildBox not on path, you are using the BuildBox sandbox because "
- "BST_FORCE_SANDBOX=buildbox")) from Err
+ raise SandboxError(
+ ("BuildBox not on path, you are using the BuildBox sandbox because " "BST_FORCE_SANDBOX=buildbox")
+ ) from Err
for mark in self._get_marked_directories():
- path = mark['directory']
- assert path.startswith('/') and len(path) > 1
+ path = mark["directory"]
+ assert path.startswith("/") and len(path) > 1
root_directory.descend(*path[1:].split(os.path.sep), create=True)
digest = root_directory._get_digest()
- with open(os.path.join(scratch_directory, 'in'), 'wb') as input_digest_file:
+ with open(os.path.join(scratch_directory, "in"), "wb") as input_digest_file:
input_digest_file.write(digest.SerializeToString())
buildbox_command += ["--local=" + root_directory.cas_cache.casdir]
buildbox_command += ["--input-digest=in"]
buildbox_command += ["--output-digest=out"]
- common_details = ("BuildBox is a experimental sandbox and does not support the requested feature.\n"
- "You are using this feature because BST_FORCE_SANDBOX=buildbox.")
+ common_details = (
+ "BuildBox is a experimental sandbox and does not support the requested feature.\n"
+ "You are using this feature because BST_FORCE_SANDBOX=buildbox."
+ )
if not flags & SandboxFlags.NETWORK_ENABLED:
# TODO
- self._issue_warning(
- "BuildBox sandbox does not have Networking yet",
- detail=common_details
- )
+ self._issue_warning("BuildBox sandbox does not have Networking yet", detail=common_details)
if cwd is not None:
- buildbox_command += ['--chdir=' + cwd]
+ buildbox_command += ["--chdir=" + cwd]
# In interactive mode, we want a complete devpts inside
# the container, so there is a /dev/console and such. In
@@ -118,27 +116,24 @@ class SandboxBuildBox(Sandbox):
if flags & SandboxFlags.INTERACTIVE:
# TODO
self._issue_warning(
- "BuildBox sandbox does not fully support BuildStream shells yet",
- detail=common_details
+ "BuildBox sandbox does not fully support BuildStream shells yet", detail=common_details
)
if flags & SandboxFlags.ROOT_READ_ONLY:
# TODO
self._issue_warning(
- "BuildBox sandbox does not fully support BuildStream `Read only Root`",
- detail=common_details
+ "BuildBox sandbox does not fully support BuildStream `Read only Root`", detail=common_details
)
# Set UID and GID
if not flags & SandboxFlags.INHERIT_UID:
# TODO
self._issue_warning(
- "BuildBox sandbox does not fully support BuildStream Inherit UID",
- detail=common_details
+ "BuildBox sandbox does not fully support BuildStream Inherit UID", detail=common_details
)
- os.makedirs(os.path.join(scratch_directory, 'mnt'), exist_ok=True)
- buildbox_command += ['mnt']
+ os.makedirs(os.path.join(scratch_directory, "mnt"), exist_ok=True)
+ buildbox_command += ["mnt"]
# Add the command
buildbox_command += command
@@ -150,7 +145,7 @@ class SandboxBuildBox(Sandbox):
with ExitStack() as stack:
# Ensure the cwd exists
if cwd is not None and len(cwd) > 1:
- assert cwd.startswith('/')
+ assert cwd.startswith("/")
root_directory.descend(*cwd[1:].split(os.path.sep), create=True)
# If we're interactive, we want to inherit our stdin,
@@ -162,12 +157,18 @@ class SandboxBuildBox(Sandbox):
stdin = stack.enter_context(open(os.devnull, "r"))
# Run buildbox !
- exit_code = self.run_buildbox(buildbox_command, stdin, stdout, stderr, env,
- interactive=(flags & SandboxFlags.INTERACTIVE),
- cwd=scratch_directory)
+ exit_code = self.run_buildbox(
+ buildbox_command,
+ stdin,
+ stdout,
+ stderr,
+ env,
+ interactive=(flags & SandboxFlags.INTERACTIVE),
+ cwd=scratch_directory,
+ )
if exit_code == 0:
- with open(os.path.join(scratch_directory, 'out'), 'rb') as output_digest_file:
+ with open(os.path.join(scratch_directory, "out"), "rb") as output_digest_file:
output_digest = remote_execution_pb2.Digest()
output_digest.ParseFromString(output_digest_file.read())
self._vdir = CasBasedDirectory(root_directory.cas_cache, digest=output_digest)
@@ -203,7 +204,7 @@ class SandboxBuildBox(Sandbox):
stdout=stdout,
stderr=stderr,
cwd=cwd,
- start_new_session=interactive
+ start_new_session=interactive,
)
# Wait for the child process to finish, ensuring that
diff --git a/src/buildstream/sandbox/_sandboxbwrap.py b/src/buildstream/sandbox/_sandboxbwrap.py
index 5c4b9a295..1405611bc 100644
--- a/src/buildstream/sandbox/_sandboxbwrap.py
+++ b/src/buildstream/sandbox/_sandboxbwrap.py
@@ -48,34 +48,27 @@ class SandboxBwrap(Sandbox):
_have_good_bwrap = None
# Minimal set of devices for the sandbox
- DEVICES = [
- '/dev/full',
- '/dev/null',
- '/dev/urandom',
- '/dev/random',
- '/dev/zero'
- ]
+ DEVICES = ["/dev/full", "/dev/null", "/dev/urandom", "/dev/random", "/dev/zero"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.linux32 = kwargs['linux32']
+ self.linux32 = kwargs["linux32"]
@classmethod
def check_available(cls):
cls._have_fuse = os.path.exists("/dev/fuse")
if not cls._have_fuse:
- cls._dummy_reasons += ['Fuse is unavailable']
+ cls._dummy_reasons += ["Fuse is unavailable"]
try:
- utils.get_host_tool('bwrap')
+ utils.get_host_tool("bwrap")
except utils.ProgramNotFoundError as Error:
cls._bwrap_exists = False
cls._have_good_bwrap = False
cls._die_with_parent_available = False
cls._json_status_available = False
- cls._dummy_reasons += ['Bubblewrap not found']
- raise SandboxError(" and ".join(cls._dummy_reasons),
- reason="unavailable-local-sandbox") from Error
+ cls._dummy_reasons += ["Bubblewrap not found"]
+ raise SandboxError(" and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox") from Error
bwrap_version = _site.get_bwrap_version()
@@ -84,7 +77,7 @@ class SandboxBwrap(Sandbox):
cls._die_with_parent_available = (0, 1, 8) <= bwrap_version
cls._json_status_available = (0, 3, 2) <= bwrap_version
if not cls._have_good_bwrap:
- cls._dummy_reasons += ['Bubblewrap is too old']
+ cls._dummy_reasons += ["Bubblewrap is too old"]
raise SandboxError(" and ".join(cls._dummy_reasons))
cls._uid = os.geteuid()
@@ -98,29 +91,26 @@ class SandboxBwrap(Sandbox):
# issue a warning if it's not available, and save the state
# locally so that we can inform the sandbox to not try it
# later on.
- bwrap = utils.get_host_tool('bwrap')
+ bwrap = utils.get_host_tool("bwrap")
try:
- whoami = utils.get_host_tool('whoami')
- output = subprocess.check_output([
- bwrap,
- '--ro-bind', '/', '/',
- '--unshare-user',
- '--uid', '0', '--gid', '0',
- whoami,
- ], universal_newlines=True).strip()
+ whoami = utils.get_host_tool("whoami")
+ output = subprocess.check_output(
+ [bwrap, "--ro-bind", "/", "/", "--unshare-user", "--uid", "0", "--gid", "0", whoami,],
+ universal_newlines=True,
+ ).strip()
except subprocess.CalledProcessError:
- output = ''
+ output = ""
except utils.ProgramNotFoundError:
- output = ''
+ output = ""
- return output == 'root'
+ return output == "root"
@classmethod
def check_sandbox_config(cls, local_platform, config):
if cls.user_ns_available:
# User namespace support allows arbitrary build UID/GID settings.
pass
- elif (config.build_uid != local_platform._uid or config.build_gid != local_platform._gid):
+ elif config.build_uid != local_platform._uid or config.build_gid != local_platform._gid:
# Without user namespace support, the UID/GID in the sandbox
# will match the host UID/GID.
return False
@@ -141,9 +131,9 @@ class SandboxBwrap(Sandbox):
root_directory = self.get_virtual_directory()._get_underlying_directory()
if not self._has_command(command[0], env):
- raise SandboxCommandError("Staged artifacts do not provide command "
- "'{}'".format(command[0]),
- reason='missing-command')
+ raise SandboxCommandError(
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command"
+ )
# NOTE: MountMap transitively imports `_fuse/fuse.py` which raises an
# EnvironmentError when fuse is not found. Since this module is
@@ -154,29 +144,29 @@ class SandboxBwrap(Sandbox):
# Create the mount map, this will tell us where
# each mount point needs to be mounted from and to
mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY)
- root_mount_source = mount_map.get_mount_source('/')
+ root_mount_source = mount_map.get_mount_source("/")
# start command with linux32 if needed
if self.linux32:
- bwrap_command = [utils.get_host_tool('linux32')]
+ bwrap_command = [utils.get_host_tool("linux32")]
else:
bwrap_command = []
# Grab the full path of the bwrap binary
- bwrap_command += [utils.get_host_tool('bwrap')]
+ bwrap_command += [utils.get_host_tool("bwrap")]
for k, v in env.items():
- bwrap_command += ['--setenv', k, v]
+ bwrap_command += ["--setenv", k, v]
for k in os.environ.keys() - env.keys():
- bwrap_command += ['--unsetenv', k]
+ bwrap_command += ["--unsetenv", k]
# Create a new pid namespace, this also ensures that any subprocesses
# are cleaned up when the bwrap process exits.
- bwrap_command += ['--unshare-pid']
+ bwrap_command += ["--unshare-pid"]
# Ensure subprocesses are cleaned up when the bwrap parent dies.
if self._die_with_parent_available:
- bwrap_command += ['--die-with-parent']
+ bwrap_command += ["--die-with-parent"]
# Add in the root filesystem stuff first.
#
@@ -186,15 +176,12 @@ class SandboxBwrap(Sandbox):
bwrap_command += ["--bind", root_mount_source, "/"]
if not flags & SandboxFlags.NETWORK_ENABLED:
- bwrap_command += ['--unshare-net']
- bwrap_command += ['--unshare-uts', '--hostname', 'buildstream']
- bwrap_command += ['--unshare-ipc']
+ bwrap_command += ["--unshare-net"]
+ bwrap_command += ["--unshare-uts", "--hostname", "buildstream"]
+ bwrap_command += ["--unshare-ipc"]
# Give it a proc and tmpfs
- bwrap_command += [
- '--proc', '/proc',
- '--tmpfs', '/tmp'
- ]
+ bwrap_command += ["--proc", "/proc", "--tmpfs", "/tmp"]
# In interactive mode, we want a complete devpts inside
# the container, so there is a /dev/console and such. In
@@ -202,21 +189,21 @@ class SandboxBwrap(Sandbox):
# a minimal set of devices to expose to the sandbox.
#
if flags & SandboxFlags.INTERACTIVE:
- bwrap_command += ['--dev', '/dev']
+ bwrap_command += ["--dev", "/dev"]
else:
for device in self.DEVICES:
- bwrap_command += ['--dev-bind', device, device]
+ bwrap_command += ["--dev-bind", device, device]
# Create a tmpfs for /dev/shm, if we're in interactive this
# is handled by `--dev /dev`
#
- bwrap_command += ['--tmpfs', '/dev/shm']
+ bwrap_command += ["--tmpfs", "/dev/shm"]
# Add bind mounts to any marked directories
marked_directories = self._get_marked_directories()
mount_source_overrides = self._get_mount_sources()
for mark in marked_directories:
- mount_point = mark['directory']
+ mount_point = mark["directory"]
if mount_point in mount_source_overrides: # pylint: disable=consider-using-get
mount_source = mount_source_overrides[mount_point]
else:
@@ -230,22 +217,22 @@ class SandboxBwrap(Sandbox):
# harmless to do in a build environment where the directories
# we mount just never contain device files.
#
- bwrap_command += ['--dev-bind', mount_source, mount_point]
+ bwrap_command += ["--dev-bind", mount_source, mount_point]
if flags & SandboxFlags.ROOT_READ_ONLY:
bwrap_command += ["--remount-ro", "/"]
if cwd is not None:
- bwrap_command += ['--dir', cwd]
- bwrap_command += ['--chdir', cwd]
+ bwrap_command += ["--dir", cwd]
+ bwrap_command += ["--chdir", cwd]
# Set UID and GUI
if self.user_ns_available:
- bwrap_command += ['--unshare-user']
+ bwrap_command += ["--unshare-user"]
if not flags & SandboxFlags.INHERIT_UID:
uid = self._get_config().build_uid
gid = self._get_config().build_gid
- bwrap_command += ['--uid', str(uid), '--gid', str(gid)]
+ bwrap_command += ["--uid", str(uid), "--gid", str(gid)]
with ExitStack() as stack:
pass_fds = ()
@@ -253,7 +240,7 @@ class SandboxBwrap(Sandbox):
if self._json_status_available:
json_status_file = stack.enter_context(TemporaryFile())
pass_fds = (json_status_file.fileno(),)
- bwrap_command += ['--json-status-fd', str(json_status_file.fileno())]
+ bwrap_command += ["--json-status-fd", str(json_status_file.fileno())]
# Add the command
bwrap_command += command
@@ -265,7 +252,7 @@ class SandboxBwrap(Sandbox):
#
existing_basedirs = {
directory: os.path.exists(os.path.join(root_directory, directory))
- for directory in ['dev/shm', 'tmp', 'dev', 'proc']
+ for directory in ["dev/shm", "tmp", "dev", "proc"]
}
# Use the MountMap context manager to ensure that any redirected
@@ -283,15 +270,16 @@ class SandboxBwrap(Sandbox):
stdin = stack.enter_context(open(os.devnull, "r"))
# Run bubblewrap !
- exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr,
- (flags & SandboxFlags.INTERACTIVE), pass_fds)
+ exit_code = self.run_bwrap(
+ bwrap_command, stdin, stdout, stderr, (flags & SandboxFlags.INTERACTIVE), pass_fds
+ )
# Cleanup things which bwrap might have left behind, while
# everything is still mounted because bwrap can be creating
# the devices on the fuse mount, so we should remove it there.
if not flags & SandboxFlags.INTERACTIVE:
for device in self.DEVICES:
- device_path = os.path.join(root_mount_source, device.lstrip('/'))
+ device_path = os.path.join(root_mount_source, device.lstrip("/"))
# This will remove the device in a loop, allowing some
# retries in case the device file leaked by bubblewrap is still busy
@@ -299,7 +287,7 @@ class SandboxBwrap(Sandbox):
# Remove /tmp, this is a bwrap owned thing we want to be sure
# never ends up in an artifact
- for basedir in ['dev/shm', 'tmp', 'dev', 'proc']:
+ for basedir in ["dev/shm", "tmp", "dev", "proc"]:
# Skip removal of directories which already existed before
# launching bwrap
@@ -341,12 +329,14 @@ class SandboxBwrap(Sandbox):
for line in json_status_file:
with suppress(json.decoder.JSONDecodeError):
o = json.loads(line.decode())
- if isinstance(o, collections.abc.Mapping) and 'exit-code' in o:
- child_exit_code = o['exit-code']
+ if isinstance(o, collections.abc.Mapping) and "exit-code" in o:
+ child_exit_code = o["exit-code"]
break
if child_exit_code is None:
- raise SandboxError("`bwrap' terminated during sandbox setup with exitcode {}".format(exit_code),
- reason="bwrap-sandbox-fail")
+ raise SandboxError(
+ "`bwrap' terminated during sandbox setup with exitcode {}".format(exit_code),
+ reason="bwrap-sandbox-fail",
+ )
exit_code = child_exit_code
self._vdir._mark_changed()
@@ -432,7 +422,7 @@ class SandboxBwrap(Sandbox):
stdin=stdin,
stdout=stdout,
stderr=stderr,
- start_new_session=new_session
+ start_new_session=new_session,
)
# Wait for the child process to finish, ensuring that
diff --git a/src/buildstream/sandbox/_sandboxchroot.py b/src/buildstream/sandbox/_sandboxchroot.py
index 8d4c54058..1805131b1 100644
--- a/src/buildstream/sandbox/_sandboxchroot.py
+++ b/src/buildstream/sandbox/_sandboxchroot.py
@@ -35,7 +35,7 @@ from . import Sandbox, SandboxFlags, SandboxCommandError
class SandboxChroot(Sandbox):
- _FUSE_MOUNT_OPTIONS = {'dev': True}
+ _FUSE_MOUNT_OPTIONS = {"dev": True}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -43,8 +43,10 @@ class SandboxChroot(Sandbox):
uid = self._get_config().build_uid
gid = self._get_config().build_gid
if uid != 0 or gid != 0:
- raise SandboxError("Chroot sandboxes cannot specify a non-root uid/gid "
- "({},{} were supplied via config)".format(uid, gid))
+ raise SandboxError(
+ "Chroot sandboxes cannot specify a non-root uid/gid "
+ "({},{} were supplied via config)".format(uid, gid)
+ )
self.mount_map = None
@@ -78,20 +80,19 @@ class SandboxChroot(Sandbox):
def _run(self, command, flags, *, cwd, env):
if not self._has_command(command[0], env):
- raise SandboxCommandError("Staged artifacts do not provide command "
- "'{}'".format(command[0]),
- reason='missing-command')
+ raise SandboxCommandError(
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command"
+ )
stdout, stderr = self._get_output()
# Create the mount map, this will tell us where
# each mount point needs to be mounted from and to
- self.mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY,
- self._FUSE_MOUNT_OPTIONS)
+ self.mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY, self._FUSE_MOUNT_OPTIONS)
# Create a sysroot and run the command inside it
with ExitStack() as stack:
- os.makedirs('/var/run/buildstream', exist_ok=True)
+ os.makedirs("/var/run/buildstream", exist_ok=True)
# FIXME: While we do not currently do anything to prevent
# network access, we also don't copy /etc/resolv.conf to
@@ -104,21 +105,20 @@ class SandboxChroot(Sandbox):
#
# Nonetheless a better solution could perhaps be found.
- rootfs = stack.enter_context(utils._tempdir(dir='/var/run/buildstream'))
+ rootfs = stack.enter_context(utils._tempdir(dir="/var/run/buildstream"))
stack.enter_context(self.create_devices(self._root, flags))
stack.enter_context(self.mount_dirs(rootfs, flags, stdout, stderr))
if flags & SandboxFlags.INTERACTIVE:
stdin = sys.stdin
else:
- stdin = stack.enter_context(open(os.devnull, 'r'))
+ stdin = stack.enter_context(open(os.devnull, "r"))
# Ensure the cwd exists
if cwd is not None:
workdir = os.path.join(rootfs, cwd.lstrip(os.sep))
os.makedirs(workdir, exist_ok=True)
- status = self.chroot(rootfs, command, stdin, stdout,
- stderr, cwd, env, flags)
+ status = self.chroot(rootfs, command, stdin, stdout, stderr, cwd, env, flags)
self._vdir._mark_changed()
return status
@@ -173,7 +173,7 @@ class SandboxChroot(Sandbox):
# If you try to put gtk dialogs here Tristan (either)
# will personally scald you
preexec_fn=lambda: (os.chroot(rootfs), os.chdir(cwd)),
- start_new_session=flags & SandboxFlags.INTERACTIVE
+ start_new_session=flags & SandboxFlags.INTERACTIVE,
)
# Wait for the child process to finish, ensuring that
@@ -214,13 +214,14 @@ class SandboxChroot(Sandbox):
# Exceptions in preexec_fn are simply reported as
# 'Exception occurred in preexec_fn', turn these into
# a more readable message.
- if str(e) == 'Exception occurred in preexec_fn.':
- raise SandboxError('Could not chroot into {} or chdir into {}. '
- 'Ensure you are root and that the relevant directory exists.'
- .format(rootfs, cwd)) from e
+ if str(e) == "Exception occurred in preexec_fn.":
+ raise SandboxError(
+ "Could not chroot into {} or chdir into {}. "
+ "Ensure you are root and that the relevant directory exists.".format(rootfs, cwd)
+ ) from e
# Otherwise, raise a more general error
- raise SandboxError('Could not run command {}: {}'.format(command, e)) from e
+ raise SandboxError("Could not run command {}: {}".format(command, e)) from e
return code
@@ -251,8 +252,10 @@ class SandboxChroot(Sandbox):
devices.append(self.mknod(device, location))
except OSError as err:
if err.errno == 1:
- raise SandboxError("Permission denied while creating device node: {}.".format(err) +
- "BuildStream reqiures root permissions for these setttings.")
+ raise SandboxError(
+ "Permission denied while creating device node: {}.".format(err)
+ + "BuildStream reqiures root permissions for these setttings."
+ )
raise
@@ -300,16 +303,16 @@ class SandboxChroot(Sandbox):
with ExitStack() as stack:
stack.enter_context(self.mount_map.mounted(self))
- stack.enter_context(mount_point('/'))
+ stack.enter_context(mount_point("/"))
if flags & SandboxFlags.INTERACTIVE:
- stack.enter_context(mount_src('/dev'))
+ stack.enter_context(mount_src("/dev"))
- stack.enter_context(mount_src('/tmp'))
- stack.enter_context(mount_src('/proc'))
+ stack.enter_context(mount_src("/tmp"))
+ stack.enter_context(mount_src("/proc"))
for mark in self._get_marked_directories():
- stack.enter_context(mount_point(mark['directory']))
+ stack.enter_context(mount_point(mark["directory"]))
# Remount root RO if necessary
if flags & flags & SandboxFlags.ROOT_READ_ONLY:
@@ -343,10 +346,9 @@ class SandboxChroot(Sandbox):
os.mknod(target, mode=stat.S_IFCHR | dev.st_mode, device=target_dev)
except PermissionError as e:
- raise SandboxError('Could not create device {}, ensure that you have root permissions: {}')
+ raise SandboxError("Could not create device {}, ensure that you have root permissions: {}")
except OSError as e:
- raise SandboxError('Could not create device {}: {}'
- .format(target, e)) from e
+ raise SandboxError("Could not create device {}: {}".format(target, e)) from e
return target
diff --git a/src/buildstream/sandbox/_sandboxdummy.py b/src/buildstream/sandbox/_sandboxdummy.py
index ae3d5e512..0cae2b6a9 100644
--- a/src/buildstream/sandbox/_sandboxdummy.py
+++ b/src/buildstream/sandbox/_sandboxdummy.py
@@ -28,9 +28,10 @@ class SandboxDummy(Sandbox):
def _run(self, command, flags, *, cwd, env):
if not self._has_command(command[0], env):
- raise SandboxCommandError("Staged artifacts do not provide command "
- "'{}'".format(command[0]),
- reason='missing-command')
+ raise SandboxCommandError(
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command"
+ )
- raise SandboxError("This platform does not support local builds: {}".format(self._reason),
- reason="unavailable-local-sandbox")
+ raise SandboxError(
+ "This platform does not support local builds: {}".format(self._reason), reason="unavailable-local-sandbox"
+ )
diff --git a/src/buildstream/sandbox/_sandboxreapi.py b/src/buildstream/sandbox/_sandboxreapi.py
index 31c1c9674..2d661c893 100644
--- a/src/buildstream/sandbox/_sandboxreapi.py
+++ b/src/buildstream/sandbox/_sandboxreapi.py
@@ -30,7 +30,6 @@ from ..storage._casbaseddirectory import CasBasedDirectory
# the Remote Execution API.
#
class SandboxREAPI(Sandbox):
-
def _use_cas_based_directory(self):
# Always use CasBasedDirectory for REAPI
return True
@@ -46,14 +45,14 @@ class SandboxREAPI(Sandbox):
# Ensure working directory exists
if len(cwd) > 1:
- assert cwd.startswith('/')
+ assert cwd.startswith("/")
vdir.descend(*cwd[1:].split(os.path.sep), create=True)
# Create directories for all marked directories. This emulates
# some of the behaviour of other sandboxes, which create these
# to use as mount points.
for mark in self._get_marked_directories():
- directory = mark['directory']
+ directory = mark["directory"]
# Create each marked directory
vdir.descend(*directory.split(os.path.sep), create=True)
@@ -61,21 +60,21 @@ class SandboxREAPI(Sandbox):
input_root_digest = vdir._get_digest()
command_proto = self._create_command(command, cwd, env)
command_digest = cascache.add_object(buffer=command_proto.SerializeToString())
- action = remote_execution_pb2.Action(command_digest=command_digest,
- input_root_digest=input_root_digest)
+ action = remote_execution_pb2.Action(command_digest=command_digest, input_root_digest=input_root_digest)
action_result = self._execute_action(action) # pylint: disable=assignment-from-no-return
# Get output of build
- self._process_job_output(action_result.output_directories, action_result.output_files,
- failure=action_result.exit_code != 0)
+ self._process_job_output(
+ action_result.output_directories, action_result.output_files, failure=action_result.exit_code != 0
+ )
if stdout:
if action_result.stdout_raw:
- stdout.write(str(action_result.stdout_raw, 'utf-8', errors='ignore'))
+ stdout.write(str(action_result.stdout_raw, "utf-8", errors="ignore"))
if stderr:
if action_result.stderr_raw:
- stderr.write(str(action_result.stderr_raw, 'utf-8', errors='ignore'))
+ stderr.write(str(action_result.stderr_raw, "utf-8", errors="ignore"))
# Non-zero exit code means a normal error during the build:
# the remote execution system has worked correctly but the command failed.
@@ -83,19 +82,21 @@ class SandboxREAPI(Sandbox):
def _create_command(self, command, working_directory, environment):
# Creates a command proto
- environment_variables = [remote_execution_pb2.Command.
- EnvironmentVariable(name=k, value=v)
- for (k, v) in environment.items()]
+ environment_variables = [
+ remote_execution_pb2.Command.EnvironmentVariable(name=k, value=v) for (k, v) in environment.items()
+ ]
# Request the whole directory tree as output
output_directory = os.path.relpath(os.path.sep, start=working_directory)
- return remote_execution_pb2.Command(arguments=command,
- working_directory=working_directory,
- environment_variables=environment_variables,
- output_files=[],
- output_directories=[output_directory],
- platform=None)
+ return remote_execution_pb2.Command(
+ arguments=command,
+ working_directory=working_directory,
+ environment_variables=environment_variables,
+ output_files=[],
+ output_directories=[output_directory],
+ platform=None,
+ )
def _process_job_output(self, output_directories, output_files, *, failure):
# Reads the remote execution server response to an execution request.
@@ -124,7 +125,7 @@ class SandboxREAPI(Sandbox):
# Get digest of root directory from tree digest
tree = remote_execution_pb2.Tree()
- with open(cascache.objpath(tree_digest), 'rb') as f:
+ with open(cascache.objpath(tree_digest), "rb") as f:
tree.ParseFromString(f.read())
root_directory = tree.root.SerializeToString()
dir_digest = utils._message_digest(root_directory)
@@ -140,8 +141,7 @@ class SandboxREAPI(Sandbox):
return _SandboxREAPIBatch(self, main_group, flags, collect=collect)
def _execute_action(self, action):
- raise ImplError("Sandbox of type '{}' does not implement _execute_action()"
- .format(type(self).__name__))
+ raise ImplError("Sandbox of type '{}' does not implement _execute_action()".format(type(self).__name__))
# _SandboxREAPIBatch()
@@ -149,7 +149,6 @@ class SandboxREAPI(Sandbox):
# Command batching by shell script generation.
#
class _SandboxREAPIBatch(_SandboxBatch):
-
def __init__(self, sandbox, main_group, flags, *, collect=None):
super().__init__(sandbox, main_group, flags, collect=collect)
@@ -164,7 +163,7 @@ class _SandboxREAPIBatch(_SandboxBatch):
self.main_group.execute(self)
first = self.first_command
- if first and self.sandbox.run(['sh', '-c', '-e', self.script], self.flags, cwd=first.cwd, env=first.env) != 0:
+ if first and self.sandbox.run(["sh", "-c", "-e", self.script], self.flags, cwd=first.cwd, env=first.env) != 0:
raise SandboxCommandError("Command execution failed", collect=self.collect)
def execute_group(self, group):
@@ -195,7 +194,7 @@ class _SandboxREAPIBatch(_SandboxBatch):
self.env = command.env
# Actual command execution
- cmdline = ' '.join(shlex.quote(cmd) for cmd in command.command)
+ cmdline = " ".join(shlex.quote(cmd) for cmd in command.command)
self.script += "(set -ex; {})".format(cmdline)
# Error handling
diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py
index fa7cc9f90..d4ffd64a1 100644
--- a/src/buildstream/sandbox/_sandboxremote.py
+++ b/src/buildstream/sandbox/_sandboxremote.py
@@ -39,7 +39,7 @@ from .._cas import CASRemote
from .._remote import RemoteSpec
-class RemoteExecutionSpec(namedtuple('RemoteExecutionSpec', 'exec_service storage_service action_service')):
+class RemoteExecutionSpec(namedtuple("RemoteExecutionSpec", "exec_service storage_service action_service")):
pass
@@ -49,59 +49,63 @@ class RemoteExecutionSpec(namedtuple('RemoteExecutionSpec', 'exec_service storag
# commands to a remote server and retrieves the results from it.
#
class SandboxRemote(SandboxREAPI):
-
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self._output_files_required = kwargs.get('output_files_required', True)
+ self._output_files_required = kwargs.get("output_files_required", True)
- config = kwargs['specs'] # This should be a RemoteExecutionSpec
+ config = kwargs["specs"] # This should be a RemoteExecutionSpec
if config is None:
return
# gRPC doesn't support fork without exec, which is used in the main process.
assert not utils._is_main_process()
- self.storage_url = config.storage_service['url']
- self.exec_url = config.exec_service['url']
+ self.storage_url = config.storage_service["url"]
+ self.exec_url = config.exec_service["url"]
exec_certs = {}
- for key in ['client-cert', 'client-key', 'server-cert']:
+ for key in ["client-cert", "client-key", "server-cert"]:
if key in config.exec_service:
- with open(config.exec_service[key], 'rb') as f:
+ with open(config.exec_service[key], "rb") as f:
exec_certs[key] = f.read()
self.exec_credentials = grpc.ssl_channel_credentials(
- root_certificates=exec_certs.get('server-cert'),
- private_key=exec_certs.get('client-key'),
- certificate_chain=exec_certs.get('client-cert'))
+ root_certificates=exec_certs.get("server-cert"),
+ private_key=exec_certs.get("client-key"),
+ certificate_chain=exec_certs.get("client-cert"),
+ )
action_certs = {}
- for key in ['client-cert', 'client-key', 'server-cert']:
+ for key in ["client-cert", "client-key", "server-cert"]:
if key in config.action_service:
- with open(config.action_service[key], 'rb') as f:
+ with open(config.action_service[key], "rb") as f:
action_certs[key] = f.read()
if config.action_service:
- self.action_url = config.action_service['url']
- self.action_instance = config.action_service.get('instance-name', None)
+ self.action_url = config.action_service["url"]
+ self.action_instance = config.action_service.get("instance-name", None)
self.action_credentials = grpc.ssl_channel_credentials(
- root_certificates=action_certs.get('server-cert'),
- private_key=action_certs.get('client-key'),
- certificate_chain=action_certs.get('client-cert'))
+ root_certificates=action_certs.get("server-cert"),
+ private_key=action_certs.get("client-key"),
+ certificate_chain=action_certs.get("client-cert"),
+ )
else:
self.action_url = None
self.action_instance = None
self.action_credentials = None
- self.exec_instance = config.exec_service.get('instance-name', None)
- self.storage_instance = config.storage_service.get('instance-name', None)
-
- self.storage_remote_spec = RemoteSpec(self.storage_url, push=True,
- server_cert=config.storage_service.get('server-cert'),
- client_key=config.storage_service.get('client-key'),
- client_cert=config.storage_service.get('client-cert'),
- instance_name=self.storage_instance)
+ self.exec_instance = config.exec_service.get("instance-name", None)
+ self.storage_instance = config.storage_service.get("instance-name", None)
+
+ self.storage_remote_spec = RemoteSpec(
+ self.storage_url,
+ push=True,
+ server_cert=config.storage_service.get("server-cert"),
+ client_key=config.storage_service.get("client-key"),
+ client_cert=config.storage_service.get("client-cert"),
+ instance_name=self.storage_instance,
+ )
self.operation_name = None
def info(self, msg):
@@ -109,47 +113,49 @@ class SandboxRemote(SandboxREAPI):
@staticmethod
def specs_from_config_node(config_node, basedir=None):
-
def require_node(config, keyname):
val = config.get_mapping(keyname, default=None)
if val is None:
provenance = remote_config.get_provenance()
- raise _yaml.LoadError("{}: '{}' was not present in the remote "
- "execution configuration (remote-execution). "
- .format(str(provenance), keyname),
- _yaml.LoadErrorReason.INVALID_DATA)
+ raise _yaml.LoadError(
+ "{}: '{}' was not present in the remote "
+ "execution configuration (remote-execution). ".format(str(provenance), keyname),
+ _yaml.LoadErrorReason.INVALID_DATA,
+ )
return val
- remote_config = config_node.get_mapping('remote-execution', default=None)
+ remote_config = config_node.get_mapping("remote-execution", default=None)
if remote_config is None:
return None
- service_keys = ['execution-service', 'storage-service', 'action-cache-service']
+ service_keys = ["execution-service", "storage-service", "action-cache-service"]
- remote_config.validate_keys(['url', *service_keys])
+ remote_config.validate_keys(["url", *service_keys])
- exec_config = require_node(remote_config, 'execution-service')
- storage_config = require_node(remote_config, 'storage-service')
- action_config = remote_config.get_mapping('action-cache-service', default={})
+ exec_config = require_node(remote_config, "execution-service")
+ storage_config = require_node(remote_config, "storage-service")
+ action_config = remote_config.get_mapping("action-cache-service", default={})
- tls_keys = ['client-key', 'client-cert', 'server-cert']
+ tls_keys = ["client-key", "client-cert", "server-cert"]
- exec_config.validate_keys(['url', 'instance-name', *tls_keys])
- storage_config.validate_keys(['url', 'instance-name', *tls_keys])
+ exec_config.validate_keys(["url", "instance-name", *tls_keys])
+ storage_config.validate_keys(["url", "instance-name", *tls_keys])
if action_config:
- action_config.validate_keys(['url', 'instance-name', *tls_keys])
+ action_config.validate_keys(["url", "instance-name", *tls_keys])
# Maintain some backwards compatibility with older configs, in which
# 'url' was the only valid key for remote-execution:
- if 'url' in remote_config:
- if 'execution-service' not in remote_config:
- exec_config = Node.from_dict({'url': remote_config['url']})
+ if "url" in remote_config:
+ if "execution-service" not in remote_config:
+ exec_config = Node.from_dict({"url": remote_config["url"]})
else:
- provenance = remote_config.get_node('url').get_provenance()
- raise _yaml.LoadError("{}: 'url' and 'execution-service' keys were found in the remote "
- "execution configuration (remote-execution). "
- "You can only specify one of these."
- .format(str(provenance)), _yaml.LoadErrorReason.INVALID_DATA)
+ provenance = remote_config.get_node("url").get_provenance()
+ raise _yaml.LoadError(
+ "{}: 'url' and 'execution-service' keys were found in the remote "
+ "execution configuration (remote-execution). "
+ "You can only specify one of these.".format(str(provenance)),
+ _yaml.LoadErrorReason.INVALID_DATA,
+ )
service_configs = [exec_config, storage_config, action_config]
@@ -161,12 +167,14 @@ class SandboxRemote(SandboxREAPI):
for config_key, config in zip(service_keys, service_configs):
# Either both or none of the TLS client key/cert pair must be specified:
- if ('client-key' in config) != ('client-cert' in config):
+ if ("client-key" in config) != ("client-cert" in config):
provenance = remote_config.get_node(config_key).get_provenance()
- raise _yaml.LoadError("{}: TLS client key/cert pair is incomplete. "
- "You must specify both 'client-key' and 'client-cert' "
- "for authenticated HTTPS connections."
- .format(str(provenance)), _yaml.LoadErrorReason.INVALID_DATA)
+ raise _yaml.LoadError(
+ "{}: TLS client key/cert pair is incomplete. "
+ "You must specify both 'client-key' and 'client-cert' "
+ "for authenticated HTTPS connections.".format(str(provenance)),
+ _yaml.LoadErrorReason.INVALID_DATA,
+ )
for tls_key in tls_keys:
if tls_key in config:
@@ -182,9 +190,9 @@ class SandboxRemote(SandboxREAPI):
# Try to create a communication channel to the BuildGrid server.
stub = remote_execution_pb2_grpc.ExecutionStub(channel)
- request = remote_execution_pb2.ExecuteRequest(instance_name=self.exec_instance,
- action_digest=action_digest,
- skip_cache_lookup=False)
+ request = remote_execution_pb2.ExecuteRequest(
+ instance_name=self.exec_instance, action_digest=action_digest, skip_cache_lookup=False
+ )
def __run_remote_command(stub, execute_request=None, running_operation=None):
try:
@@ -206,26 +214,30 @@ class SandboxRemote(SandboxREAPI):
except grpc.RpcError as e:
status_code = e.code()
if status_code == grpc.StatusCode.UNAVAILABLE:
- raise SandboxError("Failed contacting remote execution server at {}."
- .format(self.exec_url))
-
- if status_code in (grpc.StatusCode.INVALID_ARGUMENT,
- grpc.StatusCode.FAILED_PRECONDITION,
- grpc.StatusCode.RESOURCE_EXHAUSTED,
- grpc.StatusCode.INTERNAL,
- grpc.StatusCode.DEADLINE_EXCEEDED):
+ raise SandboxError("Failed contacting remote execution server at {}.".format(self.exec_url))
+
+ if status_code in (
+ grpc.StatusCode.INVALID_ARGUMENT,
+ grpc.StatusCode.FAILED_PRECONDITION,
+ grpc.StatusCode.RESOURCE_EXHAUSTED,
+ grpc.StatusCode.INTERNAL,
+ grpc.StatusCode.DEADLINE_EXCEEDED,
+ ):
raise SandboxError("{} ({}).".format(e.details(), status_code.name))
if running_operation and status_code == grpc.StatusCode.UNIMPLEMENTED:
- raise SandboxError("Failed trying to recover from connection loss: "
- "server does not support operation status polling recovery.")
+ raise SandboxError(
+ "Failed trying to recover from connection loss: "
+ "server does not support operation status polling recovery."
+ )
return last_operation
# Set up signal handler to trigger cancel_operation on SIGTERM
operation = None
- with self._get_context().messenger.timed_activity("Waiting for the remote build to complete"), \
- _signals.terminator(partial(self.cancel_operation, channel)):
+ with self._get_context().messenger.timed_activity(
+ "Waiting for the remote build to complete"
+ ), _signals.terminator(partial(self.cancel_operation, channel)):
operation = __run_remote_command(stub, execute_request=request)
if operation is None:
return None
@@ -242,18 +254,17 @@ class SandboxRemote(SandboxREAPI):
return
stub = operations_pb2_grpc.OperationsStub(channel)
- request = operations_pb2.CancelOperationRequest(
- name=str(self.operation_name))
+ request = operations_pb2.CancelOperationRequest(name=str(self.operation_name))
try:
stub.CancelOperation(request)
except grpc.RpcError as e:
- if (e.code() == grpc.StatusCode.UNIMPLEMENTED or
- e.code() == grpc.StatusCode.INVALID_ARGUMENT):
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED or e.code() == grpc.StatusCode.INVALID_ARGUMENT:
pass
else:
- raise SandboxError("Failed trying to send CancelOperation request: "
- "{} ({})".format(e.details(), e.code().name))
+ raise SandboxError(
+ "Failed trying to send CancelOperation request: " "{} ({})".format(e.details(), e.code().name)
+ )
def _fetch_missing_blobs(self, vdir):
context = self._get_context()
@@ -282,8 +293,9 @@ class SandboxRemote(SandboxREAPI):
remote_missing_blobs = cascache.fetch_blobs(casremote, blobs_to_fetch)
if remote_missing_blobs:
- raise SandboxError("{} output files are missing on the CAS server"
- .format(len(remote_missing_blobs)))
+ raise SandboxError(
+ "{} output files are missing on the CAS server".format(len(remote_missing_blobs))
+ )
def _execute_action(self, action):
context = self._get_context()
@@ -301,8 +313,9 @@ class SandboxRemote(SandboxREAPI):
try:
casremote.init()
except grpc.RpcError as e:
- raise SandboxError("Failed to contact remote execution CAS endpoint at {}: {}"
- .format(self.storage_url, e)) from e
+ raise SandboxError(
+ "Failed to contact remote execution CAS endpoint at {}: {}".format(self.storage_url, e)
+ ) from e
# Determine blobs missing on remote
try:
@@ -333,15 +346,19 @@ class SandboxRemote(SandboxREAPI):
# Next, try to create a communication channel to the BuildGrid server.
url = urlparse(self.exec_url)
if not url.port:
- raise SandboxError("You must supply a protocol and port number in the execution-service url, "
- "for example: http://buildservice:50051.")
- if url.scheme == 'http':
- channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
- elif url.scheme == 'https':
- channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.exec_credentials)
+ raise SandboxError(
+ "You must supply a protocol and port number in the execution-service url, "
+ "for example: http://buildservice:50051."
+ )
+ if url.scheme == "http":
+ channel = grpc.insecure_channel("{}:{}".format(url.hostname, url.port))
+ elif url.scheme == "https":
+ channel = grpc.secure_channel("{}:{}".format(url.hostname, url.port), self.exec_credentials)
else:
- raise SandboxError("Remote execution currently only supports the 'http' protocol "
- "and '{}' was supplied.".format(url.scheme))
+ raise SandboxError(
+ "Remote execution currently only supports the 'http' protocol "
+ "and '{}' was supplied.".format(url.scheme)
+ )
# Now request to execute the action
with channel:
@@ -369,23 +386,25 @@ class SandboxRemote(SandboxREAPI):
return None
url = urlparse(self.action_url)
if not url.port:
- raise SandboxError("You must supply a protocol and port number in the action-cache-service url, "
- "for example: http://buildservice:50051.")
- if url.scheme == 'http':
- channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
- elif url.scheme == 'https':
- channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.action_credentials)
+ raise SandboxError(
+ "You must supply a protocol and port number in the action-cache-service url, "
+ "for example: http://buildservice:50051."
+ )
+ if url.scheme == "http":
+ channel = grpc.insecure_channel("{}:{}".format(url.hostname, url.port))
+ elif url.scheme == "https":
+ channel = grpc.secure_channel("{}:{}".format(url.hostname, url.port), self.action_credentials)
with channel:
- request = remote_execution_pb2.GetActionResultRequest(instance_name=self.action_instance,
- action_digest=action_digest)
+ request = remote_execution_pb2.GetActionResultRequest(
+ instance_name=self.action_instance, action_digest=action_digest
+ )
stub = remote_execution_pb2_grpc.ActionCacheStub(channel)
try:
result = stub.GetActionResult(request)
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
- raise SandboxError("Failed to query action cache: {} ({})"
- .format(e.code(), e.details()))
+ raise SandboxError("Failed to query action cache: {} ({})".format(e.code(), e.details()))
return None
else:
self.info("Action result found in action cache")
@@ -397,7 +416,7 @@ class SandboxRemote(SandboxREAPI):
# Failure of remote execution, usually due to an error in BuildStream
raise SandboxError("No response returned from server")
- assert not operation.HasField('error') and operation.HasField('response')
+ assert not operation.HasField("error") and operation.HasField("response")
execution_response = remote_execution_pb2.ExecuteResponse()
# The response is expected to be an ExecutionResponse message
diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py
index b4691fe3f..c88cbf977 100644
--- a/src/buildstream/sandbox/sandbox.py
+++ b/src/buildstream/sandbox/sandbox.py
@@ -47,10 +47,11 @@ if TYPE_CHECKING:
# pylint: disable=cyclic-import
from .._context import Context
from .._project import Project
+
# pylint: enable=cyclic-import
-class SandboxFlags():
+class SandboxFlags:
"""Flags indicating how the sandbox should be run.
"""
@@ -100,49 +101,45 @@ class SandboxCommandError(SandboxError):
collect (str): An optional directory containing partial install contents
reason (str): An optional reason string (defaults to 'command-failed')
"""
- def __init__(self, message, *, detail=None, collect=None, reason='command-failed'):
+
+ def __init__(self, message, *, detail=None, collect=None, reason="command-failed"):
super().__init__(message, detail=detail, reason=reason)
self.collect = collect
-class Sandbox():
+class Sandbox:
"""Sandbox()
Sandbox programming interface for :class:`.Element` plugins.
"""
# Minimal set of devices for the sandbox
- DEVICES = [
- '/dev/urandom',
- '/dev/random',
- '/dev/zero',
- '/dev/null'
- ]
- _dummy_reasons = [] # type: List[str]
-
- def __init__(self, context: 'Context', project: 'Project', directory: str, **kwargs):
+ DEVICES = ["/dev/urandom", "/dev/random", "/dev/zero", "/dev/null"]
+ _dummy_reasons = [] # type: List[str]
+
+ def __init__(self, context: "Context", project: "Project", directory: str, **kwargs):
self.__context = context
self.__project = project
- self.__directories = [] # type: List[Dict[str, Union[int, str]]]
- self.__cwd = None # type: Optional[str]
- self.__env = None # type: Optional[Dict[str, str]]
- self.__mount_sources = {} # type: Dict[str, str]
- self.__allow_real_directory = kwargs['allow_real_directory']
+ self.__directories = [] # type: List[Dict[str, Union[int, str]]]
+ self.__cwd = None # type: Optional[str]
+ self.__env = None # type: Optional[Dict[str, str]]
+ self.__mount_sources = {} # type: Dict[str, str]
+ self.__allow_real_directory = kwargs["allow_real_directory"]
self.__allow_run = True
# Plugin element full name for logging
- plugin = kwargs.get('plugin', None)
+ plugin = kwargs.get("plugin", None)
if plugin:
self.__element_name = plugin._get_full_name()
else:
self.__element_name = None
# Configuration from kwargs common to all subclasses
- self.__config = kwargs['config']
- self.__stdout = kwargs['stdout']
- self.__stderr = kwargs['stderr']
- self.__bare_directory = kwargs['bare_directory']
+ self.__config = kwargs["config"]
+ self.__stdout = kwargs["stdout"]
+ self.__stderr = kwargs["stderr"]
+ self.__bare_directory = kwargs["bare_directory"]
# Setup the directories. Root and output_directory should be
# available to subclasses, hence being single-underscore. The
@@ -153,15 +150,15 @@ class Sandbox():
self.__scratch = None
os.makedirs(self._root, exist_ok=True)
else:
- self._root = os.path.join(directory, 'root')
- self.__scratch = os.path.join(directory, 'scratch')
+ self._root = os.path.join(directory, "root")
+ self.__scratch = os.path.join(directory, "scratch")
for directory_ in [self._root, self.__scratch]:
os.makedirs(directory_, exist_ok=True)
- self._output_directory = None # type: Optional[str]
+ self._output_directory = None # type: Optional[str]
self._build_directory = None
self._build_directory_always = None
- self._vdir = None # type: Optional[Directory]
+ self._vdir = None # type: Optional[Directory]
self._usebuildtree = False
# This is set if anyone requests access to the underlying
@@ -255,18 +252,17 @@ class Sandbox():
Any marked directories will be read-write in the sandboxed
environment, only the root directory is allowed to be readonly.
"""
- self.__directories.append({
- 'directory': directory,
- 'artifact': artifact
- })
-
- def run(self,
- command: List[str],
- flags: int,
- *,
- cwd: Optional[str] = None,
- env: Optional[Dict[str, str]] = None,
- label: str = None) -> Optional[int]:
+ self.__directories.append({"directory": directory, "artifact": artifact})
+
+ def run(
+ self,
+ command: List[str],
+ flags: int,
+ *,
+ cwd: Optional[str] = None,
+ env: Optional[Dict[str, str]] = None,
+ label: str = None
+ ) -> Optional[int]:
"""Run a command in the sandbox.
If this is called outside a batch context, the command is immediately
@@ -314,8 +310,7 @@ class Sandbox():
command = [command]
if self.__batch:
- assert flags == self.__batch.flags, \
- "Inconsistent sandbox flags in single command batch"
+ assert flags == self.__batch.flags, "Inconsistent sandbox flags in single command batch"
batch_command = _SandboxBatchCommand(command, cwd=cwd, env=env, label=label)
@@ -352,8 +347,7 @@ class Sandbox():
if self.__batch:
# Nested batch
- assert flags == self.__batch.flags, \
- "Inconsistent sandbox flags in single command batch"
+ assert flags == self.__batch.flags, "Inconsistent sandbox flags in single command batch"
parent_group = self.__batch.current_group
parent_group.append(group)
@@ -394,8 +388,7 @@ class Sandbox():
# (int): The program exit code.
#
def _run(self, command, flags, *, cwd, env):
- raise ImplError("Sandbox of type '{}' does not implement _run()"
- .format(type(self).__name__))
+ raise ImplError("Sandbox of type '{}' does not implement _run()".format(type(self).__name__))
# _create_batch()
#
@@ -425,7 +418,7 @@ class Sandbox():
if not self.__allow_real_directory and not self.__allow_run:
return True
- return 'BST_CAS_DIRECTORIES' in os.environ
+ return "BST_CAS_DIRECTORIES" in os.environ
# _fetch_missing_blobs()
#
@@ -513,7 +506,7 @@ class Sandbox():
# what directory it is in makes it unnecessary to call the faulty
# getcwd.
env = dict(env)
- env['PWD'] = cwd
+ env["PWD"] = cwd
return env
@@ -528,7 +521,7 @@ class Sandbox():
# Returns:
# (str): The sandbox work directory
def _get_work_directory(self, *, cwd=None):
- return cwd or self.__cwd or '/'
+ return cwd or self.__cwd or "/"
# _get_scratch_directory()
#
@@ -584,7 +577,7 @@ class Sandbox():
if len(command_as_parts) > 1:
return False
- for path in env.get('PATH').split(':'):
+ for path in env.get("PATH").split(":"):
path_as_parts = path.lstrip(os.sep).split(os.sep)
if vroot._exists(*path_as_parts, command, follow_symlinks=True):
return True
@@ -649,20 +642,14 @@ class Sandbox():
# message (str): A message to issue
# details (str): optional, more detatils
def _issue_warning(self, message, detail=None):
- self.__context.messenger.message(
- Message(MessageType.WARN,
- message,
- detail=detail
- )
- )
+ self.__context.messenger.message(Message(MessageType.WARN, message, detail=detail))
# _SandboxBatch()
#
# A batch of sandbox commands.
#
-class _SandboxBatch():
-
+class _SandboxBatch:
def __init__(self, sandbox, main_group, flags, *, collect=None):
self.sandbox = sandbox
self.main_group = main_group
@@ -686,16 +673,21 @@ class _SandboxBatch():
def execute_command(self, command):
if command.label:
context = self.sandbox._get_context()
- message = Message(MessageType.STATUS, 'Running command', detail=command.label,
- element_name=self.sandbox._get_element_name())
+ message = Message(
+ MessageType.STATUS,
+ "Running command",
+ detail=command.label,
+ element_name=self.sandbox._get_element_name(),
+ )
context.messenger.message(message)
exitcode = self.sandbox._run(command.command, self.flags, cwd=command.cwd, env=command.env)
if exitcode != 0:
- cmdline = ' '.join(shlex.quote(cmd) for cmd in command.command)
+ cmdline = " ".join(shlex.quote(cmd) for cmd in command.command)
label = command.label or cmdline
- raise SandboxCommandError("Command failed with exitcode {}".format(exitcode),
- detail=label, collect=self.collect)
+ raise SandboxCommandError(
+ "Command failed with exitcode {}".format(exitcode), detail=label, collect=self.collect
+ )
def execute_call(self, call):
call.callback()
@@ -705,8 +697,7 @@ class _SandboxBatch():
#
# An item in a command batch.
#
-class _SandboxBatchItem():
-
+class _SandboxBatchItem:
def __init__(self, *, label=None):
self.label = label
@@ -716,7 +707,6 @@ class _SandboxBatchItem():
# A command item in a command batch.
#
class _SandboxBatchCommand(_SandboxBatchItem):
-
def __init__(self, command, *, cwd, env, label=None):
super().__init__(label=label)
@@ -733,7 +723,6 @@ class _SandboxBatchCommand(_SandboxBatchItem):
# A group in a command batch.
#
class _SandboxBatchGroup(_SandboxBatchItem):
-
def __init__(self, *, label=None):
super().__init__(label=label)
@@ -755,7 +744,6 @@ class _SandboxBatchGroup(_SandboxBatchItem):
# A call item in a command batch.
#
class _SandboxBatchCall(_SandboxBatchItem):
-
def __init__(self, callback):
super().__init__()
diff --git a/src/buildstream/scriptelement.py b/src/buildstream/scriptelement.py
index e78049b4a..f8deff28e 100644
--- a/src/buildstream/scriptelement.py
+++ b/src/buildstream/scriptelement.py
@@ -48,8 +48,8 @@ class ScriptElement(Element):
__install_root = "/"
__cwd = "/"
__root_read_only = False
- __commands = None # type: OrderedDict[str, List[str]]
- __layout = [] # type: List[Dict[str, Optional[str]]]
+ __commands = None # type: OrderedDict[str, List[str]]
+ __layout = [] # type: List[Dict[str, Optional[str]]]
# The compose element's output is its dependencies, so
# we must rebuild if the dependencies change even when
@@ -149,8 +149,7 @@ class ScriptElement(Element):
#
if not self.__layout:
self.__layout = []
- self.__layout.append({"element": element,
- "destination": destination})
+ self.__layout.append({"element": element, "destination": destination})
def add_commands(self, group_name: str, command_list: List[str]) -> None:
"""Adds a list of commands under the group-name.
@@ -183,11 +182,11 @@ class ScriptElement(Element):
def get_unique_key(self):
return {
- 'commands': self.__commands,
- 'cwd': self.__cwd,
- 'install-root': self.__install_root,
- 'layout': self.__layout,
- 'root-read-only': self.__root_read_only
+ "commands": self.__commands,
+ "cwd": self.__cwd,
+ "install-root": self.__install_root,
+ "layout": self.__layout,
+ "root-read-only": self.__root_read_only,
}
def configure_sandbox(self, sandbox):
@@ -206,14 +205,14 @@ class ScriptElement(Element):
# Mark the artifact directories in the layout
for item in self.__layout:
- destination = item['destination']
+ destination = item["destination"]
was_artifact = directories.get(destination, False)
- directories[destination] = item['element'] or was_artifact
+ directories[destination] = item["element"] or was_artifact
for directory, artifact in directories.items():
# Root does not need to be marked as it is always mounted
# with artifact (unless explicitly marked non-artifact)
- if directory != '/':
+ if directory != "/":
sandbox.mark_directory(directory, artifact=artifact)
def stage(self, sandbox):
@@ -222,8 +221,7 @@ class ScriptElement(Element):
if not self.__layout:
# if no layout set, stage all dependencies into /
for build_dep in self.dependencies(Scope.BUILD, recurse=False):
- with self.timed_activity("Staging {} at /"
- .format(build_dep.name), silent_nested=True):
+ with self.timed_activity("Staging {} at /".format(build_dep.name), silent_nested=True):
build_dep.stage_dependency_artifacts(sandbox, Scope.RUN, path="/")
with sandbox.batch(SandboxFlags.NONE):
@@ -236,35 +234,33 @@ class ScriptElement(Element):
for item in self.__layout:
# Skip layout members which dont stage an element
- if not item['element']:
+ if not item["element"]:
continue
- element = self.search(Scope.BUILD, item['element'])
- if item['destination'] == '/':
- with self.timed_activity("Staging {} at /".format(element.name),
- silent_nested=True):
+ element = self.search(Scope.BUILD, item["element"])
+ if item["destination"] == "/":
+ with self.timed_activity("Staging {} at /".format(element.name), silent_nested=True):
element.stage_dependency_artifacts(sandbox, Scope.RUN)
else:
- with self.timed_activity("Staging {} at {}"
- .format(element.name, item['destination']),
- silent_nested=True):
+ with self.timed_activity(
+ "Staging {} at {}".format(element.name, item["destination"]), silent_nested=True
+ ):
virtual_dstdir = sandbox.get_virtual_directory()
- virtual_dstdir.descend(*item['destination'].lstrip(os.sep).split(os.sep), create=True)
- element.stage_dependency_artifacts(sandbox, Scope.RUN, path=item['destination'])
+ virtual_dstdir.descend(*item["destination"].lstrip(os.sep).split(os.sep), create=True)
+ element.stage_dependency_artifacts(sandbox, Scope.RUN, path=item["destination"])
with sandbox.batch(SandboxFlags.NONE):
for item in self.__layout:
# Skip layout members which dont stage an element
- if not item['element']:
+ if not item["element"]:
continue
- element = self.search(Scope.BUILD, item['element'])
+ element = self.search(Scope.BUILD, item["element"])
# Integration commands can only be run for elements staged to /
- if item['destination'] == '/':
- with self.timed_activity("Integrating {}".format(element.name),
- silent_nested=True):
+ if item["destination"] == "/":
+ with self.timed_activity("Integrating {}".format(element.name), silent_nested=True):
for dep in element.dependencies(Scope.RUN):
dep.integrate(sandbox)
@@ -283,9 +279,7 @@ class ScriptElement(Element):
for cmd in commands:
# Note the -e switch to 'sh' means to exit with an error
# if any untested command fails.
- sandbox.run(['sh', '-c', '-e', cmd + '\n'],
- flags,
- label=cmd)
+ sandbox.run(["sh", "-c", "-e", cmd + "\n"], flags, label=cmd)
# Return where the result can be collected from
return self.__install_root
@@ -297,18 +291,18 @@ class ScriptElement(Element):
def __validate_layout(self):
if self.__layout:
# Cannot proceeed if layout is used, but none are for "/"
- root_defined = any([(entry['destination'] == '/') for entry in self.__layout])
+ root_defined = any([(entry["destination"] == "/") for entry in self.__layout])
if not root_defined:
- raise ElementError("{}: Using layout, but none are staged as '/'"
- .format(self))
+ raise ElementError("{}: Using layout, but none are staged as '/'".format(self))
# Cannot proceed if layout specifies an element that isn't part
# of the dependencies.
for item in self.__layout:
- if item['element']:
- if not self.search(Scope.BUILD, item['element']):
- raise ElementError("{}: '{}' in layout not found in dependencies"
- .format(self, item['element']))
+ if item["element"]:
+ if not self.search(Scope.BUILD, item["element"]):
+ raise ElementError(
+ "{}: '{}' in layout not found in dependencies".format(self, item["element"])
+ )
def setup():
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index 05a1ae464..2e7460439 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -184,6 +184,7 @@ if TYPE_CHECKING:
# pylint: disable=cyclic-import
from ._context import Context
from ._project import Project
+
# pylint: enable=cyclic-import
@@ -197,16 +198,14 @@ class SourceError(BstError):
reason: An optional machine readable reason string, used for test cases
temporary: An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
"""
- def __init__(self,
- message: str,
- *,
- detail: Optional[str] = None,
- reason: Optional[str] = None,
- temporary: bool = False):
+
+ def __init__(
+ self, message: str, *, detail: Optional[str] = None, reason: Optional[str] = None, temporary: bool = False
+ ):
super().__init__(message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason, temporary=temporary)
-class SourceFetcher():
+class SourceFetcher:
"""SourceFetcher()
This interface exists so that a source that downloads from multiple
@@ -222,6 +221,7 @@ class SourceFetcher():
for every URL found in the configuration data at
:func:`Plugin.configure() <buildstream.plugin.Plugin.configure>` time.
"""
+
def __init__(self):
self.__alias = None
@@ -275,8 +275,9 @@ class Source(Plugin):
All Sources derive from this class, this interface defines how
the core will be interacting with Sources.
"""
+
# The defaults from the project
- __defaults = None # type: Optional[Dict[str, Any]]
+ __defaults = None # type: Optional[Dict[str, Any]]
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = False
"""Whether access to previous sources is required during track
@@ -331,32 +332,40 @@ class Source(Plugin):
*Since: 1.91.2*
"""
- def __init__(self,
- context: 'Context',
- project: 'Project',
- meta: MetaSource,
- *,
- alias_override: Optional[Tuple[str, str]] = None,
- unique_id: Optional[int] = None):
+ def __init__(
+ self,
+ context: "Context",
+ project: "Project",
+ meta: MetaSource,
+ *,
+ alias_override: Optional[Tuple[str, str]] = None,
+ unique_id: Optional[int] = None
+ ):
provenance = meta.config.get_provenance()
# Set element_name member before parent init, as needed for debug messaging
- self.__element_name = meta.element_name # The name of the element owning this source
- super().__init__("{}-{}".format(meta.element_name, meta.element_index),
- context, project, provenance, "source", unique_id=unique_id)
+ self.__element_name = meta.element_name # The name of the element owning this source
+ super().__init__(
+ "{}-{}".format(meta.element_name, meta.element_index),
+ context,
+ project,
+ provenance,
+ "source",
+ unique_id=unique_id,
+ )
- self.__element_index = meta.element_index # The index of the source in the owning element's source list
- self.__element_kind = meta.element_kind # The kind of the element owning this source
- self.__directory = meta.directory # Staging relative directory
- self.__consistency = Consistency.INCONSISTENT # Cached consistency state
- self.__meta_kind = meta.kind # The kind of this source, required for unpickling
+ self.__element_index = meta.element_index # The index of the source in the owning element's source list
+ self.__element_kind = meta.element_kind # The kind of the element owning this source
+ self.__directory = meta.directory # Staging relative directory
+ self.__consistency = Consistency.INCONSISTENT # Cached consistency state
+ self.__meta_kind = meta.kind # The kind of this source, required for unpickling
- self.__key = None # Cache key for source
+ self.__key = None # Cache key for source
# The alias_override is only set on a re-instantiated Source
- self.__alias_override = alias_override # Tuple of alias and its override to use instead
- self.__expected_alias = None # The primary alias
+ self.__alias_override = alias_override # Tuple of alias and its override to use instead
+ self.__expected_alias = None # The primary alias
# Set of marked download URLs
- self.__marked_urls = set() # type: Set[str]
+ self.__marked_urls = set() # type: Set[str]
# Collect the composited element configuration and
# ask the element to configure itself.
@@ -365,12 +374,12 @@ class Source(Plugin):
self.__first_pass = meta.first_pass
# cached values for commonly access values on the source
- self.__mirror_directory = None # type: Optional[str]
+ self.__mirror_directory = None # type: Optional[str]
self._configure(self.__config)
self.__digest = None
- COMMON_CONFIG_KEYS = ['kind', 'directory']
+ COMMON_CONFIG_KEYS = ["kind", "directory"]
"""Common source config keys
Source config keys that must not be accessed in configure(), and
@@ -611,8 +620,8 @@ class Source(Plugin):
# specific alias, so that sources that fetch from multiple
# URLs and use different aliases default to only overriding
# one alias, rather than getting confused.
- override_alias = self.__alias_override[0] # type: ignore
- override_url = self.__alias_override[1] # type: ignore
+ override_alias = self.__alias_override[0] # type: ignore
+ override_url = self.__alias_override[1] # type: ignore
if url_alias == override_alias:
url = override_url + url_body
return url
@@ -642,9 +651,9 @@ class Source(Plugin):
if primary:
expected_alias = _extract_alias(url)
- assert (self.__expected_alias is None or
- self.__expected_alias == expected_alias), \
- "Primary URL marked twice with different URLs"
+ assert (
+ self.__expected_alias is None or self.__expected_alias == expected_alias
+ ), "Primary URL marked twice with different URLs"
self.__expected_alias = expected_alias
@@ -664,8 +673,9 @@ class Source(Plugin):
# the case for git submodules which might be automatically
# discovered.
#
- assert (url in self.__marked_urls or not _extract_alias(url)), \
- "URL was not seen at configure time: {}".format(url)
+ assert url in self.__marked_urls or not _extract_alias(
+ url
+ ), "URL was not seen at configure time: {}".format(url)
def get_project_directory(self) -> str:
"""Fetch the project base directory
@@ -790,8 +800,7 @@ class Source(Plugin):
if self.BST_KEY_REQUIRES_STAGE:
# _get_unique_key should be called before _stage
assert self.__digest is not None
- cas_dir = CasBasedDirectory(self._get_context().get_cascache(),
- digest=self.__digest)
+ cas_dir = CasBasedDirectory(self._get_context().get_cascache(), digest=self.__digest)
directory.import_files(cas_dir)
else:
self.stage(directory)
@@ -811,11 +820,11 @@ class Source(Plugin):
#
def _get_unique_key(self):
key = {}
- key['directory'] = self.__directory
+ key["directory"] = self.__directory
if self.BST_KEY_REQUIRES_STAGE:
- key['unique'] = self._stage_into_cas()
+ key["unique"] = self._stage_into_cas()
else:
- key['unique'] = self.get_unique_key() # pylint: disable=assignment-from-no-return
+ key["unique"] = self.get_unique_key() # pylint: disable=assignment-from-no-return
return key
# _project_refs():
@@ -828,7 +837,7 @@ class Source(Plugin):
#
def _project_refs(self, project):
element_kind = self.__element_kind
- if element_kind == 'junction':
+ if element_kind == "junction":
return project.junction_refs
return project.refs
@@ -863,9 +872,10 @@ class Source(Plugin):
try:
self.load_ref(ref_node)
except ImplError as e:
- raise SourceError("{}: Storing refs in project.refs is not supported by '{}' sources"
- .format(self, self.get_kind()),
- reason="unsupported-load-ref") from e
+ raise SourceError(
+ "{}: Storing refs in project.refs is not supported by '{}' sources".format(self, self.get_kind()),
+ reason="unsupported-load-ref",
+ ) from e
# If the main project overrides the ref, use the override
if project is not toplevel and toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
@@ -938,12 +948,12 @@ class Source(Plugin):
elif provenance._project is None:
assert provenance._filename == ""
assert provenance._shortname == ""
- raise SourceError("{}: Error saving source reference to synthetic node."
- .format(self))
+ raise SourceError("{}: Error saving source reference to synthetic node.".format(self))
else:
- raise SourceError("{}: Cannot track source in a fragment from a junction"
- .format(provenance._shortname),
- reason="tracking-junction-fragment")
+ raise SourceError(
+ "{}: Cannot track source in a fragment from a junction".format(provenance._shortname),
+ reason="tracking-junction-fragment",
+ )
#
# Step 2 - Set the ref in memory, and determine changed state
@@ -968,13 +978,13 @@ class Source(Plugin):
actions = {}
for k, v in clean.items():
if k not in to_modify:
- actions[k] = 'del'
+ actions[k] = "del"
else:
if v != to_modify[k]:
- actions[k] = 'mod'
+ actions[k] = "mod"
for k in to_modify.keys():
if k not in clean:
- actions[k] = 'add'
+ actions[k] = "add"
def walk_container(container, path):
# For each step along path, synthesise if we need to.
@@ -1002,20 +1012,19 @@ class Source(Plugin):
def process_value(action, container, path, key, new_value):
container = walk_container(container, path)
- if action == 'del':
+ if action == "del":
del container[key]
- elif action == 'mod':
+ elif action == "mod":
container[key] = new_value
- elif action == 'add':
+ elif action == "add":
container[key] = new_value
else:
- assert False, \
- "BUG: Unknown action: {}".format(action)
+ assert False, "BUG: Unknown action: {}".format(action)
roundtrip_cache = {}
for key, action in actions.items():
# Obtain the top level node and its file
- if action == 'add':
+ if action == "add":
provenance = node.get_provenance()
else:
provenance = node.get_node(key).get_provenance()
@@ -1023,7 +1032,7 @@ class Source(Plugin):
toplevel_node = provenance._toplevel
# Get the path to whatever changed
- if action == 'add':
+ if action == "add":
path = toplevel_node._find(node)
else:
full_path = toplevel_node._find(node.get_node(key))
@@ -1033,8 +1042,7 @@ class Source(Plugin):
roundtrip_file = roundtrip_cache.get(provenance._filename)
if not roundtrip_file:
roundtrip_file = roundtrip_cache[provenance._filename] = _yaml.roundtrip_load(
- provenance._filename,
- allow_missing=True
+ provenance._filename, allow_missing=True
)
# Get the value of the round trip file that we need to change
@@ -1048,9 +1056,9 @@ class Source(Plugin):
try:
_yaml.roundtrip_dump(data, filename)
except OSError as e:
- raise SourceError("{}: Error saving source reference to '{}': {}"
- .format(self, filename, e),
- reason="save-ref-error") from e
+ raise SourceError(
+ "{}: Error saving source reference to '{}': {}".format(self, filename, e), reason="save-ref-error"
+ ) from e
return True
@@ -1059,7 +1067,7 @@ class Source(Plugin):
# Args:
# previous_sources (list): List of Sources listed prior to this source
#
- def _track(self, previous_sources: List['Source']) -> SourceRef:
+ def _track(self, previous_sources: List["Source"]) -> SourceRef:
if self.BST_KEY_REQUIRES_STAGE:
# ensure that these sources have a key after tracking
self._get_unique_key()
@@ -1067,8 +1075,7 @@ class Source(Plugin):
if self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK:
self.__ensure_previous_sources(previous_sources)
- with self.__stage_previous_sources(previous_sources) \
- as staging_directory:
+ with self.__stage_previous_sources(previous_sources) as staging_directory:
new_ref = self.__do_track(previous_sources_dir=self.__ensure_directory(staging_directory))
else:
new_ref = self.__do_track()
@@ -1135,9 +1142,7 @@ class Source(Plugin):
# Gives a ref path that points to where sources are kept in the CAS
def _get_source_name(self):
# @ is used to prevent conflicts with project names
- return "{}/{}".format(
- self.get_kind(),
- self._key)
+ return "{}/{}".format(self.get_kind(), self._key)
def _get_brief_display_key(self):
context = self._get_context()
@@ -1210,9 +1215,7 @@ class Source(Plugin):
meta.first_pass = self.__first_pass
- clone = source_kind(context, project, meta,
- alias_override=(alias, uri),
- unique_id=self._unique_id)
+ clone = source_kind(context, project, meta, alias_override=(alias, uri), unique_id=self._unique_id)
# Do the necessary post instantiation routines here
#
@@ -1352,20 +1355,18 @@ class Source(Plugin):
try:
os.makedirs(directory, exist_ok=True)
except OSError as e:
- raise SourceError("Failed to create staging directory: {}"
- .format(e),
- reason="ensure-stage-dir-fail") from e
+ raise SourceError(
+ "Failed to create staging directory: {}".format(e), reason="ensure-stage-dir-fail"
+ ) from e
else:
if self.__directory is not None:
try:
- directory = directory.descend(
- *self.__directory.lstrip(os.sep).split(os.sep),
- create=True)
+ directory = directory.descend(*self.__directory.lstrip(os.sep).split(os.sep), create=True)
except VirtualDirectoryError as e:
- raise SourceError("Failed to descend into staging directory: {}"
- .format(e),
- reason="ensure-stage-dir-fail") from e
+ raise SourceError(
+ "Failed to descend into staging directory: {}".format(e), reason="ensure-stage-dir-fail"
+ ) from e
return directory
@@ -1383,7 +1384,7 @@ class Source(Plugin):
#
@classmethod
def __extract_config(cls, meta):
- config = cls.__defaults.get_mapping('config', default={})
+ config = cls.__defaults.get_mapping("config", default={})
config = config.clone()
meta.config._composite(config)
diff --git a/src/buildstream/storage/_casbaseddirectory.py b/src/buildstream/storage/_casbaseddirectory.py
index 3786f25b6..df28dc591 100644
--- a/src/buildstream/storage/_casbaseddirectory.py
+++ b/src/buildstream/storage/_casbaseddirectory.py
@@ -38,10 +38,20 @@ from ._filebaseddirectory import FileBasedDirectory
from ..utils import FileListResult, BST_ARBITRARY_TIMESTAMP
-class IndexEntry():
+class IndexEntry:
""" Directory entry used in CasBasedDirectory.index """
- def __init__(self, name, entrytype, *, digest=None, target=None, is_executable=False,
- buildstream_object=None, modified=False):
+
+ def __init__(
+ self,
+ name,
+ entrytype,
+ *,
+ digest=None,
+ target=None,
+ is_executable=False,
+ buildstream_object=None,
+ modified=False
+ ):
self.name = name
self.type = entrytype
self.digest = digest
@@ -52,8 +62,9 @@ class IndexEntry():
def get_directory(self, parent):
if not self.buildstream_object:
- self.buildstream_object = CasBasedDirectory(parent.cas_cache, digest=self.digest,
- parent=parent, filename=self.name)
+ self.buildstream_object = CasBasedDirectory(
+ parent.cas_cache, digest=self.digest, parent=parent, filename=self.name
+ )
self.digest = None
return self.buildstream_object
@@ -69,6 +80,7 @@ class IndexEntry():
# which is meant to be unimplemented.
# pylint: disable=super-init-not-called
+
class CasBasedDirectory(Directory):
"""
CAS-based directories can have two names; one is a 'common name' which has no effect
@@ -100,21 +112,19 @@ class CasBasedDirectory(Directory):
def _populate_index(self, digest):
try:
pb2_directory = remote_execution_pb2.Directory()
- with open(self.cas_cache.objpath(digest), 'rb') as f:
+ with open(self.cas_cache.objpath(digest), "rb") as f:
pb2_directory.ParseFromString(f.read())
except FileNotFoundError as e:
raise VirtualDirectoryError("Directory not found in local cache: {}".format(e)) from e
for entry in pb2_directory.directories:
- self.index[entry.name] = IndexEntry(entry.name, _FileType.DIRECTORY,
- digest=entry.digest)
+ self.index[entry.name] = IndexEntry(entry.name, _FileType.DIRECTORY, digest=entry.digest)
for entry in pb2_directory.files:
- self.index[entry.name] = IndexEntry(entry.name, _FileType.REGULAR_FILE,
- digest=entry.digest,
- is_executable=entry.is_executable)
+ self.index[entry.name] = IndexEntry(
+ entry.name, _FileType.REGULAR_FILE, digest=entry.digest, is_executable=entry.is_executable
+ )
for entry in pb2_directory.symlinks:
- self.index[entry.name] = IndexEntry(entry.name, _FileType.SYMLINK,
- target=entry.target)
+ self.index[entry.name] = IndexEntry(entry.name, _FileType.SYMLINK, target=entry.target)
def _find_self_in_parent(self):
assert self.parent is not None
@@ -136,8 +146,7 @@ class CasBasedDirectory(Directory):
return newdir
def _add_file(self, basename, filename, modified=False, can_link=False):
- entry = IndexEntry(filename, _FileType.REGULAR_FILE,
- modified=modified or filename in self.index)
+ entry = IndexEntry(filename, _FileType.REGULAR_FILE, modified=modified or filename in self.index)
path = os.path.join(basename, filename)
entry.digest = self.cas_cache.add_object(path=path, link_directly=can_link)
entry.is_executable = os.access(path, os.X_OK)
@@ -206,14 +215,13 @@ class CasBasedDirectory(Directory):
current_dir = current_dir.descend(*newpaths, follow_symlinks=True)
else:
error = "Cannot descend into {}, which is a '{}' in the directory {}"
- raise VirtualDirectoryError(error.format(path,
- current_dir.index[path].type,
- current_dir),
- reason="not-a-directory")
+ raise VirtualDirectoryError(
+ error.format(path, current_dir.index[path].type, current_dir), reason="not-a-directory"
+ )
else:
- if path == '.':
+ if path == ".":
continue
- elif path == '..':
+ elif path == "..":
if current_dir.parent is not None:
current_dir = current_dir.parent
# In POSIX /.. == / so just stay at the root dir
@@ -222,8 +230,7 @@ class CasBasedDirectory(Directory):
current_dir = current_dir._add_directory(path)
else:
error = "'{}' not found in {}"
- raise VirtualDirectoryError(error.format(path, str(current_dir)),
- reason="directory-not-found")
+ raise VirtualDirectoryError(error.format(path, str(current_dir)), reason="directory-not-found")
return current_dir
@@ -299,12 +306,13 @@ class CasBasedDirectory(Directory):
dest_subdir = self.descend(name, create=create_subdir)
except VirtualDirectoryError:
filetype = self.index[name].type
- raise VirtualDirectoryError('Destination is a {}, not a directory: /{}'
- .format(filetype, relative_pathname))
+ raise VirtualDirectoryError(
+ "Destination is a {}, not a directory: /{}".format(filetype, relative_pathname)
+ )
- dest_subdir._partial_import_cas_into_cas(src_subdir, filter_callback,
- path_prefix=relative_pathname,
- origin=origin, result=result)
+ dest_subdir._partial_import_cas_into_cas(
+ src_subdir, filter_callback, path_prefix=relative_pathname, origin=origin, result=result
+ )
if filter_callback and not filter_callback(relative_pathname):
if is_dir and create_subdir and dest_subdir.is_empty():
@@ -317,20 +325,22 @@ class CasBasedDirectory(Directory):
if not is_dir:
if self._check_replacement(name, relative_pathname, result):
if entry.type == _FileType.REGULAR_FILE:
- self.index[name] = IndexEntry(name, _FileType.REGULAR_FILE,
- digest=entry.digest,
- is_executable=entry.is_executable,
- modified=True)
+ self.index[name] = IndexEntry(
+ name,
+ _FileType.REGULAR_FILE,
+ digest=entry.digest,
+ is_executable=entry.is_executable,
+ modified=True,
+ )
self.__invalidate_digest()
else:
assert entry.type == _FileType.SYMLINK
self._add_new_link_direct(name=name, target=entry.target)
result.files_written.append(relative_pathname)
- def import_files(self, external_pathspec, *,
- filter_callback=None,
- report_written=True, update_mtime=False,
- can_link=False):
+ def import_files(
+ self, external_pathspec, *, filter_callback=None, report_written=True, update_mtime=False, can_link=False
+ ):
""" See superclass Directory for arguments """
result = FileListResult()
@@ -358,13 +368,12 @@ class CasBasedDirectory(Directory):
def import_single_file(self, external_pathspec):
result = FileListResult()
- if self._check_replacement(os.path.basename(external_pathspec),
- os.path.dirname(external_pathspec),
- result):
- self._add_file(os.path.dirname(external_pathspec),
- os.path.basename(external_pathspec),
- modified=os.path.basename(external_pathspec)
- in result.overwritten)
+ if self._check_replacement(os.path.basename(external_pathspec), os.path.dirname(external_pathspec), result):
+ self._add_file(
+ os.path.dirname(external_pathspec),
+ os.path.basename(external_pathspec),
+ modified=os.path.basename(external_pathspec) in result.overwritten,
+ )
result.files_written.append(external_pathspec)
return result
@@ -516,10 +525,8 @@ class CasBasedDirectory(Directory):
"""
- file_list = list(filter(lambda i: i[1].type != _FileType.DIRECTORY,
- self.index.items()))
- directory_list = filter(lambda i: i[1].type == _FileType.DIRECTORY,
- self.index.items())
+ file_list = list(filter(lambda i: i[1].type != _FileType.DIRECTORY, self.index.items()))
+ directory_list = filter(lambda i: i[1].type == _FileType.DIRECTORY, self.index.items())
if prefix != "":
yield prefix
@@ -553,10 +560,7 @@ class CasBasedDirectory(Directory):
"""
for leaf in sorted(self.index.keys()):
entry = self.index[leaf]
- info = {
- "name": os.path.join(prefix, leaf),
- "type": entry.type
- }
+ info = {"name": os.path.join(prefix, leaf), "type": entry.type}
if entry.type == _FileType.REGULAR_FILE:
info["executable"] = entry.is_executable
info["size"] = self.get_size()
@@ -599,8 +603,9 @@ class CasBasedDirectory(Directory):
def _get_underlying_directory(self):
""" There is no underlying directory for a CAS-backed directory, so
throw an exception. """
- raise VirtualDirectoryError("_get_underlying_directory was called on a CAS-backed directory," +
- " which has no underlying directory.")
+ raise VirtualDirectoryError(
+ "_get_underlying_directory was called on a CAS-backed directory," + " which has no underlying directory."
+ )
# _get_digest():
#
diff --git a/src/buildstream/storage/_filebaseddirectory.py b/src/buildstream/storage/_filebaseddirectory.py
index 07c23c192..222b47979 100644
--- a/src/buildstream/storage/_filebaseddirectory.py
+++ b/src/buildstream/storage/_filebaseddirectory.py
@@ -65,23 +65,22 @@ class FileBasedDirectory(Directory):
try:
st = os.lstat(new_path)
if not stat.S_ISDIR(st.st_mode):
- raise VirtualDirectoryError("Cannot descend into '{}': '{}' is not a directory"
- .format(path, new_path))
+ raise VirtualDirectoryError(
+ "Cannot descend into '{}': '{}' is not a directory".format(path, new_path)
+ )
except FileNotFoundError:
if create:
os.mkdir(new_path)
else:
- raise VirtualDirectoryError("Cannot descend into '{}': '{}' does not exist"
- .format(path, new_path))
+ raise VirtualDirectoryError("Cannot descend into '{}': '{}' does not exist".format(path, new_path))
current_dir = FileBasedDirectory(new_path)
return current_dir
- def import_files(self, external_pathspec, *,
- filter_callback=None,
- report_written=True, update_mtime=False,
- can_link=False):
+ def import_files(
+ self, external_pathspec, *, filter_callback=None, report_written=True, update_mtime=False, can_link=False
+ ):
""" See superclass Directory for arguments """
from ._casbaseddirectory import CasBasedDirectory # pylint: disable=cyclic-import
@@ -101,13 +100,21 @@ class FileBasedDirectory(Directory):
source_directory = external_pathspec
if can_link and not update_mtime:
- import_result = link_files(source_directory, self.external_directory,
- filter_callback=filter_callback,
- ignore_missing=False, report_written=report_written)
+ import_result = link_files(
+ source_directory,
+ self.external_directory,
+ filter_callback=filter_callback,
+ ignore_missing=False,
+ report_written=report_written,
+ )
else:
- import_result = copy_files(source_directory, self.external_directory,
- filter_callback=filter_callback,
- ignore_missing=False, report_written=report_written)
+ import_result = copy_files(
+ source_directory,
+ self.external_directory,
+ filter_callback=filter_callback,
+ ignore_missing=False,
+ report_written=report_written,
+ )
if update_mtime:
cur_time = time.time()
@@ -190,8 +197,11 @@ class FileBasedDirectory(Directory):
Return value: List(str) - list of modified paths
"""
- return [f for f in list_relative_paths(self.external_directory)
- if _get_link_mtime(os.path.join(self.external_directory, f)) != BST_ARBITRARY_TIMESTAMP]
+ return [
+ f
+ for f in list_relative_paths(self.external_directory)
+ if _get_link_mtime(os.path.join(self.external_directory, f)) != BST_ARBITRARY_TIMESTAMP
+ ]
def list_relative_paths(self):
"""Provide a list of all relative paths.
@@ -251,11 +261,13 @@ class FileBasedDirectory(Directory):
dest_subdir = self.descend(name, create=create_subdir)
except VirtualDirectoryError:
filetype = self._get_filetype(name)
- raise VirtualDirectoryError('Destination is a {}, not a directory: /{}'
- .format(filetype, relative_pathname))
+ raise VirtualDirectoryError(
+ "Destination is a {}, not a directory: /{}".format(filetype, relative_pathname)
+ )
- dest_subdir._import_files_from_cas(src_subdir, actionfunc, filter_callback,
- path_prefix=relative_pathname, result=result)
+ dest_subdir._import_files_from_cas(
+ src_subdir, actionfunc, filter_callback, path_prefix=relative_pathname, result=result
+ )
if filter_callback and not filter_callback(relative_pathname):
if is_dir and create_subdir and dest_subdir.is_empty():
@@ -279,8 +291,16 @@ class FileBasedDirectory(Directory):
src_path = source_directory.cas_cache.objpath(entry.digest)
actionfunc(src_path, dest_path, result=result)
if entry.is_executable:
- os.chmod(dest_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
- stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
+ os.chmod(
+ dest_path,
+ stat.S_IRUSR
+ | stat.S_IWUSR
+ | stat.S_IXUSR
+ | stat.S_IRGRP
+ | stat.S_IXGRP
+ | stat.S_IROTH
+ | stat.S_IXOTH,
+ )
else:
assert entry.type == _FileType.SYMLINK
os.symlink(entry.target, dest_path)
diff --git a/src/buildstream/storage/directory.py b/src/buildstream/storage/directory.py
index 29cbb53f2..89d20c433 100644
--- a/src/buildstream/storage/directory.py
+++ b/src/buildstream/storage/directory.py
@@ -46,11 +46,12 @@ class VirtualDirectoryError(BstError):
or either of the :class:`.ElementError` or :class:`.SourceError`
exceptions should be raised from this error.
"""
+
def __init__(self, message, reason=None):
super().__init__(message, domain=ErrorDomain.VIRTUAL_FS, reason=reason)
-class Directory():
+class Directory:
def __init__(self, external_directory=None):
raise NotImplementedError()
@@ -74,10 +75,15 @@ class Directory():
raise NotImplementedError()
# Import and export of files and links
- def import_files(self, external_pathspec: Union['Directory', str], *,
- filter_callback: Optional[Callable[[str], bool]] = None,
- report_written: bool = True, update_mtime: bool = False,
- can_link: bool = False) -> FileListResult:
+ def import_files(
+ self,
+ external_pathspec: Union["Directory", str],
+ *,
+ filter_callback: Optional[Callable[[str], bool]] = None,
+ report_written: bool = True,
+ update_mtime: bool = False,
+ can_link: bool = False
+ ) -> FileListResult:
"""Imports some or all files from external_path into this directory.
Args:
@@ -214,4 +220,4 @@ class _FileType(FastEnum):
def __str__(self):
# https://github.com/PyCQA/pylint/issues/2062
- return self.name.lower().replace('_', ' ') # pylint: disable=no-member
+ return self.name.lower().replace("_", " ") # pylint: disable=no-member
diff --git a/src/buildstream/testing/__init__.py b/src/buildstream/testing/__init__.py
index 3926b4eab..67e96885a 100644
--- a/src/buildstream/testing/__init__.py
+++ b/src/buildstream/testing/__init__.py
@@ -31,9 +31,8 @@ from .integration import integration_cache
try:
import pytest
except ImportError:
- module_name = globals()['__name__']
- msg = "Could not import pytest:\n" \
- "To use the {} module, you must have pytest installed.".format(module_name)
+ module_name = globals()["__name__"]
+ msg = "Could not import pytest:\n" "To use the {} module, you must have pytest installed.".format(module_name)
raise ImportError(msg)
@@ -41,7 +40,7 @@ except ImportError:
ALL_REPO_KINDS = OrderedDict() # type: OrderedDict[Repo, str]
-def create_repo(kind, directory, subdir='repo'):
+def create_repo(kind, directory, subdir="repo"):
"""Convenience method for creating a Repo
Args:
@@ -92,6 +91,7 @@ def sourcetests_collection_hook(session):
Args:
session (pytest.Session): The current pytest session
"""
+
def should_collect_tests(config):
args = config.args
rootdir = config.rootdir
@@ -112,6 +112,7 @@ def sourcetests_collection_hook(session):
return True
from . import _sourcetests
+
source_test_path = os.path.dirname(_sourcetests.__file__)
# Add the location of the source tests to the session's
# python_files config. Without this, pytest may filter out these
diff --git a/src/buildstream/testing/_fixtures.py b/src/buildstream/testing/_fixtures.py
index 2684782a1..5da51bb45 100644
--- a/src/buildstream/testing/_fixtures.py
+++ b/src/buildstream/testing/_fixtures.py
@@ -30,6 +30,7 @@ def thread_check():
yield
assert utils._is_single_threaded()
+
# Reset global state in node.pyx to improve test isolation
@pytest.fixture(autouse=True)
def reset_global_node_state():
diff --git a/src/buildstream/testing/_sourcetests/build_checkout.py b/src/buildstream/testing/_sourcetests/build_checkout.py
index 4d4bcf0e2..782d99814 100644
--- a/src/buildstream/testing/_sourcetests/build_checkout.py
+++ b/src/buildstream/testing/_sourcetests/build_checkout.py
@@ -29,23 +29,23 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
def strict_args(args, strict):
if strict != "strict":
- return ['--no-strict', *args]
+ return ["--no-strict", *args]
return args
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("strict", ["strict", "non-strict"])
def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'build-test-{}.bst'.format(kind)
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "build-test-{}.bst".format(kind)
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
@@ -54,26 +54,20 @@ def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
ref = repo.create(dev_files_path)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- assert cli.get_element_state(project, element_name) == 'fetch needed'
- result = cli.run(project=project, args=strict_args(['build', element_name], strict))
+ assert cli.get_element_state(project, element_name) == "fetch needed"
+ result = cli.run(project=project, args=strict_args(["build", element_name], strict))
result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
# Now check it out
- result = cli.run(project=project, args=strict_args([
- 'artifact', 'checkout', element_name, '--directory', checkout
- ], strict))
+ result = cli.run(
+ project=project, args=strict_args(["artifact", "checkout", element_name, "--directory", checkout], strict)
+ )
result.assert_success()
# Check that the pony.h include from files/dev-files exists
- filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
+ filename = os.path.join(checkout, "usr", "include", "pony.h")
assert os.path.exists(filename)
diff --git a/src/buildstream/testing/_sourcetests/fetch.py b/src/buildstream/testing/_sourcetests/fetch.py
index 897752297..05b43d793 100644
--- a/src/buildstream/testing/_sourcetests/fetch.py
+++ b/src/buildstream/testing/_sourcetests/fetch.py
@@ -32,15 +32,15 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
@pytest.mark.datafiles(DATA_DIR)
def test_fetch(cli, tmpdir, datafiles, kind):
project = str(datafiles)
- bin_files_path = os.path.join(project, 'files', 'bin-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'fetch-test-{}.bst'.format(kind)
+ bin_files_path = os.path.join(project, "files", "bin-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "fetch-test-{}.bst".format(kind)
# Create our repo object of the given source type with
# the bin files, and then collect the initial ref.
@@ -49,59 +49,46 @@ def test_fetch(cli, tmpdir, datafiles, kind):
ref = repo.create(bin_files_path)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'fetch needed'
+ assert cli.get_element_state(project, element_name) == "fetch needed"
# Now try to fetch it
- result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
- import_etc_path = os.path.join(subproject_path, 'elements', 'import-etc-repo.bst')
- etc_files_path = os.path.join(subproject_path, 'files', 'etc-files')
+ import_etc_path = os.path.join(subproject_path, "elements", "import-etc-repo.bst")
+ etc_files_path = os.path.join(subproject_path, "files", "etc-files")
- repo = create_repo(kind, str(tmpdir.join('import-etc')))
+ repo = create_repo(kind, str(tmpdir.join("import-etc")))
ref = repo.create(etc_files_path)
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=(ref if ref_storage == 'inline' else None))
- ]
- }
+ element = {"kind": "import", "sources": [repo.source_config(ref=(ref if ref_storage == "inline" else None))]}
_yaml.roundtrip_dump(element, import_etc_path)
- update_project_configuration(project, {
- 'ref-storage': ref_storage
- })
+ update_project_configuration(project, {"ref-storage": ref_storage})
- generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline'))
+ generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))
- if ref_storage == 'project.refs':
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ if ref_storage == "project.refs":
+ result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc.bst'])
+ result = cli.run(project=project, args=["source", "track", "junction.bst:import-etc.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'junction.bst:import-etc.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "junction.bst:import-etc.bst"])
result.assert_success()
diff --git a/src/buildstream/testing/_sourcetests/mirror.py b/src/buildstream/testing/_sourcetests/mirror.py
index b6316045d..3ff3fb981 100644
--- a/src/buildstream/testing/_sourcetests/mirror.py
+++ b/src/buildstream/testing/_sourcetests/mirror.py
@@ -31,25 +31,25 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
def _set_project_mirrors_and_aliases(project_path, mirrors, aliases):
- project_conf_path = os.path.join(project_path, 'project.conf')
+ project_conf_path = os.path.join(project_path, "project.conf")
project_conf = _yaml.roundtrip_load(project_conf_path)
- project_conf['mirrors'] = mirrors
- project_conf['aliases'].update(aliases)
+ project_conf["mirrors"] = mirrors
+ project_conf["aliases"].update(aliases)
_yaml.roundtrip_dump(project_conf, project_conf_path)
def _set_project_includes_and_aliases(project_path, includes, aliases):
- project_conf_path = os.path.join(project_path, 'project.conf')
+ project_conf_path = os.path.join(project_path, "project.conf")
project_conf = _yaml.roundtrip_load(project_conf_path)
- project_conf['aliases'].update(aliases)
- project_conf['(@)'] = includes
+ project_conf["aliases"].update(aliases)
+ project_conf["(@)"] = includes
_yaml.roundtrip_dump(project_conf, project_conf_path)
@@ -57,11 +57,11 @@ def _set_project_includes_and_aliases(project_path, includes, aliases):
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_fetch(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr")
+ dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
@@ -69,228 +69,162 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind):
mirror_repo = upstream_repo.copy(mirror_repodir)
upstream_ref = upstream_repo.create(dev_files_path)
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
- }
- element_name = 'test.bst'
+ element = {"kind": "import", "sources": [upstream_repo.source_config(ref=upstream_ref)]}
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
_set_project_mirrors_and_aliases(
project_dir,
- [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + '/'],
- },
- },
- ],
- {alias: upstream_map + '/'},
+ [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},],
+ {alias: upstream_map + "/"},
)
# No obvious ways of checking that the mirror has been fetched
# But at least we can be sure it succeeds
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(project_dir, 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr")
+ upstream_repodir = os.path.join(project_dir, "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
ref = upstream_repo.create(dev_files_path)
mirror_repo = upstream_repo.copy(mirror_repodir)
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=ref)
- ]
- }
+ element = {"kind": "import", "sources": [upstream_repo.source_config(ref=ref)]}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
_, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
_set_project_mirrors_and_aliases(
project_dir,
- [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"]
- },
- },
- ],
- {alias: 'http://www.example.com'},
+ [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"]},},],
+ {alias: "http://www.example.com"},
)
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
upstream_ref = upstream_repo.create(bin_files_path)
mirror_repo = upstream_repo.copy(mirror_repodir)
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
- }
- element_name = 'test.bst'
+ element = {"kind": "import", "sources": [upstream_repo.source_config(ref=upstream_ref)]}
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
- config_project_dir = str(tmpdir.join('config'))
+ config_project_dir = str(tmpdir.join("config"))
os.makedirs(config_project_dir, exist_ok=True)
- config_project = {
- 'name': 'config'
- }
- _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, 'project.conf'))
- extra_mirrors = {
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- }
- }
- ]
- }
- _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
- generate_junction(str(tmpdir.join('config_repo')),
- config_project_dir,
- os.path.join(element_dir, 'config.bst'))
+ config_project = {"name": "config"}
+ _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, "project.conf"))
+ extra_mirrors = {"mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]}
+ _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, "mirrors.yml"))
+ generate_junction(str(tmpdir.join("config_repo")), config_project_dir, os.path.join(element_dir, "config.bst"))
_set_project_includes_and_aliases(
- project_dir,
- ['config.bst:mirrors.yml'],
- {alias: upstream_map + '/'},
+ project_dir, ["config.bst:mirrors.yml"], {alias: upstream_map + "/"},
)
# Now make the upstream unavailable.
- os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ os.rename(upstream_repo.repo, "{}.bak".format(upstream_repo.repo))
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
upstream_ref = upstream_repo.create(bin_files_path)
mirror_repo = upstream_repo.copy(mirror_repodir)
- element = {
- 'kind': 'junction',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
- }
- element_name = 'test.bst'
+ element = {"kind": "junction", "sources": [upstream_repo.source_config(ref=upstream_ref)]}
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
- config_project_dir = str(tmpdir.join('config'))
+ config_project_dir = str(tmpdir.join("config"))
os.makedirs(config_project_dir, exist_ok=True)
- config_project = {
- 'name': 'config'
- }
- _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, 'project.conf'))
- extra_mirrors = {
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- }
- }
- ]
- }
- _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
- generate_junction(str(tmpdir.join('config_repo')),
- config_project_dir,
- os.path.join(element_dir, 'config.bst'))
+ config_project = {"name": "config"}
+ _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, "project.conf"))
+ extra_mirrors = {"mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]}
+ _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, "mirrors.yml"))
+ generate_junction(str(tmpdir.join("config_repo")), config_project_dir, os.path.join(element_dir, "config.bst"))
- _set_project_includes_and_aliases(
- project_dir,
- ['config.bst:mirrors.yml'],
- {alias: upstream_map + '/'}
- )
+ _set_project_includes_and_aliases(project_dir, ["config.bst:mirrors.yml"], {alias: upstream_map + "/"})
# Now make the upstream unavailable.
- os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ os.rename(upstream_repo.repo, "{}.bak".format(upstream_repo.repo))
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_main_error(ErrorDomain.STREAM, None)
# Now make the upstream available again.
- os.rename('{}.bak'.format(upstream_repo.repo), upstream_repo.repo)
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ os.rename("{}.bak".format(upstream_repo.repo), upstream_repo.repo)
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr")
+ dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
@@ -298,55 +232,43 @@ def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
mirror_repo = upstream_repo.copy(mirror_repodir)
upstream_ref = upstream_repo.create(dev_files_path)
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
- }
+ element = {"kind": "import", "sources": [upstream_repo.source_config(ref=upstream_ref)]}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
_set_project_mirrors_and_aliases(
project_dir,
- [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + '/'],
- },
- },
- ],
- {alias: upstream_map + '/'},
+ [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},],
+ {alias: upstream_map + "/"},
)
- result = cli.run(project=project_dir, args=['source', 'track', element_name])
+ result = cli.run(project=project_dir, args=["source", "track", element_name])
result.assert_success()
# Tracking tries upstream first. Check the ref is from upstream.
new_element = _yaml.load(element_path)
- source = new_element.get_sequence('sources').mapping_at(0)
- if 'ref' in source:
- assert source.get_str('ref') == upstream_ref
+ source = new_element.get_sequence("sources").mapping_at(0)
+ if "ref" in source:
+ assert source.get_str("ref") == upstream_ref
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr")
+ dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
@@ -355,42 +277,30 @@ def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
mirror_ref = upstream_ref
upstream_ref = upstream_repo.create(dev_files_path)
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
- }
+ element = {"kind": "import", "sources": [upstream_repo.source_config(ref=upstream_ref)]}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
_, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
_set_project_mirrors_and_aliases(
project_dir,
- [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + '/'],
- },
- },
- ],
- {alias: 'http://www.example.com'},
+ [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},],
+ {alias: "http://www.example.com"},
)
- result = cli.run(project=project_dir, args=['source', 'track', element_name])
+ result = cli.run(project=project_dir, args=["source", "track", element_name])
result.assert_success()
# Check that tracking fell back to the mirror
new_element = _yaml.load(element_path)
- source = new_element.get_sequence('sources').mapping_at(0)
- if 'ref' in source:
- assert source.get_str('ref') == mirror_ref
+ source = new_element.get_sequence("sources").mapping_at(0)
+ if "ref" in source:
+ assert source.get_str("ref") == mirror_ref
diff --git a/src/buildstream/testing/_sourcetests/source_determinism.py b/src/buildstream/testing/_sourcetests/source_determinism.py
index fc0e4618c..27664e0c2 100644
--- a/src/buildstream/testing/_sourcetests/source_determinism.py
+++ b/src/buildstream/testing/_sourcetests/source_determinism.py
@@ -30,83 +30,72 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
-def create_test_file(*path, mode=0o644, content='content\n'):
+def create_test_file(*path, mode=0o644, content="content\n"):
path = os.path.join(*path)
os.makedirs(os.path.dirname(path), exist_ok=True)
- with open(path, 'w') as f:
+ with open(path, "w") as f:
f.write(content)
os.fchmod(f.fileno(), mode)
def create_test_directory(*path, mode=0o644):
- create_test_file(*path, '.keep', content='')
+ create_test_file(*path, ".keep", content="")
path = os.path.join(*path)
os.chmod(path, mode)
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.skipif(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox, Must Fix')
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
+@pytest.mark.skipif(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox, Must Fix")
def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
project = str(datafiles)
- element_name = 'list.bst'
- element_path = os.path.join(project, 'elements', element_name)
- repodir = os.path.join(str(tmpdir), 'repo')
- sourcedir = os.path.join(project, 'source')
-
- create_test_file(sourcedir, 'a.txt', mode=0o700)
- create_test_file(sourcedir, 'b.txt', mode=0o755)
- create_test_file(sourcedir, 'c.txt', mode=0o600)
- create_test_file(sourcedir, 'd.txt', mode=0o400)
- create_test_file(sourcedir, 'e.txt', mode=0o644)
- create_test_file(sourcedir, 'f.txt', mode=0o4755)
- create_test_file(sourcedir, 'g.txt', mode=0o2755)
- create_test_file(sourcedir, 'h.txt', mode=0o1755)
- create_test_directory(sourcedir, 'dir-a', mode=0o0700)
- create_test_directory(sourcedir, 'dir-c', mode=0o0755)
- create_test_directory(sourcedir, 'dir-d', mode=0o4755)
- create_test_directory(sourcedir, 'dir-e', mode=0o2755)
- create_test_directory(sourcedir, 'dir-f', mode=0o1755)
+ element_name = "list.bst"
+ element_path = os.path.join(project, "elements", element_name)
+ repodir = os.path.join(str(tmpdir), "repo")
+ sourcedir = os.path.join(project, "source")
+
+ create_test_file(sourcedir, "a.txt", mode=0o700)
+ create_test_file(sourcedir, "b.txt", mode=0o755)
+ create_test_file(sourcedir, "c.txt", mode=0o600)
+ create_test_file(sourcedir, "d.txt", mode=0o400)
+ create_test_file(sourcedir, "e.txt", mode=0o644)
+ create_test_file(sourcedir, "f.txt", mode=0o4755)
+ create_test_file(sourcedir, "g.txt", mode=0o2755)
+ create_test_file(sourcedir, "h.txt", mode=0o1755)
+ create_test_directory(sourcedir, "dir-a", mode=0o0700)
+ create_test_directory(sourcedir, "dir-c", mode=0o0755)
+ create_test_directory(sourcedir, "dir-d", mode=0o4755)
+ create_test_directory(sourcedir, "dir-e", mode=0o2755)
+ create_test_directory(sourcedir, "dir-f", mode=0o1755)
repo = create_repo(kind, repodir)
ref = repo.create(sourcedir)
source = repo.source_config(ref=ref)
element = {
- 'kind': 'manual',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build'
- }
- ],
- 'sources': [
- source
- ],
- 'config': {
- 'install-commands': [
- 'ls -l >"%{install-root}/ls-l"'
- ]
- }
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "sources": [source],
+ "config": {"install-commands": ['ls -l >"%{install-root}/ls-l"']},
}
_yaml.roundtrip_dump(element, element_path)
def get_value_for_umask(umask):
- checkoutdir = os.path.join(str(tmpdir), 'checkout-{}'.format(umask))
+ checkoutdir = os.path.join(str(tmpdir), "checkout-{}".format(umask))
old_umask = os.umask(umask)
try:
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkoutdir])
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkoutdir])
result.assert_success()
- with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
+ with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
return f.read()
finally:
os.umask(old_umask)
diff --git a/src/buildstream/testing/_sourcetests/track.py b/src/buildstream/testing/_sourcetests/track.py
index 48856b351..623045cd9 100644
--- a/src/buildstream/testing/_sourcetests/track.py
+++ b/src/buildstream/testing/_sourcetests/track.py
@@ -33,33 +33,26 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
def generate_element(repo, element_path, dep_name=None):
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config()
- ]
- }
+ element = {"kind": "import", "sources": [repo.source_config()]}
if dep_name:
- element['depends'] = [dep_name]
+ element["depends"] = [dep_name]
_yaml.roundtrip_dump(element, element_path)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_track(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'track-test-{}.bst'.format(kind)
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "track-test-{}.bst".format(kind)
- update_project_configuration(project, {
- 'ref-storage': ref_storage
- })
+ update_project_configuration(project, {"ref-storage": ref_storage})
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
@@ -71,28 +64,28 @@ def test_track(cli, tmpdir, datafiles, ref_storage, kind):
generate_element(repo, os.path.join(element_path, element_name))
# Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'no reference'
+ assert cli.get_element_state(project, element_name) == "no reference"
# Now first try to track it
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
result.assert_success()
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
# Assert there was a project.refs created, depending on the configuration
- if ref_storage == 'project.refs':
- assert os.path.exists(os.path.join(project, 'project.refs'))
+ if ref_storage == "project.refs":
+ assert os.path.exists(os.path.join(project, "project.refs"))
else:
- assert not os.path.exists(os.path.join(project, 'project.refs'))
+ assert not os.path.exists(os.path.join(project, "project.refs"))
# NOTE:
@@ -112,18 +105,14 @@ def test_track(cli, tmpdir, datafiles, ref_storage, kind):
@pytest.mark.parametrize("amount", [1, 10])
def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
# Try to actually launch as many fetch jobs as possible at the same time
#
# This stresses the Source plugins and helps to ensure that
# they handle concurrent access to the store correctly.
- cli.configure({
- 'scheduler': {
- 'fetchers': amount,
- }
- })
+ cli.configure({"scheduler": {"fetchers": amount,}})
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
@@ -135,7 +124,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
element_names = []
last_element_name = None
for i in range(amount + 1):
- element_name = 'track-test-{}-{}.bst'.format(kind, i + 1)
+ element_name = "track-test-{}-{}.bst".format(kind, i + 1)
filename = os.path.join(element_path, element_name)
element_names.append(element_name)
@@ -146,39 +135,35 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
# Assert that a fetch is needed
states = cli.get_element_states(project, [last_element_name])
for element_name in element_names:
- assert states[element_name] == 'no reference'
+ assert states[element_name] == "no reference"
# Now first try to track it
- result = cli.run(project=project, args=[
- 'source', 'track', '--deps', 'all',
- last_element_name])
+ result = cli.run(project=project, args=["source", "track", "--deps", "all", last_element_name])
result.assert_success()
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(project=project, args=[
- 'source', 'fetch', '--deps', 'all',
- last_element_name])
+ result = cli.run(project=project, args=["source", "fetch", "--deps", "all", last_element_name])
result.assert_success()
# Assert that the base is buildable and the rest are waiting
states = cli.get_element_states(project, [last_element_name])
for element_name in element_names:
if element_name == element_names[0]:
- assert states[element_name] == 'buildable'
+ assert states[element_name] == "buildable"
else:
- assert states[element_name] == 'waiting'
+ assert states[element_name] == "waiting"
@pytest.mark.datafiles(DATA_DIR)
def test_track_recurse_except(cli, tmpdir, datafiles, kind):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_dep_name = 'track-test-dep-{}.bst'.format(kind)
- element_target_name = 'track-test-target-{}.bst'.format(kind)
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_dep_name = "track-test-dep-{}.bst".format(kind)
+ element_target_name = "track-test-target-{}.bst".format(kind)
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
@@ -188,88 +173,79 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
# Write out our test targets
generate_element(repo, os.path.join(element_path, element_dep_name))
- generate_element(repo, os.path.join(element_path, element_target_name),
- dep_name=element_dep_name)
+ generate_element(repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name)
# Assert that a fetch is needed
states = cli.get_element_states(project, [element_target_name])
- assert states[element_dep_name] == 'no reference'
- assert states[element_target_name] == 'no reference'
+ assert states[element_dep_name] == "no reference"
+ assert states[element_target_name] == "no reference"
# Now first try to track it
- result = cli.run(project=project, args=[
- 'source', 'track', '--deps', 'all', '--except', element_dep_name,
- element_target_name])
+ result = cli.run(
+ project=project, args=["source", "track", "--deps", "all", "--except", element_dep_name, element_target_name]
+ )
result.assert_success()
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(project=project, args=[
- 'source', 'fetch', '--deps', 'none',
- element_target_name])
+ result = cli.run(project=project, args=["source", "fetch", "--deps", "none", element_target_name])
result.assert_success()
# Assert that the dependency is buildable and the target is waiting
states = cli.get_element_states(project, [element_target_name])
- assert states[element_dep_name] == 'no reference'
- assert states[element_target_name] == 'waiting'
+ assert states[element_dep_name] == "no reference"
+ assert states[element_target_name] == "waiting"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- etc_files = os.path.join(subproject_path, 'files', 'etc-files')
- repo_element_path = os.path.join(subproject_path, 'elements',
- 'import-etc-repo.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ etc_files = os.path.join(subproject_path, "files", "etc-files")
+ repo_element_path = os.path.join(subproject_path, "elements", "import-etc-repo.bst")
- update_project_configuration(project, {
- 'ref-storage': ref_storage
- })
+ update_project_configuration(project, {"ref-storage": ref_storage})
- repo = create_repo(kind, str(tmpdir.join('element_repo')))
+ repo = create_repo(kind, str(tmpdir.join("element_repo")))
repo.create(etc_files)
generate_element(repo, repo_element_path)
- generate_junction(str(tmpdir.join('junction_repo')),
- subproject_path, junction_path, store_ref=False)
+ generate_junction(str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=False)
# Track the junction itself first.
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
- assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'no reference'
+ assert cli.get_element_state(project, "junction.bst:import-etc-repo.bst") == "no reference"
# Track the cross junction element. -J is not given, it is implied.
- result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc-repo.bst'])
+ result = cli.run(project=project, args=["source", "track", "junction.bst:import-etc-repo.bst"])
- if ref_storage == 'inline':
+ if ref_storage == "inline":
# This is not allowed to track cross junction without project.refs.
- result.assert_main_error(ErrorDomain.PIPELINE, 'untrackable-sources')
+ result.assert_main_error(ErrorDomain.PIPELINE, "untrackable-sources")
else:
result.assert_success()
- assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'buildable'
+ assert cli.get_element_state(project, "junction.bst:import-etc-repo.bst") == "buildable"
- assert os.path.exists(os.path.join(project, 'project.refs'))
+ assert os.path.exists(os.path.join(project, "project.refs"))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'track-test-{}.bst'.format(kind)
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "track-test-{}.bst".format(kind)
- update_project_configuration(project, {
- 'ref-storage': ref_storage
- })
+ update_project_configuration(project, {"ref-storage": ref_storage})
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
@@ -278,139 +254,118 @@ def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
ref = repo.create(dev_files_path)
# Generate the element
- element = {
- 'kind': 'import',
- '(@)': ['elements/sources.yml']
- }
- sources = {
- 'sources': [
- repo.source_config()
- ]
- }
+ element = {"kind": "import", "(@)": ["elements/sources.yml"]}
+ sources = {"sources": [repo.source_config()]}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- _yaml.roundtrip_dump(sources, os.path.join(element_path, 'sources.yml'))
+ _yaml.roundtrip_dump(sources, os.path.join(element_path, "sources.yml"))
# Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'no reference'
+ assert cli.get_element_state(project, element_name) == "no reference"
# Now first try to track it
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
result.assert_success()
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
# Assert there was a project.refs created, depending on the configuration
- if ref_storage == 'project.refs':
- assert os.path.exists(os.path.join(project, 'project.refs'))
+ if ref_storage == "project.refs":
+ assert os.path.exists(os.path.join(project, "project.refs"))
else:
- assert not os.path.exists(os.path.join(project, 'project.refs'))
+ assert not os.path.exists(os.path.join(project, "project.refs"))
- new_sources = _yaml.load(os.path.join(element_path, 'sources.yml'))
+ new_sources = _yaml.load(os.path.join(element_path, "sources.yml"))
# Get all of the sources
- assert 'sources' in new_sources
- sources_list = new_sources.get_sequence('sources')
+ assert "sources" in new_sources
+ sources_list = new_sources.get_sequence("sources")
assert len(sources_list) == 1
# Get the first source from the sources list
new_source = sources_list.mapping_at(0)
- assert 'ref' in new_source
- assert ref == new_source.get_str('ref')
+ assert "ref" in new_source
+ assert ref == new_source.get_str("ref")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'track-test-{}.bst'.format(kind)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- sub_element_path = os.path.join(subproject_path, 'elements')
- junction_path = os.path.join(element_path, 'junction.bst')
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "track-test-{}.bst".format(kind)
+ subproject_path = os.path.join(project, "files", "sub-project")
+ sub_element_path = os.path.join(subproject_path, "elements")
+ junction_path = os.path.join(element_path, "junction.bst")
- update_project_configuration(project, {
- 'ref-storage': ref_storage
- })
+ update_project_configuration(project, {"ref-storage": ref_storage})
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
#
- repo = create_repo(kind, str(tmpdir.join('element_repo')))
+ repo = create_repo(kind, str(tmpdir.join("element_repo")))
repo.create(dev_files_path)
# Generate the element
- element = {
- 'kind': 'import',
- '(@)': ['junction.bst:elements/sources.yml']
- }
- sources = {
- 'sources': [
- repo.source_config()
- ]
- }
+ element = {"kind": "import", "(@)": ["junction.bst:elements/sources.yml"]}
+ sources = {"sources": [repo.source_config()]}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- _yaml.roundtrip_dump(sources, os.path.join(sub_element_path, 'sources.yml'))
+ _yaml.roundtrip_dump(sources, os.path.join(sub_element_path, "sources.yml"))
- generate_junction(str(tmpdir.join('junction_repo')),
- subproject_path, junction_path, store_ref=True)
+ generate_junction(str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=True)
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
# Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'no reference'
+ assert cli.get_element_state(project, element_name) == "no reference"
# Now first try to track it
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
# Assert there was a project.refs created, depending on the configuration
- if ref_storage == 'inline':
+ if ref_storage == "inline":
# FIXME: We should expect an error. But only a warning is emitted
# result.assert_main_error(ErrorDomain.SOURCE, 'tracking-junction-fragment')
- assert 'junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction' in result.stderr
+ assert "junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction" in result.stderr
else:
- assert os.path.exists(os.path.join(project, 'project.refs'))
+ assert os.path.exists(os.path.join(project, "project.refs"))
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- element_path = os.path.join(project, 'elements')
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(element_path, 'junction.bst')
+ element_path = os.path.join(project, "elements")
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(element_path, "junction.bst")
- update_project_configuration(project, {
- 'ref-storage': ref_storage,
- '(@)': ['junction.bst:test.yml']
- })
+ update_project_configuration(project, {"ref-storage": ref_storage, "(@)": ["junction.bst:test.yml"]})
- generate_junction(str(tmpdir.join('junction_repo')),
- subproject_path, junction_path, store_ref=False)
+ generate_junction(str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=False)
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
diff --git a/src/buildstream/testing/_sourcetests/track_cross_junction.py b/src/buildstream/testing/_sourcetests/track_cross_junction.py
index 550f57faf..2477b37ee 100644
--- a/src/buildstream/testing/_sourcetests/track_cross_junction.py
+++ b/src/buildstream/testing/_sourcetests/track_cross_junction.py
@@ -32,32 +32,27 @@ from .utils import add_plugins_conf
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
def generate_element(repo, element_path, dep_name=None):
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config()
- ]
- }
+ element = {"kind": "import", "sources": [repo.source_config()]}
if dep_name:
- element['depends'] = [dep_name]
+ element["depends"] = [dep_name]
_yaml.roundtrip_dump(element, element_path)
def generate_import_element(tmpdir, kind, project, name):
- element_name = 'import-{}.bst'.format(name)
- repo_element_path = os.path.join(project, 'elements', element_name)
+ element_name = "import-{}.bst".format(name)
+ repo_element_path = os.path.join(project, "elements", element_name)
files = str(tmpdir.join("imported_files_{}".format(name)))
os.makedirs(files)
- with open(os.path.join(files, '{}.txt'.format(name)), 'w') as f:
+ with open(os.path.join(files, "{}.txt".format(name)), "w") as f:
f.write(name)
- repo = create_repo(kind, str(tmpdir.join('element_{}_repo'.format(name))))
+ repo = create_repo(kind, str(tmpdir.join("element_{}_repo".format(name))))
repo.create(files)
generate_element(repo, repo_element_path)
@@ -69,28 +64,22 @@ def generate_project(tmpdir, name, kind, config=None):
if config is None:
config = {}
- project_name = 'project-{}'.format(name)
+ project_name = "project-{}".format(name)
subproject_path = os.path.join(str(tmpdir.join(project_name)))
- os.makedirs(os.path.join(subproject_path, 'elements'))
+ os.makedirs(os.path.join(subproject_path, "elements"))
- project_conf = {
- 'name': name,
- 'element-path': 'elements'
- }
+ project_conf = {"name": name, "element-path": "elements"}
project_conf.update(config)
- _yaml.roundtrip_dump(project_conf, os.path.join(subproject_path, 'project.conf'))
+ _yaml.roundtrip_dump(project_conf, os.path.join(subproject_path, "project.conf"))
add_plugins_conf(subproject_path, kind)
return project_name, subproject_path
def generate_simple_stack(project, name, dependencies):
- element_name = '{}.bst'.format(name)
- element_path = os.path.join(project, 'elements', element_name)
- element = {
- 'kind': 'stack',
- 'depends': dependencies
- }
+ element_name = "{}.bst".format(name)
+ element_path = os.path.join(project, "elements", element_name)
+ element = {"kind": "stack", "depends": dependencies}
_yaml.roundtrip_dump(element, element_path)
return element_name
@@ -98,11 +87,11 @@ def generate_simple_stack(project, name, dependencies):
def generate_cross_element(project, subproject_name, import_name):
basename, _ = os.path.splitext(import_name)
- return generate_simple_stack(project, 'import-{}-{}'.format(subproject_name, basename),
- [{
- 'junction': '{}.bst'.format(subproject_name),
- 'filename': import_name
- }])
+ return generate_simple_stack(
+ project,
+ "import-{}-{}".format(subproject_name, basename),
+ [{"junction": "{}.bst".format(subproject_name), "filename": import_name}],
+ )
@pytest.mark.parametrize("kind", ALL_REPO_KINDS.keys())
@@ -110,30 +99,30 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind):
tmpdir = tmpdir.join(kind)
# Generate 3 projects: main, a, b
- _, project = generate_project(tmpdir, 'main', kind, {'ref-storage': 'project.refs'})
- project_a, project_a_path = generate_project(tmpdir, 'a', kind)
- project_b, project_b_path = generate_project(tmpdir, 'b', kind)
+ _, project = generate_project(tmpdir, "main", kind, {"ref-storage": "project.refs"})
+ project_a, project_a_path = generate_project(tmpdir, "a", kind)
+ project_b, project_b_path = generate_project(tmpdir, "b", kind)
# Generate an element with a trackable source for each project
- element_a = generate_import_element(tmpdir, kind, project_a_path, 'a')
- element_b = generate_import_element(tmpdir, kind, project_b_path, 'b')
- element_c = generate_import_element(tmpdir, kind, project, 'c')
+ element_a = generate_import_element(tmpdir, kind, project_a_path, "a")
+ element_b = generate_import_element(tmpdir, kind, project_b_path, "b")
+ element_c = generate_import_element(tmpdir, kind, project, "c")
# Create some indirections to the elements with dependencies to test --deps
- stack_a = generate_simple_stack(project_a_path, 'stack-a', [element_a])
- stack_b = generate_simple_stack(project_b_path, 'stack-b', [element_b])
+ stack_a = generate_simple_stack(project_a_path, "stack-a", [element_a])
+ stack_b = generate_simple_stack(project_b_path, "stack-b", [element_b])
# Create junctions for projects a and b in main.
- junction_a = '{}.bst'.format(project_a)
- junction_a_path = os.path.join(project, 'elements', junction_a)
- generate_junction(tmpdir.join('repo_a'), project_a_path, junction_a_path, store_ref=False)
+ junction_a = "{}.bst".format(project_a)
+ junction_a_path = os.path.join(project, "elements", junction_a)
+ generate_junction(tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False)
- junction_b = '{}.bst'.format(project_b)
- junction_b_path = os.path.join(project, 'elements', junction_b)
- generate_junction(tmpdir.join('repo_b'), project_b_path, junction_b_path, store_ref=False)
+ junction_b = "{}.bst".format(project_b)
+ junction_b_path = os.path.join(project, "elements", junction_b)
+ generate_junction(tmpdir.join("repo_b"), project_b_path, junction_b_path, store_ref=False)
# Track the junctions.
- result = cli.run(project=project, args=['source', 'track', junction_a, junction_b])
+ result = cli.run(project=project, args=["source", "track", junction_a, junction_b])
result.assert_success()
# Import elements from a and b in to main.
@@ -141,18 +130,16 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind):
imported_b = generate_cross_element(project, project_b, stack_b)
# Generate a top level stack depending on everything
- all_bst = generate_simple_stack(project, 'all', [imported_a, imported_b, element_c])
+ all_bst = generate_simple_stack(project, "all", [imported_a, imported_b, element_c])
# Track without following junctions. But explicitly also track the elements in project a.
- result = cli.run(project=project, args=['source', 'track',
- '--deps', 'all',
- all_bst,
- '{}:{}'.format(junction_a, stack_a)])
+ result = cli.run(
+ project=project, args=["source", "track", "--deps", "all", all_bst, "{}:{}".format(junction_a, stack_a)]
+ )
result.assert_success()
# Elements in project b should not be tracked. But elements in project a and main should.
- expected = [element_c,
- '{}:{}'.format(junction_a, element_a)]
+ expected = [element_c, "{}:{}".format(junction_a, element_a)]
assert set(result.get_tracked_elements()) == set(expected)
@@ -160,31 +147,38 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind):
def test_track_exceptions(cli, tmpdir, kind):
tmpdir = tmpdir.join(kind)
- _, project = generate_project(tmpdir, 'main', kind, {'ref-storage': 'project.refs'})
- project_a, project_a_path = generate_project(tmpdir, 'a', kind)
+ _, project = generate_project(tmpdir, "main", kind, {"ref-storage": "project.refs"})
+ project_a, project_a_path = generate_project(tmpdir, "a", kind)
- element_a = generate_import_element(tmpdir, kind, project_a_path, 'a')
- element_b = generate_import_element(tmpdir, kind, project_a_path, 'b')
+ element_a = generate_import_element(tmpdir, kind, project_a_path, "a")
+ element_b = generate_import_element(tmpdir, kind, project_a_path, "b")
- all_bst = generate_simple_stack(project_a_path, 'all', [element_a,
- element_b])
+ all_bst = generate_simple_stack(project_a_path, "all", [element_a, element_b])
- junction_a = '{}.bst'.format(project_a)
- junction_a_path = os.path.join(project, 'elements', junction_a)
- generate_junction(tmpdir.join('repo_a'), project_a_path, junction_a_path, store_ref=False)
+ junction_a = "{}.bst".format(project_a)
+ junction_a_path = os.path.join(project, "elements", junction_a)
+ generate_junction(tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False)
- result = cli.run(project=project, args=['source', 'track', junction_a])
+ result = cli.run(project=project, args=["source", "track", junction_a])
result.assert_success()
imported_b = generate_cross_element(project, project_a, element_b)
- indirection = generate_simple_stack(project, 'indirection', [imported_b])
-
- result = cli.run(project=project,
- args=['source', 'track', '--deps', 'all',
- '--except', indirection,
- '{}:{}'.format(junction_a, all_bst), imported_b])
+ indirection = generate_simple_stack(project, "indirection", [imported_b])
+
+ result = cli.run(
+ project=project,
+ args=[
+ "source",
+ "track",
+ "--deps",
+ "all",
+ "--except",
+ indirection,
+ "{}:{}".format(junction_a, all_bst),
+ imported_b,
+ ],
+ )
result.assert_success()
- expected = ['{}:{}'.format(junction_a, element_a),
- '{}:{}'.format(junction_a, element_b)]
+ expected = ["{}:{}".format(junction_a, element_a), "{}:{}".format(junction_a, element_b)]
assert set(result.get_tracked_elements()) == set(expected)
diff --git a/src/buildstream/testing/_sourcetests/utils.py b/src/buildstream/testing/_sourcetests/utils.py
index a0e65b4f4..116506807 100644
--- a/src/buildstream/testing/_sourcetests/utils.py
+++ b/src/buildstream/testing/_sourcetests/utils.py
@@ -27,9 +27,8 @@ import os
try:
import pytest
except ImportError:
- module_name = globals()['__name__']
- msg = "Could not import pytest:\n" \
- "To use the {} module, you must have pytest installed.".format(module_name)
+ module_name = globals()["__name__"]
+ msg = "Could not import pytest:\n" "To use the {} module, you must have pytest installed.".format(module_name)
raise ImportError(msg)
from buildstream import _yaml
@@ -72,13 +71,7 @@ def add_plugins_conf(project, plugin_kind):
if plugin_package is not None:
project_conf["plugins"] = [
- {
- "origin": "pip",
- "package-name": plugin_package,
- "sources": {
- plugin_kind: 0,
- },
- },
+ {"origin": "pip", "package-name": plugin_package, "sources": {plugin_kind: 0,},},
]
_yaml.roundtrip_dump(project_conf, project_conf_file)
@@ -96,7 +89,7 @@ def add_plugins_conf(project, plugin_kind):
# updated_configuration (dict): configuration to merge into the existing one
#
def update_project_configuration(project_path, updated_configuration):
- project_conf_path = os.path.join(project_path, 'project.conf')
+ project_conf_path = os.path.join(project_path, "project.conf")
project_conf = _yaml.roundtrip_load(project_conf_path)
project_conf.update(updated_configuration)
diff --git a/src/buildstream/testing/_sourcetests/workspace.py b/src/buildstream/testing/_sourcetests/workspace.py
index dd7977e76..34e2247ea 100644
--- a/src/buildstream/testing/_sourcetests/workspace.py
+++ b/src/buildstream/testing/_sourcetests/workspace.py
@@ -30,10 +30,10 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
-class WorkspaceCreator():
+class WorkspaceCreator:
def __init__(self, cli, tmpdir, datafiles, project_path=None):
self.cli = cli
self.tmpdir = tmpdir
@@ -45,17 +45,16 @@ class WorkspaceCreator():
shutil.copytree(str(datafiles), project_path)
self.project_path = project_path
- self.bin_files_path = os.path.join(project_path, 'files', 'bin-files')
+ self.bin_files_path = os.path.join(project_path, "files", "bin-files")
- self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd')
+ self.workspace_cmd = os.path.join(self.project_path, "workspace_cmd")
- def create_workspace_element(self, kind, track, suffix='', workspace_dir=None,
- element_attrs=None):
- element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
- element_path = os.path.join(self.project_path, 'elements')
+ def create_workspace_element(self, kind, track, suffix="", workspace_dir=None, element_attrs=None):
+ element_name = "workspace-test-{}{}.bst".format(kind, suffix)
+ element_path = os.path.join(self.project_path, "elements")
if not workspace_dir:
workspace_dir = os.path.join(self.workspace_cmd, element_name)
- if workspace_dir[-4:] == '.bst':
+ if workspace_dir[-4:] == ".bst":
workspace_dir = workspace_dir[:-4]
# Create our repo object of the given source type with
@@ -66,64 +65,53 @@ class WorkspaceCreator():
ref = None
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
if element_attrs:
element = {**element, **element_attrs}
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
return element_name, element_path, workspace_dir
- def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
- element_attrs=None):
+ def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None, element_attrs=None):
element_tuples = []
if suffixs is None:
- suffixs = ['', ] * len(kinds)
+ suffixs = ["",] * len(kinds)
else:
if len(suffixs) != len(kinds):
raise "terable error"
for suffix, kind in zip(suffixs, kinds):
- element_name, _, workspace_dir = \
- self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
- element_attrs)
+ element_name, _, workspace_dir = self.create_workspace_element(
+ kind, track, suffix, workspace_dir_usr, element_attrs
+ )
element_tuples.append((element_name, workspace_dir))
# Assert that there is no reference, a track & fetch is needed
- states = self.cli.get_element_states(self.project_path, [
- e for e, _ in element_tuples
- ])
+ states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples])
if track:
- assert not any(states[e] != 'no reference' for e, _ in element_tuples)
+ assert not any(states[e] != "no reference" for e, _ in element_tuples)
else:
- assert not any(states[e] != 'fetch needed' for e, _ in element_tuples)
+ assert not any(states[e] != "fetch needed" for e, _ in element_tuples)
return element_tuples
- def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
- element_attrs=None, no_checkout=False):
+ def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None, element_attrs=None, no_checkout=False):
- element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
- element_attrs)
+ element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir, element_attrs)
os.makedirs(self.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
# tracking & fetching the source from the repo.
- args = ['workspace', 'open']
+ args = ["workspace", "open"]
if track:
- args.append('--track')
+ args.append("--track")
if no_checkout:
- args.append('--no-checkout')
+ args.append("--no-checkout")
if workspace_dir is not None:
assert len(element_tuples) == 1, "test logic error"
_, workspace_dir = element_tuples[0]
- args.extend(['--directory', workspace_dir])
+ args.extend(["--directory", workspace_dir])
args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
@@ -132,24 +120,31 @@ class WorkspaceCreator():
if not no_checkout:
# Assert that we are now buildable because the source is now cached.
- states = self.cli.get_element_states(self.project_path, [
- e for e, _ in element_tuples
- ])
- assert not any(states[e] != 'buildable' for e, _ in element_tuples)
+ states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples])
+ assert not any(states[e] != "buildable" for e, _ in element_tuples)
# Check that the executable hello file is found in each workspace
for _, workspace in element_tuples:
- filename = os.path.join(workspace, 'usr', 'bin', 'hello')
+ filename = os.path.join(workspace, "usr", "bin", "hello")
assert os.path.exists(filename)
return element_tuples
-def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
- project_path=None, element_attrs=None, no_checkout=False):
+def open_workspace(
+ cli,
+ tmpdir,
+ datafiles,
+ kind,
+ track,
+ suffix="",
+ workspace_dir=None,
+ project_path=None,
+ element_attrs=None,
+ no_checkout=False,
+):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path)
- workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
- element_attrs, no_checkout)
+ workspaces = workspace_object.open_workspaces((kind,), track, (suffix,), workspace_dir, element_attrs, no_checkout)
assert len(workspaces) == 1
element_name, workspace = workspaces[0]
return element_name, workspace_object.project_path, workspace
diff --git a/src/buildstream/testing/_utils/junction.py b/src/buildstream/testing/_utils/junction.py
index 98d23b0a2..cfc5898a9 100644
--- a/src/buildstream/testing/_utils/junction.py
+++ b/src/buildstream/testing/_utils/junction.py
@@ -28,12 +28,7 @@ def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True)
if not store_ref:
source_ref = None
- element = {
- 'kind': 'junction',
- 'sources': [
- repo.source_config(ref=source_ref)
- ]
- }
+ element = {"kind": "junction", "sources": [repo.source_config(ref=source_ref)]}
_yaml.roundtrip_dump(element, junction_path)
return ref
@@ -41,46 +36,38 @@ def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True)
# A barebones Git Repo class to use for generating junctions
class _SimpleGit(Repo):
- def __init__(self, directory, subdir='repo'):
+ def __init__(self, directory, subdir="repo"):
if not HAVE_GIT:
- pytest.skip('git is not available')
+ pytest.skip("git is not available")
super().__init__(directory, subdir)
def create(self, directory):
self.copy_directory(directory, self.repo)
- self._run_git('init', '.')
- self._run_git('add', '.')
- self._run_git('commit', '-m', 'Initial commit')
+ self._run_git("init", ".")
+ self._run_git("add", ".")
+ self._run_git("commit", "-m", "Initial commit")
return self.latest_commit()
def latest_commit(self):
- return self._run_git(
- 'rev-parse', 'HEAD',
- stdout=subprocess.PIPE,
- universal_newlines=True,
- ).stdout.strip()
+ return self._run_git("rev-parse", "HEAD", stdout=subprocess.PIPE, universal_newlines=True,).stdout.strip()
def source_config(self, ref=None):
return self.source_config_extra(ref)
def source_config_extra(self, ref=None, checkout_submodules=None):
- config = {
- 'kind': 'git',
- 'url': 'file://' + self.repo,
- 'track': 'master'
- }
+ config = {"kind": "git", "url": "file://" + self.repo, "track": "master"}
if ref is not None:
- config['ref'] = ref
+ config["ref"] = ref
if checkout_submodules is not None:
- config['checkout-submodules'] = checkout_submodules
+ config["checkout-submodules"] = checkout_submodules
return config
def _run_git(self, *args, **kwargs):
argv = [GIT]
argv.extend(args)
- if 'env' not in kwargs:
- kwargs['env'] = dict(GIT_ENV, PWD=self.repo)
- kwargs.setdefault('cwd', self.repo)
- kwargs.setdefault('check', True)
+ if "env" not in kwargs:
+ kwargs["env"] = dict(GIT_ENV, PWD=self.repo)
+ kwargs.setdefault("cwd", self.repo)
+ kwargs.setdefault("check", True)
return subprocess.run(argv, **kwargs)
diff --git a/src/buildstream/testing/_utils/site.py b/src/buildstream/testing/_utils/site.py
index ca74d9505..953d21607 100644
--- a/src/buildstream/testing/_utils/site.py
+++ b/src/buildstream/testing/_utils/site.py
@@ -5,29 +5,29 @@ import os
import subprocess
import sys
import platform
-from typing import Optional # pylint: disable=unused-import
+from typing import Optional # pylint: disable=unused-import
from buildstream import _site, utils, ProgramNotFoundError
from buildstream._platform import Platform
try:
- GIT = utils.get_host_tool('git') # type: Optional[str]
+ GIT = utils.get_host_tool("git") # type: Optional[str]
HAVE_GIT = True
- out = str(subprocess.check_output(['git', '--version']), "utf-8")
+ out = str(subprocess.check_output(["git", "--version"]), "utf-8")
# e.g. on Git for Windows we get "git version 2.21.0.windows.1".
# e.g. on Mac via Homebrew we get "git version 2.19.0".
- version = tuple(int(x) for x in out.split(' ')[2].split('.')[:3])
+ version = tuple(int(x) for x in out.split(" ")[2].split(".")[:3])
HAVE_OLD_GIT = version < (1, 8, 5)
GIT_ENV = {
- 'GIT_AUTHOR_DATE': '1320966000 +0200',
- 'GIT_AUTHOR_NAME': 'tomjon',
- 'GIT_AUTHOR_EMAIL': 'tom@jon.com',
- 'GIT_COMMITTER_DATE': '1320966000 +0200',
- 'GIT_COMMITTER_NAME': 'tomjon',
- 'GIT_COMMITTER_EMAIL': 'tom@jon.com'
+ "GIT_AUTHOR_DATE": "1320966000 +0200",
+ "GIT_AUTHOR_NAME": "tomjon",
+ "GIT_AUTHOR_EMAIL": "tom@jon.com",
+ "GIT_COMMITTER_DATE": "1320966000 +0200",
+ "GIT_COMMITTER_NAME": "tomjon",
+ "GIT_COMMITTER_EMAIL": "tom@jon.com",
}
except ProgramNotFoundError:
GIT = None
@@ -36,18 +36,16 @@ except ProgramNotFoundError:
GIT_ENV = dict()
try:
- BZR = utils.get_host_tool('bzr') # type: Optional[str]
+ BZR = utils.get_host_tool("bzr") # type: Optional[str]
HAVE_BZR = True
- BZR_ENV = {
- "BZR_EMAIL": "Testy McTesterson <testy.mctesterson@example.com>"
- }
+ BZR_ENV = {"BZR_EMAIL": "Testy McTesterson <testy.mctesterson@example.com>"}
except ProgramNotFoundError:
BZR = None
HAVE_BZR = False
BZR_ENV = {}
try:
- utils.get_host_tool('bwrap')
+ utils.get_host_tool("bwrap")
HAVE_BWRAP = True
HAVE_BWRAP_JSON_STATUS = _site.get_bwrap_version() >= (0, 3, 2)
except ProgramNotFoundError:
@@ -55,32 +53,33 @@ except ProgramNotFoundError:
HAVE_BWRAP_JSON_STATUS = False
try:
- utils.get_host_tool('lzip')
+ utils.get_host_tool("lzip")
HAVE_LZIP = True
except ProgramNotFoundError:
HAVE_LZIP = False
try:
import arpy # pylint: disable=unused-import
+
HAVE_ARPY = True
except ImportError:
HAVE_ARPY = False
try:
- utils.get_host_tool('buildbox')
+ utils.get_host_tool("buildbox")
HAVE_BUILDBOX = True
except ProgramNotFoundError:
HAVE_BUILDBOX = False
-IS_LINUX = os.getenv('BST_FORCE_BACKEND', sys.platform).startswith('linux')
-IS_WSL = (IS_LINUX and 'Microsoft' in platform.uname().release)
-IS_WINDOWS = (os.name == 'nt')
+IS_LINUX = os.getenv("BST_FORCE_BACKEND", sys.platform).startswith("linux")
+IS_WSL = IS_LINUX and "Microsoft" in platform.uname().release
+IS_WINDOWS = os.name == "nt"
MACHINE_ARCH = Platform.get_host_arch()
-HAVE_SANDBOX = os.getenv('BST_FORCE_SANDBOX')
+HAVE_SANDBOX = os.getenv("BST_FORCE_SANDBOX")
if HAVE_SANDBOX is not None:
pass
elif IS_LINUX and HAVE_BWRAP and (not IS_WSL):
- HAVE_SANDBOX = 'bwrap'
+ HAVE_SANDBOX = "bwrap"
diff --git a/src/buildstream/testing/integration.py b/src/buildstream/testing/integration.py
index 01635de74..584d7da1b 100644
--- a/src/buildstream/testing/integration.py
+++ b/src/buildstream/testing/integration.py
@@ -39,11 +39,11 @@ def walk_dir(root):
# print path to all subdirectories first.
for subdirname in dirnames:
- yield os.path.join(dirname, subdirname)[len(root):]
+ yield os.path.join(dirname, subdirname)[len(root) :]
# print path to all filenames.
for filename in filenames:
- yield os.path.join(dirname, filename)[len(root):]
+ yield os.path.join(dirname, filename)[len(root) :]
# Ensure that a directory contains the given filenames.
@@ -51,35 +51,33 @@ def assert_contains(directory, expected):
missing = set(expected)
missing.difference_update(walk_dir(directory))
if missing:
- raise AssertionError("Missing {} expected elements from list: {}"
- .format(len(missing), missing))
+ raise AssertionError("Missing {} expected elements from list: {}".format(len(missing), missing))
class IntegrationCache:
-
def __init__(self, cache):
self.root = os.path.abspath(cache)
os.makedirs(cache, exist_ok=True)
# Use the same sources every time
- self.sources = os.path.join(self.root, 'sources')
+ self.sources = os.path.join(self.root, "sources")
# Create a temp directory for the duration of the test for
# the artifacts directory
try:
- self.cachedir = tempfile.mkdtemp(dir=self.root, prefix='cache-')
+ self.cachedir = tempfile.mkdtemp(dir=self.root, prefix="cache-")
except OSError as e:
raise AssertionError("Unable to create test directory !") from e
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def integration_cache(request):
# Set the cache dir to the INTEGRATION_CACHE variable, or the
# default if that is not set.
- if 'INTEGRATION_CACHE' in os.environ:
- cache_dir = os.environ['INTEGRATION_CACHE']
+ if "INTEGRATION_CACHE" in os.environ:
+ cache_dir = os.environ["INTEGRATION_CACHE"]
else:
- cache_dir = os.path.abspath('./integration-cache')
+ cache_dir = os.path.abspath("./integration-cache")
cache = IntegrationCache(cache_dir)
@@ -92,6 +90,6 @@ def integration_cache(request):
except FileNotFoundError:
pass
try:
- shutil.rmtree(os.path.join(cache.root, 'cas'))
+ shutil.rmtree(os.path.join(cache.root, "cas"))
except FileNotFoundError:
pass
diff --git a/src/buildstream/testing/repo.py b/src/buildstream/testing/repo.py
index c1538685d..1b46ec806 100644
--- a/src/buildstream/testing/repo.py
+++ b/src/buildstream/testing/repo.py
@@ -25,7 +25,7 @@ import os
import shutil
-class Repo():
+class Repo:
"""Repo()
Abstract class providing scaffolding for generating data to be
@@ -38,7 +38,8 @@ class Repo():
subdir (str): The subdir for the repo, in case there is more than one
"""
- def __init__(self, directory, subdir='repo'):
+
+ def __init__(self, directory, subdir="repo"):
# The working directory for the repo object
#
@@ -100,7 +101,7 @@ class Repo():
Returns:
(Repo): A Repo object for the new repository.
"""
- subdir = self.repo[len(self.directory):].lstrip(os.sep)
+ subdir = self.repo[len(self.directory) :].lstrip(os.sep)
new_dir = os.path.join(dest, subdir)
os.makedirs(new_dir, exist_ok=True)
self.copy_directory(self.repo, new_dir)
diff --git a/src/buildstream/testing/runcli.py b/src/buildstream/testing/runcli.py
index 36426c8af..d4f7e2ffd 100644
--- a/src/buildstream/testing/runcli.py
+++ b/src/buildstream/testing/runcli.py
@@ -61,14 +61,8 @@ from buildstream._protos.buildstream.v2 import artifact_pb2
# Wrapper for the click.testing result
-class Result():
-
- def __init__(self,
- exit_code=None,
- exception=None,
- exc_info=None,
- output=None,
- stderr=None):
+class Result:
+ def __init__(self, exit_code=None, exception=None, exc_info=None, output=None, stderr=None):
self.exit_code = exit_code
self.exc = exception
self.exc_info = exc_info
@@ -94,8 +88,7 @@ class Result():
self.unhandled_exception = True
self.exception = get_last_exception()
- self.task_error_domain, \
- self.task_error_reason = get_last_task_error()
+ self.task_error_domain, self.task_error_reason = get_last_task_error()
else:
self.exception = None
self.task_error_domain = None
@@ -111,7 +104,7 @@ class Result():
# Raises:
# (AssertionError): If the session did not complete successfully
#
- def assert_success(self, fail_message=''):
+ def assert_success(self, fail_message=""):
assert self.exit_code == 0, fail_message
assert self.exc is None, fail_message
assert self.exception is None, fail_message
@@ -131,11 +124,7 @@ class Result():
# Raises:
# (AssertionError): If any of the assertions fail
#
- def assert_main_error(self,
- error_domain,
- error_reason,
- fail_message='',
- *, debug=False):
+ def assert_main_error(self, error_domain, error_reason, fail_message="", *, debug=False):
if debug:
print(
"""
@@ -144,11 +133,9 @@ class Result():
Domain: {}
Reason: {}
""".format(
- self.exit_code,
- self.exception,
- self.exception.domain,
- self.exception.reason
- ))
+ self.exit_code, self.exception, self.exception.domain, self.exception.reason
+ )
+ )
assert self.exit_code == -1, fail_message
assert self.exc is not None, fail_message
assert self.exception is not None, fail_message
@@ -172,10 +159,7 @@ class Result():
# Raises:
# (AssertionError): If any of the assertions fail
#
- def assert_task_error(self,
- error_domain,
- error_reason,
- fail_message=''):
+ def assert_task_error(self, error_domain, error_reason, fail_message=""):
assert self.exit_code == -1, fail_message
assert self.exc is not None, fail_message
@@ -197,7 +181,7 @@ class Result():
# Raises:
# (AssertionError): If any of the assertions fail
#
- def assert_shell_error(self, fail_message=''):
+ def assert_shell_error(self, fail_message=""):
assert self.exit_code == 1, fail_message
# get_start_order()
@@ -212,7 +196,7 @@ class Result():
# (list): A list of element names in the order which they first appeared in the result
#
def get_start_order(self, activity):
- results = re.findall(r'\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log'.format(activity), self.stderr)
+ results = re.findall(r"\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log".format(activity), self.stderr)
if results is None:
return []
return list(results)
@@ -228,29 +212,28 @@ class Result():
# (list): A list of element names
#
def get_tracked_elements(self):
- tracked = re.findall(r'\[\s*track:(\S+)\s*]', self.stderr)
+ tracked = re.findall(r"\[\s*track:(\S+)\s*]", self.stderr)
if tracked is None:
return []
return list(tracked)
def get_pushed_elements(self):
- pushed = re.findall(r'\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed artifact', self.stderr)
+ pushed = re.findall(r"\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed artifact", self.stderr)
if pushed is None:
return []
return list(pushed)
def get_pulled_elements(self):
- pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact', self.stderr)
+ pulled = re.findall(r"\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact", self.stderr)
if pulled is None:
return []
return list(pulled)
-class Cli():
-
+class Cli:
def __init__(self, directory, verbose=True, default_options=None):
self.directory = directory
self.config = None
@@ -286,14 +269,13 @@ class Cli():
# element_name (str): The name of the element artifact
# cache_dir (str): Specific cache dir to remove artifact from
#
- def remove_artifact_from_cache(self, project, element_name,
- *, cache_dir=None):
+ def remove_artifact_from_cache(self, project, element_name, *, cache_dir=None):
# Read configuration to figure out where artifacts are stored
if not cache_dir:
- default = os.path.join(project, 'cache')
+ default = os.path.join(project, "cache")
if self.config is not None:
- cache_dir = self.config.get('cachedir', default)
+ cache_dir = self.config.get("cachedir", default)
else:
cache_dir = default
@@ -313,8 +295,17 @@ class Cli():
# args (list): A list of arguments to pass buildstream
# binary_capture (bool): Whether to capture the stdout/stderr as binary
#
- def run(self, configure=True, project=None, silent=False, env=None,
- cwd=None, options=None, args=None, binary_capture=False):
+ def run(
+ self,
+ configure=True,
+ project=None,
+ silent=False,
+ env=None,
+ cwd=None,
+ options=None,
+ args=None,
+ binary_capture=False,
+ ):
# We don't want to carry the state of one bst invocation into another
# bst invocation. Since node _FileInfo objects hold onto BuildStream
@@ -335,22 +326,20 @@ class Cli():
options = self.default_options + options
with ExitStack() as stack:
- bst_args = ['--no-colors']
+ bst_args = ["--no-colors"]
if silent:
- bst_args += ['--no-verbose']
+ bst_args += ["--no-verbose"]
if configure:
- config_file = stack.enter_context(
- configured(self.directory, self.config)
- )
- bst_args += ['--config', config_file]
+ config_file = stack.enter_context(configured(self.directory, self.config))
+ bst_args += ["--config", config_file]
if project:
- bst_args += ['--directory', str(project)]
+ bst_args += ["--directory", str(project)]
for option, value in options:
- bst_args += ['--option', option, value]
+ bst_args += ["--option", option, value]
bst_args += args
@@ -366,15 +355,14 @@ class Cli():
try:
sys.__stdout__.fileno()
except ValueError:
- sys.__stdout__ = open('/dev/stdout', 'w')
+ sys.__stdout__ = open("/dev/stdout", "w")
result = self._invoke(bst_cli, bst_args, binary_capture=binary_capture)
# Some informative stdout we can observe when anything fails
if self.verbose:
command = "bst " + " ".join(bst_args)
- print("BuildStream exited with code {} for invocation:\n\t{}"
- .format(result.exit_code, command))
+ print("BuildStream exited with code {} for invocation:\n\t{}".format(result.exit_code, command))
if result.output:
print("Program output was:\n{}".format(result.output))
if result.stderr:
@@ -409,9 +397,9 @@ class Cli():
exit_code = e.code
if not isinstance(exit_code, int):
- sys.stdout.write('Program exit code was not an integer: ')
+ sys.stdout.write("Program exit code was not an integer: ")
sys.stdout.write(str(exit_code))
- sys.stdout.write('\n')
+ sys.stdout.write("\n")
exit_code = 1
except Exception as e: # pylint: disable=broad-except
exception = e
@@ -424,11 +412,7 @@ class Cli():
out, err = capture.readouterr()
capture.stop_capturing()
- return Result(exit_code=exit_code,
- exception=exception,
- exc_info=exc_info,
- output=out,
- stderr=err)
+ return Result(exit_code=exit_code, exception=exception, exc_info=exc_info, output=out, stderr=err)
# Fetch an element state by name by
# invoking bst show on the project with the CLI
@@ -437,12 +421,9 @@ class Cli():
# then use get_element_states(s) instead.
#
def get_element_state(self, project, element_name):
- result = self.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{state}',
- element_name
- ])
+ result = self.run(
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{state}", element_name]
+ )
result.assert_success()
return result.output.strip()
@@ -450,18 +431,15 @@ class Cli():
#
# Returns a dictionary with the element names as keys
#
- def get_element_states(self, project, targets, deps='all'):
- result = self.run(project=project, silent=True, args=[
- 'show',
- '--deps', deps,
- '--format', '%{name}||%{state}',
- *targets
- ])
+ def get_element_states(self, project, targets, deps="all"):
+ result = self.run(
+ project=project, silent=True, args=["show", "--deps", deps, "--format", "%{name}||%{state}", *targets]
+ )
result.assert_success()
lines = result.output.splitlines()
states = {}
for line in lines:
- split = line.split(sep='||')
+ split = line.split(sep="||")
states[split[0]] = split[1]
return states
@@ -469,24 +447,18 @@ class Cli():
# on the project with the CLI
#
def get_element_key(self, project, element_name):
- result = self.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{full-key}',
- element_name
- ])
+ result = self.run(
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{full-key}", element_name]
+ )
result.assert_success()
return result.output.strip()
# Get the decoded config of an element.
#
def get_element_config(self, project, element_name):
- result = self.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{config}',
- element_name
- ])
+ result = self.run(
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{config}", element_name]
+ )
result.assert_success()
return yaml.safe_load(result.output)
@@ -494,12 +466,12 @@ class Cli():
# Fetch the elements that would be in the pipeline with the given
# arguments.
#
- def get_pipeline(self, project, elements, except_=None, scope='plan'):
+ def get_pipeline(self, project, elements, except_=None, scope="plan"):
if except_ is None:
except_ = []
- args = ['show', '--deps', scope, '--format', '%{name}']
- args += list(itertools.chain.from_iterable(zip(itertools.repeat('--except'), except_)))
+ args = ["show", "--deps", scope, "--format", "%{name}"]
+ args += list(itertools.chain.from_iterable(zip(itertools.repeat("--except"), except_)))
result = self.run(project=project, silent=True, args=args + elements)
result.assert_success()
@@ -523,11 +495,27 @@ class CliIntegration(Cli):
#
# This supports the same arguments as Cli.run(), see run_project_config().
#
- def run(self, configure=True, project=None, silent=False, env=None,
- cwd=None, options=None, args=None, binary_capture=False):
+ def run(
+ self,
+ configure=True,
+ project=None,
+ silent=False,
+ env=None,
+ cwd=None,
+ options=None,
+ args=None,
+ binary_capture=False,
+ ):
return self.run_project_config(
- configure=configure, project=project, silent=silent, env=env,
- cwd=cwd, options=options, args=args, binary_capture=binary_capture)
+ configure=configure,
+ project=project,
+ silent=silent,
+ env=env,
+ cwd=cwd,
+ options=options,
+ args=args,
+ binary_capture=binary_capture,
+ )
# run_project_config()
#
@@ -549,9 +537,9 @@ class CliIntegration(Cli):
# Save the original project.conf, because we will run more than
# once in the same temp directory
#
- project_directory = kwargs['project']
- project_filename = os.path.join(project_directory, 'project.conf')
- project_backup = os.path.join(project_directory, 'project.conf.backup')
+ project_directory = kwargs["project"]
+ project_filename = os.path.join(project_directory, "project.conf")
+ project_backup = os.path.join(project_directory, "project.conf.backup")
project_load_filename = project_filename
if not os.path.exists(project_backup):
@@ -576,8 +564,8 @@ class CliIntegration(Cli):
#
with tempfile.TemporaryDirectory(dir=project_directory) as scratchdir:
- temp_project = os.path.join(scratchdir, 'project.conf')
- with open(temp_project, 'w') as f:
+ temp_project = os.path.join(scratchdir, "project.conf")
+ with open(temp_project, "w") as f:
yaml.safe_dump(project_config, f)
project_config = _yaml.load(temp_project)
@@ -589,7 +577,7 @@ class CliIntegration(Cli):
else:
# Otherwise, just dump it as is
- with open(project_filename, 'w') as f:
+ with open(project_filename, "w") as f:
f.write(config)
return super().run(**kwargs)
@@ -611,50 +599,49 @@ class CliRemote(CliIntegration):
#
# Returns a list of configured services (by names).
#
- def ensure_services(self, actions=True, execution=True, storage=True,
- artifacts=False, sources=False):
+ def ensure_services(self, actions=True, execution=True, storage=True, artifacts=False, sources=False):
# Build a list of configured services by name:
configured_services = []
if not self.config:
return configured_services
- if 'remote-execution' in self.config:
- rexec_config = self.config['remote-execution']
+ if "remote-execution" in self.config:
+ rexec_config = self.config["remote-execution"]
- if 'action-cache-service' in rexec_config:
+ if "action-cache-service" in rexec_config:
if actions:
- configured_services.append('action-cache')
+ configured_services.append("action-cache")
else:
- rexec_config.pop('action-cache-service')
+ rexec_config.pop("action-cache-service")
- if 'execution-service' in rexec_config:
+ if "execution-service" in rexec_config:
if execution:
- configured_services.append('execution')
+ configured_services.append("execution")
else:
- rexec_config.pop('execution-service')
+ rexec_config.pop("execution-service")
- if 'storage-service' in rexec_config:
+ if "storage-service" in rexec_config:
if storage:
- configured_services.append('storage')
+ configured_services.append("storage")
else:
- rexec_config.pop('storage-service')
+ rexec_config.pop("storage-service")
- if 'artifacts' in self.config:
+ if "artifacts" in self.config:
if artifacts:
- configured_services.append('artifact-cache')
+ configured_services.append("artifact-cache")
else:
- self.config.pop('artifacts')
+ self.config.pop("artifacts")
- if 'source-caches' in self.config:
+ if "source-caches" in self.config:
if sources:
- configured_services.append('source-cache')
+ configured_services.append("source-cache")
else:
- self.config.pop('source-caches')
+ self.config.pop("source-caches")
return configured_services
-class TestArtifact():
+class TestArtifact:
# remove_artifact_from_cache():
#
@@ -666,10 +653,10 @@ class TestArtifact():
#
def remove_artifact_from_cache(self, cache_dir, element_name):
- cache_dir = os.path.join(cache_dir, 'artifacts', 'refs')
+ cache_dir = os.path.join(cache_dir, "artifacts", "refs")
- normal_name = element_name.replace(os.sep, '-')
- cache_dir = os.path.splitext(os.path.join(cache_dir, 'test', normal_name))[0]
+ normal_name = element_name.replace(os.sep, "-")
+ cache_dir = os.path.splitext(os.path.join(cache_dir, "test", normal_name))[0]
shutil.rmtree(cache_dir)
# is_cached():
@@ -688,7 +675,7 @@ class TestArtifact():
# cas = CASCache(str(cache_dir))
artifact_ref = element.get_artifact_name(element_key)
- return os.path.exists(os.path.join(cache_dir, 'artifacts', 'refs', artifact_ref))
+ return os.path.exists(os.path.join(cache_dir, "artifacts", "refs", artifact_ref))
# get_digest():
#
@@ -705,9 +692,9 @@ class TestArtifact():
def get_digest(self, cache_dir, element, element_key):
artifact_ref = element.get_artifact_name(element_key)
- artifact_dir = os.path.join(cache_dir, 'artifacts', 'refs')
+ artifact_dir = os.path.join(cache_dir, "artifacts", "refs")
artifact_proto = artifact_pb2.Artifact()
- with open(os.path.join(artifact_dir, artifact_ref), 'rb') as f:
+ with open(os.path.join(artifact_dir, artifact_ref), "rb") as f:
artifact_proto.ParseFromString(f.read())
return artifact_proto.files
@@ -727,7 +714,7 @@ class TestArtifact():
def extract_buildtree(self, cache_dir, tmpdir, ref):
artifact = artifact_pb2.Artifact()
try:
- with open(os.path.join(cache_dir, 'artifacts', 'refs', ref), 'rb') as f:
+ with open(os.path.join(cache_dir, "artifacts", "refs", ref), "rb") as f:
artifact.ParseFromString(f.read())
except FileNotFoundError:
yield None
@@ -768,7 +755,7 @@ class TestArtifact():
#
@pytest.fixture()
def cli(tmpdir):
- directory = os.path.join(str(tmpdir), 'cache')
+ directory = os.path.join(str(tmpdir), "cache")
os.makedirs(directory)
return Cli(directory)
@@ -781,27 +768,26 @@ def cli(tmpdir):
# stacktraces.
@pytest.fixture()
def cli_integration(tmpdir, integration_cache):
- directory = os.path.join(str(tmpdir), 'cache')
+ directory = os.path.join(str(tmpdir), "cache")
os.makedirs(directory)
fixture = CliIntegration(directory)
# We want to cache sources for integration tests more permanently,
# to avoid downloading the huge base-sdk repeatedly
- fixture.configure({
- 'cachedir': integration_cache.cachedir,
- 'sourcedir': integration_cache.sources,
- })
+ fixture.configure(
+ {"cachedir": integration_cache.cachedir, "sourcedir": integration_cache.sources,}
+ )
yield fixture
# remove following folders if necessary
try:
- shutil.rmtree(os.path.join(integration_cache.cachedir, 'build'))
+ shutil.rmtree(os.path.join(integration_cache.cachedir, "build"))
except FileNotFoundError:
pass
try:
- shutil.rmtree(os.path.join(integration_cache.cachedir, 'tmp'))
+ shutil.rmtree(os.path.join(integration_cache.cachedir, "tmp"))
except FileNotFoundError:
pass
@@ -813,36 +799,32 @@ def cli_integration(tmpdir, integration_cache):
# stacktraces.
@pytest.fixture()
def cli_remote_execution(tmpdir, remote_services):
- directory = os.path.join(str(tmpdir), 'cache')
+ directory = os.path.join(str(tmpdir), "cache")
os.makedirs(directory)
fixture = CliRemote(directory)
if remote_services.artifact_service:
- fixture.configure({'artifacts': [{
- 'url': remote_services.artifact_service,
- }]})
+ fixture.configure({"artifacts": [{"url": remote_services.artifact_service,}]})
remote_execution = {}
if remote_services.action_service:
- remote_execution['action-cache-service'] = {
- 'url': remote_services.action_service,
+ remote_execution["action-cache-service"] = {
+ "url": remote_services.action_service,
}
if remote_services.exec_service:
- remote_execution['execution-service'] = {
- 'url': remote_services.exec_service,
+ remote_execution["execution-service"] = {
+ "url": remote_services.exec_service,
}
if remote_services.storage_service:
- remote_execution['storage-service'] = {
- 'url': remote_services.storage_service,
+ remote_execution["storage-service"] = {
+ "url": remote_services.storage_service,
}
if remote_execution:
- fixture.configure({'remote-execution': remote_execution})
+ fixture.configure({"remote-execution": remote_execution})
if remote_services.source_service:
- fixture.configure({'source-caches': [{
- 'url': remote_services.source_service,
- }]})
+ fixture.configure({"source-caches": [{"url": remote_services.source_service,}]})
return fixture
@@ -882,12 +864,12 @@ def configured(directory, config=None):
if not config:
config = {}
- if not config.get('sourcedir', False):
- config['sourcedir'] = os.path.join(directory, 'sources')
- if not config.get('cachedir', False):
- config['cachedir'] = directory
- if not config.get('logdir', False):
- config['logdir'] = os.path.join(directory, 'logs')
+ if not config.get("sourcedir", False):
+ config["sourcedir"] = os.path.join(directory, "sources")
+ if not config.get("cachedir", False):
+ config["cachedir"] = directory
+ if not config.get("logdir", False):
+ config["logdir"] = os.path.join(directory, "logs")
# Dump it and yield the filename for test scripts to feed it
# to buildstream as an artument
diff --git a/src/buildstream/types.py b/src/buildstream/types.py
index 5688bf393..180044dbd 100644
--- a/src/buildstream/types.py
+++ b/src/buildstream/types.py
@@ -151,7 +151,7 @@ class Consistency(FastEnum):
return self.value < other.value
-class CoreWarnings():
+class CoreWarnings:
"""CoreWarnings()
Some common warnings which are raised by core functionalities within BuildStream are found in this class.
diff --git a/src/buildstream/utils.py b/src/buildstream/utils.py
index de7c14b70..181ea1df9 100644
--- a/src/buildstream/utils.py
+++ b/src/buildstream/utils.py
@@ -52,7 +52,7 @@ from ._utils import url_directory_name # pylint: disable=unused-import
BST_ARBITRARY_TIMESTAMP = calendar.timegm((2011, 11, 11, 11, 11, 11))
# The separator we use for user specified aliases
-_ALIAS_SEPARATOR = ':'
+_ALIAS_SEPARATOR = ":"
_URI_SCHEMES = ["http", "https", "ftp", "file", "git", "sftp", "ssh"]
# Main process pid
@@ -74,6 +74,7 @@ class UtilError(BstError):
or either of the :class:`.ElementError` or :class:`.SourceError`
exceptions should be raised from this error.
"""
+
def __init__(self, message, reason=None):
super().__init__(message, domain=ErrorDomain.UTIL, reason=reason)
@@ -83,6 +84,7 @@ class ProgramNotFoundError(BstError):
It is normally unneeded to handle this exception from plugin code.
"""
+
def __init__(self, message, reason=None):
super().__init__(message, domain=ErrorDomain.PROG_NOT_FOUND, reason=reason)
@@ -92,7 +94,7 @@ class DirectoryExistsError(OSError):
"""
-class FileListResult():
+class FileListResult:
"""An object which stores the result of one of the operations
which run on a list of files.
"""
@@ -112,7 +114,7 @@ class FileListResult():
self.files_written = []
"""List of files that were written."""
- def combine(self, other: 'FileListResult') -> 'FileListResult':
+ def combine(self, other: "FileListResult") -> "FileListResult":
"""Create a new FileListResult that contains the results of both.
"""
ret = FileListResult()
@@ -165,10 +167,10 @@ def list_relative_paths(directory: str) -> Iterator[str]:
# We don't want "./" pre-pended to all the entries in the root of
# `directory`, prefer to have no prefix in that case.
- basepath = relpath if relpath != '.' and dirpath != directory else ''
+ basepath = relpath if relpath != "." and dirpath != directory else ""
# First yield the walked directory itself, except for the root
- if basepath != '':
+ if basepath != "":
yield basepath
# List the filenames in the walked directory
@@ -248,8 +250,7 @@ def sha256sum(filename: str) -> str:
h.update(chunk)
except OSError as e:
- raise UtilError("Failed to get a checksum of file '{}': {}"
- .format(filename, e)) from e
+ raise UtilError("Failed to get a checksum of file '{}': {}".format(filename, e)) from e
return h.hexdigest()
@@ -274,8 +275,7 @@ def safe_copy(src: str, dest: str, *, result: Optional[FileListResult] = None) -
os.unlink(dest)
except OSError as e:
if e.errno != errno.ENOENT:
- raise UtilError("Failed to remove destination file '{}': {}"
- .format(dest, e)) from e
+ raise UtilError("Failed to remove destination file '{}': {}".format(dest, e)) from e
shutil.copyfile(src, dest)
try:
@@ -291,8 +291,7 @@ def safe_copy(src: str, dest: str, *, result: Optional[FileListResult] = None) -
result.failed_attributes.append(dest)
except shutil.Error as e:
- raise UtilError("Failed to copy '{} -> {}': {}"
- .format(src, dest, e)) from e
+ raise UtilError("Failed to copy '{} -> {}': {}".format(src, dest, e)) from e
def safe_link(src: str, dest: str, *, result: Optional[FileListResult] = None, _unlink=False) -> None:
@@ -313,8 +312,7 @@ def safe_link(src: str, dest: str, *, result: Optional[FileListResult] = None, _
os.unlink(dest)
except OSError as e:
if e.errno != errno.ENOENT:
- raise UtilError("Failed to remove destination file '{}': {}"
- .format(dest, e)) from e
+ raise UtilError("Failed to remove destination file '{}': {}".format(dest, e)) from e
# If we can't link it due to cross-device hardlink, copy
try:
@@ -326,8 +324,7 @@ def safe_link(src: str, dest: str, *, result: Optional[FileListResult] = None, _
elif e.errno == errno.EXDEV:
safe_copy(src, dest)
else:
- raise UtilError("Failed to link '{} -> {}': {}"
- .format(src, dest, e)) from e
+ raise UtilError("Failed to link '{} -> {}': {}".format(src, dest, e)) from e
def safe_remove(path: str) -> bool:
@@ -363,16 +360,17 @@ def safe_remove(path: str) -> bool:
# Path does not exist
return True
- raise UtilError("Failed to remove '{}': {}"
- .format(path, e))
+ raise UtilError("Failed to remove '{}': {}".format(path, e))
-def copy_files(src: str,
- dest: str,
- *,
- filter_callback: Optional[Callable[[str], bool]] = None,
- ignore_missing: bool = False,
- report_written: bool = False) -> FileListResult:
+def copy_files(
+ src: str,
+ dest: str,
+ *,
+ filter_callback: Optional[Callable[[str], bool]] = None,
+ ignore_missing: bool = False,
+ report_written: bool = False
+) -> FileListResult:
"""Copy files from source to destination.
Args:
@@ -401,22 +399,28 @@ def copy_files(src: str,
"""
result = FileListResult()
try:
- _process_list(src, dest, safe_copy, result,
- filter_callback=filter_callback,
- ignore_missing=ignore_missing,
- report_written=report_written)
+ _process_list(
+ src,
+ dest,
+ safe_copy,
+ result,
+ filter_callback=filter_callback,
+ ignore_missing=ignore_missing,
+ report_written=report_written,
+ )
except OSError as e:
- raise UtilError("Failed to copy '{} -> {}': {}"
- .format(src, dest, e))
+ raise UtilError("Failed to copy '{} -> {}': {}".format(src, dest, e))
return result
-def link_files(src: str,
- dest: str,
- *,
- filter_callback: Optional[Callable[[str], bool]] = None,
- ignore_missing: bool = False,
- report_written: bool = False) -> FileListResult:
+def link_files(
+ src: str,
+ dest: str,
+ *,
+ filter_callback: Optional[Callable[[str], bool]] = None,
+ ignore_missing: bool = False,
+ report_written: bool = False
+) -> FileListResult:
"""Hardlink files from source to destination.
Args:
@@ -450,13 +454,17 @@ def link_files(src: str,
"""
result = FileListResult()
try:
- _process_list(src, dest, safe_link, result,
- filter_callback=filter_callback,
- ignore_missing=ignore_missing,
- report_written=report_written)
+ _process_list(
+ src,
+ dest,
+ safe_link,
+ result,
+ filter_callback=filter_callback,
+ ignore_missing=ignore_missing,
+ report_written=report_written,
+ )
except OSError as e:
- raise UtilError("Failed to link '{} -> {}': {}"
- .format(src, dest, e))
+ raise UtilError("Failed to link '{} -> {}': {}".format(src, dest, e))
return result
@@ -473,7 +481,7 @@ def get_host_tool(name: str) -> str:
Raises:
:class:`.ProgramNotFoundError`
"""
- search_path = os.environ.get('PATH')
+ search_path = os.environ.get("PATH")
program_path = shutil.which(name, path=search_path)
if not program_path:
@@ -491,22 +499,27 @@ def get_bst_version() -> Tuple[int, int]:
"""
# Import this only conditionally, it's not resolved at bash complete time
from . import __version__ # pylint: disable=cyclic-import
- versions = __version__.split('.')[:2]
- if versions[0] == '0+untagged':
- raise UtilError("Your git repository has no tags - BuildStream can't "
- "determine its version. Please run `git fetch --tags`.")
+ versions = __version__.split(".")[:2]
+
+ if versions[0] == "0+untagged":
+ raise UtilError(
+ "Your git repository has no tags - BuildStream can't "
+ "determine its version. Please run `git fetch --tags`."
+ )
try:
return (int(versions[0]), int(versions[1]))
except IndexError:
- raise UtilError("Cannot detect Major and Minor parts of the version\n"
- "Version: {} not in XX.YY.whatever format"
- .format(__version__))
+ raise UtilError(
+ "Cannot detect Major and Minor parts of the version\n"
+ "Version: {} not in XX.YY.whatever format".format(__version__)
+ )
except ValueError:
- raise UtilError("Cannot convert version to integer numbers\n"
- "Version: {} not in Integer.Integer.whatever format"
- .format(__version__))
+ raise UtilError(
+ "Cannot convert version to integer numbers\n"
+ "Version: {} not in Integer.Integer.whatever format".format(__version__)
+ )
def move_atomic(source: Union[Path, str], destination: Union[Path, str], *, ensure_parents: bool = True) -> None:
@@ -548,16 +561,18 @@ def move_atomic(source: Union[Path, str], destination: Union[Path, str], *, ensu
@contextmanager
-def save_file_atomic(filename: str,
- mode: str = 'w',
- *,
- buffering: int = -1,
- encoding: Optional[str] = None,
- errors: Optional[str] = None,
- newline: Optional[str] = None,
- closefd: bool = True,
- opener: Optional[Callable[[str, int], int]] = None,
- tempdir: Optional[str] = None) -> Iterator[IO]:
+def save_file_atomic(
+ filename: str,
+ mode: str = "w",
+ *,
+ buffering: int = -1,
+ encoding: Optional[str] = None,
+ errors: Optional[str] = None,
+ newline: Optional[str] = None,
+ closefd: bool = True,
+ opener: Optional[Callable[[str, int], int]] = None,
+ tempdir: Optional[str] = None
+) -> Iterator[IO]:
"""Save a file with a temporary name and rename it into place when ready.
This is a context manager which is meant for saving data to files.
@@ -589,8 +604,16 @@ def save_file_atomic(filename: str,
fd, tempname = tempfile.mkstemp(dir=tempdir)
os.close(fd)
- f = open(tempname, mode=mode, buffering=buffering, encoding=encoding,
- errors=errors, newline=newline, closefd=closefd, opener=opener)
+ f = open(
+ tempname,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ errors=errors,
+ newline=newline,
+ closefd=closefd,
+ opener=opener,
+ )
def cleanup_tempfile():
f.close()
@@ -604,7 +627,7 @@ def save_file_atomic(filename: str,
try:
with _signals.terminator(cleanup_tempfile):
# Disable type-checking since "IO[Any]" has no attribute "real_filename"
- f.real_filename = filename # type: ignore
+ f.real_filename = filename # type: ignore
yield f
f.close()
# This operation is atomic, at least on platforms we care about:
@@ -660,8 +683,7 @@ def _get_volume_size(path):
try:
usage = shutil.disk_usage(path)
except OSError as e:
- raise UtilError("Failed to retrieve stats on volume for path '{}': {}"
- .format(path, e)) from e
+ raise UtilError("Failed to retrieve stats on volume for path '{}': {}".format(path, e)) from e
return usage.total, usage.free
@@ -685,16 +707,16 @@ def _get_volume_size(path):
# UtilError if the string is not a valid data size.
#
def _parse_size(size, volume):
- if size == 'infinity':
+ if size == "infinity":
return None
- matches = re.fullmatch(r'([0-9]+\.?[0-9]*)([KMGT%]?)', size)
+ matches = re.fullmatch(r"([0-9]+\.?[0-9]*)([KMGT%]?)", size)
if matches is None:
raise UtilError("{} is not a valid data size.".format(size))
num, unit = matches.groups()
- if unit == '%':
+ if unit == "%":
num = float(num)
if num > 100:
raise UtilError("{}% is not a valid percentage value.".format(num))
@@ -703,8 +725,8 @@ def _parse_size(size, volume):
return disk_size * (num / 100)
- units = ('', 'K', 'M', 'G', 'T')
- return int(num) * 1024**units.index(unit)
+ units = ("", "K", "M", "G", "T")
+ return int(num) * 1024 ** units.index(unit)
# _pretty_size()
@@ -720,8 +742,8 @@ def _parse_size(size, volume):
# (str): The string representation of the number of bytes in the largest
def _pretty_size(size, dec_places=0):
psize = size
- unit = 'B'
- units = ('B', 'K', 'M', 'G', 'T')
+ unit = "B"
+ units = ("B", "K", "M", "G", "T")
for unit in units:
if psize < 1024:
break
@@ -746,19 +768,17 @@ def _force_rmtree(rootpath, **kwargs):
os.chmod(rootpath, 0o755)
for root, dirs, _ in os.walk(rootpath):
for d in dirs:
- path = os.path.join(root, d.lstrip('/'))
+ path = os.path.join(root, d.lstrip("/"))
if os.path.exists(path) and not os.path.islink(path):
try:
os.chmod(path, 0o755)
except OSError as e:
- raise UtilError("Failed to ensure write permission on file '{}': {}"
- .format(path, e))
+ raise UtilError("Failed to ensure write permission on file '{}': {}".format(path, e))
try:
shutil.rmtree(rootpath, **kwargs)
except OSError as e:
- raise UtilError("Failed to remove cache directory '{}': {}"
- .format(rootpath, e))
+ raise UtilError("Failed to remove cache directory '{}': {}".format(rootpath, e))
# Recursively make directories in target area
@@ -779,8 +799,7 @@ def _copy_directories(srcdir, destdir, target):
os.makedirs(new_dir)
yield (new_dir, mode)
else:
- raise UtilError('Source directory tree has file where '
- 'directory expected: {}'.format(old_dir))
+ raise UtilError("Source directory tree has file where " "directory expected: {}".format(old_dir))
else:
if not os.access(new_dir, os.W_OK):
# If the destination directory is not writable, change permissions to make it
@@ -806,16 +825,16 @@ def _ensure_real_directory(root, path):
try:
deststat = os.lstat(destpath)
if not stat.S_ISDIR(deststat.st_mode):
- relpath = destpath[len(root):]
+ relpath = destpath[len(root) :]
if stat.S_ISLNK(deststat.st_mode):
- filetype = 'symlink'
+ filetype = "symlink"
elif stat.S_ISREG(deststat.st_mode):
- filetype = 'regular file'
+ filetype = "regular file"
else:
- filetype = 'special file'
+ filetype = "special file"
- raise UtilError('Destination is a {}, not a directory: {}'.format(filetype, relpath))
+ raise UtilError("Destination is a {}, not a directory: {}".format(filetype, relpath))
except FileNotFoundError:
os.makedirs(destpath)
@@ -836,9 +855,9 @@ def _ensure_real_directory(root, path):
# ignore_missing: Dont raise any error if a source file is missing
#
#
-def _process_list(srcdir, destdir, actionfunc, result,
- filter_callback=None,
- ignore_missing=False, report_written=False):
+def _process_list(
+ srcdir, destdir, actionfunc, result, filter_callback=None, ignore_missing=False, report_written=False
+):
# Keep track of directory permissions, since these need to be set
# *after* files have been written.
@@ -921,7 +940,7 @@ def _process_list(srcdir, destdir, actionfunc, result,
else:
# Unsupported type.
- raise UtilError('Cannot extract {} into staging-area. Unsupported type.'.format(srcpath))
+ raise UtilError("Cannot extract {} into staging-area. Unsupported type.".format(srcpath))
# Write directory permissions now that all files have been written
for d, perms in permissions:
@@ -1035,8 +1054,9 @@ def _tempnamedfile(suffix="", prefix="tmp", dir=None): # pylint: disable=redefi
if temp is not None:
temp.close()
- with _signals.terminator(close_tempfile), \
- tempfile.NamedTemporaryFile(suffix=suffix, prefix=prefix, dir=dir) as temp:
+ with _signals.terminator(close_tempfile), tempfile.NamedTemporaryFile(
+ suffix=suffix, prefix=prefix, dir=dir
+ ) as temp:
yield temp
@@ -1145,13 +1165,13 @@ def _kill_process_tree(pid):
#
def _call(*popenargs, terminate=False, **kwargs):
- kwargs['start_new_session'] = True
+ kwargs["start_new_session"] = True
process = None
- old_preexec_fn = kwargs.get('preexec_fn')
- if 'preexec_fn' in kwargs:
- del kwargs['preexec_fn']
+ old_preexec_fn = kwargs.get("preexec_fn")
+ if "preexec_fn" in kwargs:
+ del kwargs["preexec_fn"]
def preexec_fn():
os.umask(stat.S_IWGRP | stat.S_IWOTH)
@@ -1203,7 +1223,8 @@ def _call(*popenargs, terminate=False, **kwargs):
with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
process = subprocess.Popen( # pylint: disable=subprocess-popen-preexec-fn
- *popenargs, preexec_fn=preexec_fn, universal_newlines=True, **kwargs)
+ *popenargs, preexec_fn=preexec_fn, universal_newlines=True, **kwargs
+ )
output, _ = process.communicate()
exit_code = process.poll()
@@ -1237,44 +1258,44 @@ def _call(*popenargs, terminate=False, **kwargs):
#
def _glob2re(pat):
i, n = 0, len(pat)
- res = '(?ms)'
+ res = "(?ms)"
while i < n:
c = pat[i]
i = i + 1
- if c == '*':
+ if c == "*":
# fnmatch.translate() simply uses the '.*' separator here,
# we only want that for double asterisk (bash 'globstar' behavior)
#
- if i < n and pat[i] == '*':
- res = res + '.*'
+ if i < n and pat[i] == "*":
+ res = res + ".*"
i = i + 1
else:
- res = res + '[^/]*'
- elif c == '?':
+ res = res + "[^/]*"
+ elif c == "?":
# fnmatch.translate() simply uses the '.' wildcard here, but
# we dont want to match path separators here
- res = res + '[^/]'
- elif c == '[':
+ res = res + "[^/]"
+ elif c == "[":
j = i
- if j < n and pat[j] == '!':
+ if j < n and pat[j] == "!":
j = j + 1
- if j < n and pat[j] == ']':
+ if j < n and pat[j] == "]":
j = j + 1
- while j < n and pat[j] != ']':
+ while j < n and pat[j] != "]":
j = j + 1
if j >= n:
- res = res + '\\['
+ res = res + "\\["
else:
- stuff = pat[i:j].replace('\\', '\\\\')
+ stuff = pat[i:j].replace("\\", "\\\\")
i = j + 1
- if stuff[0] == '!':
- stuff = '^' + stuff[1:]
- elif stuff[0] == '^':
- stuff = '\\' + stuff
- res = '{}[{}]'.format(res, stuff)
+ if stuff[0] == "!":
+ stuff = "^" + stuff[1:]
+ elif stuff[0] == "^":
+ stuff = "\\" + stuff
+ res = "{}[{}]".format(res, stuff)
else:
res = res + re.escape(c)
- return res + r'\Z'
+ return res + r"\Z"
# _deduplicate()
@@ -1392,7 +1413,7 @@ def _deterministic_umask():
#
#
def _get_compression(tar):
- mapped_extensions = {'.tar': '', '.gz': 'gz', '.xz': 'xz', '.bz2': 'bz2'}
+ mapped_extensions = {".tar": "", ".gz": "gz", ".xz": "xz", ".bz2": "bz2"}
name, ext = os.path.splitext(tar)
@@ -1403,12 +1424,14 @@ def _get_compression(tar):
# If so, we assume we have been given an unsupported extension,
# which expects compression. Raise an error
_, suffix = os.path.splitext(name)
- if suffix == '.tar':
- raise UtilError("Expected compression with unknown file extension ('{}'), "
- "supported extensions are ('.tar'), ('.gz'), ('.xz'), ('.bz2')".format(ext))
+ if suffix == ".tar":
+ raise UtilError(
+ "Expected compression with unknown file extension ('{}'), "
+ "supported extensions are ('.tar'), ('.gz'), ('.xz'), ('.bz2')".format(ext)
+ )
# Assume just an unconventional name was provided, default to uncompressed
- return ''
+ return ""
# _is_single_threaded()