summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChandan Singh <csingh43@bloomberg.net>2019-11-05 13:40:03 +0000
committerChandan Singh <csingh43@bloomberg.net>2019-11-05 13:40:03 +0000
commitab707e87f53249d7f2aac17683254b54196f90ce (patch)
treed1d2898c6561a8ca362419dce92a6f808d45b4e6
parente06c2295b063245dbdb2397e5bd8c4d0a79ba10d (diff)
downloadbuildstream-chandan/black.tar.gz
Use 119 line lengthchandan/black
-rw-r--r--src/buildstream/_artifact.py47
-rw-r--r--src/buildstream/_artifactcache.py79
-rw-r--r--src/buildstream/_artifactelement.py8
-rw-r--r--src/buildstream/_basecache.py35
-rw-r--r--src/buildstream/_cachekey.py4
-rw-r--r--src/buildstream/_cas/cascache.py139
-rw-r--r--src/buildstream/_cas/casremote.py20
-rw-r--r--src/buildstream/_cas/casserver.py71
-rw-r--r--src/buildstream/_context.py61
-rw-r--r--src/buildstream/_exceptions.py52
-rw-r--r--src/buildstream/_frontend/app.py148
-rw-r--r--src/buildstream/_frontend/cli.py370
-rw-r--r--src/buildstream/_frontend/complete.py37
-rw-r--r--src/buildstream/_frontend/status.py48
-rw-r--r--src/buildstream/_frontend/widget.py211
-rw-r--r--src/buildstream/_fuse/fuse.py104
-rw-r--r--src/buildstream/_fuse/mount.py22
-rw-r--r--src/buildstream/_gitsourcebase.py186
-rw-r--r--src/buildstream/_includes.py51
-rw-r--r--src/buildstream/_loader/loader.py155
-rw-r--r--src/buildstream/_loader/metasource.py4
-rw-r--r--src/buildstream/_message.py12
-rw-r--r--src/buildstream/_messenger.py58
-rw-r--r--src/buildstream/_options/option.py4
-rw-r--r--src/buildstream/_options/optionarch.py8
-rw-r--r--src/buildstream/_options/optionbool.py3
-rw-r--r--src/buildstream/_options/optionenum.py4
-rw-r--r--src/buildstream/_options/optionflags.py4
-rw-r--r--src/buildstream/_options/optionpool.py38
-rw-r--r--src/buildstream/_pipeline.py73
-rw-r--r--src/buildstream/_platform/darwin.py3
-rw-r--r--src/buildstream/_platform/platform.py18
-rw-r--r--src/buildstream/_platform/win32.py4
-rw-r--r--src/buildstream/_plugincontext.py69
-rw-r--r--src/buildstream/_profile.py8
-rw-r--r--src/buildstream/_project.py168
-rw-r--r--src/buildstream/_projectrefs.py8
-rw-r--r--src/buildstream/_protos/buildstream/v2/artifact_pb2.py20
-rw-r--r--src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py8
-rw-r--r--src/buildstream/_protos/buildstream/v2/buildstream_pb2.py32
-rw-r--r--src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py12
-rw-r--r--src/buildstream/_protos/buildstream/v2/source_pb2.py8
-rw-r--r--src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py8
-rw-r--r--src/buildstream/_protos/google/api/annotations_pb2.py5
-rw-r--r--src/buildstream/_protos/google/api/http_pb2.py14
-rw-r--r--src/buildstream/_protos/google/bytestream/bytestream_pb2.py9
-rw-r--r--src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py8
-rw-r--r--src/buildstream/_protos/google/longrunning/operations_pb2.py42
-rw-r--r--src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py8
-rw-r--r--src/buildstream/_protos/google/rpc/code_pb2.py82
-rw-r--r--src/buildstream/_remote.py43
-rw-r--r--src/buildstream/_scheduler/jobs/elementjob.py12
-rw-r--r--src/buildstream/_scheduler/jobs/job.py136
-rw-r--r--src/buildstream/_scheduler/jobs/jobpickler.py9
-rw-r--r--src/buildstream/_scheduler/queues/queue.py37
-rw-r--r--src/buildstream/_scheduler/resources.py5
-rw-r--r--src/buildstream/_scheduler/scheduler.py34
-rw-r--r--src/buildstream/_sourcecache.py60
-rw-r--r--src/buildstream/_state.py4
-rw-r--r--src/buildstream/_stream.py321
-rw-r--r--src/buildstream/_version.py24
-rw-r--r--src/buildstream/_workspaces.py69
-rw-r--r--src/buildstream/buildelement.py16
-rw-r--r--src/buildstream/element.py474
-rw-r--r--src/buildstream/plugin.py87
-rw-r--r--src/buildstream/plugins/elements/autotools.py4
-rw-r--r--src/buildstream/plugins/elements/compose.py12
-rw-r--r--src/buildstream/plugins/elements/filter.py37
-rw-r--r--src/buildstream/plugins/elements/import.py22
-rw-r--r--src/buildstream/plugins/elements/junction.py28
-rw-r--r--src/buildstream/plugins/elements/manual.py4
-rw-r--r--src/buildstream/plugins/elements/pip.py4
-rw-r--r--src/buildstream/plugins/sources/_downloadablefilesource.py29
-rw-r--r--src/buildstream/plugins/sources/bzr.py59
-rw-r--r--src/buildstream/plugins/sources/deb.py4
-rw-r--r--src/buildstream/plugins/sources/local.py3
-rw-r--r--src/buildstream/plugins/sources/patch.py16
-rw-r--r--src/buildstream/plugins/sources/pip.py15
-rw-r--r--src/buildstream/plugins/sources/remote.py7
-rw-r--r--src/buildstream/plugins/sources/tar.py13
-rw-r--r--src/buildstream/plugins/sources/workspace.py3
-rw-r--r--src/buildstream/plugins/sources/zip.py10
-rw-r--r--src/buildstream/sandbox/_mount.py16
-rw-r--r--src/buildstream/sandbox/_mounter.py21
-rw-r--r--src/buildstream/sandbox/_sandboxbuildbox.py37
-rw-r--r--src/buildstream/sandbox/_sandboxbwrap.py44
-rw-r--r--src/buildstream/sandbox/_sandboxchroot.py48
-rw-r--r--src/buildstream/sandbox/_sandboxdummy.py6
-rw-r--r--src/buildstream/sandbox/_sandboxremote.py114
-rw-r--r--src/buildstream/sandbox/sandbox.py40
-rw-r--r--src/buildstream/scriptelement.py44
-rw-r--r--src/buildstream/source.py160
-rw-r--r--src/buildstream/storage/_casbaseddirectory.py134
-rw-r--r--src/buildstream/storage/_filebaseddirectory.py59
-rw-r--r--src/buildstream/testing/__init__.py9
-rw-r--r--src/buildstream/testing/_sourcetests/build_checkout.py5
-rw-r--r--src/buildstream/testing/_sourcetests/fetch.py12
-rw-r--r--src/buildstream/testing/_sourcetests/mirror.py28
-rw-r--r--src/buildstream/testing/_sourcetests/source_determinism.py13
-rw-r--r--src/buildstream/testing/_sourcetests/track.py65
-rw-r--r--src/buildstream/testing/_sourcetests/track_cross_junction.py22
-rw-r--r--src/buildstream/testing/_sourcetests/utils.py11
-rw-r--r--src/buildstream/testing/_sourcetests/workspace.py40
-rw-r--r--src/buildstream/testing/_utils/junction.py4
-rw-r--r--src/buildstream/testing/integration.py4
-rw-r--r--src/buildstream/testing/runcli.py76
-rw-r--r--src/buildstream/types.py16
-rw-r--r--src/buildstream/utils.py78
-rw-r--r--tests/artifactcache/artifactservice.py13
-rw-r--r--tests/artifactcache/config.py64
-rw-r--r--tests/artifactcache/expiry.py17
-rw-r--r--tests/artifactcache/junctions.py48
-rw-r--r--tests/artifactcache/pull.py24
-rw-r--r--tests/artifactcache/push.py22
-rw-r--r--tests/cachekey/cachekey.py33
-rwxr-xr-xtests/cachekey/update.py8
-rwxr-xr-xtests/conftest.py14
-rw-r--r--tests/elements/filter.py132
-rw-r--r--tests/elements/filter/basic/element_plugins/dynamic.py5
-rw-r--r--tests/examples/autotools.py28
-rw-r--r--tests/examples/developing.py60
-rw-r--r--tests/examples/first-project.py14
-rw-r--r--tests/examples/flatpak-autotools.py36
-rw-r--r--tests/examples/integration-commands.py25
-rw-r--r--tests/examples/junctions.py42
-rw-r--r--tests/examples/running-commands.py21
-rw-r--r--tests/external_plugins.py16
-rw-r--r--tests/format/include.py106
-rw-r--r--tests/format/include_composition.py24
-rw-r--r--tests/format/invalid_keys.py7
-rw-r--r--tests/format/junctions.py71
-rw-r--r--tests/format/listdirectiveerrors.py30
-rw-r--r--tests/format/optionarch.py20
-rw-r--r--tests/format/optionbool.py41
-rw-r--r--tests/format/optioneltmask.py28
-rw-r--r--tests/format/optionenum.py42
-rw-r--r--tests/format/optionexports.py12
-rw-r--r--tests/format/optionflags.py47
-rw-r--r--tests/format/optionos.py4
-rw-r--r--tests/format/options.py120
-rw-r--r--tests/format/project.py44
-rw-r--r--tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py3
-rw-r--r--tests/format/projectoverrides.py12
-rw-r--r--tests/format/variables.py89
-rw-r--r--tests/frontend/artifact_delete.py22
-rw-r--r--tests/frontend/artifact_list_contents.py21
-rw-r--r--tests/frontend/artifact_log.py15
-rw-r--r--tests/frontend/artifact_show.py4
-rw-r--r--tests/frontend/buildcheckout.py144
-rw-r--r--tests/frontend/completions.py160
-rw-r--r--tests/frontend/compose_splits.py8
-rw-r--r--tests/frontend/configurable_warnings.py10
-rw-r--r--tests/frontend/configuredwarning/plugins/corewarn.py3
-rw-r--r--tests/frontend/configuredwarning/plugins/warninga.py4
-rw-r--r--tests/frontend/configuredwarning/plugins/warningb.py4
-rw-r--r--tests/frontend/consistencyerror/plugins/consistencyerror.py4
-rw-r--r--tests/frontend/cross_junction_workspace.py6
-rw-r--r--tests/frontend/fetch.py4
-rw-r--r--tests/frontend/help.py14
-rw-r--r--tests/frontend/init.py45
-rw-r--r--tests/frontend/large_directory.py8
-rw-r--r--tests/frontend/logging.py26
-rw-r--r--tests/frontend/mirror.py58
-rw-r--r--tests/frontend/order.py34
-rw-r--r--tests/frontend/overlaps.py26
-rw-r--r--tests/frontend/progress.py16
-rw-r--r--tests/frontend/project/sources/fetch_source.py17
-rw-r--r--tests/frontend/pull.py92
-rw-r--r--tests/frontend/push.py121
-rw-r--r--tests/frontend/rebuild.py8
-rw-r--r--tests/frontend/show.py156
-rw-r--r--tests/frontend/source_checkout.py110
-rw-r--r--tests/frontend/track.py60
-rw-r--r--tests/frontend/workspace.py379
-rw-r--r--tests/integration/artifact.py35
-rw-r--r--tests/integration/autotools.py22
-rw-r--r--tests/integration/build-uid.py18
-rw-r--r--tests/integration/cachedfail.py40
-rw-r--r--tests/integration/cmake.py22
-rw-r--r--tests/integration/compose.py9
-rw-r--r--tests/integration/filter.py14
-rw-r--r--tests/integration/import.py10
-rw-r--r--tests/integration/make.py13
-rw-r--r--tests/integration/manual.py44
-rw-r--r--tests/integration/messages.py33
-rw-r--r--tests/integration/pip_element.py43
-rw-r--r--tests/integration/pip_source.py32
-rw-r--r--tests/integration/pullbuildtrees.py75
-rw-r--r--tests/integration/sandbox-bwrap.py16
-rw-r--r--tests/integration/script.py101
-rw-r--r--tests/integration/shell.py142
-rw-r--r--tests/integration/shellbuildtrees.py272
-rw-r--r--tests/integration/sockets.py8
-rw-r--r--tests/integration/source-determinism.py9
-rw-r--r--tests/integration/stack.py7
-rw-r--r--tests/integration/symlinks.py35
-rw-r--r--tests/integration/workspace.py94
-rw-r--r--tests/internals/cascache.py4
-rw-r--r--tests/internals/context.py4
-rw-r--r--tests/internals/pluginfactory.py84
-rw-r--r--tests/internals/pluginloading.py8
-rw-r--r--tests/internals/storage.py4
-rw-r--r--tests/internals/storage_vdir_import.py16
-rw-r--r--tests/internals/yaml.py438
-rw-r--r--tests/plugins/deprecationwarnings/deprecationwarnings.py10
-rw-r--r--tests/remoteexecution/buildfail.py9
-rw-r--r--tests/remoteexecution/buildtree.py46
-rw-r--r--tests/remoteexecution/junction.py19
-rw-r--r--tests/remoteexecution/partial.py22
-rw-r--r--tests/remoteexecution/simple.py9
-rw-r--r--tests/sandboxes/missing_dependencies.py8
-rw-r--r--tests/sandboxes/remote-exec-config.py24
-rw-r--r--tests/sandboxes/selection.py6
-rw-r--r--tests/sourcecache/cache.py4
-rw-r--r--tests/sourcecache/config.py9
-rw-r--r--tests/sourcecache/fetch.py28
-rw-r--r--tests/sourcecache/source-checkout.py15
-rw-r--r--tests/sourcecache/workspace.py16
-rw-r--r--tests/sources/bzr.py5
-rw-r--r--tests/sources/deb.py19
-rw-r--r--tests/sources/git.py106
-rw-r--r--tests/sources/local.py32
-rw-r--r--tests/sources/patch.py24
-rw-r--r--tests/sources/previous_source_access.py9
-rw-r--r--tests/sources/previous_source_access/plugins/sources/foo_transform.py3
-rw-r--r--tests/sources/remote.py44
-rw-r--r--tests/sources/tar.py77
-rw-r--r--tests/sources/zip.py28
-rw-r--r--tests/testutils/artifactshare.py23
-rw-r--r--tests/testutils/context.py4
-rw-r--r--tests/testutils/http_server.py4
-rw-r--r--tests/testutils/patch.py8
-rw-r--r--tests/testutils/python_repo.py10
-rw-r--r--tests/testutils/repo/bzr.py12
-rw-r--r--tests/testutils/repo/git.py12
-rw-r--r--tests/testutils/setuptools.py4
-rw-r--r--tox.ini2
237 files changed, 2159 insertions, 8342 deletions
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index a06b189ed..1feda2246 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -61,18 +61,10 @@ class Artifact:
self._tmpdir = context.tmpdir
self._proto = None
- self._metadata_keys = (
- None # Strong and weak key tuple extracted from the artifact
- )
- self._metadata_dependencies = (
- None # Dictionary of dependency strong keys from the artifact
- )
- self._metadata_workspaced = (
- None # Boolean of whether it's a workspaced artifact
- )
- self._metadata_workspaced_dependencies = (
- None # List of which dependencies are workspaced from the artifact
- )
+ self._metadata_keys = None # Strong and weak key tuple extracted from the artifact
+ self._metadata_dependencies = None # Dictionary of dependency strong keys from the artifact
+ self._metadata_workspaced = None # Boolean of whether it's a workspaced artifact
+ self._metadata_workspaced_dependencies = None # List of which dependencies are workspaced from the artifact
self._cached = None # Boolean of whether the artifact is cached
# get_files():
@@ -202,10 +194,7 @@ class Artifact:
size += buildtreevdir.get_size()
os.makedirs(
- os.path.dirname(
- os.path.join(self._artifactdir, element.get_artifact_name())
- ),
- exist_ok=True,
+ os.path.dirname(os.path.join(self._artifactdir, element.get_artifact_name())), exist_ok=True,
)
keys = utils._deduplicate([self._cache_key, self._weak_cache_key])
for key in keys:
@@ -321,9 +310,7 @@ class Artifact:
# Extract proto
artifact = self._get_proto()
- self._metadata_dependencies = {
- dep.element_name: dep.cache_key for dep in artifact.build_deps
- }
+ self._metadata_dependencies = {dep.element_name: dep.cache_key for dep in artifact.build_deps}
return self._metadata_dependencies
@@ -388,11 +375,7 @@ class Artifact:
if deps == Scope.BUILD:
try:
dependency_refs = [
- os.path.join(
- dep.project_name,
- _get_normal_name(dep.element_name),
- dep.cache_key,
- )
+ os.path.join(dep.project_name, _get_normal_name(dep.element_name), dep.cache_key,)
for dep in artifact.build_deps
]
except AttributeError:
@@ -410,9 +393,7 @@ class Artifact:
# 1. It might trigger unnecessary rebuilds.
# 2. It would be impossible to support cyclic runtime dependencies
# in the future
- raise ArtifactError(
- "Dependency scope: {} is not supported for artifacts".format(deps)
- )
+ raise ArtifactError("Dependency scope: {} is not supported for artifacts".format(deps))
return dependency_refs
@@ -442,17 +423,13 @@ class Artifact:
# Determine whether directories are required
require_directories = context.require_artifact_directories
# Determine whether file contents are required as well
- require_files = (
- context.require_artifact_files or self._element._artifact_files_required()
- )
+ require_files = context.require_artifact_files or self._element._artifact_files_required()
# Check whether 'files' subdirectory is available, with or without file contents
if (
require_directories
and str(artifact.files)
- and not self._cas.contains_directory(
- artifact.files, with_files=require_files
- )
+ and not self._cas.contains_directory(artifact.files, with_files=require_files)
):
self._cached = False
return False
@@ -500,9 +477,7 @@ class Artifact:
key = self.get_extract_key()
- proto_path = os.path.join(
- self._artifactdir, self._element.get_artifact_name(key=key)
- )
+ proto_path = os.path.join(self._artifactdir, self._element.get_artifact_name(key=key))
artifact = ArtifactProto()
try:
with open(proto_path, mode="r+b") as f:
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index 2eb738db1..40b23e126 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -195,12 +195,7 @@ class ArtifactCache(BaseCache):
# ([str]) - A list of artifact names as generated in LRU order
#
def list_artifacts(self, *, glob=None):
- return [
- ref
- for _, ref in sorted(
- list(self._list_refs_mtimes(self.artifactdir, glob_expr=glob))
- )
- ]
+ return [ref for _, ref in sorted(list(self._list_refs_mtimes(self.artifactdir, glob_expr=glob)))]
# remove():
#
@@ -239,9 +234,7 @@ class ArtifactCache(BaseCache):
removed = []
modified = []
- self.cas.diff_trees(
- digest_a, digest_b, added=added, removed=removed, modified=modified
- )
+ self.cas.diff_trees(digest_a, digest_b, added=added, removed=removed, modified=modified)
return modified, removed, added
@@ -271,14 +264,10 @@ class ArtifactCache(BaseCache):
# can perform file checks on their end
for remote in storage_remotes:
remote.init()
- element.status(
- "Pushing data from artifact {} -> {}".format(display_key, remote)
- )
+ element.status("Pushing data from artifact {} -> {}".format(display_key, remote))
if self._push_artifact_blobs(artifact, remote):
- element.info(
- "Pushed data from artifact {} -> {}".format(display_key, remote)
- )
+ element.info("Pushed data from artifact {} -> {}".format(display_key, remote))
else:
element.info(
"Remote ({}) already has all data of artifact {} cached".format(
@@ -295,9 +284,7 @@ class ArtifactCache(BaseCache):
pushed = True
else:
element.info(
- "Remote ({}) already has artifact {} cached".format(
- remote, element._get_brief_display_key()
- )
+ "Remote ({}) already has artifact {} cached".format(remote, element._get_brief_display_key())
)
return pushed
@@ -331,19 +318,14 @@ class ArtifactCache(BaseCache):
element.info("Pulled artifact {} <- {}".format(display_key, remote))
break
else:
- element.info(
- "Remote ({}) does not have artifact {} cached".format(
- remote, display_key
- )
- )
+ element.info("Remote ({}) does not have artifact {} cached".format(remote, display_key))
except CASError as e:
element.warn("Could not pull from remote {}: {}".format(remote, e))
errors.append(e)
if errors and not artifact:
raise ArtifactError(
- "Failed to pull artifact {}".format(display_key),
- detail="\n".join(str(e) for e in errors),
+ "Failed to pull artifact {}".format(display_key), detail="\n".join(str(e) for e in errors),
)
# If we don't have an artifact, we can't exactly pull our
@@ -356,31 +338,20 @@ class ArtifactCache(BaseCache):
for remote in self._storage_remotes[project]:
remote.init()
try:
- element.status(
- "Pulling data for artifact {} <- {}".format(display_key, remote)
- )
+ element.status("Pulling data for artifact {} <- {}".format(display_key, remote))
- if self._pull_artifact_storage(
- element, artifact, remote, pull_buildtrees=pull_buildtrees
- ):
- element.info(
- "Pulled data for artifact {} <- {}".format(display_key, remote)
- )
+ if self._pull_artifact_storage(element, artifact, remote, pull_buildtrees=pull_buildtrees):
+ element.info("Pulled data for artifact {} <- {}".format(display_key, remote))
return True
- element.info(
- "Remote ({}) does not have artifact {} cached".format(
- remote, display_key
- )
- )
+ element.info("Remote ({}) does not have artifact {} cached".format(remote, display_key))
except CASError as e:
element.warn("Could not pull from remote {}: {}".format(remote, e))
errors.append(e)
if errors:
raise ArtifactError(
- "Failed to pull artifact {}".format(display_key),
- detail="\n".join(str(e) for e in errors),
+ "Failed to pull artifact {}".format(display_key), detail="\n".join(str(e) for e in errors),
)
return False
@@ -424,8 +395,7 @@ class ArtifactCache(BaseCache):
if not push_remotes:
raise ArtifactError(
- "push_message was called, but no remote artifact "
- + "servers are configured as push remotes."
+ "push_message was called, but no remote artifact " + "servers are configured as push remotes."
)
for remote in push_remotes:
@@ -448,8 +418,7 @@ class ArtifactCache(BaseCache):
if not os.path.exists(os.path.join(self.artifactdir, newref)):
os.link(
- os.path.join(self.artifactdir, oldref),
- os.path.join(self.artifactdir, newref),
+ os.path.join(self.artifactdir, oldref), os.path.join(self.artifactdir, newref),
)
# get_artifact_logs():
@@ -622,15 +591,11 @@ class ArtifactCache(BaseCache):
except CASRemoteError as cas_error:
if cas_error.reason != "cache-too-full":
- raise ArtifactError(
- "Failed to push artifact blobs: {}".format(cas_error)
- )
+ raise ArtifactError("Failed to push artifact blobs: {}".format(cas_error))
return False
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
- raise ArtifactError(
- "Failed to push artifact blobs: {}".format(e.details())
- )
+ raise ArtifactError("Failed to push artifact blobs: {}".format(e.details()))
return False
return True
@@ -655,9 +620,7 @@ class ArtifactCache(BaseCache):
artifact_proto = artifact._get_proto()
- keys = list(
- utils._deduplicate([artifact_proto.strong_key, artifact_proto.weak_key])
- )
+ keys = list(utils._deduplicate([artifact_proto.strong_key, artifact_proto.weak_key]))
# Check whether the artifact is on the server
for key in keys:
@@ -665,18 +628,14 @@ class ArtifactCache(BaseCache):
remote.get_artifact(element.get_artifact_name(key=key))
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
- raise ArtifactError(
- "Error checking artifact cache: {}".format(e.details())
- )
+ raise ArtifactError("Error checking artifact cache: {}".format(e.details()))
else:
return False
# If not, we send the artifact proto
for key in keys:
try:
- remote.update_artifact(
- element.get_artifact_name(key=key), artifact_proto
- )
+ remote.update_artifact(element.get_artifact_name(key=key), artifact_proto)
except grpc.RpcError as e:
raise ArtifactError("Failed to push artifact: {}".format(e.details()))
diff --git a/src/buildstream/_artifactelement.py b/src/buildstream/_artifactelement.py
index dfdd751a3..1c1c5db46 100644
--- a/src/buildstream/_artifactelement.py
+++ b/src/buildstream/_artifactelement.py
@@ -173,15 +173,11 @@ class ArtifactElement(Element):
#
def verify_artifact_ref(ref):
try:
- project, element, key = ref.split(
- "/", 2
- ) # This will raise a Value error if unable to split
+ project, element, key = ref.split("/", 2) # This will raise a Value error if unable to split
# Explicitly raise a ValueError if the key length is not as expected
if not _cachekey.is_key(key):
raise ValueError
except ValueError:
- raise ArtifactElementError(
- "Artifact: {} is not of the expected format".format(ref)
- )
+ raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
return project, element, key
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index d277fa504..f4b5c602f 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -68,9 +68,7 @@ class BaseCache:
# against fork() with open gRPC channels.
#
def has_open_grpc_channels(self):
- for project_remotes in chain(
- self._index_remotes.values(), self._storage_remotes.values()
- ):
+ for project_remotes in chain(self._index_remotes.values(), self._storage_remotes.values()):
for remote in project_remotes:
if remote.channel:
return True
@@ -82,9 +80,7 @@ class BaseCache:
#
def close_grpc_channels(self):
# Close all remotes and their gRPC channels
- for project_remotes in chain(
- self._index_remotes.values(), self._storage_remotes.values()
- ):
+ for project_remotes in chain(self._index_remotes.values(), self._storage_remotes.values()):
for remote in project_remotes:
remote.close()
@@ -152,9 +148,7 @@ class BaseCache:
project_specs = getattr(project, cls.spec_name)
context_specs = getattr(context, cls.spec_name)
- return list(
- utils._deduplicate(project_extra_specs + project_specs + context_specs)
- )
+ return list(utils._deduplicate(project_extra_specs + project_specs + context_specs))
# setup_remotes():
#
@@ -207,9 +201,7 @@ class BaseCache:
# on_failure (callable): Called if we fail to contact one of the caches.
#
def initialize_remotes(self, *, on_failure=None):
- index_remotes, storage_remotes = self._create_remote_instances(
- on_failure=on_failure
- )
+ index_remotes, storage_remotes = self._create_remote_instances(on_failure=on_failure)
# Assign remote instances to their respective projects
for project in self.context.get_projects():
@@ -232,12 +224,8 @@ class BaseCache:
yield remote_list[remote_spec]
- self._index_remotes[project] = list(
- get_remotes(index_remotes, remote_specs)
- )
- self._storage_remotes[project] = list(
- get_remotes(storage_remotes, remote_specs)
- )
+ self._index_remotes[project] = list(get_remotes(index_remotes, remote_specs))
+ self._storage_remotes[project] = list(get_remotes(storage_remotes, remote_specs))
# has_fetch_remotes():
#
@@ -409,13 +397,10 @@ class BaseCache:
def _initialize_remotes(self):
def remote_failed(remote, error):
self._message(
- MessageType.WARN,
- "Failed to initialize remote {}: {}".format(remote.url, error),
+ MessageType.WARN, "Failed to initialize remote {}: {}".format(remote.url, error),
)
- with self.context.messenger.timed_activity(
- "Initializing remote caches", silent_nested=True
- ):
+ with self.context.messenger.timed_activity("Initializing remote caches", silent_nested=True):
self.initialize_remotes(on_failure=remote_failed)
# _list_refs_mtimes()
@@ -442,9 +427,7 @@ class BaseCache:
for root, _, files in os.walk(path):
for filename in files:
ref_path = os.path.join(root, filename)
- relative_path = os.path.relpath(
- ref_path, base_path
- ) # Relative to refs head
+ relative_path = os.path.relpath(ref_path, base_path) # Relative to refs head
if not glob_expr or fnmatch(relative_path, glob_expr):
# Obtain the mtime (the time a file was last modified)
yield (os.path.getmtime(ref_path), relative_path)
diff --git a/src/buildstream/_cachekey.py b/src/buildstream/_cachekey.py
index 8c6382bd5..dd9207516 100644
--- a/src/buildstream/_cachekey.py
+++ b/src/buildstream/_cachekey.py
@@ -62,7 +62,5 @@ def is_key(key):
# (str): An sha256 hex digest of the given value
#
def generate_key(value):
- ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode(
- "utf-8"
- )
+ ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode("utf-8")
return hashlib.sha256(ustring).hexdigest()
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index 3caa745da..f1df9d1a2 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -73,13 +73,7 @@ class CASLogLevel(FastEnum):
#
class CASCache:
def __init__(
- self,
- path,
- *,
- casd=True,
- cache_quota=None,
- protect_session_blobs=True,
- log_level=CASLogLevel.WARNING
+ self, path, *, casd=True, cache_quota=None, protect_session_blobs=True, log_level=CASLogLevel.WARNING
):
self.casdir = os.path.join(path, "cas")
self.tmpdir = os.path.join(path, "tmp")
@@ -97,9 +91,7 @@ class CASCache:
# Place socket in global/user temporary directory to avoid hitting
# the socket path length limit.
self._casd_socket_tempdir = tempfile.mkdtemp(prefix="buildstream")
- self._casd_socket_path = os.path.join(
- self._casd_socket_tempdir, "casd.sock"
- )
+ self._casd_socket_path = os.path.join(self._casd_socket_tempdir, "casd.sock")
casd_args = [utils.get_host_tool("buildbox-casd")]
casd_args.append("--bind=unix:" + self._casd_socket_path)
@@ -155,24 +147,16 @@ class CASCache:
# casd is not ready yet, try again after a 10ms delay,
# but don't wait for more than 15s
if time.time() > self._casd_start_time + 15:
- raise CASCacheError(
- "Timed out waiting for buildbox-casd to become ready"
- )
+ raise CASCacheError("Timed out waiting for buildbox-casd to become ready")
time.sleep(0.01)
self._casd_channel = grpc.insecure_channel("unix:" + self._casd_socket_path)
- self._casd_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(
- self._casd_channel
- )
- self._local_cas = local_cas_pb2_grpc.LocalContentAddressableStorageStub(
- self._casd_channel
- )
+ self._casd_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self._casd_channel)
+ self._local_cas = local_cas_pb2_grpc.LocalContentAddressableStorageStub(self._casd_channel)
# Call GetCapabilities() to establish connection to casd
- capabilities = remote_execution_pb2_grpc.CapabilitiesStub(
- self._casd_channel
- )
+ capabilities = remote_execution_pb2_grpc.CapabilitiesStub(self._casd_channel)
capabilities.GetCapabilities(remote_execution_pb2.GetCapabilitiesRequest())
# _get_cas():
@@ -201,9 +185,7 @@ class CASCache:
headdir = os.path.join(self.casdir, "refs", "heads")
objdir = os.path.join(self.casdir, "objects")
if not (os.path.isdir(headdir) and os.path.isdir(objdir)):
- raise CASCacheError(
- "CAS repository check failed for '{}'".format(self.casdir)
- )
+ raise CASCacheError("CAS repository check failed for '{}'".format(self.casdir))
# has_open_grpc_channels():
#
@@ -289,9 +271,7 @@ class CASCache:
if e.code() == grpc.StatusCode.NOT_FOUND:
return False
if e.code() == grpc.StatusCode.UNIMPLEMENTED:
- raise CASCacheError(
- "Unsupported buildbox-casd version: FetchTree unimplemented"
- ) from e
+ raise CASCacheError("Unsupported buildbox-casd version: FetchTree unimplemented") from e
raise
# checkout():
@@ -414,15 +394,7 @@ class CASCache:
#
# Either `path` or `buffer` must be passed, but not both.
#
- def add_object(
- self,
- *,
- digest=None,
- path=None,
- buffer=None,
- link_directly=False,
- instance_name=None
- ):
+ def add_object(self, *, digest=None, path=None, buffer=None, link_directly=False, instance_name=None):
# Exactly one of the two parameters has to be specified
assert (path is None) != (buffer is None)
@@ -450,21 +422,13 @@ class CASCache:
response = local_cas.CaptureFiles(request)
if len(response.responses) != 1:
- raise CASCacheError(
- "Expected 1 response from CaptureFiles, got {}".format(
- len(response.responses)
- )
- )
+ raise CASCacheError("Expected 1 response from CaptureFiles, got {}".format(len(response.responses)))
blob_response = response.responses[0]
if blob_response.status.code == code_pb2.RESOURCE_EXHAUSTED:
raise CASCacheError("Cache too full", reason="cache-too-full")
if blob_response.status.code != code_pb2.OK:
- raise CASCacheError(
- "Failed to capture blob {}: {}".format(
- path, blob_response.status.code
- )
- )
+ raise CASCacheError("Failed to capture blob {}: {}".format(path, blob_response.status.code))
digest.CopyFrom(blob_response.digest)
return digest
@@ -487,19 +451,13 @@ class CASCache:
response = local_cas.CaptureTree(request)
if len(response.responses) != 1:
- raise CASCacheError(
- "Expected 1 response from CaptureTree, got {}".format(
- len(response.responses)
- )
- )
+ raise CASCacheError("Expected 1 response from CaptureTree, got {}".format(len(response.responses)))
tree_response = response.responses[0]
if tree_response.status.code == code_pb2.RESOURCE_EXHAUSTED:
raise CASCacheError("Cache too full", reason="cache-too-full")
if tree_response.status.code != code_pb2.OK:
- raise CASCacheError(
- "Failed to capture tree {}: {}".format(path, tree_response.status.code)
- )
+ raise CASCacheError("Failed to capture tree {}: {}".format(path, tree_response.status.code))
treepath = self.objpath(tree_response.tree_digest)
tree = remote_execution_pb2.Tree()
@@ -547,9 +505,7 @@ class CASCache:
return digest
except FileNotFoundError as e:
- raise CASCacheError(
- "Attempt to access unavailable ref: {}".format(e)
- ) from e
+ raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
# update_mtime()
#
@@ -562,9 +518,7 @@ class CASCache:
try:
os.utime(self._refpath(ref))
except FileNotFoundError as e:
- raise CASCacheError(
- "Attempt to access unavailable ref: {}".format(e)
- ) from e
+ raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
# remove():
#
@@ -616,9 +570,7 @@ class CASCache:
missing_blobs = dict()
# Limit size of FindMissingBlobs request
for required_blobs_group in _grouper(iter(blobs), 512):
- request = remote_execution_pb2.FindMissingBlobsRequest(
- instance_name=instance_name
- )
+ request = remote_execution_pb2.FindMissingBlobsRequest(instance_name=instance_name)
for required_digest in required_blobs_group:
d = request.blob_digests.add()
@@ -627,12 +579,8 @@ class CASCache:
try:
response = cas.FindMissingBlobs(request)
except grpc.RpcError as e:
- if e.code() == grpc.StatusCode.INVALID_ARGUMENT and e.details().startswith(
- "Invalid instance name"
- ):
- raise CASCacheError(
- "Unsupported buildbox-casd version: FindMissingBlobs failed"
- ) from e
+ if e.code() == grpc.StatusCode.INVALID_ARGUMENT and e.details().startswith("Invalid instance name"):
+ raise CASCacheError("Unsupported buildbox-casd version: FindMissingBlobs failed") from e
raise
for missing_digest in response.missing_blob_digests:
@@ -698,14 +646,10 @@ class CASCache:
a = 0
b = 0
while a < len(dir_a.files) or b < len(dir_b.files):
- if b < len(dir_b.files) and (
- a >= len(dir_a.files) or dir_a.files[a].name > dir_b.files[b].name
- ):
+ if b < len(dir_b.files) and (a >= len(dir_a.files) or dir_a.files[a].name > dir_b.files[b].name):
added.append(os.path.join(path, dir_b.files[b].name))
b += 1
- elif a < len(dir_a.files) and (
- b >= len(dir_b.files) or dir_b.files[b].name > dir_a.files[a].name
- ):
+ elif a < len(dir_a.files) and (b >= len(dir_b.files) or dir_b.files[b].name > dir_a.files[a].name):
removed.append(os.path.join(path, dir_a.files[a].name))
a += 1
else:
@@ -719,8 +663,7 @@ class CASCache:
b = 0
while a < len(dir_a.directories) or b < len(dir_b.directories):
if b < len(dir_b.directories) and (
- a >= len(dir_a.directories)
- or dir_a.directories[a].name > dir_b.directories[b].name
+ a >= len(dir_a.directories) or dir_a.directories[a].name > dir_b.directories[b].name
):
self.diff_trees(
None,
@@ -732,8 +675,7 @@ class CASCache:
)
b += 1
elif a < len(dir_a.directories) and (
- b >= len(dir_b.directories)
- or dir_b.directories[b].name > dir_a.directories[a].name
+ b >= len(dir_b.directories) or dir_b.directories[b].name > dir_a.directories[a].name
):
self.diff_trees(
dir_a.directories[a].digest,
@@ -838,9 +780,7 @@ class CASCache:
break
# Something went wrong here
- raise CASCacheError(
- "System error while removing ref '{}': {}".format(ref, e)
- ) from e
+ raise CASCacheError("System error while removing ref '{}': {}".format(ref, e)) from e
def _get_subdir(self, tree, subdir):
head, name = os.path.split(subdir)
@@ -858,9 +798,7 @@ class CASCache:
raise CASCacheError("Subdirectory {} not found".format(name))
- def _reachable_refs_dir(
- self, reachable, tree, update_mtime=False, check_exists=False
- ):
+ def _reachable_refs_dir(self, reachable, tree, update_mtime=False, check_exists=False):
if tree.hash in reachable:
return
try:
@@ -891,10 +829,7 @@ class CASCache:
for dirnode in directory.directories:
self._reachable_refs_dir(
- reachable,
- dirnode.digest,
- update_mtime=update_mtime,
- check_exists=check_exists,
+ reachable, dirnode.digest, update_mtime=update_mtime, check_exists=check_exists,
)
# _temporary_object():
@@ -943,9 +878,7 @@ class CASCache:
return _CASBatchRead(remote)
# Helper function for _fetch_directory().
- def _fetch_directory_node(
- self, remote, digest, batch, fetch_queue, fetch_next_queue, *, recursive=False
- ):
+ def _fetch_directory_node(self, remote, digest, batch, fetch_queue, fetch_next_queue, *, recursive=False):
in_local_cache = os.path.exists(self.objpath(digest))
if in_local_cache:
@@ -985,9 +918,7 @@ class CASCache:
while len(fetch_queue) + len(fetch_next_queue) > 0:
if not fetch_queue:
- batch = self._fetch_directory_batch(
- remote, batch, fetch_queue, fetch_next_queue
- )
+ batch = self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
dir_digest = fetch_queue.pop(0)
@@ -999,12 +930,7 @@ class CASCache:
for dirnode in directory.directories:
batch = self._fetch_directory_node(
- remote,
- dirnode.digest,
- batch,
- fetch_queue,
- fetch_next_queue,
- recursive=True,
+ remote, dirnode.digest, batch, fetch_queue, fetch_next_queue, recursive=True,
)
# Fetch final batch
@@ -1116,10 +1042,7 @@ class CASCache:
if messenger:
messenger.message(
- Message(
- MessageType.WARN,
- "Buildbox-casd didn't exit in time and has been killed",
- )
+ Message(MessageType.WARN, "Buildbox-casd didn't exit in time and has been killed",)
)
self._casd_process = None
return
@@ -1155,9 +1078,7 @@ class CASCache:
# (subprocess.Process): The casd process that is used for the current cascache
#
def get_casd_process(self):
- assert (
- self._casd_process is not None
- ), "This should only be called with a running buildbox-casd process"
+ assert self._casd_process is not None, "This should only be called with a running buildbox-casd process"
return self._casd_process
diff --git a/src/buildstream/_cas/casremote.py b/src/buildstream/_cas/casremote.py
index f6be2cdab..a0308bdbf 100644
--- a/src/buildstream/_cas/casremote.py
+++ b/src/buildstream/_cas/casremote.py
@@ -83,9 +83,7 @@ class CASRemote(BaseRemote):
self.init()
- return self.cascache.add_object(
- buffer=message_buffer, instance_name=self.local_cas_instance_name
- )
+ return self.cascache.add_object(buffer=message_buffer, instance_name=self.local_cas_instance_name)
# Represents a batch of blobs queued for fetching.
@@ -125,25 +123,19 @@ class _CASBatchRead:
if missing_blobs is None:
raise BlobNotFound(
response.digest.hash,
- "Failed to download blob {}: {}".format(
- response.digest.hash, response.status.code
- ),
+ "Failed to download blob {}: {}".format(response.digest.hash, response.status.code),
)
missing_blobs.append(response.digest)
if response.status.code != code_pb2.OK:
raise CASRemoteError(
- "Failed to download blob {}: {}".format(
- response.digest.hash, response.status.code
- )
+ "Failed to download blob {}: {}".format(response.digest.hash, response.status.code)
)
if response.digest.size_bytes != len(response.data):
raise CASRemoteError(
"Failed to download blob {}: expected {} bytes, received {} bytes".format(
- response.digest.hash,
- response.digest.size_bytes,
- len(response.data),
+ response.digest.hash, response.digest.size_bytes, len(response.data),
)
)
@@ -188,8 +180,6 @@ class _CASBatchUpdate:
reason = None
raise CASRemoteError(
- "Failed to upload blob {}: {}".format(
- response.digest.hash, response.status.code
- ),
+ "Failed to upload blob {}: {}".format(response.digest.hash, response.status.code),
reason=reason,
)
diff --git a/src/buildstream/_cas/casserver.py b/src/buildstream/_cas/casserver.py
index 327b087c4..77f51256c 100644
--- a/src/buildstream/_cas/casserver.py
+++ b/src/buildstream/_cas/casserver.py
@@ -67,9 +67,7 @@ _MAX_PAYLOAD_BYTES = 1024 * 1024
#
@contextmanager
def create_server(repo, *, enable_push, quota, index_only):
- cas = CASCache(
- os.path.abspath(repo), cache_quota=quota, protect_session_blobs=False
- )
+ cas = CASCache(os.path.abspath(repo), cache_quota=quota, protect_session_blobs=False)
try:
artifactdir = os.path.join(os.path.abspath(repo), "artifacts", "refs")
@@ -88,9 +86,7 @@ def create_server(repo, *, enable_push, quota, index_only):
_ContentAddressableStorageServicer(cas, enable_push=enable_push), server
)
- remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(
- _CapabilitiesServicer(), server
- )
+ remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(_CapabilitiesServicer(), server)
buildstream_pb2_grpc.add_ReferenceStorageServicer_to_server(
_ReferenceStorageServicer(cas, enable_push=enable_push), server
@@ -100,22 +96,13 @@ def create_server(repo, *, enable_push, quota, index_only):
_ArtifactServicer(cas, artifactdir, update_cas=not index_only), server
)
- source_pb2_grpc.add_SourceServiceServicer_to_server(
- _SourceServicer(sourcedir), server
- )
+ source_pb2_grpc.add_SourceServiceServicer_to_server(_SourceServicer(sourcedir), server)
# Create up reference storage and artifact capabilities
- artifact_capabilities = buildstream_pb2.ArtifactCapabilities(
- allow_updates=enable_push
- )
- source_capabilities = buildstream_pb2.SourceCapabilities(
- allow_updates=enable_push
- )
+ artifact_capabilities = buildstream_pb2.ArtifactCapabilities(allow_updates=enable_push)
+ source_capabilities = buildstream_pb2.SourceCapabilities(allow_updates=enable_push)
buildstream_pb2_grpc.add_CapabilitiesServicer_to_server(
- _BuildStreamCapabilitiesServicer(
- artifact_capabilities, source_capabilities
- ),
- server,
+ _BuildStreamCapabilitiesServicer(artifact_capabilities, source_capabilities), server,
)
yield server
@@ -130,16 +117,10 @@ def create_server(repo, *, enable_push, quota, index_only):
@click.option("--server-cert", help="Public server certificate for TLS (PEM-encoded)")
@click.option("--client-certs", help="Public client certificates for TLS (PEM-encoded)")
@click.option(
- "--enable-push",
- is_flag=True,
- help="Allow clients to upload blobs and update artifact cache",
+ "--enable-push", is_flag=True, help="Allow clients to upload blobs and update artifact cache",
)
@click.option(
- "--quota",
- type=click.INT,
- default=10e9,
- show_default=True,
- help="Maximum disk usage in bytes",
+ "--quota", type=click.INT, default=10e9, show_default=True, help="Maximum disk usage in bytes",
)
@click.option(
"--index-only",
@@ -147,31 +128,24 @@ def create_server(repo, *, enable_push, quota, index_only):
help='Only provide the BuildStream artifact and source services ("index"), not the CAS ("storage")',
)
@click.argument("repo")
-def server_main(
- repo, port, server_key, server_cert, client_certs, enable_push, quota, index_only
-):
+def server_main(repo, port, server_key, server_cert, client_certs, enable_push, quota, index_only):
# Handle SIGTERM by calling sys.exit(0), which will raise a SystemExit exception,
# properly executing cleanup code in `finally` clauses and context managers.
# This is required to terminate buildbox-casd on SIGTERM.
signal.signal(signal.SIGTERM, lambda signalnum, frame: sys.exit(0))
- with create_server(
- repo, quota=quota, enable_push=enable_push, index_only=index_only
- ) as server:
+ with create_server(repo, quota=quota, enable_push=enable_push, index_only=index_only) as server:
use_tls = bool(server_key)
if bool(server_cert) != use_tls:
click.echo(
- "ERROR: --server-key and --server-cert are both required for TLS",
- err=True,
+ "ERROR: --server-key and --server-cert are both required for TLS", err=True,
)
sys.exit(-1)
if client_certs and not use_tls:
- click.echo(
- "ERROR: --client-certs can only be used with --server-key", err=True
- )
+ click.echo("ERROR: --client-certs can only be used with --server-key", err=True)
sys.exit(-1)
if use_tls:
@@ -274,9 +248,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
break
try:
- os.posix_fallocate(
- out.fileno(), 0, client_digest.size_bytes
- )
+ os.posix_fallocate(out.fileno(), 0, client_digest.size_bytes)
break
except OSError as e:
# Multiple upload can happen in the same time
@@ -322,9 +294,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
return response
-class _ContentAddressableStorageServicer(
- remote_execution_pb2_grpc.ContentAddressableStorageServicer
-):
+class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddressableStorageServicer):
def __init__(self, cas, *, enable_push):
super().__init__()
self.cas = cas
@@ -426,9 +396,7 @@ class _CapabilitiesServicer(remote_execution_pb2_grpc.CapabilitiesServicer):
cache_capabilities.digest_function.append(remote_execution_pb2.SHA256)
cache_capabilities.action_cache_update_capabilities.update_enabled = False
cache_capabilities.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
- cache_capabilities.symlink_absolute_path_strategy = (
- remote_execution_pb2.CacheCapabilities.ALLOWED
- )
+ cache_capabilities.symlink_absolute_path_strategy = remote_execution_pb2.CacheCapabilities.ALLOWED
response.deprecated_api_version.major = 2
response.low_api_version.major = 2
@@ -574,20 +542,17 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
directory.ParseFromString(f.read())
except FileNotFoundError:
context.abort(
- grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but no files found".format(name),
+ grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but no files found".format(name),
)
except DecodeError:
context.abort(
- grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but directory not found".format(name),
+ grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but directory not found".format(name),
)
def _check_file(self, name, digest, context):
if not os.path.exists(self.cas.objpath(digest)):
context.abort(
- grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but not found".format(name),
+ grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but not found".format(name),
)
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 17fe691d4..c550a1e62 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -215,9 +215,7 @@ class Context:
# a $XDG_CONFIG_HOME/buildstream.conf file
#
if not config:
- default_config = os.path.join(
- os.environ["XDG_CONFIG_HOME"], "buildstream.conf"
- )
+ default_config = os.path.join(os.environ["XDG_CONFIG_HOME"], "buildstream.conf")
if os.path.exists(default_config):
config = default_config
@@ -232,9 +230,7 @@ class Context:
# Give obsoletion warnings
if "builddir" in defaults:
- raise LoadError(
- "builddir is obsolete, use cachedir", LoadErrorReason.INVALID_DATA
- )
+ raise LoadError("builddir is obsolete, use cachedir", LoadErrorReason.INVALID_DATA)
if "artifactdir" in defaults:
raise LoadError("artifactdir is obsolete", LoadErrorReason.INVALID_DATA)
@@ -271,12 +267,9 @@ class Context:
# Relative paths don't make sense in user configuration. The exception is
# workspacedir where `.` is useful as it will be combined with the name
# specified on the command line.
- if not os.path.isabs(path) and not (
- directory == "workspacedir" and path == "."
- ):
+ if not os.path.isabs(path) and not (directory == "workspacedir" and path == "."):
raise LoadError(
- "{} must be an absolute path".format(directory),
- LoadErrorReason.INVALID_DATA,
+ "{} must be an absolute path".format(directory), LoadErrorReason.INVALID_DATA,
)
# add directories not set by users
@@ -287,11 +280,7 @@ class Context:
# Move old artifact cas to cas if it exists and create symlink
old_casdir = os.path.join(self.cachedir, "artifacts", "cas")
- if (
- os.path.exists(old_casdir)
- and not os.path.islink(old_casdir)
- and not os.path.exists(self.casdir)
- ):
+ if os.path.exists(old_casdir) and not os.path.islink(old_casdir) and not os.path.exists(self.casdir):
os.rename(old_casdir, self.casdir)
os.symlink(self.casdir, old_casdir)
@@ -316,9 +305,7 @@ class Context:
self.config_cache_quota_string = cache.get_str("quota")
try:
- self.config_cache_quota = utils._parse_size(
- self.config_cache_quota_string, cas_volume
- )
+ self.config_cache_quota = utils._parse_size(self.config_cache_quota_string, cas_volume)
except utils.UtilError as e:
raise LoadError(
"{}\nPlease specify the value in bytes or as a % of full disk space.\n"
@@ -335,9 +322,7 @@ class Context:
# Load remote execution config getting pull-artifact-files from it
remote_execution = defaults.get_mapping("remote-execution", default=None)
if remote_execution:
- self.pull_artifact_files = remote_execution.get_bool(
- "pull-artifact-files", default=True
- )
+ self.pull_artifact_files = remote_execution.get_bool("pull-artifact-files", default=True)
# This stops it being used in the remote service set up
remote_execution.safe_del("pull-artifact-files")
# Don't pass the remote execution settings if that was the only option
@@ -357,15 +342,7 @@ class Context:
# Load logging config
logging = defaults.get_mapping("logging")
logging.validate_keys(
- [
- "key-length",
- "verbose",
- "error-lines",
- "message-lines",
- "debug",
- "element-format",
- "message-format",
- ]
+ ["key-length", "verbose", "error-lines", "message-lines", "debug", "element-format", "message-format",]
)
self.log_key_length = logging.get_int("key-length")
self.log_debug = logging.get_bool("debug")
@@ -377,9 +354,7 @@ class Context:
# Load scheduler config
scheduler = defaults.get_mapping("scheduler")
- scheduler.validate_keys(
- ["on-error", "fetchers", "builders", "pushers", "network-retries"]
- )
+ scheduler.validate_keys(["on-error", "fetchers", "builders", "pushers", "network-retries"])
self.sched_error_action = scheduler.get_enum("on-error", _SchedulerErrorAction)
self.sched_fetchers = scheduler.get_int("fetchers")
self.sched_builders = scheduler.get_int("builders")
@@ -395,9 +370,7 @@ class Context:
if self.build_dependencies not in ["plan", "all"]:
provenance = build.get_scalar("dependencies").get_provenance()
raise LoadError(
- "{}: Invalid value for 'dependencies'. Choose 'plan' or 'all'.".format(
- provenance
- ),
+ "{}: Invalid value for 'dependencies'. Choose 'plan' or 'all'.".format(provenance),
LoadErrorReason.INVALID_DATA,
)
@@ -408,14 +381,7 @@ class Context:
# on the overrides are expected to validate elsewhere.
for overrides in self._project_overrides.values():
overrides.validate_keys(
- [
- "artifacts",
- "source-caches",
- "options",
- "strict",
- "default-mirror",
- "remote-execution",
- ]
+ ["artifacts", "source-caches", "options", "strict", "default-mirror", "remote-execution",]
)
@property
@@ -567,10 +533,7 @@ class Context:
log_level = CASLogLevel.WARNING
self._cascache = CASCache(
- self.cachedir,
- casd=self.use_casd,
- cache_quota=self.config_cache_quota,
- log_level=log_level,
+ self.cachedir, casd=self.use_casd, cache_quota=self.config_cache_quota, log_level=log_level,
)
return self._cascache
diff --git a/src/buildstream/_exceptions.py b/src/buildstream/_exceptions.py
index f05e38162..89dfb49be 100644
--- a/src/buildstream/_exceptions.py
+++ b/src/buildstream/_exceptions.py
@@ -52,9 +52,7 @@ def get_last_exception():
#
def get_last_task_error():
if "BST_TEST_SUITE" not in os.environ:
- raise BstError(
- "Getting the last task error is only supported when running tests"
- )
+ raise BstError("Getting the last task error is only supported when running tests")
global _last_task_error_domain
global _last_task_error_reason
@@ -109,9 +107,7 @@ class ErrorDomain(Enum):
# context can then be communicated back to the main process.
#
class BstError(Exception):
- def __init__(
- self, message, *, detail=None, domain=None, reason=None, temporary=False
- ):
+ def __init__(self, message, *, detail=None, domain=None, reason=None, temporary=False):
global _last_exception
super().__init__(message)
@@ -148,9 +144,7 @@ class BstError(Exception):
#
class PluginError(BstError):
def __init__(self, message, reason=None, temporary=False):
- super().__init__(
- message, domain=ErrorDomain.PLUGIN, reason=reason, temporary=False
- )
+ super().__init__(message, domain=ErrorDomain.PLUGIN, reason=reason, temporary=False)
# LoadErrorReason
@@ -266,9 +260,7 @@ class ImplError(BstError):
# Raised if the current platform is not supported.
class PlatformError(BstError):
def __init__(self, message, reason=None, detail=None):
- super().__init__(
- message, domain=ErrorDomain.PLATFORM, reason=reason, detail=detail
- )
+ super().__init__(message, domain=ErrorDomain.PLATFORM, reason=reason, detail=detail)
# SandboxError
@@ -277,9 +269,7 @@ class PlatformError(BstError):
#
class SandboxError(BstError):
def __init__(self, message, detail=None, reason=None):
- super().__init__(
- message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason)
# SourceCacheError
@@ -288,9 +278,7 @@ class SandboxError(BstError):
#
class SourceCacheError(BstError):
def __init__(self, message, detail=None, reason=None):
- super().__init__(
- message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason)
# ArtifactError
@@ -300,11 +288,7 @@ class SourceCacheError(BstError):
class ArtifactError(BstError):
def __init__(self, message, *, detail=None, reason=None, temporary=False):
super().__init__(
- message,
- detail=detail,
- domain=ErrorDomain.ARTIFACT,
- reason=reason,
- temporary=True,
+ message, detail=detail, domain=ErrorDomain.ARTIFACT, reason=reason, temporary=True,
)
@@ -314,9 +298,7 @@ class ArtifactError(BstError):
#
class RemoteError(BstError):
def __init__(self, message, *, detail=None, reason=None):
- super().__init__(
- message, detail=detail, domain=ErrorDomain.REMOTE, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.REMOTE, reason=reason)
# CASError
@@ -326,11 +308,7 @@ class RemoteError(BstError):
class CASError(BstError):
def __init__(self, message, *, detail=None, reason=None, temporary=False):
super().__init__(
- message,
- detail=detail,
- domain=ErrorDomain.CAS,
- reason=reason,
- temporary=True,
+ message, detail=detail, domain=ErrorDomain.CAS, reason=reason, temporary=True,
)
@@ -355,9 +333,7 @@ class CASCacheError(CASError):
#
class PipelineError(BstError):
def __init__(self, message, *, detail=None, reason=None):
- super().__init__(
- message, detail=detail, domain=ErrorDomain.PIPELINE, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.PIPELINE, reason=reason)
# StreamError
@@ -373,9 +349,7 @@ class StreamError(BstError):
if message is None:
message = ""
- super().__init__(
- message, detail=detail, domain=ErrorDomain.STREAM, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.STREAM, reason=reason)
self.terminated = terminated
@@ -404,6 +378,4 @@ class SkipJob(Exception):
#
class ArtifactElementError(BstError):
def __init__(self, message, *, detail=None, reason=None):
- super().__init__(
- message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason
- )
+ super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason)
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 3be035c0c..9a12f3083 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -289,8 +289,7 @@ class App:
# don't stop them with an offer to create a project for them.
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
click.echo(
- "No project found. You can create a new project like so:",
- err=True,
+ "No project found. You can create a new project like so:", err=True,
)
click.echo("", err=True)
click.echo(" bst init", err=True)
@@ -309,13 +308,9 @@ class App:
if session_name:
elapsed = self.stream.elapsed_time
- if (
- isinstance(e, StreamError) and e.terminated
- ): # pylint: disable=no-member
+ if isinstance(e, StreamError) and e.terminated: # pylint: disable=no-member
self._message(
- MessageType.WARN,
- session_name + " Terminated",
- elapsed=elapsed,
+ MessageType.WARN, session_name + " Terminated", elapsed=elapsed,
)
else:
self._message(MessageType.FAIL, session_name, elapsed=elapsed)
@@ -330,8 +325,7 @@ class App:
self._error_exit(e)
except RecursionError:
click.echo(
- "RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.",
- err=True,
+ "RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.", err=True,
)
sys.exit(-1)
@@ -339,9 +333,7 @@ class App:
# No exceptions occurred, print session time and summary
if session_name:
self._message(
- MessageType.SUCCESS,
- session_name,
- elapsed=self.stream.elapsed_time,
+ MessageType.SUCCESS, session_name, elapsed=self.stream.elapsed_time,
)
if self._started:
self._print_summary()
@@ -389,8 +381,7 @@ class App:
# Abort if the project.conf already exists, unless `--force` was specified in `bst init`
if not force and os.path.exists(project_path):
raise AppError(
- "A project.conf already exists at: {}".format(project_path),
- reason="project-exists",
+ "A project.conf already exists at: {}".format(project_path), reason="project-exists",
)
if project_name:
@@ -407,11 +398,7 @@ class App:
)
else:
# Collect the parameters using an interactive session
- (
- project_name,
- format_version,
- element_path,
- ) = self._init_project_interactive(
+ (project_name, format_version, element_path,) = self._init_project_interactive(
project_name, format_version, element_path
)
@@ -419,20 +406,14 @@ class App:
try:
os.makedirs(directory, exist_ok=True)
except IOError as e:
- raise AppError(
- "Error creating project directory {}: {}".format(directory, e)
- ) from e
+ raise AppError("Error creating project directory {}: {}".format(directory, e)) from e
# Create the elements sub-directory if it doesnt exist
elements_path = os.path.join(directory, element_path)
try:
os.makedirs(elements_path, exist_ok=True)
except IOError as e:
- raise AppError(
- "Error creating elements sub-directory {}: {}".format(
- elements_path, e
- )
- ) from e
+ raise AppError("Error creating elements sub-directory {}: {}".format(elements_path, e)) from e
# Dont use ruamel.yaml here, because it doesnt let
# us programatically insert comments or whitespace at
@@ -554,11 +535,7 @@ class App:
def _maybe_render_status(self):
# If we're suspended or terminating, then dont render the status area
- if (
- self._status
- and self.stream
- and not (self.stream.suspended or self.stream.terminated)
- ):
+ if self._status and self.stream and not (self.stream.suspended or self.stream.terminated):
self._status.render()
#
@@ -591,9 +568,7 @@ class App:
try:
choice = click.prompt(
"Choice:",
- value_proc=_prefix_choice_value_proc(
- ["continue", "quit", "terminate"]
- ),
+ value_proc=_prefix_choice_value_proc(["continue", "quit", "terminate"]),
default="continue",
err=True,
)
@@ -641,9 +616,7 @@ class App:
self._status.clear()
click.echo(
"\n\n\nBUG: Message handling out of sync, "
- + "unable to retrieve failure message for element {}\n\n\n\n\n".format(
- full_name
- ),
+ + "unable to retrieve failure message for element {}\n\n\n\n\n".format(full_name),
err=True,
)
else:
@@ -682,9 +655,7 @@ class App:
if failure.logfile:
summary += " (l)og - View the full log file\n"
if failure.sandbox:
- summary += (
- " (s)hell - Drop into a shell in the failed build sandbox\n"
- )
+ summary += " (s)hell - Drop into a shell in the failed build sandbox\n"
summary += "\nPressing ^C will terminate jobs and exit\n"
choices = ["continue", "quit", "terminate", "retry"]
@@ -698,16 +669,12 @@ class App:
click.echo(summary, err=True)
self._notify(
- "BuildStream failure",
- "{} on element {}".format(failure.action_name, full_name),
+ "BuildStream failure", "{} on element {}".format(failure.action_name, full_name),
)
try:
choice = click.prompt(
- "Choice:",
- default="continue",
- err=True,
- value_proc=_prefix_choice_value_proc(choices),
+ "Choice:", default="continue", err=True, value_proc=_prefix_choice_value_proc(choices),
)
except click.Abort:
# Ensure a newline after automatically printed '^C'
@@ -718,26 +685,17 @@ class App:
#
if choice == "shell":
click.echo(
- "\nDropping into an interactive shell in the failed build sandbox\n",
- err=True,
+ "\nDropping into an interactive shell in the failed build sandbox\n", err=True,
)
try:
unique_id, element_key = element
prompt = self.shell_prompt(full_name, element_key)
self.stream.shell(
- None,
- Scope.BUILD,
- prompt,
- isolate=True,
- usebuildtree="always",
- unique_id=unique_id,
+ None, Scope.BUILD, prompt, isolate=True, usebuildtree="always", unique_id=unique_id,
)
except BstError as e:
click.echo(
- "Error while attempting to create interactive shell: {}".format(
- e
- ),
- err=True,
+ "Error while attempting to create interactive shell: {}".format(e), err=True,
)
elif choice == "log":
with open(failure.logfile, "r") as logfile:
@@ -752,9 +710,7 @@ class App:
click.echo("\nCompleting ongoing tasks before quitting\n", err=True)
self.stream.quit()
elif choice == "continue":
- click.echo(
- "\nContinuing with other non failing elements\n", err=True
- )
+ click.echo("\nContinuing with other non failing elements\n", err=True)
elif choice == "retry":
click.echo("\nRetrying failed job\n", err=True)
unique_id = element[0]
@@ -768,10 +724,7 @@ class App:
self._started = True
if self._session_name:
self.logger.print_heading(
- self.project,
- self.stream,
- log_file=self._main_options["log_file"],
- styling=self.colors,
+ self.project, self.stream, log_file=self._main_options["log_file"], styling=self.colors,
)
#
@@ -779,9 +732,7 @@ class App:
#
def _print_summary(self):
click.echo("", err=True)
- self.logger.print_summary(
- self.stream, self._main_options["log_file"], styling=self.colors
- )
+ self.logger.print_summary(self.stream, self._main_options["log_file"], styling=self.colors)
# _error_exit()
#
@@ -797,9 +748,7 @@ class App:
def _error_exit(self, error, prefix=None):
click.echo("", err=True)
- if (
- self.context is None or self.context.log_debug is None
- ): # Context might not be initialized, default to cmd
+ if self.context is None or self.context.log_debug is None: # Context might not be initialized, default to cmd
debug = self._main_options["debug"]
else:
debug = self.context.log_debug
@@ -831,10 +780,7 @@ class App:
return
# Hold on to the failure messages
- if (
- message.message_type in [MessageType.FAIL, MessageType.BUG]
- and message.element_name is not None
- ):
+ if message.message_type in [MessageType.FAIL, MessageType.BUG] and message.element_name is not None:
self._fail_messages[message.element_name] = message
# Send to frontend if appropriate
@@ -866,9 +812,8 @@ class App:
# Some validation routines for project initialization
#
def _assert_format_version(self, format_version):
- message = (
- "The version must be supported by this "
- + "version of buildstream (0 - {})\n".format(BST_FORMAT_VERSION)
+ message = "The version must be supported by this " + "version of buildstream (0 - {})\n".format(
+ BST_FORMAT_VERSION
)
# Validate that it is an integer
@@ -911,9 +856,7 @@ class App:
# format_version (int): The user selected format version
# element_path (str): The user selected element path
#
- def _init_project_interactive(
- self, project_name, format_version=BST_FORMAT_VERSION, element_path="elements"
- ):
+ def _init_project_interactive(self, project_name, format_version=BST_FORMAT_VERSION, element_path="elements"):
def project_name_proc(user_input):
try:
node._assert_symbol_name(user_input, "project name")
@@ -940,12 +883,8 @@ class App:
# Collect project name
click.echo("", err=True)
- click.echo(
- self._content_profile.fmt("Choose a unique name for your project"), err=True
- )
- click.echo(
- self._format_profile.fmt("-------------------------------------"), err=True
- )
+ click.echo(self._content_profile.fmt("Choose a unique name for your project"), err=True)
+ click.echo(self._format_profile.fmt("-------------------------------------"), err=True)
click.echo("", err=True)
click.echo(
self._detail_profile.fmt(
@@ -969,25 +908,15 @@ class App:
err=True,
)
click.echo("", err=True)
- project_name = click.prompt(
- self._content_profile.fmt("Project name"),
- value_proc=project_name_proc,
- err=True,
- )
+ project_name = click.prompt(self._content_profile.fmt("Project name"), value_proc=project_name_proc, err=True,)
click.echo("", err=True)
# Collect format version
click.echo(
- self._content_profile.fmt(
- "Select the minimum required format version for your project"
- ),
- err=True,
+ self._content_profile.fmt("Select the minimum required format version for your project"), err=True,
)
click.echo(
- self._format_profile.fmt(
- "-----------------------------------------------------------"
- ),
- err=True,
+ self._format_profile.fmt("-----------------------------------------------------------"), err=True,
)
click.echo("", err=True)
click.echo(
@@ -1047,10 +976,7 @@ class App:
)
click.echo("", err=True)
element_path = click.prompt(
- self._content_profile.fmt("Element path"),
- value_proc=element_path_proc,
- default=element_path,
- err=True,
+ self._content_profile.fmt("Element path"), value_proc=element_path_proc, default=element_path, err=True,
)
return (project_name, format_version, element_path)
@@ -1070,9 +996,7 @@ class App:
#
def _prefix_choice_value_proc(choices):
def value_proc(user_input):
- remaining_candidate = [
- choice for choice in choices if choice.startswith(user_input)
- ]
+ remaining_candidate = [choice for choice in choices if choice.startswith(user_input)]
if not remaining_candidate:
raise UsageError("Expected one of {}, got {}".format(choices, user_input))
@@ -1080,10 +1004,6 @@ def _prefix_choice_value_proc(choices):
if len(remaining_candidate) == 1:
return remaining_candidate[0]
else:
- raise UsageError(
- "Ambiguous input. '{}' can refer to one of {}".format(
- user_input, remaining_candidate
- )
- )
+ raise UsageError("Ambiguous input. '{}' can refer to one of {}".format(user_input, remaining_candidate))
return value_proc
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index 9a73ab375..bffcf3786 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -54,9 +54,7 @@ def search_command(args, *, context=None):
command = command_ctx.command.get_command(command_ctx, cmd)
if command is None:
return None
- command_ctx = command.make_context(
- command.name, [command.name], parent=command_ctx, resilient_parsing=True
- )
+ command_ctx = command.make_context(command.name, [command.name], parent=command_ctx, resilient_parsing=True)
return command_ctx
@@ -64,11 +62,7 @@ def search_command(args, *, context=None):
# Completion for completing command names as help arguments
def complete_commands(cmd, args, incomplete):
command_ctx = search_command(args[1:])
- if (
- command_ctx
- and command_ctx.command
- and isinstance(command_ctx.command, click.MultiCommand)
- ):
+ if command_ctx and command_ctx.command and isinstance(command_ctx.command, click.MultiCommand):
return [
subcommand + " "
for subcommand in command_ctx.command.list_commands(command_ctx)
@@ -108,9 +102,7 @@ def complete_target(args, incomplete):
else:
# Check if this directory or any of its parent directories
# contain a project config file
- base_directory, _ = utils._search_upward_for_files(
- base_directory, [project_conf]
- )
+ base_directory, _ = utils._search_upward_for_files(base_directory, [project_conf])
if base_directory is None:
# No project_conf was found in base_directory or its parents, no need
@@ -164,11 +156,7 @@ def complete_artifact(orig_args, args, incomplete):
# element targets are valid artifact names
complete_list = complete_target(args, incomplete)
- complete_list.extend(
- ref
- for ref in ctx.artifactcache.list_artifacts()
- if ref.startswith(incomplete)
- )
+ complete_list.extend(ref for ref in ctx.artifactcache.list_artifacts() if ref.startswith(incomplete))
return complete_list
@@ -216,10 +204,7 @@ def validate_output_streams():
flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
if flags & os.O_NONBLOCK:
click.echo(
- "{} is currently set to O_NONBLOCK, try opening a new shell".format(
- stream.name
- ),
- err=True,
+ "{} is currently set to O_NONBLOCK, try opening a new shell".format(stream.name), err=True,
)
sys.exit(-1)
@@ -232,8 +217,7 @@ def handle_bst_force_start_method_env():
if existing_start_method is None:
multiprocessing.set_start_method(start_method)
print(
- bst_force_start_method_str
- + ": multiprocessing start method forced to:",
+ bst_force_start_method_str + ": multiprocessing start method forced to:",
start_method,
file=sys.stderr,
flush=True,
@@ -243,16 +227,14 @@ def handle_bst_force_start_method_env():
# multiple times in the same executable, so guard against that
# here.
print(
- bst_force_start_method_str
- + ": multiprocessing start method already set to:",
+ bst_force_start_method_str + ": multiprocessing start method already set to:",
existing_start_method,
file=sys.stderr,
flush=True,
)
else:
print(
- bst_force_start_method_str
- + ": cannot set multiprocessing start method to:",
+ bst_force_start_method_str + ": cannot set multiprocessing start method to:",
start_method,
", already set to:",
existing_start_method,
@@ -262,9 +244,7 @@ def handle_bst_force_start_method_env():
sys.exit(-1)
-def override_main(
- self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra
-):
+def override_main(self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra):
# Hook for the Bash completion. This only activates if the Bash
# completion is actually enabled, otherwise this is quite a fast
@@ -295,14 +275,7 @@ def override_main(
# case of testing, our tests preceed our entrypoint, so we do our best.
handle_bst_force_start_method_env()
- original_main(
- self,
- args=args,
- prog_name=prog_name,
- complete_var=None,
- standalone_mode=standalone_mode,
- **extra
- )
+ original_main(self, args=args, prog_name=prog_name, complete_var=None, standalone_mode=standalone_mode, **extra)
original_main = click.BaseCommand.main
@@ -325,14 +298,9 @@ def print_version(ctx, param, value):
@click.group(context_settings=dict(help_option_names=["-h", "--help"]))
+@click.option("--version", is_flag=True, callback=print_version, expose_value=False, is_eager=True)
@click.option(
- "--version", is_flag=True, callback=print_version, expose_value=False, is_eager=True
-)
-@click.option(
- "--config",
- "-c",
- type=click.Path(exists=True, dir_okay=False, readable=True),
- help="Configuration file to use",
+ "--config", "-c", type=click.Path(exists=True, dir_okay=False, readable=True), help="Configuration file to use",
)
@click.option(
"--directory",
@@ -348,47 +316,26 @@ def print_version(ctx, param, value):
help="What to do when an error is encountered",
)
@click.option(
- "--fetchers",
- type=click.INT,
- default=None,
- help="Maximum simultaneous download tasks",
+ "--fetchers", type=click.INT, default=None, help="Maximum simultaneous download tasks",
)
+@click.option("--builders", type=click.INT, default=None, help="Maximum simultaneous build tasks")
+@click.option("--pushers", type=click.INT, default=None, help="Maximum simultaneous upload tasks")
@click.option(
- "--builders", type=click.INT, default=None, help="Maximum simultaneous build tasks"
+ "--max-jobs", type=click.INT, default=None, help="Number of parallel jobs allowed for a given build task",
)
@click.option(
- "--pushers", type=click.INT, default=None, help="Maximum simultaneous upload tasks"
+ "--network-retries", type=click.INT, default=None, help="Maximum retries for network tasks",
)
@click.option(
- "--max-jobs",
- type=click.INT,
- default=None,
- help="Number of parallel jobs allowed for a given build task",
-)
-@click.option(
- "--network-retries",
- type=click.INT,
- default=None,
- help="Maximum retries for network tasks",
-)
-@click.option(
- "--no-interactive",
- is_flag=True,
- help="Force non interactive mode, otherwise this is automatically decided",
+ "--no-interactive", is_flag=True, help="Force non interactive mode, otherwise this is automatically decided",
)
@click.option("--verbose/--no-verbose", default=None, help="Be extra verbose")
@click.option("--debug/--no-debug", default=None, help="Print debugging output")
@click.option(
- "--error-lines",
- type=click.INT,
- default=None,
- help="Maximum number of lines to show from a task log",
+ "--error-lines", type=click.INT, default=None, help="Maximum number of lines to show from a task log",
)
@click.option(
- "--message-lines",
- type=click.INT,
- default=None,
- help="Maximum number of lines to show in a detailed message",
+ "--message-lines", type=click.INT, default=None, help="Maximum number of lines to show in a detailed message",
)
@click.option(
"--log-file",
@@ -396,9 +343,7 @@ def print_version(ctx, param, value):
help="A file to store the main log (allows storing the main log while in interactive mode)",
)
@click.option(
- "--colors/--no-colors",
- default=None,
- help="Force enable/disable ANSI color codes in output",
+ "--colors/--no-colors", default=None, help="Force enable/disable ANSI color codes in output",
)
@click.option(
"--strict/--no-strict",
@@ -415,9 +360,7 @@ def print_version(ctx, param, value):
help="Specify a project option",
)
@click.option(
- "--default-mirror",
- default=None,
- help="The mirror to fetch from first, before attempting other mirrors",
+ "--default-mirror", default=None, help="The mirror to fetch from first, before attempting other mirrors",
)
@click.option(
"--pull-buildtrees",
@@ -450,9 +393,7 @@ def cli(context, **kwargs):
# Help Command #
##################################################################
@cli.command(
- name="help",
- short_help="Print usage information",
- context_settings={"help_option_names": []},
+ name="help", short_help="Print usage information", context_settings={"help_option_names": []},
)
@click.argument("command", nargs=-1, metavar="COMMAND")
@click.pass_context
@@ -462,10 +403,7 @@ def help_command(ctx, command):
command_ctx = search_command(command, context=ctx.parent)
if not command_ctx:
click.echo(
- "Not a valid command: '{} {}'".format(
- ctx.parent.info_name, " ".join(command)
- ),
- err=True,
+ "Not a valid command: '{} {}'".format(ctx.parent.info_name, " ".join(command)), err=True,
)
sys.exit(-1)
@@ -477,10 +415,7 @@ def help_command(ctx, command):
if command:
detail = " {} ".format(" ".join(command))
click.echo(
- "\nFor usage on a specific command: {} help{}COMMAND".format(
- ctx.parent.info_name, detail
- ),
- err=True,
+ "\nFor usage on a specific command: {} help{}COMMAND".format(ctx.parent.info_name, detail), err=True,
)
@@ -503,14 +438,9 @@ def help_command(ctx, command):
show_default=True,
help="The subdirectory to store elements in",
)
-@click.option(
- "--force", "-f", is_flag=True, help="Allow overwriting an existing project.conf"
-)
+@click.option("--force", "-f", is_flag=True, help="Allow overwriting an existing project.conf")
@click.argument(
- "target-directory",
- nargs=1,
- required=False,
- type=click.Path(file_okay=False, writable=True),
+ "target-directory", nargs=1, required=False, type=click.Path(file_okay=False, writable=True),
)
@click.pass_obj
def init(app, project_name, format_version, element_path, force, target_directory):
@@ -522,9 +452,7 @@ def init(app, project_name, format_version, element_path, force, target_director
Unless `--project-name` is specified, this will be an
interactive session.
"""
- app.init_project(
- project_name, format_version, element_path, force, target_directory
- )
+ app.init_project(project_name, format_version, element_path, force, target_directory)
##################################################################
@@ -532,17 +460,10 @@ def init(app, project_name, format_version, element_path, force, target_director
##################################################################
@cli.command(short_help="Build elements in a pipeline")
@click.option(
- "--deps",
- "-d",
- default=None,
- type=click.Choice(["plan", "all"]),
- help="The dependencies to build",
+ "--deps", "-d", default=None, type=click.Choice(["plan", "all"]), help="The dependencies to build",
)
@click.option(
- "--remote",
- "-r",
- default=None,
- help="The URL of the remote cache (defaults to the first configured cache)",
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)",
)
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
@@ -574,10 +495,7 @@ def build(app, elements, deps, remote):
ignore_junction_targets = True
app.stream.build(
- elements,
- selection=deps,
- ignore_junction_targets=ignore_junction_targets,
- remote=remote,
+ elements, selection=deps, ignore_junction_targets=ignore_junction_targets, remote=remote,
)
@@ -586,11 +504,7 @@ def build(app, elements, deps, remote):
##################################################################
@cli.command(short_help="Show elements in the pipeline")
@click.option(
- "--except",
- "except_",
- multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies",
+ "--except", "except_", multiple=True, type=click.Path(readable=False), help="Except certain dependencies",
)
@click.option(
"--deps",
@@ -679,9 +593,7 @@ def show(app, elements, deps, except_, order, format_):
if not elements:
elements = app.project.get_default_targets()
- dependencies = app.stream.load_selection(
- elements, selection=deps, except_targets=except_
- )
+ dependencies = app.stream.load_selection(elements, selection=deps, except_targets=except_)
if order == "alpha":
dependencies = sorted(dependencies)
@@ -698,11 +610,7 @@ def show(app, elements, deps, except_, order, format_):
##################################################################
@cli.command(short_help="Shell into an element's sandbox environment")
@click.option(
- "--build",
- "-b",
- "build_",
- is_flag=True,
- help="Stage dependencies and sources to build",
+ "--build", "-b", "build_", is_flag=True, help="Stage dependencies and sources to build",
)
@click.option(
"--sysroot",
@@ -726,16 +634,10 @@ def show(app, elements, deps, except_, order, format_):
type=click.Choice(["ask", "try", "always", "never"]),
default="ask",
show_default=True,
- help=(
- "Use a buildtree. If `always` is set, will always fail to "
- "build if a buildtree is not available."
- ),
+ help=("Use a buildtree. If `always` is set, will always fail to " "build if a buildtree is not available."),
)
@click.option(
- "--pull",
- "pull_",
- is_flag=True,
- help="Attempt to pull missing or incomplete artifacts",
+ "--pull", "pull_", is_flag=True, help="Attempt to pull missing or incomplete artifacts",
)
@click.argument("element", required=False, type=click.Path(readable=False))
@click.argument("command", type=click.STRING, nargs=-1)
@@ -782,9 +684,7 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
if not element:
raise AppError('Missing argument "ELEMENT".')
- elements = app.stream.load_selection(
- (element,), selection=selection, use_artifact_config=True
- )
+ elements = app.stream.load_selection((element,), selection=selection, use_artifact_config=True)
# last one will be the element we want to stage, previous ones are
# elements to try and pull
@@ -811,20 +711,15 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
else:
if cli_buildtree == "always":
# Exit early if it won't be possible to even fetch a buildtree with always option
- raise AppError(
- "Artifact was created without buildtree, unable to launch shell with it"
- )
+ raise AppError("Artifact was created without buildtree, unable to launch shell with it")
click.echo(
- "WARNING: Artifact created without buildtree, shell will be loaded without it",
- err=True,
+ "WARNING: Artifact created without buildtree, shell will be loaded without it", err=True,
)
else:
# If the value has defaulted to ask and in non interactive mode, don't consider the buildtree, this
# being the default behaviour of the command
if app.interactive and cli_buildtree == "ask":
- if cached and bool(
- click.confirm("Do you want to use the cached buildtree?")
- ):
+ if cached and bool(click.confirm("Do you want to use the cached buildtree?")):
use_buildtree = "always"
elif buildtree_exists:
try:
@@ -858,9 +753,7 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
pull_dependencies=pull_dependencies,
)
except BstError as e:
- raise AppError(
- "Error launching shell: {}".format(e), detail=e.detail
- ) from e
+ raise AppError("Error launching shell: {}".format(e), detail=e.detail) from e
# If there were no errors, we return the shell's exit code here.
sys.exit(exitcode)
@@ -894,22 +787,13 @@ def source():
help="The dependencies to fetch",
)
@click.option(
- "--track",
- "track_",
- is_flag=True,
- help="Track new source references before fetching",
+ "--track", "track_", is_flag=True, help="Track new source references before fetching",
)
@click.option(
- "--track-cross-junctions",
- "-J",
- is_flag=True,
- help="Allow tracking to cross junction boundaries",
+ "--track-cross-junctions", "-J", is_flag=True, help="Allow tracking to cross junction boundaries",
)
@click.option(
- "--remote",
- "-r",
- default=None,
- help="The URL of the remote source cache (defaults to the first configured cache)",
+ "--remote", "-r", default=None, help="The URL of the remote source cache (defaults to the first configured cache)",
)
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
@@ -939,8 +823,7 @@ def source_fetch(app, elements, deps, track_, except_, track_cross_junctions, re
if track_cross_junctions and not track_:
click.echo(
- "ERROR: The --track-cross-junctions option can only be used with --track",
- err=True,
+ "ERROR: The --track-cross-junctions option can only be used with --track", err=True,
)
sys.exit(-1)
@@ -985,9 +868,7 @@ def source_fetch(app, elements, deps, track_, except_, track_cross_junctions, re
type=click.Choice(["none", "all"]),
help="The dependencies to track",
)
-@click.option(
- "--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries"
-)
+@click.option("--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries")
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def source_track(app, elements, deps, except_, cross_junctions):
@@ -1021,10 +902,7 @@ def source_track(app, elements, deps, except_, cross_junctions):
if deps == "none":
deps = "redirect"
app.stream.track(
- elements,
- selection=deps,
- except_targets=except_,
- cross_junctions=cross_junctions,
+ elements, selection=deps, except_targets=except_, cross_junctions=cross_junctions,
)
@@ -1034,11 +912,7 @@ def source_track(app, elements, deps, except_, cross_junctions):
@source.command(name="checkout", short_help="Checkout sources of an element")
@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
@click.option(
- "--except",
- "except_",
- multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies",
+ "--except", "except_", multiple=True, type=click.Path(readable=False), help="Except certain dependencies",
)
@click.option(
"--deps",
@@ -1070,9 +944,7 @@ def source_track(app, elements, deps, except_, cross_junctions):
)
@click.argument("element", required=False, type=click.Path(readable=False))
@click.pass_obj
-def source_checkout(
- app, element, directory, force, deps, except_, tar, compression, build_scripts
-):
+def source_checkout(app, element, directory, force, deps, except_, tar, compression, build_scripts):
"""Checkout sources of an element to the specified location
When this command is executed from a workspace directory, the default
@@ -1122,9 +994,7 @@ def workspace():
##################################################################
@workspace.command(name="open", short_help="Open a new workspace")
@click.option(
- "--no-checkout",
- is_flag=True,
- help="Do not checkout the source, only link to the given directory",
+ "--no-checkout", is_flag=True, help="Do not checkout the source, only link to the given directory",
)
@click.option(
"--force",
@@ -1134,10 +1004,7 @@ def workspace():
+ "or if a workspace for that element already exists",
)
@click.option(
- "--track",
- "track_",
- is_flag=True,
- help="Track and fetch new source references before checking out the workspace",
+ "--track", "track_", is_flag=True, help="Track and fetch new source references before checking out the workspace",
)
@click.option(
"--directory",
@@ -1152,11 +1019,7 @@ def workspace_open(app, no_checkout, force, track_, directory, elements):
with app.initialized():
app.stream.workspace_open(
- elements,
- no_checkout=no_checkout,
- track_first=track_,
- force=force,
- custom_dir=directory,
+ elements, no_checkout=no_checkout, track_first=track_, force=force, custom_dir=directory,
)
@@ -1165,9 +1028,7 @@ def workspace_open(app, no_checkout, force, track_, directory, elements):
##################################################################
@workspace.command(name="close", short_help="Close workspaces")
@click.option(
- "--remove-dir",
- is_flag=True,
- help="Remove the path that contains the closed workspace",
+ "--remove-dir", is_flag=True, help="Remove the path that contains the closed workspace",
)
@click.option("--all", "-a", "all_", is_flag=True, help="Close all open workspaces")
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@@ -1193,9 +1054,7 @@ def workspace_close(app, remove_dir, all_, elements):
sys.exit(0)
if all_:
- elements = [
- element_name for element_name, _ in app.context.get_workspaces().list()
- ]
+ elements = [element_name for element_name, _ in app.context.get_workspaces().list()]
elements = app.stream.redirect_element_names(elements)
@@ -1227,14 +1086,9 @@ def workspace_close(app, remove_dir, all_, elements):
# Workspace Reset Command #
##################################################################
@workspace.command(name="reset", short_help="Reset a workspace to its original state")
+@click.option("--soft", is_flag=True, help="Reset workspace state without affecting its contents")
@click.option(
- "--soft", is_flag=True, help="Reset workspace state without affecting its contents"
-)
-@click.option(
- "--track",
- "track_",
- is_flag=True,
- help="Track and fetch the latest source before resetting",
+ "--track", "track_", is_flag=True, help="Track and fetch the latest source before resetting",
)
@click.option("--all", "-a", "all_", is_flag=True, help="Reset all open workspaces")
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@@ -1256,9 +1110,7 @@ def workspace_reset(app, soft, track_, all_, elements):
raise AppError("No open workspaces to reset")
if all_:
- elements = tuple(
- element_name for element_name, _ in app.context.get_workspaces().list()
- )
+ elements = tuple(element_name for element_name, _ in app.context.get_workspaces().list())
app.stream.workspace_reset(elements, soft=soft, track_first=track_)
@@ -1344,15 +1196,10 @@ def artifact_show(app, deps, artifacts):
help="The dependencies to checkout",
)
@click.option(
- "--integrate/--no-integrate",
- default=None,
- is_flag=True,
- help="Whether to run integration commands",
+ "--integrate/--no-integrate", default=None, is_flag=True, help="Whether to run integration commands",
)
@click.option(
- "--hardlinks",
- is_flag=True,
- help="Checkout hardlinks instead of copying if possible",
+ "--hardlinks", is_flag=True, help="Checkout hardlinks instead of copying if possible",
)
@click.option(
"--tar",
@@ -1370,22 +1217,14 @@ def artifact_show(app, deps, artifacts):
help="The compression option of the tarball created.",
)
@click.option(
- "--pull",
- "pull_",
- is_flag=True,
- help="Pull the artifact if it's missing or incomplete.",
+ "--pull", "pull_", is_flag=True, help="Pull the artifact if it's missing or incomplete.",
)
@click.option(
- "--directory",
- default=None,
- type=click.Path(file_okay=False),
- help="The directory to checkout the artifact to",
+ "--directory", default=None, type=click.Path(file_okay=False), help="The directory to checkout the artifact to",
)
@click.argument("target", required=False, type=click.Path(readable=False))
@click.pass_obj
-def artifact_checkout(
- app, force, deps, integrate, hardlinks, tar, compression, pull_, directory, target
-):
+def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression, pull_, directory, target):
"""Checkout contents of an artifact
When this command is executed from a workspace directory, the default
@@ -1402,8 +1241,7 @@ def artifact_checkout(
if not tar:
if compression:
click.echo(
- "ERROR: --compression can only be provided if --tar is provided",
- err=True,
+ "ERROR: --compression can only be provided if --tar is provided", err=True,
)
sys.exit(-1)
else:
@@ -1420,15 +1258,10 @@ def artifact_checkout(
inferred_compression = _get_compression(tar)
except UtilError as e:
click.echo(
- "ERROR: Invalid file extension given with '--tar': {}".format(e),
- err=True,
+ "ERROR: Invalid file extension given with '--tar': {}".format(e), err=True,
)
sys.exit(-1)
- if (
- compression
- and inferred_compression != ""
- and inferred_compression != compression
- ):
+ if compression and inferred_compression != "" and inferred_compression != compression:
click.echo(
"WARNING: File extension and compression differ."
"File extension has been overridden by --compression",
@@ -1469,10 +1302,7 @@ def artifact_checkout(
help="The dependency artifacts to pull",
)
@click.option(
- "--remote",
- "-r",
- default=None,
- help="The URL of the remote cache (defaults to the first configured cache)",
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)",
)
@click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
@@ -1506,10 +1336,7 @@ def artifact_pull(app, artifacts, deps, remote):
ignore_junction_targets = True
app.stream.pull(
- artifacts,
- selection=deps,
- remote=remote,
- ignore_junction_targets=ignore_junction_targets,
+ artifacts, selection=deps, remote=remote, ignore_junction_targets=ignore_junction_targets,
)
@@ -1526,10 +1353,7 @@ def artifact_pull(app, artifacts, deps, remote):
help="The dependencies to push",
)
@click.option(
- "--remote",
- "-r",
- default=None,
- help="The URL of the remote cache (defaults to the first configured cache)",
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)",
)
@click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
@@ -1565,10 +1389,7 @@ def artifact_push(app, artifacts, deps, remote):
ignore_junction_targets = True
app.stream.push(
- artifacts,
- selection=deps,
- remote=remote,
- ignore_junction_targets=ignore_junction_targets,
+ artifacts, selection=deps, remote=remote, ignore_junction_targets=ignore_junction_targets,
)
@@ -1624,11 +1445,7 @@ def artifact_log(app, artifacts, out):
################################################################
@artifact.command(name="list-contents", short_help="List the contents of an artifact")
@click.option(
- "--long",
- "-l",
- "long_",
- is_flag=True,
- help="Provide more information about the contents of the artifact.",
+ "--long", "-l", "long_", is_flag=True, help="Provide more information about the contents of the artifact.",
)
@click.argument("artifacts", type=click.Path(), nargs=-1)
@click.pass_obj
@@ -1698,23 +1515,15 @@ def artifact_delete(app, artifacts, deps):
help="The dependencies to fetch",
)
@click.option(
- "--track",
- "track_",
- is_flag=True,
- help="Track new source references before fetching",
+ "--track", "track_", is_flag=True, help="Track new source references before fetching",
)
@click.option(
- "--track-cross-junctions",
- "-J",
- is_flag=True,
- help="Allow tracking to cross junction boundaries",
+ "--track-cross-junctions", "-J", is_flag=True, help="Allow tracking to cross junction boundaries",
)
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def fetch(app, elements, deps, track_, except_, track_cross_junctions):
- click.echo(
- "This command is now obsolete. Use `bst source fetch` instead.", err=True
- )
+ click.echo("This command is now obsolete. Use `bst source fetch` instead.", err=True)
sys.exit(1)
@@ -1737,15 +1546,11 @@ def fetch(app, elements, deps, track_, except_, track_cross_junctions):
type=click.Choice(["none", "all"]),
help="The dependencies to track",
)
-@click.option(
- "--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries"
-)
+@click.option("--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries")
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def track(app, elements, deps, except_, cross_junctions):
- click.echo(
- "This command is now obsolete. Use `bst source track` instead.", err=True
- )
+ click.echo("This command is now obsolete. Use `bst source track` instead.", err=True)
sys.exit(1)
@@ -1763,14 +1568,10 @@ def track(app, elements, deps, except_, cross_junctions):
help="The dependencies to checkout",
)
@click.option(
- "--integrate/--no-integrate",
- default=True,
- help="Run integration commands (default is to run commands)",
+ "--integrate/--no-integrate", default=True, help="Run integration commands (default is to run commands)",
)
@click.option(
- "--hardlinks",
- is_flag=True,
- help="Checkout hardlinks instead of copies (handle with care)",
+ "--hardlinks", is_flag=True, help="Checkout hardlinks instead of copies (handle with care)",
)
@click.option(
"--tar",
@@ -1804,16 +1605,12 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
help="The dependency artifacts to pull",
)
@click.option(
- "--remote",
- "-r",
- help="The URL of the remote cache (defaults to the first configured cache)",
+ "--remote", "-r", help="The URL of the remote cache (defaults to the first configured cache)",
)
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def pull(app, elements, deps, remote):
- click.echo(
- "This command is now obsolete. Use `bst artifact pull` instead.", err=True
- )
+ click.echo("This command is now obsolete. Use `bst artifact pull` instead.", err=True)
sys.exit(1)
@@ -1830,15 +1627,10 @@ def pull(app, elements, deps, remote):
help="The dependencies to push",
)
@click.option(
- "--remote",
- "-r",
- default=None,
- help="The URL of the remote cache (defaults to the first configured cache)",
+ "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)",
)
@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def push(app, elements, deps, remote):
- click.echo(
- "This command is now obsolete. Use `bst artifact push` instead.", err=True
- )
+ click.echo("This command is now obsolete. Use `bst artifact push` instead.", err=True)
sys.exit(1)
diff --git a/src/buildstream/_frontend/complete.py b/src/buildstream/_frontend/complete.py
index 35d1cb1a3..4ac596287 100644
--- a/src/buildstream/_frontend/complete.py
+++ b/src/buildstream/_frontend/complete.py
@@ -170,12 +170,7 @@ def resolve_ctx(cli, prog_name, args):
cmd = ctx.command.get_command(ctx, args_remaining[0])
if cmd is None:
return None
- ctx = cmd.make_context(
- args_remaining[0],
- args_remaining[1:],
- parent=ctx,
- resilient_parsing=True,
- )
+ ctx = cmd.make_context(args_remaining[0], args_remaining[1:], parent=ctx, resilient_parsing=True,)
args_remaining = ctx.protected_args + ctx.args
else:
ctx = ctx.parent
@@ -202,9 +197,7 @@ def is_incomplete_option(all_args, cmd_param):
if cmd_param.is_flag:
return False
last_option = None
- for index, arg_str in enumerate(
- reversed([arg for arg in all_args if arg != WORDBREAK])
- ):
+ for index, arg_str in enumerate(reversed([arg for arg in all_args if arg != WORDBREAK])):
if index + 1 > cmd_param.nargs:
break
if start_of_option(arg_str):
@@ -295,38 +288,22 @@ def get_choices(cli, prog_name, args, incomplete, override):
if not found_param:
# completion for option values by choices
for cmd_param in ctx.command.params:
- if isinstance(cmd_param, Option) and is_incomplete_option(
- all_args, cmd_param
- ):
- choices.extend(
- get_user_autocompletions(
- all_args, incomplete, ctx.command, cmd_param, override
- )
- )
+ if isinstance(cmd_param, Option) and is_incomplete_option(all_args, cmd_param):
+ choices.extend(get_user_autocompletions(all_args, incomplete, ctx.command, cmd_param, override))
found_param = True
break
if not found_param:
# completion for argument values by choices
for cmd_param in ctx.command.params:
- if isinstance(cmd_param, Argument) and is_incomplete_argument(
- ctx.params, cmd_param
- ):
- choices.extend(
- get_user_autocompletions(
- all_args, incomplete, ctx.command, cmd_param, override
- )
- )
+ if isinstance(cmd_param, Argument) and is_incomplete_argument(ctx.params, cmd_param):
+ choices.extend(get_user_autocompletions(all_args, incomplete, ctx.command, cmd_param, override))
found_param = True
break
if not found_param and isinstance(ctx.command, MultiCommand):
# completion for any subcommands
choices.extend(
- [
- cmd + " "
- for cmd in ctx.command.list_commands(ctx)
- if not ctx.command.get_command(ctx, cmd).hidden
- ]
+ [cmd + " " for cmd in ctx.command.list_commands(ctx) if not ctx.command.get_command(ctx, cmd).hidden]
)
if (
diff --git a/src/buildstream/_frontend/status.py b/src/buildstream/_frontend/status.py
index 577fd40c5..0fd44d09b 100644
--- a/src/buildstream/_frontend/status.py
+++ b/src/buildstream/_frontend/status.py
@@ -50,15 +50,7 @@ class Status:
_TERM_CAPABILITIES = {"move_up": "cuu1", "move_x": "hpa", "clear_eol": "el"}
def __init__(
- self,
- context,
- state,
- content_profile,
- format_profile,
- success_profile,
- error_profile,
- stream,
- colors=False,
+ self, context, state, content_profile, format_profile, success_profile, error_profile, stream, colors=False,
):
self._context = context
@@ -73,13 +65,7 @@ class Status:
self._spacing = 1
self._colors = colors
self._header = _StatusHeader(
- context,
- state,
- content_profile,
- format_profile,
- success_profile,
- error_profile,
- stream,
+ context, state, content_profile, format_profile, success_profile, error_profile, stream,
)
self._term_width, _ = click.get_terminal_size()
@@ -317,14 +303,7 @@ class Status:
def _add_job(self, action_name, full_name):
task = self._state.tasks[(action_name, full_name)]
elapsed = task.elapsed_offset
- job = _StatusJob(
- self._context,
- action_name,
- full_name,
- self._content_profile,
- self._format_profile,
- elapsed,
- )
+ job = _StatusJob(self._context, action_name, full_name, self._content_profile, self._format_profile, elapsed,)
self._jobs[(action_name, full_name)] = job
self._need_alloc = True
@@ -355,14 +334,7 @@ class Status:
#
class _StatusHeader:
def __init__(
- self,
- context,
- state,
- content_profile,
- format_profile,
- success_profile,
- error_profile,
- stream,
+ self, context, state, content_profile, format_profile, success_profile, error_profile, stream,
):
#
@@ -518,9 +490,7 @@ class _StatusHeader:
# elapsed (datetime): The offset into the session when this job is created
#
class _StatusJob:
- def __init__(
- self, context, action_name, full_name, content_profile, format_profile, elapsed
- ):
+ def __init__(self, context, action_name, full_name, content_profile, format_profile, elapsed):
#
# Public members
#
@@ -612,13 +582,9 @@ class _StatusJob:
)
if self._current_progress is not None:
- text += self._format_profile.fmt(":") + self._content_profile.fmt(
- str(self._current_progress)
- )
+ text += self._format_profile.fmt(":") + self._content_profile.fmt(str(self._current_progress))
if self._maximum_progress is not None:
- text += self._format_profile.fmt("/") + self._content_profile.fmt(
- str(self._maximum_progress)
- )
+ text += self._format_profile.fmt("/") + self._content_profile.fmt(str(self._maximum_progress))
# Add padding before terminating ']'
terminator = (" " * padding) + "]"
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index 8a605bb33..c7eac2b24 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -91,18 +91,12 @@ class WallclockTime(Widget):
fields = [
self.content_profile.fmt("{:02d}".format(x))
- for x in [
- message.creation_time.hour,
- message.creation_time.minute,
- message.creation_time.second,
- ]
+ for x in [message.creation_time.hour, message.creation_time.minute, message.creation_time.second,]
]
text = self.format_profile.fmt(":").join(fields)
if self._output_format == "us":
- text += self.content_profile.fmt(
- ".{:06d}".format(message.creation_time.microsecond)
- )
+ text += self.content_profile.fmt(".{:06d}".format(message.creation_time.microsecond))
return text
@@ -135,18 +129,13 @@ class TimeCode(Widget):
else:
hours, remainder = divmod(int(elapsed.total_seconds()), 60 * 60)
minutes, seconds = divmod(remainder, 60)
- fields = [
- self.content_profile.fmt("{0:02d}".format(field))
- for field in [hours, minutes, seconds]
- ]
+ fields = [self.content_profile.fmt("{0:02d}".format(field)) for field in [hours, minutes, seconds]]
text = self.format_profile.fmt(":").join(fields)
if self._microseconds:
if elapsed is not None:
- text += self.content_profile.fmt(
- ".{0:06d}".format(elapsed.microseconds)
- )
+ text += self.content_profile.fmt(".{0:06d}".format(elapsed.microseconds))
else:
text += self.content_profile.fmt(".------")
return text
@@ -270,17 +259,11 @@ class MessageOrLogFile(Widget):
def __init__(self, context, content_profile, format_profile, err_profile):
super().__init__(context, content_profile, format_profile)
self._message_widget = MessageText(context, content_profile, format_profile)
- self._logfile_widget = LogFile(
- context, content_profile, format_profile, err_profile
- )
+ self._logfile_widget = LogFile(context, content_profile, format_profile, err_profile)
def render(self, message):
# Show the log file only in the main start/success messages
- if (
- message.logfile
- and message.scheduler
- and message.message_type in [MessageType.START, MessageType.SUCCESS]
- ):
+ if message.logfile and message.scheduler and message.message_type in [MessageType.START, MessageType.SUCCESS]:
text = self._logfile_widget.render(message)
else:
text = self._message_widget.render(message)
@@ -303,15 +286,7 @@ class MessageOrLogFile(Widget):
#
class LogLine(Widget):
def __init__(
- self,
- context,
- state,
- content_profile,
- format_profile,
- success_profile,
- err_profile,
- detail_profile,
- indent=4,
+ self, context, state, content_profile, format_profile, success_profile, err_profile, detail_profile, indent=4,
):
super().__init__(context, content_profile, format_profile)
@@ -326,34 +301,22 @@ class LogLine(Widget):
self._resolved_keys = None
self._state = state
- self._logfile_widget = LogFile(
- context, content_profile, format_profile, err_profile
- )
+ self._logfile_widget = LogFile(context, content_profile, format_profile, err_profile)
if context.log_debug:
self._columns.extend([Debug(context, content_profile, format_profile)])
self.logfile_variable_names = {
- "elapsed": TimeCode(
- context, content_profile, format_profile, microseconds=False
- ),
- "elapsed-us": TimeCode(
- context, content_profile, format_profile, microseconds=True
- ),
+ "elapsed": TimeCode(context, content_profile, format_profile, microseconds=False),
+ "elapsed-us": TimeCode(context, content_profile, format_profile, microseconds=True),
"wallclock": WallclockTime(context, content_profile, format_profile),
- "wallclock-us": WallclockTime(
- context, content_profile, format_profile, output_format="us"
- ),
+ "wallclock-us": WallclockTime(context, content_profile, format_profile, output_format="us"),
"key": CacheKey(context, content_profile, format_profile, err_profile),
"element": ElementName(context, content_profile, format_profile),
"action": TypeName(context, content_profile, format_profile),
- "message": MessageOrLogFile(
- context, content_profile, format_profile, err_profile
- ),
+ "message": MessageOrLogFile(context, content_profile, format_profile, err_profile),
}
- logfile_tokens = self._parse_logfile_format(
- context.log_message_format, content_profile, format_profile
- )
+ logfile_tokens = self._parse_logfile_format(context.log_message_format, content_profile, format_profile)
self._columns.extend(logfile_tokens)
# show_pipeline()
@@ -379,9 +342,7 @@ class LogLine(Widget):
full_key, cache_key, dim_keys = element._get_display_key()
- line = p.fmt_subst(
- line, "name", element._get_full_name(), fg="blue", bold=True
- )
+ line = p.fmt_subst(line, "name", element._get_full_name(), fg="blue", bold=True)
line = p.fmt_subst(line, "key", cache_key, fg="yellow", dim=dim_keys)
line = p.fmt_subst(line, "full-key", full_key, fg="yellow", dim=dim_keys)
@@ -393,9 +354,7 @@ class LogLine(Widget):
line = p.fmt_subst(line, "state", "failed", fg="red")
elif element._cached_success():
line = p.fmt_subst(line, "state", "cached", fg="magenta")
- elif (
- consistency == Consistency.RESOLVED and not element._source_cached()
- ):
+ elif consistency == Consistency.RESOLVED and not element._source_cached():
line = p.fmt_subst(line, "state", "fetch needed", fg="red")
elif element._buildable():
line = p.fmt_subst(line, "state", "buildable", fg="green")
@@ -407,53 +366,34 @@ class LogLine(Widget):
line = p.fmt_subst(
line,
"config",
- yaml.round_trip_dump(
- element._Element__config,
- default_flow_style=False,
- allow_unicode=True,
- ),
+ yaml.round_trip_dump(element._Element__config, default_flow_style=False, allow_unicode=True,),
)
# Variables
if "%{vars" in format_:
variables = element._Element__variables.flat
line = p.fmt_subst(
- line,
- "vars",
- yaml.round_trip_dump(
- variables, default_flow_style=False, allow_unicode=True
- ),
+ line, "vars", yaml.round_trip_dump(variables, default_flow_style=False, allow_unicode=True),
)
# Environment
if "%{env" in format_:
environment = element._Element__environment
line = p.fmt_subst(
- line,
- "env",
- yaml.round_trip_dump(
- environment, default_flow_style=False, allow_unicode=True
- ),
+ line, "env", yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True),
)
# Public
if "%{public" in format_:
environment = element._Element__public
line = p.fmt_subst(
- line,
- "public",
- yaml.round_trip_dump(
- environment, default_flow_style=False, allow_unicode=True
- ),
+ line, "public", yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True),
)
# Workspaced
if "%{workspaced" in format_:
line = p.fmt_subst(
- line,
- "workspaced",
- "(workspaced)" if element._get_workspace() else "",
- fg="yellow",
+ line, "workspaced", "(workspaced)" if element._get_workspace() else "", fg="yellow",
)
# Workspace-dirs
@@ -463,39 +403,25 @@ class LogLine(Widget):
path = workspace.get_absolute_path()
if path.startswith("~/"):
path = os.path.join(os.getenv("HOME", "/root"), path[2:])
- line = p.fmt_subst(
- line, "workspace-dirs", "Workspace: {}".format(path)
- )
+ line = p.fmt_subst(line, "workspace-dirs", "Workspace: {}".format(path))
else:
line = p.fmt_subst(line, "workspace-dirs", "")
# Dependencies
if "%{deps" in format_:
deps = [e.name for e in element.dependencies(Scope.ALL, recurse=False)]
- line = p.fmt_subst(
- line, "deps", yaml.safe_dump(deps, default_style=None).rstrip("\n")
- )
+ line = p.fmt_subst(line, "deps", yaml.safe_dump(deps, default_style=None).rstrip("\n"))
# Build Dependencies
if "%{build-deps" in format_:
- build_deps = [
- e.name for e in element.dependencies(Scope.BUILD, recurse=False)
- ]
- line = p.fmt_subst(
- line,
- "build-deps",
- yaml.safe_dump(build_deps, default_style=False).rstrip("\n"),
- )
+ build_deps = [e.name for e in element.dependencies(Scope.BUILD, recurse=False)]
+ line = p.fmt_subst(line, "build-deps", yaml.safe_dump(build_deps, default_style=False).rstrip("\n"),)
# Runtime Dependencies
if "%{runtime-deps" in format_:
- runtime_deps = [
- e.name for e in element.dependencies(Scope.RUN, recurse=False)
- ]
+ runtime_deps = [e.name for e in element.dependencies(Scope.RUN, recurse=False)]
line = p.fmt_subst(
- line,
- "runtime-deps",
- yaml.safe_dump(runtime_deps, default_style=False).rstrip("\n"),
+ line, "runtime-deps", yaml.safe_dump(runtime_deps, default_style=False).rstrip("\n"),
)
report += line + "\n"
@@ -519,15 +445,11 @@ class LogLine(Widget):
starttime = datetime.datetime.now()
text = ""
- self._resolved_keys = {
- element: element._get_cache_key() for element in stream.session_elements
- }
+ self._resolved_keys = {element: element._get_cache_key() for element in stream.session_elements}
# Main invocation context
text += "\n"
- text += self.content_profile.fmt(
- "BuildStream Version {}\n".format(bst_version), bold=True
- )
+ text += self.content_profile.fmt("BuildStream Version {}\n".format(bst_version), bold=True)
values = OrderedDict()
values["Session Start"] = starttime.strftime("%A, %d-%m-%Y at %H:%M:%S")
values["Project"] = "{} ({})".format(project.name, project.directory)
@@ -538,11 +460,7 @@ class LogLine(Widget):
text += "\n"
text += self.content_profile.fmt("User Configuration\n", bold=True)
values = OrderedDict()
- values["Configuration File"] = (
- "Default Configuration"
- if not context.config_origin
- else context.config_origin
- )
+ values["Configuration File"] = "Default Configuration" if not context.config_origin else context.config_origin
values["Cache Directory"] = context.cachedir
values["Log Files"] = context.logdir
values["Source Mirrors"] = context.sourcedir
@@ -570,8 +488,7 @@ class LogLine(Widget):
)
if project.config.element_factory and project.config.source_factory:
text += self._format_plugins(
- project.config.element_factory.loaded_dependencies,
- project.config.source_factory.loaded_dependencies,
+ project.config.element_factory.loaded_dependencies, project.config.source_factory.loaded_dependencies,
)
# Pipeline state
@@ -606,9 +523,7 @@ class LogLine(Widget):
text = ""
assert self._resolved_keys is not None
- elements = sorted(
- e for (e, k) in self._resolved_keys.items() if k != e._get_cache_key()
- )
+ elements = sorted(e for (e, k) in self._resolved_keys.items() if k != e._get_cache_key())
if elements:
text += self.content_profile.fmt("Resolved key Summary\n", bold=True)
text += self.show_pipeline(elements, self.context.log_element_format)
@@ -622,9 +537,7 @@ class LogLine(Widget):
# Exclude the failure messages if the job didn't ultimately fail
# (e.g. succeeded on retry)
if element_name in group.failed_tasks:
- values[element_name] = "".join(
- self._render(v) for v in messages
- )
+ values[element_name] = "".join(self._render(v) for v in messages)
if values:
text += self.content_profile.fmt("Failure Summary\n", bold=True)
@@ -667,12 +580,7 @@ class LogLine(Widget):
+ skipped_align
)
- status_text += (
- self.content_profile.fmt("failed ")
- + self._err_profile.fmt(failed)
- + " "
- + failed_align
- )
+ status_text += self.content_profile.fmt("failed ") + self._err_profile.fmt(failed) + " " + failed_align
values["{} Queue".format(group.name)] = status_text
text += self._format_values(values, style_value=False)
@@ -701,9 +609,7 @@ class LogLine(Widget):
logfile_tokens = []
while format_string:
if format_string.startswith("%%"):
- logfile_tokens.append(
- FixedText(self.context, "%", content_profile, format_profile)
- )
+ logfile_tokens.append(FixedText(self.context, "%", content_profile, format_profile))
format_string = format_string[2:]
continue
m = re.search(r"^%\{([^\}]+)\}", format_string)
@@ -711,25 +617,17 @@ class LogLine(Widget):
variable = m.group(1)
format_string = format_string[m.end(0) :]
if variable not in self.logfile_variable_names:
- raise Exception(
- "'{0}' is not a valid log variable name.".format(variable)
- )
+ raise Exception("'{0}' is not a valid log variable name.".format(variable))
logfile_tokens.append(self.logfile_variable_names[variable])
else:
m = re.search("^[^%]+", format_string)
if m is not None:
- text = FixedText(
- self.context, m.group(0), content_profile, format_profile
- )
+ text = FixedText(self.context, m.group(0), content_profile, format_profile)
format_string = format_string[m.end(0) :]
logfile_tokens.append(text)
else:
# No idea what to do now
- raise Exception(
- "'{0}' could not be parsed into a valid logging format.".format(
- format_string
- )
- )
+ raise Exception("'{0}' could not be parsed into a valid logging format.".format(format_string))
return logfile_tokens
def _render(self, message):
@@ -754,11 +652,7 @@ class LogLine(Widget):
n_lines = len(lines)
abbrev = False
- if (
- message.message_type not in ERROR_MESSAGES
- and not frontend_message
- and n_lines > self._message_lines
- ):
+ if message.message_type not in ERROR_MESSAGES and not frontend_message and n_lines > self._message_lines:
lines = lines[0 : self._message_lines]
if self._message_lines > 0:
abbrev = True
@@ -775,10 +669,7 @@ class LogLine(Widget):
if abbrev:
text += self._indent + self.content_profile.fmt(
- "Message contains {} additional lines".format(
- n_lines - self._message_lines
- ),
- dim=True,
+ "Message contains {} additional lines".format(n_lines - self._message_lines), dim=True,
)
text += "\n"
@@ -793,18 +684,10 @@ class LogLine(Widget):
elif self._log_lines > 0:
text += (
self._indent
- + self._err_profile.fmt(
- "Printing the last {} lines from log file:".format(
- self._log_lines
- )
- )
- + "\n"
- )
- text += (
- self._indent
- + self._logfile_widget.render_abbrev(message, abbrev=False)
+ + self._err_profile.fmt("Printing the last {} lines from log file:".format(self._log_lines))
+ "\n"
)
+ text += self._indent + self._logfile_widget.render_abbrev(message, abbrev=False) + "\n"
text += self._indent + self._err_profile.fmt("=" * 70) + "\n"
log_content = self._read_last_lines(message.logfile)
@@ -891,9 +774,7 @@ class LogLine(Widget):
text += textwrap.indent(value, self._indent)
continue
- text += self.format_profile.fmt(
- " {}: {}".format(key, " " * (max_key_len - len(key)))
- )
+ text += self.format_profile.fmt(" {}: {}".format(key, " " * (max_key_len - len(key))))
if style_value:
text += self.content_profile.fmt(str(value))
else:
@@ -928,13 +809,9 @@ class LogLine(Widget):
text += textwrap.indent(value, self._indent)
continue
- text += self.format_profile.fmt(
- " {}:{}".format(key, " " * (max_key_len - len(key)))
- )
+ text += self.format_profile.fmt(" {}:{}".format(key, " " * (max_key_len - len(key))))
- value_list = "\n\t" + "\n\t".join(
- (self._get_filestats(v, list_long=long_) for v in value)
- )
+ value_list = "\n\t" + "\n\t".join((self._get_filestats(v, list_long=long_) for v in value))
if value == []:
message = "\n\tThis element has no associated artifacts"
if style_value:
diff --git a/src/buildstream/_fuse/fuse.py b/src/buildstream/_fuse/fuse.py
index 9bedb2d4b..62b2de871 100644
--- a/src/buildstream/_fuse/fuse.py
+++ b/src/buildstream/_fuse/fuse.py
@@ -73,9 +73,7 @@ _machine = machine()
if _system == "Darwin":
_libiconv = CDLL(find_library("iconv"), RTLD_GLOBAL) # libfuse dependency
- _libfuse_path = (
- find_library("fuse4x") or find_library("osxfuse") or find_library("fuse")
- )
+ _libfuse_path = find_library("fuse4x") or find_library("osxfuse") or find_library("fuse")
else:
_libfuse_path = find_library("fuse")
@@ -98,12 +96,8 @@ if _system in ("Darwin", "Darwin-MacFuse", "FreeBSD"):
c_off_t = c_int64
c_pid_t = c_int32
c_uid_t = c_uint32
- setxattr_t = CFUNCTYPE(
- c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_int, c_uint32
- )
- getxattr_t = CFUNCTYPE(
- c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_uint32
- )
+ setxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_int, c_uint32)
+ getxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_uint32)
if _system == "Darwin":
c_stat._fields_ = [
("st_dev", c_dev_t),
@@ -369,28 +363,8 @@ class fuse_operations(Structure):
("truncate", CFUNCTYPE(c_int, c_char_p, c_off_t)),
("utime", c_voidp), # Deprecated, use utimens
("open", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
- (
- "read",
- CFUNCTYPE(
- c_int,
- c_char_p,
- POINTER(c_byte),
- c_size_t,
- c_off_t,
- POINTER(fuse_file_info),
- ),
- ),
- (
- "write",
- CFUNCTYPE(
- c_int,
- c_char_p,
- POINTER(c_byte),
- c_size_t,
- c_off_t,
- POINTER(fuse_file_info),
- ),
- ),
+ ("read", CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t, c_off_t, POINTER(fuse_file_info),),),
+ ("write", CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t, c_off_t, POINTER(fuse_file_info),),),
("statfs", CFUNCTYPE(c_int, c_char_p, POINTER(c_statvfs))),
("flush", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
("release", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
@@ -418,10 +392,7 @@ class fuse_operations(Structure):
("access", CFUNCTYPE(c_int, c_char_p, c_int)),
("create", CFUNCTYPE(c_int, c_char_p, c_mode_t, POINTER(fuse_file_info))),
("ftruncate", CFUNCTYPE(c_int, c_char_p, c_off_t, POINTER(fuse_file_info))),
- (
- "fgetattr",
- CFUNCTYPE(c_int, c_char_p, POINTER(c_stat), POINTER(fuse_file_info)),
- ),
+ ("fgetattr", CFUNCTYPE(c_int, c_char_p, POINTER(c_stat), POINTER(fuse_file_info)),),
("lock", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info), c_int, c_voidp)),
("utimens", CFUNCTYPE(c_int, c_char_p, POINTER(c_utimbuf))),
("bmap", CFUNCTYPE(c_int, c_char_p, c_size_t, POINTER(c_ulonglong))),
@@ -475,9 +446,7 @@ class FUSE(object):
("nothreads", "-s"),
)
- def __init__(
- self, operations, mountpoint, raw_fi=False, encoding="utf-8", **kwargs
- ):
+ def __init__(self, operations, mountpoint, raw_fi=False, encoding="utf-8", **kwargs):
"""
Setting raw_fi to True will cause FUSE to pass the fuse_file_info
@@ -526,9 +495,7 @@ class FUSE(object):
except ValueError:
old_handler = SIG_DFL
- err = _libfuse.fuse_main_real(
- len(args), argv, pointer(fuse_ops), sizeof(fuse_ops), None
- )
+ err = _libfuse.fuse_main_real(len(args), argv, pointer(fuse_ops), sizeof(fuse_ops), None)
try:
signal(SIGINT, old_handler)
@@ -572,9 +539,7 @@ class FUSE(object):
return self.fgetattr(path, buf, None)
def readlink(self, path, buf, bufsize):
- ret = self.operations("readlink", path.decode(self.encoding)).encode(
- self.encoding
- )
+ ret = self.operations("readlink", path.decode(self.encoding)).encode(self.encoding)
# copies a string into the given buffer
# (null terminated and truncated if necessary)
@@ -597,21 +562,15 @@ class FUSE(object):
def symlink(self, source, target):
"creates a symlink `target -> source` (e.g. ln -s source target)"
- return self.operations(
- "symlink", target.decode(self.encoding), source.decode(self.encoding)
- )
+ return self.operations("symlink", target.decode(self.encoding), source.decode(self.encoding))
def rename(self, old, new):
- return self.operations(
- "rename", old.decode(self.encoding), new.decode(self.encoding)
- )
+ return self.operations("rename", old.decode(self.encoding), new.decode(self.encoding))
def link(self, source, target):
"creates a hard link `target -> source` (e.g. ln source target)"
- return self.operations(
- "link", target.decode(self.encoding), source.decode(self.encoding)
- )
+ return self.operations("link", target.decode(self.encoding), source.decode(self.encoding))
def chmod(self, path, mode):
return self.operations("chmod", path.decode(self.encoding), mode)
@@ -643,17 +602,13 @@ class FUSE(object):
else:
fh = fip.contents.fh
- ret = self.operations(
- "read", self._decode_optional_path(path), size, offset, fh
- )
+ ret = self.operations("read", self._decode_optional_path(path), size, offset, fh)
if not ret:
return 0
retsize = len(ret)
- assert (
- retsize <= size
- ), "actual amount read {:d} greater than expected {:d}".format(retsize, size)
+ assert retsize <= size, "actual amount read {:d} greater than expected {:d}".format(retsize, size)
data = create_string_buffer(ret, retsize)
memmove(buf, data, retsize)
@@ -667,9 +622,7 @@ class FUSE(object):
else:
fh = fip.contents.fh
- return self.operations(
- "write", self._decode_optional_path(path), data, offset, fh
- )
+ return self.operations("write", self._decode_optional_path(path), data, offset, fh)
def statfs(self, path, buf):
stv = buf.contents
@@ -706,18 +659,11 @@ class FUSE(object):
def setxattr(self, path, name, value, size, options, *args):
return self.operations(
- "setxattr",
- path.decode(self.encoding),
- name.decode(self.encoding),
- string_at(value, size),
- options,
- *args
+ "setxattr", path.decode(self.encoding), name.decode(self.encoding), string_at(value, size), options, *args
)
def getxattr(self, path, name, value, size, *args):
- ret = self.operations(
- "getxattr", path.decode(self.encoding), name.decode(self.encoding), *args
- )
+ ret = self.operations("getxattr", path.decode(self.encoding), name.decode(self.encoding), *args)
retsize = len(ret)
# allow size queries
@@ -754,9 +700,7 @@ class FUSE(object):
return retsize
def removexattr(self, path, name):
- return self.operations(
- "removexattr", path.decode(self.encoding), name.decode(self.encoding)
- )
+ return self.operations("removexattr", path.decode(self.encoding), name.decode(self.encoding))
def opendir(self, path, fip):
# Ignore raw_fi
@@ -766,9 +710,7 @@ class FUSE(object):
def readdir(self, path, buf, filler, offset, fip):
# Ignore raw_fi
- for item in self.operations(
- "readdir", self._decode_optional_path(path), fip.contents.fh
- ):
+ for item in self.operations("readdir", self._decode_optional_path(path), fip.contents.fh):
if isinstance(item, basestring):
name, st, offset = item, None, 0
@@ -787,15 +729,11 @@ class FUSE(object):
def releasedir(self, path, fip):
# Ignore raw_fi
- return self.operations(
- "releasedir", self._decode_optional_path(path), fip.contents.fh
- )
+ return self.operations("releasedir", self._decode_optional_path(path), fip.contents.fh)
def fsyncdir(self, path, datasync, fip):
# Ignore raw_fi
- return self.operations(
- "fsyncdir", self._decode_optional_path(path), datasync, fip.contents.fh
- )
+ return self.operations("fsyncdir", self._decode_optional_path(path), datasync, fip.contents.fh)
def init(self, conn):
return self.operations("init", "/")
diff --git a/src/buildstream/_fuse/mount.py b/src/buildstream/_fuse/mount.py
index 4df2ed603..d586ea2d5 100644
--- a/src/buildstream/_fuse/mount.py
+++ b/src/buildstream/_fuse/mount.py
@@ -89,9 +89,7 @@ class Mount:
################################################
def __init__(self, fuse_mount_options=None):
- self._fuse_mount_options = (
- {} if fuse_mount_options is None else fuse_mount_options
- )
+ self._fuse_mount_options = {} if fuse_mount_options is None else fuse_mount_options
# _mount():
#
@@ -110,18 +108,14 @@ class Mount:
# Ensure the child process does not inherit our signal handlers, if the
# child wants to handle a signal then it will first set its own
# handler, and then unblock it.
- with _signals.blocked(
- [signal.SIGTERM, signal.SIGTSTP, signal.SIGINT], ignore=False
- ):
+ with _signals.blocked([signal.SIGTERM, signal.SIGTSTP, signal.SIGINT], ignore=False):
self.__process.start()
while not os.path.ismount(mountpoint):
if not self.__process.is_alive():
self.__logfile.seek(0)
stderr = self.__logfile.read()
- raise FuseMountError(
- "Unable to mount {}: {}".format(mountpoint, stderr.decode().strip())
- )
+ raise FuseMountError("Unable to mount {}: {}".format(mountpoint, stderr.decode().strip()))
time.sleep(1 / 100)
@@ -185,11 +179,7 @@ class Mount:
# Returns:
# (Operations): A FUSE Operations implementation
def create_operations(self):
- raise ImplError(
- "Mount subclass '{}' did not implement create_operations()".format(
- type(self).__name__
- )
- )
+ raise ImplError("Mount subclass '{}' did not implement create_operations()".format(type(self).__name__))
################################################
# Child Process #
@@ -211,9 +201,7 @@ class Mount:
# Ask the subclass to give us an Operations object
#
- self.__operations = (
- self.create_operations()
- ) # pylint: disable=assignment-from-no-return
+ self.__operations = self.create_operations() # pylint: disable=assignment-from-no-return
# Run fuse in foreground in this child process, internally libfuse
# will handle SIGTERM and gracefully exit its own little main loop.
diff --git a/src/buildstream/_gitsourcebase.py b/src/buildstream/_gitsourcebase.py
index 11f1d6572..1fcfe335e 100644
--- a/src/buildstream/_gitsourcebase.py
+++ b/src/buildstream/_gitsourcebase.py
@@ -62,9 +62,7 @@ class _GitMirror(SourceFetcher):
self.ref = ref
self.tags = tags
self.primary = primary
- self.mirror = os.path.join(
- source.get_mirror_directory(), utils.url_directory_name(url)
- )
+ self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
self.mark_download_url(url)
# Ensures that the mirror exists
@@ -81,9 +79,7 @@ class _GitMirror(SourceFetcher):
# system configured tmpdir is not on the same partition.
#
with self.source.tempdir() as tmpdir:
- url = self.source.translate_url(
- self.url, alias_override=alias_override, primary=self.primary
- )
+ url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
self.source.call(
[self.source.host_git, "clone", "--mirror", "-n", url, tmpdir],
fail="Failed to clone git repository {}".format(url),
@@ -95,9 +91,7 @@ class _GitMirror(SourceFetcher):
except DirectoryExistsError:
# Another process was quicker to download this repository.
# Let's discard our own
- self.source.status(
- "{}: Discarding duplicate clone of {}".format(self.source, url)
- )
+ self.source.status("{}: Discarding duplicate clone of {}".format(self.source, url))
except OSError as e:
raise SourceError(
"{}: Failed to move cloned git repository {} from '{}' to '{}': {}".format(
@@ -106,9 +100,7 @@ class _GitMirror(SourceFetcher):
) from e
def _fetch(self, alias_override=None):
- url = self.source.translate_url(
- self.url, alias_override=alias_override, primary=self.primary
- )
+ url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
if alias_override:
remote_name = utils.url_directory_name(alias_override)
@@ -142,13 +134,9 @@ class _GitMirror(SourceFetcher):
def fetch(self, alias_override=None): # pylint: disable=arguments-differ
# Resolve the URL for the message
- resolved_url = self.source.translate_url(
- self.url, alias_override=alias_override, primary=self.primary
- )
+ resolved_url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
- with self.source.timed_activity(
- "Fetching from {}".format(resolved_url), silent_nested=True
- ):
+ with self.source.timed_activity("Fetching from {}".format(resolved_url), silent_nested=True):
self.ensure(alias_override)
if not self.has_ref():
self._fetch(alias_override)
@@ -163,25 +151,19 @@ class _GitMirror(SourceFetcher):
return False
# Check if the ref is really there
- rc = self.source.call(
- [self.source.host_git, "cat-file", "-t", self.ref], cwd=self.mirror
- )
+ rc = self.source.call([self.source.host_git, "cat-file", "-t", self.ref], cwd=self.mirror)
return rc == 0
def assert_ref(self):
if not self.has_ref():
raise SourceError(
- "{}: expected ref '{}' was not found in git repository: '{}'".format(
- self.source, self.ref, self.url
- )
+ "{}: expected ref '{}' was not found in git repository: '{}'".format(self.source, self.ref, self.url)
)
def latest_commit_with_tags(self, tracking, track_tags=False):
_, output = self.source.check_output(
[self.source.host_git, "rev-parse", tracking],
- fail="Unable to find commit for specified branch name '{}'".format(
- tracking
- ),
+ fail="Unable to find commit for specified branch name '{}'".format(tracking),
cwd=self.mirror,
)
ref = output.rstrip("\n")
@@ -190,15 +172,7 @@ class _GitMirror(SourceFetcher):
# Prefix the ref with the closest tag, if available,
# to make the ref human readable
exit_code, output = self.source.check_output(
- [
- self.source.host_git,
- "describe",
- "--tags",
- "--abbrev=40",
- "--long",
- ref,
- ],
- cwd=self.mirror,
+ [self.source.host_git, "describe", "--tags", "--abbrev=40", "--long", ref,], cwd=self.mirror,
)
if exit_code == 0:
ref = output.rstrip("\n")
@@ -214,8 +188,7 @@ class _GitMirror(SourceFetcher):
["--tags", "--first-parent"],
]:
exit_code, output = self.source.check_output(
- [self.source.host_git, "describe", "--abbrev=0", ref, *options],
- cwd=self.mirror,
+ [self.source.host_git, "describe", "--abbrev=0", ref, *options], cwd=self.mirror,
)
if exit_code == 0:
tag = output.strip()
@@ -224,9 +197,7 @@ class _GitMirror(SourceFetcher):
fail="Unable to resolve tag '{}'".format(tag),
cwd=self.mirror,
)
- exit_code = self.source.call(
- [self.source.host_git, "cat-file", "tag", tag], cwd=self.mirror
- )
+ exit_code = self.source.call([self.source.host_git, "cat-file", "tag", tag], cwd=self.mirror)
annotated = exit_code == 0
tags.add((tag, commit_ref.strip(), annotated))
@@ -240,17 +211,8 @@ class _GitMirror(SourceFetcher):
# case we're just checking out a specific commit and then removing the .git/
# directory.
self.source.call(
- [
- self.source.host_git,
- "clone",
- "--no-checkout",
- "--shared",
- self.mirror,
- fullpath,
- ],
- fail="Failed to create git mirror {} in directory: {}".format(
- self.mirror, fullpath
- ),
+ [self.source.host_git, "clone", "--no-checkout", "--shared", self.mirror, fullpath,],
+ fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
fail_temporarily=True,
)
@@ -271,9 +233,7 @@ class _GitMirror(SourceFetcher):
self.source.call(
[self.source.host_git, "clone", "--no-checkout", self.mirror, fullpath],
- fail="Failed to clone git mirror {} in directory: {}".format(
- self.mirror, fullpath
- ),
+ fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
fail_temporarily=True,
)
@@ -292,20 +252,14 @@ class _GitMirror(SourceFetcher):
# List the submodules (path/url tuples) present at the given ref of this repo
def submodule_list(self):
modules = "{}:{}".format(self.ref, GIT_MODULES)
- exit_code, output = self.source.check_output(
- [self.source.host_git, "show", modules], cwd=self.mirror
- )
+ exit_code, output = self.source.check_output([self.source.host_git, "show", modules], cwd=self.mirror)
# If git show reports error code 128 here, we take it to mean there is
# no .gitmodules file to display for the given revision.
if exit_code == 128:
return
elif exit_code != 0:
- raise SourceError(
- "{plugin}: Failed to show gitmodules at ref {ref}".format(
- plugin=self, ref=self.ref
- )
- )
+ raise SourceError("{plugin}: Failed to show gitmodules at ref {ref}".format(plugin=self, ref=self.ref))
content = "\n".join([l.strip() for l in output.splitlines()])
@@ -331,9 +285,7 @@ class _GitMirror(SourceFetcher):
# object that corresponds to the submodule
_, output = self.source.check_output(
[self.source.host_git, "ls-tree", ref, submodule],
- fail="ls-tree failed for commit {} and submodule: {}".format(
- ref, submodule
- ),
+ fail="ls-tree failed for commit {} and submodule: {}".format(ref, submodule),
cwd=self.mirror,
)
@@ -345,26 +297,20 @@ class _GitMirror(SourceFetcher):
# fail if the commit hash is invalid
if len(submodule_commit) != 40:
raise SourceError(
- "{}: Error reading commit information for submodule '{}'".format(
- self.source, submodule
- )
+ "{}: Error reading commit information for submodule '{}'".format(self.source, submodule)
)
return submodule_commit
else:
detail = (
- "The submodule '{}' is defined either in the BuildStream source\n".format(
- submodule
- )
+ "The submodule '{}' is defined either in the BuildStream source\n".format(submodule)
+ "definition, or in a .gitmodules file. But the submodule was never added to the\n"
+ "underlying git repository with `git submodule add`."
)
self.source.warn(
- "{}: Ignoring inconsistent submodule '{}'".format(
- self.source, submodule
- ),
+ "{}: Ignoring inconsistent submodule '{}'".format(self.source, submodule),
detail=detail,
warning_token=WARN_INCONSISTENT_SUBMODULE,
)
@@ -398,9 +344,7 @@ class _GitMirror(SourceFetcher):
fail_temporarily=True,
cwd=self.mirror,
)
- self.source.warn(
- "refs {}..{}: {}".format(commit_ref, self.ref, out.splitlines())
- )
+ self.source.warn("refs {}..{}: {}".format(commit_ref, self.ref, out.splitlines()))
for line in out.splitlines():
rev = line.lstrip("-")
if line[0] == "-":
@@ -427,14 +371,7 @@ class _GitMirror(SourceFetcher):
)
commit_file.seek(0, 0)
self.source.call(
- [
- self.source.host_git,
- "hash-object",
- "-w",
- "-t",
- "commit",
- "--stdin",
- ],
+ [self.source.host_git, "hash-object", "-w", "-t", "commit", "--stdin",],
stdin=commit_file,
fail="Failed to add commit object {}".format(rev),
cwd=fullpath,
@@ -447,20 +384,11 @@ class _GitMirror(SourceFetcher):
for tag, commit_ref, annotated in self.tags:
if annotated:
with TemporaryFile(dir=tmpdir) as tag_file:
- tag_data = "object {}\ntype commit\ntag {}\n".format(
- commit_ref, tag
- )
+ tag_data = "object {}\ntype commit\ntag {}\n".format(commit_ref, tag)
tag_file.write(tag_data.encode("ascii"))
tag_file.seek(0, 0)
_, tag_ref = self.source.check_output(
- [
- self.source.host_git,
- "hash-object",
- "-w",
- "-t",
- "tag",
- "--stdin",
- ],
+ [self.source.host_git, "hash-object", "-w", "-t", "tag", "--stdin",],
stdin=tag_file,
fail="Failed to add tag object {}".format(tag),
cwd=fullpath,
@@ -518,9 +446,7 @@ class _GitSourceBase(Source):
self.track_tags = node.get_bool("track-tags", default=False)
self.original_url = node.get_str("url")
- self.mirror = self.BST_MIRROR_CLASS(
- self, "", self.original_url, ref, tags=tags, primary=True
- )
+ self.mirror = self.BST_MIRROR_CLASS(self, "", self.original_url, ref, tags=tags, primary=True)
self.tracking = node.get_str("track", None)
self.ref_format = node.get_enum("ref-format", _RefFormat, _RefFormat.SHA1)
@@ -529,8 +455,7 @@ class _GitSourceBase(Source):
# If it is missing both then we will be unable to track or build.
if self.mirror.ref is None and self.tracking is None:
raise SourceError(
- "{}: Git sources require a ref and/or track".format(self),
- reason="missing-track-and-ref",
+ "{}: Git sources require a ref and/or track".format(self), reason="missing-track-and-ref",
)
self.checkout_submodules = node.get_bool("checkout-submodules", default=True)
@@ -566,9 +491,7 @@ class _GitSourceBase(Source):
# from another location, it should not affect the cache key.
key = [self.original_url, self.mirror.ref]
if self.mirror.tags:
- tags = {
- tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags
- }
+ tags = {tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags}
key.append({"tags": tags})
# Only modify the cache key with checkout_submodules if it's something
@@ -582,9 +505,7 @@ class _GitSourceBase(Source):
key.append(self.submodule_overrides)
if self.submodule_checkout_overrides:
- key.append(
- {"submodule_checkout_overrides": self.submodule_checkout_overrides}
- )
+ key.append({"submodule_checkout_overrides": self.submodule_checkout_overrides})
return key
@@ -631,22 +552,16 @@ class _GitSourceBase(Source):
if not self.tracking:
# Is there a better way to check if a ref is given.
if self.mirror.ref is None:
- detail = (
- "Without a tracking branch ref can not be updated. Please "
- + "provide a ref or a track."
- )
+ detail = "Without a tracking branch ref can not be updated. Please " + "provide a ref or a track."
raise SourceError(
- "{}: No track or ref".format(self),
- detail=detail,
- reason="track-attempt-no-track",
+ "{}: No track or ref".format(self), detail=detail, reason="track-attempt-no-track",
)
return None
# Resolve the URL for the message
resolved_url = self.translate_url(self.mirror.url)
with self.timed_activity(
- "Tracking {} from {}".format(self.tracking, resolved_url),
- silent_nested=True,
+ "Tracking {} from {}".format(self.tracking, resolved_url), silent_nested=True,
):
self.mirror.ensure()
self.mirror._fetch()
@@ -660,9 +575,7 @@ class _GitSourceBase(Source):
# XXX: may wish to refactor this as some code dupe with stage()
self._refresh_submodules()
- with self.timed_activity(
- 'Setting up workspace "{}"'.format(directory), silent_nested=True
- ):
+ with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
self.mirror.init_workspace(directory)
for mirror in self.submodules:
mirror.init_workspace(directory)
@@ -678,9 +591,7 @@ class _GitSourceBase(Source):
# Stage the main repo in the specified directory
#
- with self.timed_activity(
- "Staging {}".format(self.mirror.url), silent_nested=True
- ):
+ with self.timed_activity("Staging {}".format(self.mirror.url), silent_nested=True):
self.mirror.stage(directory)
for mirror in self.submodules:
mirror.stage(directory)
@@ -719,8 +630,7 @@ class _GitSourceBase(Source):
"{}: Invalid submodules specified".format(self),
warning_token=WARN_INVALID_SUBMODULE,
detail="The following submodules are specified in the source "
- "description but do not exist according to the repository\n\n"
- + "\n".join(detail),
+ "description but do not exist according to the repository\n\n" + "\n".join(detail),
)
# Warn about submodules which exist but have not been explicitly configured
@@ -741,28 +651,14 @@ class _GitSourceBase(Source):
ref_in_track = False
if self.tracking:
_, branch = self.check_output(
- [
- self.host_git,
- "branch",
- "--list",
- self.tracking,
- "--contains",
- self.mirror.ref,
- ],
+ [self.host_git, "branch", "--list", self.tracking, "--contains", self.mirror.ref,],
cwd=self.mirror.mirror,
)
if branch:
ref_in_track = True
else:
_, tag = self.check_output(
- [
- self.host_git,
- "tag",
- "--list",
- self.tracking,
- "--contains",
- self.mirror.ref,
- ],
+ [self.host_git, "tag", "--list", self.tracking, "--contains", self.mirror.ref,],
cwd=self.mirror.mirror,
)
if tag:
@@ -772,13 +668,9 @@ class _GitSourceBase(Source):
detail = (
"The ref provided for the element does not exist locally "
+ "in the provided track branch / tag '{}'.\n".format(self.tracking)
- + "You may wish to track the element to update the ref from '{}' ".format(
- self.tracking
- )
+ + "You may wish to track the element to update the ref from '{}' ".format(self.tracking)
+ "with `bst source track`,\n"
- + "or examine the upstream at '{}' for the specific ref.".format(
- self.mirror.url
- )
+ + "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
)
self.warn(
diff --git a/src/buildstream/_includes.py b/src/buildstream/_includes.py
index 860b1328f..f8737c1d1 100644
--- a/src/buildstream/_includes.py
+++ b/src/buildstream/_includes.py
@@ -32,14 +32,10 @@ class Includes:
if current_loader is None:
current_loader = self._loader
- includes_node = node.get_node(
- "(@)", allowed_types=[ScalarNode, SequenceNode], allow_none=True
- )
+ includes_node = node.get_node("(@)", allowed_types=[ScalarNode, SequenceNode], allow_none=True)
if includes_node:
- if (
- type(includes_node) is ScalarNode
- ): # pylint: disable=unidiomatic-typecheck
+ if type(includes_node) is ScalarNode: # pylint: disable=unidiomatic-typecheck
includes = [includes_node.as_str()]
else:
includes = includes_node.as_str_list()
@@ -50,9 +46,7 @@ class Includes:
if only_local and ":" in include:
continue
try:
- include_node, file_path, sub_loader = self._include_file(
- include, current_loader
- )
+ include_node, file_path, sub_loader = self._include_file(include, current_loader)
except LoadError as e:
include_provenance = includes_node.get_provenance()
if e.reason == LoadErrorReason.MISSING_FILE:
@@ -64,9 +58,7 @@ class Includes:
message = "{}: Include block references a directory instead of a file: '{}'.".format(
include_provenance, include
)
- raise LoadError(
- message, LoadErrorReason.LOADING_DIRECTORY
- ) from e
+ raise LoadError(message, LoadErrorReason.LOADING_DIRECTORY) from e
# Otherwise, we don't know the reason, so just raise
raise
@@ -74,9 +66,7 @@ class Includes:
if file_path in included:
include_provenance = includes_node.get_provenance()
raise LoadError(
- "{}: trying to recursively include {}".format(
- include_provenance, file_path
- ),
+ "{}: trying to recursively include {}".format(include_provenance, file_path),
LoadErrorReason.RECURSIVE_INCLUDE,
)
# Because the included node will be modified, we need
@@ -87,10 +77,7 @@ class Includes:
try:
included.add(file_path)
self.process(
- include_node,
- included=included,
- current_loader=sub_loader,
- only_local=only_local,
+ include_node, included=included, current_loader=sub_loader, only_local=only_local,
)
finally:
included.remove(file_path)
@@ -99,10 +86,7 @@ class Includes:
for value in node.values():
self._process_value(
- value,
- included=included,
- current_loader=current_loader,
- only_local=only_local,
+ value, included=included, current_loader=current_loader, only_local=only_local,
)
# _include_file()
@@ -126,12 +110,7 @@ class Includes:
file_path = os.path.join(directory, include)
key = (current_loader, file_path)
if key not in self._loaded:
- self._loaded[key] = _yaml.load(
- file_path,
- shortname=shortname,
- project=project,
- copy_tree=self._copy_tree,
- )
+ self._loaded[key] = _yaml.load(file_path, shortname=shortname, project=project, copy_tree=self._copy_tree,)
return self._loaded[key], file_path, current_loader
# _process_value()
@@ -143,23 +122,15 @@ class Includes:
# included (set): Fail for recursion if trying to load any files in this set
# current_loader (Loader): Use alternative loader (for junction files)
# only_local (bool): Whether to ignore junction files
- def _process_value(
- self, value, *, included=set(), current_loader=None, only_local=False
- ):
+ def _process_value(self, value, *, included=set(), current_loader=None, only_local=False):
value_type = type(value)
if value_type is MappingNode:
self.process(
- value,
- included=included,
- current_loader=current_loader,
- only_local=only_local,
+ value, included=included, current_loader=current_loader, only_local=only_local,
)
elif value_type is SequenceNode:
for v in value:
self._process_value(
- v,
- included=included,
- current_loader=current_loader,
- only_local=only_local,
+ v, included=included, current_loader=current_loader, only_local=only_local,
)
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index d703bd711..729b3c5e8 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -74,9 +74,7 @@ class Loader:
self._context = context
self._options = project.options # Project options (OptionPool)
self._basedir = basedir # Base project directory
- self._first_pass_options = (
- project.first_pass_config.options
- ) # Project options (OptionPool)
+ self._first_pass_options = project.first_pass_config.options # Project options (OptionPool)
self._parent = parent # The parent loader
self._fetch_subprojects = fetch_subprojects
@@ -101,9 +99,7 @@ class Loader:
# Raises: LoadError
#
# Returns: The toplevel LoadElement
- def load(
- self, targets, task, rewritable=False, ticker=None, ignore_workspaces=False
- ):
+ def load(self, targets, task, rewritable=False, ticker=None, ignore_workspaces=False):
for filename in targets:
if os.path.isabs(filename):
@@ -111,9 +107,7 @@ class Loader:
# Expect that the caller gives us the right thing at least ?
raise LoadError(
"Target '{}' was not specified as a relative "
- "path to the base project directory: {}".format(
- filename, self._basedir
- ),
+ "path to the base project directory: {}".format(filename, self._basedir),
LoadErrorReason.INVALID_DATA,
)
@@ -155,11 +149,7 @@ class Loader:
# Finally, wrap what we have into LoadElements and return the target
#
- ret.append(
- loader._collect_element(
- element, task, ignore_workspaces=ignore_workspaces
- )
- )
+ ret.append(loader._collect_element(element, task, ignore_workspaces=ignore_workspaces))
self._clean_caches()
@@ -241,9 +231,7 @@ class Loader:
# Load the data and process any conditional statements therein
fullpath = os.path.join(self._basedir, filename)
try:
- node = _yaml.load(
- fullpath, shortname=filename, copy_tree=rewritable, project=self.project
- )
+ node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable, project=self.project)
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_FILE:
@@ -252,9 +240,7 @@ class Loader:
filename, self.project.junction.name
)
else:
- message = "Could not find element '{}' in elements directory '{}'".format(
- filename, self._basedir
- )
+ message = "Could not find element '{}' in elements directory '{}'".format(filename, self._basedir)
if provenance:
message = "{}: {}".format(provenance, message)
@@ -265,14 +251,10 @@ class Loader:
detail = None
elements_dir = os.path.relpath(self._basedir, self.project.directory)
element_relpath = os.path.relpath(filename, elements_dir)
- if filename.startswith(elements_dir) and os.path.exists(
- os.path.join(self._basedir, element_relpath)
- ):
+ if filename.startswith(elements_dir) and os.path.exists(os.path.join(self._basedir, element_relpath)):
detail = "Did you mean '{}'?".format(element_relpath)
- raise LoadError(
- message, LoadErrorReason.MISSING_FILE, detail=detail
- ) from e
+ raise LoadError(message, LoadErrorReason.MISSING_FILE, detail=detail) from e
if e.reason == LoadErrorReason.LOADING_DIRECTORY:
# If a <directory>.bst file exists in the element path,
@@ -284,9 +266,7 @@ class Loader:
if os.path.exists(os.path.join(self._basedir, filename + ".bst")):
element_name = filename + ".bst"
detail = "Did you mean '{}'?\n".format(element_name)
- raise LoadError(
- message, LoadErrorReason.LOADING_DIRECTORY, detail=detail
- ) from e
+ raise LoadError(message, LoadErrorReason.LOADING_DIRECTORY, detail=detail) from e
# Otherwise, we don't know the reason, so just raise
raise
@@ -355,14 +335,9 @@ class Loader:
if dep.junction:
self._load_file(dep.junction, rewritable, ticker, dep.provenance)
loader = self._get_loader(
- dep.junction,
- rewritable=rewritable,
- ticker=ticker,
- provenance=dep.provenance,
- )
- dep_element = loader._load_file(
- dep.name, rewritable, ticker, dep.provenance
+ dep.junction, rewritable=rewritable, ticker=ticker, provenance=dep.provenance,
)
+ dep_element = loader._load_file(dep.name, rewritable, ticker, dep.provenance)
else:
dep_element = self._elements.get(dep.name)
@@ -370,19 +345,14 @@ class Loader:
# The loader does not have this available so we need to
# either recursively cause it to be loaded, or else we
# need to push this onto the loader queue in this loader
- dep_element = self._load_file_no_deps(
- dep.name, rewritable, dep.provenance
- )
+ dep_element = self._load_file_no_deps(dep.name, rewritable, dep.provenance)
dep_deps = extract_depends_from_node(dep_element.node)
loader_queue.append((dep_element, list(reversed(dep_deps)), []))
# Pylint is not very happy about Cython and can't understand 'node' is a 'MappingNode'
- if (
- dep_element.node.get_str(Symbol.KIND) == "junction"
- ): # pylint: disable=no-member
+ if dep_element.node.get_str(Symbol.KIND) == "junction": # pylint: disable=no-member
raise LoadError(
- "{}: Cannot depend on junction".format(dep.provenance),
- LoadErrorReason.INVALID_DATA,
+ "{}: Cannot depend on junction".format(dep.provenance), LoadErrorReason.INVALID_DATA,
)
# All is well, push the dependency onto the LoadElement
@@ -429,16 +399,12 @@ class Loader:
# Create `chain`, the loop of element dependencies from this
# element back to itself, by trimming everything before this
# element from the sequence under consideration.
- chain = [
- element.full_name
- for element in sequence[sequence.index(element) :]
- ]
+ chain = [element.full_name for element in sequence[sequence.index(element) :]]
chain.append(element.full_name)
raise LoadError(
- (
- "Circular dependency detected at element: {}\n"
- + "Dependency chain: {}"
- ).format(element.full_name, " -> ".join(chain)),
+ ("Circular dependency detected at element: {}\n" + "Dependency chain: {}").format(
+ element.full_name, " -> ".join(chain)
+ ),
LoadErrorReason.CIRCULAR_DEPENDENCY,
)
if element not in validated:
@@ -488,9 +454,7 @@ class Loader:
if workspace and not ignore_workspaces:
workspace_node = {"kind": "workspace"}
workspace_node["path"] = workspace.get_absolute_path()
- workspace_node["ref"] = str(
- workspace.to_dict().get("last_successful", "ignored")
- )
+ workspace_node["ref"] = str(workspace.to_dict().get("last_successful", "ignored"))
node[Symbol.SOURCES] = [workspace_node]
skip_workspace = False
@@ -507,9 +471,7 @@ class Loader:
directory = source.get_str(Symbol.DIRECTORY, default=None)
if directory:
del source[Symbol.DIRECTORY]
- meta_source = MetaSource(
- element.name, index, element_kind, kind, source, directory
- )
+ meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
meta_sources.append(meta_source)
meta_element = MetaElement(
@@ -548,11 +510,7 @@ class Loader:
#
def _collect_element(self, top_element, task, ignore_workspaces=False):
element_queue = [top_element]
- meta_element_queue = [
- self._collect_element_no_deps(
- top_element, task, ignore_workspaces=ignore_workspaces
- )
- ]
+ meta_element_queue = [self._collect_element_no_deps(top_element, task, ignore_workspaces=ignore_workspaces)]
while element_queue:
element = element_queue.pop()
@@ -569,9 +527,7 @@ class Loader:
name = dep.element.name
if name not in loader._meta_elements:
- meta_dep = loader._collect_element_no_deps(
- dep.element, task, ignore_workspaces=ignore_workspaces
- )
+ meta_dep = loader._collect_element_no_deps(dep.element, task, ignore_workspaces=ignore_workspaces)
element_queue.append(dep.element)
meta_element_queue.append(meta_dep)
else:
@@ -598,9 +554,7 @@ class Loader:
# Raises: LoadError
#
# Returns: A Loader or None if specified junction does not exist
- def _get_loader(
- self, filename, *, rewritable=False, ticker=None, level=0, provenance=None
- ):
+ def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0, provenance=None):
provenance_str = ""
if provenance is not None:
@@ -626,11 +580,7 @@ class Loader:
# junctions in the parent take precedence over junctions defined
# in subprojects
loader = self._parent._get_loader(
- filename,
- rewritable=rewritable,
- ticker=ticker,
- level=level + 1,
- provenance=provenance,
+ filename, rewritable=rewritable, ticker=ticker, level=level + 1, provenance=provenance,
)
if loader:
self._loaders[filename] = loader
@@ -662,14 +612,10 @@ class Loader:
#
# Any task counting *inside* the junction will be handled by
# its loader.
- meta_element = self._collect_element_no_deps(
- self._elements[filename], _NO_PROGRESS
- )
+ meta_element = self._collect_element_no_deps(self._elements[filename], _NO_PROGRESS)
if meta_element.kind != "junction":
raise LoadError(
- "{}{}: Expected junction but element kind is {}".format(
- provenance_str, filename, meta_element.kind
- ),
+ "{}{}: Expected junction but element kind is {}".format(provenance_str, filename, meta_element.kind),
LoadErrorReason.INVALID_DATA,
)
@@ -688,8 +634,7 @@ class Loader:
# but since we haven't loaded those yet that's impossible.
if self._elements[filename].dependencies:
raise LoadError(
- "Dependencies are forbidden for 'junction' elements",
- LoadErrorReason.INVALID_JUNCTION,
+ "Dependencies are forbidden for 'junction' elements", LoadErrorReason.INVALID_JUNCTION,
)
element = Element._new_from_meta(meta_element)
@@ -699,28 +644,17 @@ class Loader:
# find loader for that project.
if element.target:
subproject_loader = self._get_loader(
- element.target_junction,
- rewritable=rewritable,
- ticker=ticker,
- level=level,
- provenance=provenance,
+ element.target_junction, rewritable=rewritable, ticker=ticker, level=level, provenance=provenance,
)
loader = subproject_loader._get_loader(
- element.target_element,
- rewritable=rewritable,
- ticker=ticker,
- level=level,
- provenance=provenance,
+ element.target_element, rewritable=rewritable, ticker=ticker, level=level, provenance=provenance,
)
self._loaders[filename] = loader
return loader
# Handle the case where a subproject needs to be fetched
#
- if (
- element._get_consistency() >= Consistency.RESOLVED
- and not element._source_cached()
- ):
+ if element._get_consistency() >= Consistency.RESOLVED and not element._source_cached():
if ticker:
ticker(filename, "Fetching subproject")
self._fetch_subprojects([element])
@@ -728,13 +662,9 @@ class Loader:
# Handle the case where a subproject has no ref
#
elif element._get_consistency() == Consistency.INCONSISTENT:
- detail = "Try tracking the junction element with `bst source track {}`".format(
- filename
- )
+ detail = "Try tracking the junction element with `bst source track {}`".format(filename)
raise LoadError(
- "{}Subproject has no ref for junction: {}".format(
- provenance_str, filename
- ),
+ "{}Subproject has no ref for junction: {}".format(provenance_str, filename),
LoadErrorReason.SUBPROJECT_INCONSISTENT,
detail=detail,
)
@@ -747,11 +677,7 @@ class Loader:
# Stage sources
element._set_required()
basedir = os.path.join(
- self.project.directory,
- ".bst",
- "staged-junctions",
- filename,
- element._get_cache_key(),
+ self.project.directory, ".bst", "staged-junctions", filename, element._get_cache_key(),
)
if not os.path.exists(basedir):
os.makedirs(basedir, exist_ok=True)
@@ -773,17 +699,12 @@ class Loader:
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
message = (
- provenance_str
- + "Could not find the project.conf file in the project "
+ provenance_str + "Could not find the project.conf file in the project "
"referred to by junction element '{}'.".format(element.name)
)
if element.path:
- message += " Was expecting it at path '{}' in the junction's source.".format(
- element.path
- )
- raise LoadError(
- message=message, reason=LoadErrorReason.INVALID_JUNCTION
- ) from e
+ message += " Was expecting it at path '{}' in the junction's source.".format(element.path)
+ raise LoadError(message=message, reason=LoadErrorReason.INVALID_JUNCTION) from e
# Otherwise, we don't know the reason, so just raise
raise
@@ -817,9 +738,7 @@ class Loader:
return None, junction_path[-1], self
else:
self._load_file(junction_path[-2], rewritable, ticker)
- loader = self._get_loader(
- junction_path[-2], rewritable=rewritable, ticker=ticker
- )
+ loader = self._get_loader(junction_path[-2], rewritable=rewritable, ticker=ticker)
return junction_path[-2], junction_path[-1], loader
# Print a warning message, checks warning_token against project configuration
diff --git a/src/buildstream/_loader/metasource.py b/src/buildstream/_loader/metasource.py
index bb83a6bc8..5466d3aa5 100644
--- a/src/buildstream/_loader/metasource.py
+++ b/src/buildstream/_loader/metasource.py
@@ -32,9 +32,7 @@ class MetaSource:
# config: The configuration data for the source
# first_pass: This source will be used with first project pass configuration (used for junctions).
#
- def __init__(
- self, element_name, element_index, element_kind, kind, config, directory
- ):
+ def __init__(self, element_name, element_index, element_kind, kind, config, directory):
self.element_name = element_name
self.element_index = element_index
self.element_kind = element_kind
diff --git a/src/buildstream/_message.py b/src/buildstream/_message.py
index 79d71441c..d18590885 100644
--- a/src/buildstream/_message.py
+++ b/src/buildstream/_message.py
@@ -69,19 +69,13 @@ class Message:
):
self.message_type = message_type # Message type
self.message = message # The message string
- self.element_name = (
- element_name # The instance element name of the issuing plugin
- )
+ self.element_name = element_name # The instance element name of the issuing plugin
self.element_key = element_key # The display key of the issuing plugin element
self.detail = detail # An additional detail string
- self.action_name = (
- action_name # Name of the task queue (fetch, refresh, build, etc)
- )
+ self.action_name = action_name # Name of the task queue (fetch, refresh, build, etc)
self.elapsed = elapsed # The elapsed time, in timed messages
self.logfile = logfile # The log file path where commands took place
- self.sandbox = (
- sandbox # Whether the error that caused this message used a sandbox
- )
+ self.sandbox = sandbox # Whether the error that caused this message used a sandbox
self.pid = os.getpid() # The process pid
self.scheduler = scheduler # Whether this is a scheduler level message
self.creation_time = datetime.datetime.now()
diff --git a/src/buildstream/_messenger.py b/src/buildstream/_messenger.py
index 687d64ebf..bf27f5620 100644
--- a/src/buildstream/_messenger.py
+++ b/src/buildstream/_messenger.py
@@ -159,18 +159,11 @@ class Messenger:
# silent_nested (bool): If True, all but _message.unconditional_messages are silenced
#
@contextmanager
- def timed_activity(
- self, activity_name, *, element_name=None, detail=None, silent_nested=False
- ):
+ def timed_activity(self, activity_name, *, element_name=None, detail=None, silent_nested=False):
with self._timed_suspendable() as timedata:
try:
# Push activity depth for status messages
- message = Message(
- MessageType.START,
- activity_name,
- detail=detail,
- element_name=element_name,
- )
+ message = Message(MessageType.START, activity_name, detail=detail, element_name=element_name,)
self.message(message)
with self.silence(actually_silence=silent_nested):
yield
@@ -179,22 +172,12 @@ class Messenger:
# Note the failure in status messages and reraise, the scheduler
# expects an error when there is an error.
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(
- MessageType.FAIL,
- activity_name,
- elapsed=elapsed,
- element_name=element_name,
- )
+ message = Message(MessageType.FAIL, activity_name, elapsed=elapsed, element_name=element_name,)
self.message(message)
raise
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(
- MessageType.SUCCESS,
- activity_name,
- elapsed=elapsed,
- element_name=element_name,
- )
+ message = Message(MessageType.SUCCESS, activity_name, elapsed=elapsed, element_name=element_name,)
self.message(message)
# simple_task()
@@ -211,14 +194,10 @@ class Messenger:
# Task: A Task object that represents this activity, principally used to report progress
#
@contextmanager
- def simple_task(
- self, activity_name, *, element_name=None, full_name=None, silent_nested=False
- ):
+ def simple_task(self, activity_name, *, element_name=None, full_name=None, silent_nested=False):
# Bypass use of State when none exists (e.g. tests)
if not self._state:
- with self.timed_activity(
- activity_name, element_name=element_name, silent_nested=silent_nested
- ):
+ with self.timed_activity(activity_name, element_name=element_name, silent_nested=silent_nested):
yield
return
@@ -227,9 +206,7 @@ class Messenger:
with self._timed_suspendable() as timedata:
try:
- message = Message(
- MessageType.START, activity_name, element_name=element_name
- )
+ message = Message(MessageType.START, activity_name, element_name=element_name)
self.message(message)
task = self._state.add_task(activity_name, full_name)
@@ -243,12 +220,7 @@ class Messenger:
except BstError:
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(
- MessageType.FAIL,
- activity_name,
- elapsed=elapsed,
- element_name=element_name,
- )
+ message = Message(MessageType.FAIL, activity_name, elapsed=elapsed, element_name=element_name,)
self.message(message)
raise
finally:
@@ -262,17 +234,11 @@ class Messenger:
if task.current_progress is not None and elapsed > _DISPLAY_LIMIT:
if task.maximum_progress is not None:
- detail = "{} of {} subtasks processed".format(
- task.current_progress, task.maximum_progress
- )
+ detail = "{} of {} subtasks processed".format(task.current_progress, task.maximum_progress)
else:
detail = "{} subtasks processed".format(task.current_progress)
message = Message(
- MessageType.SUCCESS,
- activity_name,
- elapsed=elapsed,
- detail=detail,
- element_name=element_name,
+ MessageType.SUCCESS, activity_name, elapsed=elapsed, detail=detail, element_name=element_name,
)
self.message(message)
@@ -308,9 +274,7 @@ class Messenger:
# Create the fully qualified logfile in the log directory,
# appending the pid and .log extension at the end.
- self._log_filename = os.path.join(
- logdir, "{}.{}.log".format(filename, os.getpid())
- )
+ self._log_filename = os.path.join(logdir, "{}.{}.log".format(filename, os.getpid()))
# Ensure the directory exists first
directory = os.path.dirname(self._log_filename)
diff --git a/src/buildstream/_options/option.py b/src/buildstream/_options/option.py
index f039ca28a..71d2f12f3 100644
--- a/src/buildstream/_options/option.py
+++ b/src/buildstream/_options/option.py
@@ -67,9 +67,7 @@ class Option:
# Assert valid symbol name for variable name
if self.variable is not None:
- _assert_symbol_name(
- self.variable, "variable name", ref_node=node.get_node("variable")
- )
+ _assert_symbol_name(self.variable, "variable name", ref_node=node.get_node("variable"))
# load_value()
#
diff --git a/src/buildstream/_options/optionarch.py b/src/buildstream/_options/optionarch.py
index ed7656ea3..2d663f0ef 100644
--- a/src/buildstream/_options/optionarch.py
+++ b/src/buildstream/_options/optionarch.py
@@ -54,16 +54,12 @@ class OptionArch(OptionEnum):
# Do not terminate the loop early to ensure we validate
# all values in the list.
except PlatformError as e:
- provenance = (
- node.get_sequence("values").scalar_at(index).get_provenance()
- )
+ provenance = node.get_sequence("values").scalar_at(index).get_provenance()
prefix = ""
if provenance:
prefix = "{}: ".format(provenance)
raise LoadError(
- "{}Invalid value for {} option '{}': {}".format(
- prefix, self.OPTION_TYPE, self.name, e
- ),
+ "{}Invalid value for {} option '{}': {}".format(prefix, self.OPTION_TYPE, self.name, e),
LoadErrorReason.INVALID_DATA,
)
diff --git a/src/buildstream/_options/optionbool.py b/src/buildstream/_options/optionbool.py
index d8201de51..c0c1271e9 100644
--- a/src/buildstream/_options/optionbool.py
+++ b/src/buildstream/_options/optionbool.py
@@ -48,8 +48,7 @@ class OptionBool(Option):
self.value = False
else:
raise LoadError(
- "Invalid value for boolean option {}: {}".format(self.name, value),
- LoadErrorReason.INVALID_DATA,
+ "Invalid value for boolean option {}: {}".format(self.name, value), LoadErrorReason.INVALID_DATA,
)
def get_value(self):
diff --git a/src/buildstream/_options/optionenum.py b/src/buildstream/_options/optionenum.py
index 80d0fa156..d30f45696 100644
--- a/src/buildstream/_options/optionenum.py
+++ b/src/buildstream/_options/optionenum.py
@@ -81,9 +81,7 @@ class OptionEnum(Option):
else:
prefix = ""
raise LoadError(
- "{}Invalid value for {} option '{}': {}\n".format(
- prefix, self.OPTION_TYPE, self.name, value
- )
+ "{}Invalid value for {} option '{}': {}\n".format(prefix, self.OPTION_TYPE, self.name, value)
+ "Valid values: {}".format(", ".join(self.values)),
LoadErrorReason.INVALID_DATA,
)
diff --git a/src/buildstream/_options/optionflags.py b/src/buildstream/_options/optionflags.py
index 5977930d4..82ede5649 100644
--- a/src/buildstream/_options/optionflags.py
+++ b/src/buildstream/_options/optionflags.py
@@ -90,9 +90,7 @@ class OptionFlags(Option):
else:
prefix = ""
raise LoadError(
- "{}Invalid value for flags option '{}': {}\n".format(
- prefix, self.name, value
- )
+ "{}Invalid value for flags option '{}': {}\n".format(prefix, self.name, value)
+ "Valid values: {}".format(", ".join(self.values)),
LoadErrorReason.INVALID_DATA,
)
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index 3b58a5904..aa1c62a4f 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -88,10 +88,7 @@ class OptionPool:
# Assert that the option name is a valid symbol
_assert_symbol_name(
- option_name,
- "option name",
- ref_node=option_definition,
- allow_dashes=False,
+ option_name, "option name", ref_node=option_definition, allow_dashes=False,
)
opt_type_name = option_definition.get_enum("type", OptionTypes)
@@ -115,8 +112,7 @@ class OptionPool:
except KeyError as e:
p = option_value.get_provenance()
raise LoadError(
- "{}: Unknown option '{}' specified".format(p, option_name),
- LoadErrorReason.INVALID_DATA,
+ "{}: Unknown option '{}' specified".format(p, option_name), LoadErrorReason.INVALID_DATA,
) from e
option.load_value(node, transform=transform)
@@ -136,9 +132,7 @@ class OptionPool:
except KeyError as e:
if not ignore_unknown:
raise LoadError(
- "Unknown option '{}' specified on the command line".format(
- option_name
- ),
+ "Unknown option '{}' specified on the command line".format(option_name),
LoadErrorReason.INVALID_DATA,
) from e
else:
@@ -237,9 +231,7 @@ class OptionPool:
# Variables must be resolved at this point.
#
try:
- template_string = "{{% if {} %}} True {{% else %}} False {{% endif %}}".format(
- expression
- )
+ template_string = "{{% if {} %}} True {{% else %}} False {{% endif %}}".format(expression)
template = self._environment.from_string(template_string)
context = template.new_context(self._variables, shared=True)
result = template.root_render_func(context)
@@ -252,13 +244,11 @@ class OptionPool:
return False
else: # pragma: nocover
raise LoadError(
- "Failed to evaluate expression: {}".format(expression),
- LoadErrorReason.EXPRESSION_FAILED,
+ "Failed to evaluate expression: {}".format(expression), LoadErrorReason.EXPRESSION_FAILED,
)
except jinja2.exceptions.TemplateError as e:
raise LoadError(
- "Failed to evaluate expression ({}): {}".format(expression, e),
- LoadErrorReason.EXPRESSION_FAILED,
+ "Failed to evaluate expression ({}): {}".format(expression, e), LoadErrorReason.EXPRESSION_FAILED,
)
# Recursion assistent for lists, in case there
@@ -286,9 +276,7 @@ class OptionPool:
# it being overwritten by a later assertion which might also trigger.
if assertion is not None:
p = node.get_scalar("(!)").get_provenance()
- raise LoadError(
- "{}: {}".format(p, assertion.strip()), LoadErrorReason.USER_ASSERTION
- )
+ raise LoadError("{}: {}".format(p, assertion.strip()), LoadErrorReason.USER_ASSERTION)
if conditions is not None:
del node["(?)"]
@@ -298,9 +286,7 @@ class OptionPool:
if len(tuples) > 1:
provenance = condition.get_provenance()
raise LoadError(
- "{}: Conditional statement has more than one key".format(
- provenance
- ),
+ "{}: Conditional statement has more than one key".format(provenance),
LoadErrorReason.INVALID_DATA,
)
@@ -312,14 +298,10 @@ class OptionPool:
provenance = condition.get_provenance()
raise LoadError("{}: {}".format(provenance, e), e.reason) from e
- if (
- type(value) is not MappingNode
- ): # pylint: disable=unidiomatic-typecheck
+ if type(value) is not MappingNode: # pylint: disable=unidiomatic-typecheck
provenance = condition.get_provenance()
raise LoadError(
- "{}: Only values of type 'dict' can be composed.".format(
- provenance
- ),
+ "{}: Only values of type 'dict' can be composed.".format(provenance),
LoadErrorReason.ILLEGAL_COMPOSITE,
)
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index ace93acef..9dac30bf7 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -103,18 +103,12 @@ class Pipeline:
# First concatenate all the lists for the loader's sake
targets = list(itertools.chain(*target_groups))
- with PROFILER.profile(
- Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)
- ):
- elements = self._project.load_elements(
- targets, rewritable=rewritable, ignore_workspaces=ignore_workspaces
- )
+ with PROFILER.profile(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)):
+ elements = self._project.load_elements(targets, rewritable=rewritable, ignore_workspaces=ignore_workspaces)
# Now create element groups to match the input target groups
elt_iter = iter(elements)
- element_groups = [
- [next(elt_iter) for i in range(len(group))] for group in target_groups
- ]
+ element_groups = [[next(elt_iter) for i in range(len(group))] for group in target_groups]
return tuple(element_groups)
@@ -141,9 +135,7 @@ class Pipeline:
# targets (list of Element): The list of toplevel element targets
#
def resolve_elements(self, targets):
- with self._context.messenger.simple_task(
- "Resolving cached state", silent_nested=True
- ) as task:
+ with self._context.messenger.simple_task("Resolving cached state", silent_nested=True) as task:
# We need to go through the project to access the loader
if task:
task.set_maximum_progress(self._project.loader.loaded)
@@ -174,9 +166,7 @@ class Pipeline:
# targets (list [Element]): The list of element targets
#
def check_remotes(self, targets):
- with self._context.messenger.simple_task(
- "Querying remotes for cached status", silent_nested=True
- ) as task:
+ with self._context.messenger.simple_task("Querying remotes for cached status", silent_nested=True) as task:
task.set_maximum_progress(len(targets))
for element in targets:
@@ -219,9 +209,7 @@ class Pipeline:
def plan(self, elements):
# Keep locally cached elements in the plan if remote artifact cache is used
# to allow pulling artifact with strict cache key, if available.
- plan_cached = (
- not self._context.get_strict() and self._artifacts.has_fetch_remotes()
- )
+ plan_cached = not self._context.get_strict() and self._artifacts.has_fetch_remotes()
return _Planner().plan(elements, plan_cached)
@@ -250,8 +238,7 @@ class Pipeline:
new_elm = t._get_source_element()
if new_elm != t and not silent:
self._message(
- MessageType.INFO,
- "Element '{}' redirected to '{}'".format(t.name, new_elm.name),
+ MessageType.INFO, "Element '{}' redirected to '{}'".format(t.name, new_elm.name),
)
if new_elm not in elements:
elements.append(new_elm)
@@ -307,11 +294,7 @@ class Pipeline:
# Build a list of 'intersection' elements, i.e. the set of
# elements that lie on the border closest to excepted elements
# between excepted and target elements.
- intersection = list(
- itertools.chain.from_iterable(
- find_intersection(element) for element in except_targets
- )
- )
+ intersection = list(itertools.chain.from_iterable(find_intersection(element) for element in except_targets))
# Now use this set of elements to traverse the targeted
# elements, except 'intersection' elements and their unique
@@ -432,30 +415,21 @@ class Pipeline:
if inconsistent:
detail = "Exact versions are missing for the following elements:\n\n"
for element in inconsistent:
- detail += " Element: {} is inconsistent\n".format(
- element._get_full_name()
- )
+ detail += " Element: {} is inconsistent\n".format(element._get_full_name())
for source in element.sources():
if source._get_consistency() == Consistency.INCONSISTENT:
detail += " {} is missing ref\n".format(source)
detail += "\n"
detail += "Try tracking these elements first with `bst source track`\n"
- raise PipelineError(
- "Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline"
- )
+ raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
if inconsistent_workspaced:
- detail = (
- "Some workspaces exist but are not closed\n"
- + "Try closing them with `bst workspace close`\n\n"
- )
+ detail = "Some workspaces exist but are not closed\n" + "Try closing them with `bst workspace close`\n\n"
for element in inconsistent_workspaced:
detail += " " + element._get_full_name() + "\n"
raise PipelineError(
- "Inconsistent pipeline",
- detail=detail,
- reason="inconsistent-pipeline-workspaced",
+ "Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced",
)
# assert_sources_cached()
@@ -469,18 +443,13 @@ class Pipeline:
uncached = []
with self._context.messenger.timed_activity("Checking sources"):
for element in elements:
- if (
- element._get_consistency() < Consistency.CACHED
- and not element._source_cached()
- ):
+ if element._get_consistency() < Consistency.CACHED and not element._source_cached():
uncached.append(element)
if uncached:
detail = "Sources are not cached for the following elements:\n\n"
for element in uncached:
- detail += " Following sources for element: {} are not cached:\n".format(
- element._get_full_name()
- )
+ detail += " Following sources for element: {} are not cached:\n".format(element._get_full_name())
for source in element.sources():
if source._get_consistency() < Consistency.CACHED:
detail += " {}\n".format(source)
@@ -490,9 +459,7 @@ class Pipeline:
+ "or run this command with `--fetch` option\n"
)
- raise PipelineError(
- "Uncached sources", detail=detail, reason="uncached-sources"
- )
+ raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
#############################################################
# Private Methods #
@@ -541,9 +508,7 @@ class Pipeline:
+ "in a project which does not use project.refs ref-storage."
)
- raise PipelineError(
- "Untrackable sources", detail=detail, reason="untrackable-sources"
- )
+ raise PipelineError("Untrackable sources", detail=detail, reason="untrackable-sources")
# _message()
#
@@ -601,8 +566,4 @@ class _Planner:
for index, item in enumerate(depth_sorted):
item[0]._set_depth(index)
- return [
- item[0]
- for item in depth_sorted
- if plan_cached or not item[0]._cached_success()
- ]
+ return [item[0] for item in depth_sorted if plan_cached or not item[0]._cached_success()]
diff --git a/src/buildstream/_platform/darwin.py b/src/buildstream/_platform/darwin.py
index adc858842..06491e8b4 100644
--- a/src/buildstream/_platform/darwin.py
+++ b/src/buildstream/_platform/darwin.py
@@ -60,8 +60,7 @@ class Darwin(Platform):
@staticmethod
def _create_dummy_sandbox(*args, **kwargs):
kwargs["dummy_reason"] = (
- "OSXFUSE is not supported and there are no supported sandbox "
- + "technologies for MacOS at this time"
+ "OSXFUSE is not supported and there are no supported sandbox " + "technologies for MacOS at this time"
)
return SandboxDummy(*args, **kwargs)
diff --git a/src/buildstream/_platform/platform.py b/src/buildstream/_platform/platform.py
index ebac66843..1fddbe82c 100644
--- a/src/buildstream/_platform/platform.py
+++ b/src/buildstream/_platform/platform.py
@@ -114,9 +114,7 @@ class Platform:
elif backend == "win32":
from .win32 import Win32 as PlatformImpl # pylint: disable=cyclic-import
elif backend == "fallback":
- from .fallback import (
- Fallback as PlatformImpl,
- ) # pylint: disable=cyclic-import
+ from .fallback import Fallback as PlatformImpl # pylint: disable=cyclic-import
else:
raise PlatformError("No such platform: '{}'".format(backend))
@@ -212,17 +210,11 @@ class Platform:
# (Sandbox) A sandbox
#
def create_sandbox(self, *args, **kwargs):
- raise ImplError(
- "Platform {platform} does not implement create_sandbox()".format(
- platform=type(self).__name__
- )
- )
+ raise ImplError("Platform {platform} does not implement create_sandbox()".format(platform=type(self).__name__))
def check_sandbox_config(self, config):
raise ImplError(
- "Platform {platform} does not implement check_sandbox_config()".format(
- platform=type(self).__name__
- )
+ "Platform {platform} does not implement check_sandbox_config()".format(platform=type(self).__name__)
)
def maximize_open_file_limit(self):
@@ -243,7 +235,5 @@ class Platform:
def _setup_dummy_sandbox(self):
raise ImplError(
- "Platform {platform} does not implement _setup_dummy_sandbox()".format(
- platform=type(self).__name__
- )
+ "Platform {platform} does not implement _setup_dummy_sandbox()".format(platform=type(self).__name__)
)
diff --git a/src/buildstream/_platform/win32.py b/src/buildstream/_platform/win32.py
index a6aaf1662..a2529d8f6 100644
--- a/src/buildstream/_platform/win32.py
+++ b/src/buildstream/_platform/win32.py
@@ -49,9 +49,7 @@ class Win32(Platform):
@staticmethod
def _create_dummy_sandbox(*args, **kwargs):
- kwargs[
- "dummy_reason"
- ] = "There are no supported sandbox technologies for Win32 at this time."
+ kwargs["dummy_reason"] = "There are no supported sandbox technologies for Win32 at this time."
return SandboxDummy(*args, **kwargs)
def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_plugincontext.py b/src/buildstream/_plugincontext.py
index f542f6fd4..95ac192dc 100644
--- a/src/buildstream/_plugincontext.py
+++ b/src/buildstream/_plugincontext.py
@@ -42,15 +42,7 @@ from . import utils
# Pipelines.
#
class PluginContext:
- def __init__(
- self,
- plugin_base,
- base_type,
- site_plugin_path,
- *,
- plugin_origins=None,
- format_versions={}
- ):
+ def __init__(self, plugin_base, base_type, site_plugin_path, *, plugin_origins=None, format_versions={}):
# For pickling across processes, make sure this context has a unique
# identifier, which we prepend to the identifier of each PluginSource.
@@ -140,9 +132,7 @@ class PluginContext:
def _get_local_plugin_source(self, path):
if ("local", path) not in self._alternate_sources:
# key by a tuple to avoid collision
- source = self._plugin_base.make_plugin_source(
- searchpath=[path], identifier=self._identifier + path,
- )
+ source = self._plugin_base.make_plugin_source(searchpath=[path], identifier=self._identifier + path,)
# Ensure that sources never get garbage collected,
# as they'll take the plugins with them.
self._alternate_sources[("local", path)] = source
@@ -157,22 +147,12 @@ class PluginContext:
# key by a tuple to avoid collision
try:
- package = pkg_resources.get_entry_info(
- package_name, "buildstream.plugins", kind
- )
+ package = pkg_resources.get_entry_info(package_name, "buildstream.plugins", kind)
except pkg_resources.DistributionNotFound as e:
- raise PluginError(
- "Failed to load {} plugin '{}': {}".format(
- self._base_type.__name__, kind, e
- )
- ) from e
+ raise PluginError("Failed to load {} plugin '{}': {}".format(self._base_type.__name__, kind, e)) from e
if package is None:
- raise PluginError(
- "Pip package {} does not contain a plugin named '{}'".format(
- package_name, kind
- )
- )
+ raise PluginError("Pip package {} does not contain a plugin named '{}'".format(package_name, kind))
location = package.dist.get_resource_filename(
pkg_resources._manager, package.module_name.replace(".", os.sep) + ".py"
@@ -182,16 +162,14 @@ class PluginContext:
# may need to extract the file.
try:
defaults = package.dist.get_resource_filename(
- pkg_resources._manager,
- package.module_name.replace(".", os.sep) + ".yaml",
+ pkg_resources._manager, package.module_name.replace(".", os.sep) + ".yaml",
)
except KeyError:
# The plugin didn't have an accompanying YAML file
defaults = None
source = self._plugin_base.make_plugin_source(
- searchpath=[os.path.dirname(location)],
- identifier=self._identifier + os.path.dirname(location),
+ searchpath=[os.path.dirname(location)], identifier=self._identifier + os.path.dirname(location),
)
self._alternate_sources[("pip", package_name)] = source
@@ -221,9 +199,7 @@ class PluginContext:
else:
raise PluginError(
"Failed to load plugin '{}': "
- "Unexpected plugin origin '{}'".format(
- kind, origin.get_str("origin")
- )
+ "Unexpected plugin origin '{}'".format(kind, origin.get_str("origin"))
)
loaded_dependency = True
break
@@ -231,11 +207,7 @@ class PluginContext:
# Fall back to getting the source from site
if not source:
if kind not in self._site_source.list_plugins():
- raise PluginError(
- "No {} type registered for kind '{}'".format(
- self._base_type.__name__, kind
- )
- )
+ raise PluginError("No {} type registered for kind '{}'".format(self._base_type.__name__, kind))
source = self._site_source
@@ -257,25 +229,17 @@ class PluginContext:
defaults = os.path.join(plugin_dir, plugin_conf_name)
except ImportError as e:
- raise PluginError(
- "Failed to load {} plugin '{}': {}".format(
- self._base_type.__name__, kind, e
- )
- ) from e
+ raise PluginError("Failed to load {} plugin '{}': {}".format(self._base_type.__name__, kind, e)) from e
try:
plugin_type = plugin.setup()
except AttributeError as e:
raise PluginError(
- "{} plugin '{}' did not provide a setup() function".format(
- self._base_type.__name__, kind
- )
+ "{} plugin '{}' did not provide a setup() function".format(self._base_type.__name__, kind)
) from e
except TypeError as e:
raise PluginError(
- "setup symbol in {} plugin '{}' is not a function".format(
- self._base_type.__name__, kind
- )
+ "setup symbol in {} plugin '{}' is not a function".format(self._base_type.__name__, kind)
) from e
self._assert_plugin(kind, plugin_type)
@@ -286,18 +250,13 @@ class PluginContext:
if kind in self._types:
raise PluginError(
"Tried to register {} plugin for existing kind '{}' "
- "(already registered {})".format(
- self._base_type.__name__, kind, self._types[kind].__name__
- )
+ "(already registered {})".format(self._base_type.__name__, kind, self._types[kind].__name__)
)
try:
if not issubclass(plugin_type, self._base_type):
raise PluginError(
"{} plugin '{}' returned type '{}', which is not a subclass of {}".format(
- self._base_type.__name__,
- kind,
- plugin_type.__name__,
- self._base_type.__name__,
+ self._base_type.__name__, kind, plugin_type.__name__, self._base_type.__name__,
)
)
except TypeError as e:
diff --git a/src/buildstream/_profile.py b/src/buildstream/_profile.py
index b8a9537a8..854c26e10 100644
--- a/src/buildstream/_profile.py
+++ b/src/buildstream/_profile.py
@@ -62,9 +62,7 @@ class _Profile:
filename_template = os.path.join(
os.getcwd(),
"profile-{}-{}".format(
- datetime.datetime.fromtimestamp(self.start_time).strftime(
- "%Y%m%dT%H%M%S"
- ),
+ datetime.datetime.fromtimestamp(self.start_time).strftime("%Y%m%dT%H%M%S"),
self.key.replace("/", "-").replace(".", "-"),
),
)
@@ -100,9 +98,7 @@ class _Profile:
)
with open(self.log_filename, "a") as fp:
- stats = pstats.Stats(
- self.profiler, *self._additional_pstats_files, stream=fp
- )
+ stats = pstats.Stats(self.profiler, *self._additional_pstats_files, stream=fp)
# Create the log file
fp.write(heading)
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index bc361d288..812d96d5a 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -118,10 +118,7 @@ class Project:
self._context = context # The invocation Context, a private member
if search_for_project:
- (
- self.directory,
- self._invoked_from_workspace_element,
- ) = self._find_project_dir(directory)
+ (self.directory, self._invoked_from_workspace_element,) = self._find_project_dir(directory)
else:
self.directory = directory
self._invoked_from_workspace_element = None
@@ -270,16 +267,14 @@ class Project:
if full_path.is_symlink():
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' must not point to "
- "symbolic links ".format(provenance, path_str),
+ "{}: Specified path '{}' must not point to " "symbolic links ".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
if path.parts and path.parts[0] == "..":
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' first component must "
- "not be '..'".format(provenance, path_str),
+ "{}: Specified path '{}' first component must " "not be '..'".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID,
)
@@ -287,14 +282,11 @@ class Project:
if sys.version_info[0] == 3 and sys.version_info[1] < 6:
full_resolved_path = full_path.resolve()
else:
- full_resolved_path = full_path.resolve(
- strict=True
- ) # pylint: disable=unexpected-keyword-arg
+ full_resolved_path = full_path.resolve(strict=True) # pylint: disable=unexpected-keyword-arg
except FileNotFoundError:
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' does not exist".format(provenance, path_str),
- LoadErrorReason.MISSING_FILE,
+ "{}: Specified path '{}' does not exist".format(provenance, path_str), LoadErrorReason.MISSING_FILE,
)
is_inside = self._absolute_directory_path in full_resolved_path.parents or (
@@ -313,37 +305,28 @@ class Project:
provenance = node.get_provenance()
raise LoadError(
"{}: Absolute path: '{}' invalid.\n"
- "Please specify a path relative to the project's root.".format(
- provenance, path
- ),
+ "Please specify a path relative to the project's root.".format(provenance, path),
LoadErrorReason.PROJ_PATH_INVALID,
)
- if full_resolved_path.is_socket() or (
- full_resolved_path.is_fifo() or full_resolved_path.is_block_device()
- ):
+ if full_resolved_path.is_socket() or (full_resolved_path.is_fifo() or full_resolved_path.is_block_device()):
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' points to an unsupported "
- "file kind".format(provenance, path_str),
+ "{}: Specified path '{}' points to an unsupported " "file kind".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
if check_is_file and not full_resolved_path.is_file():
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' is not a regular file".format(
- provenance, path_str
- ),
+ "{}: Specified path '{}' is not a regular file".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
if check_is_dir and not full_resolved_path.is_dir():
provenance = node.get_provenance()
raise LoadError(
- "{}: Specified path '{}' is not a directory".format(
- provenance, path_str
- ),
+ "{}: Specified path '{}' is not a directory".format(provenance, path_str),
LoadErrorReason.PROJ_PATH_INVALID_KIND,
)
@@ -391,9 +374,7 @@ class Project:
#
def create_element(self, meta, *, first_pass=False):
if first_pass:
- return self.first_pass_config.element_factory.create(
- self._context, self, meta
- )
+ return self.first_pass_config.element_factory.create(self._context, self, meta)
else:
return self.config.element_factory.create(self._context, self, meta)
@@ -423,9 +404,7 @@ class Project:
#
def create_source(self, meta, *, first_pass=False):
if first_pass:
- return self.first_pass_config.source_factory.create(
- self._context, self, meta
- )
+ return self.first_pass_config.source_factory.create(self._context, self, meta)
else:
return self.config.source_factory.create(self._context, self, meta)
@@ -461,9 +440,7 @@ class Project:
else:
config = self.config
- if (
- not alias or alias not in config._aliases
- ): # pylint: disable=unsupported-membership-test
+ if not alias or alias not in config._aliases: # pylint: disable=unsupported-membership-test
return [None]
mirror_list = []
@@ -490,15 +467,9 @@ class Project:
# (list): A list of loaded Element
#
def load_elements(self, targets, *, rewritable=False, ignore_workspaces=False):
- with self._context.messenger.simple_task(
- "Loading elements", silent_nested=True
- ) as task:
+ with self._context.messenger.simple_task("Loading elements", silent_nested=True) as task:
meta_elements = self.loader.load(
- targets,
- task,
- rewritable=rewritable,
- ticker=None,
- ignore_workspaces=ignore_workspaces,
+ targets, task, rewritable=rewritable, ticker=None, ignore_workspaces=ignore_workspaces,
)
with self._context.messenger.simple_task("Resolving elements") as task:
@@ -512,20 +483,11 @@ class Project:
# been discovered in the resolve() phase.
redundant_refs = Element._get_redundant_source_refs()
if redundant_refs:
- detail = (
- "The following inline specified source references will be ignored:\n\n"
- )
- lines = [
- "{}:{}".format(source._get_provenance(), ref)
- for source, ref in redundant_refs
- ]
+ detail = "The following inline specified source references will be ignored:\n\n"
+ lines = ["{}:{}".format(source._get_provenance(), ref) for source, ref in redundant_refs]
detail += "\n".join(lines)
self._context.messenger.message(
- Message(
- MessageType.WARN,
- "Ignoring redundant source references",
- detail=detail,
- )
+ Message(MessageType.WARN, "Ignoring redundant source references", detail=detail,)
)
return elements
@@ -551,9 +513,7 @@ class Project:
#
artifacts = []
for ref in targets:
- artifacts.append(
- ArtifactElement._new_from_artifact_ref(ref, self._context, task)
- )
+ artifacts.append(ArtifactElement._new_from_artifact_ref(ref, self._context, task))
ArtifactElement._clear_artifact_refs_cache()
@@ -667,9 +627,7 @@ class Project:
major, minor = utils.get_bst_version()
raise LoadError(
"Project requested format version {}, but BuildStream {}.{} only supports format version {} or above."
- "Use latest 1.x release".format(
- format_version, major, minor, BST_FORMAT_VERSION_MIN
- ),
+ "Use latest 1.x release".format(format_version, major, minor, BST_FORMAT_VERSION_MIN),
LoadErrorReason.UNSUPPORTED_PROJECT,
)
@@ -690,15 +648,10 @@ class Project:
self.name = self._project_conf.get_str("name")
# Validate that project name is a valid symbol name
- _assert_symbol_name(
- self.name, "project name", ref_node=pre_config_node.get_node("name")
- )
+ _assert_symbol_name(self.name, "project name", ref_node=pre_config_node.get_node("name"))
self.element_path = os.path.join(
- self.directory,
- self.get_path_from_node(
- pre_config_node.get_scalar("element-path"), check_is_dir=True
- ),
+ self.directory, self.get_path_from_node(pre_config_node.get_scalar("element-path"), check_is_dir=True),
)
self.config.options = OptionPool(self.element_path)
@@ -709,16 +662,9 @@ class Project:
self._default_targets = defaults.get_str_list("targets")
# Fatal warnings
- self._fatal_warnings = pre_config_node.get_str_list(
- "fatal-warnings", default=[]
- )
+ self._fatal_warnings = pre_config_node.get_str_list("fatal-warnings", default=[])
- self.loader = Loader(
- self._context,
- self,
- parent=parent_loader,
- fetch_subprojects=fetch_subprojects,
- )
+ self.loader = Loader(self._context, self, parent=parent_loader, fetch_subprojects=fetch_subprojects,)
self._project_includes = Includes(self.loader, copy_tree=False)
@@ -738,9 +684,7 @@ class Project:
]:
p = ref_storage_node.get_provenance()
raise LoadError(
- "{}: Invalid value '{}' specified for ref-storage".format(
- p, self.ref_storage
- ),
+ "{}: Invalid value '{}' specified for ref-storage".format(p, self.ref_storage),
LoadErrorReason.INVALID_DATA,
)
@@ -767,32 +711,24 @@ class Project:
#
# Load artifacts pull/push configuration for this project
- self.artifact_cache_specs = ArtifactCache.specs_from_config_node(
- config, self.directory
- )
+ self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
# If there is a junction Element which specifies that we want to remotely cache
# its elements, append the junction's remotes to the artifact cache specs list
if self.junction:
parent = self.junction._get_project()
if self.junction.cache_junction_elements:
- self.artifact_cache_specs = (
- parent.artifact_cache_specs + self.artifact_cache_specs
- )
+ self.artifact_cache_specs = parent.artifact_cache_specs + self.artifact_cache_specs
if self.junction.ignore_junction_remotes:
self.artifact_cache_specs = []
# Load source caches with pull/push config
- self.source_cache_specs = SourceCache.specs_from_config_node(
- config, self.directory
- )
+ self.source_cache_specs = SourceCache.specs_from_config_node(config, self.directory)
# Load remote-execution configuration for this project
project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
- override_specs = SandboxRemote.specs_from_config_node(
- self._context.get_overrides(self.name), self.directory
- )
+ override_specs = SandboxRemote.specs_from_config_node(self._context.get_overrides(self.name), self.directory)
if override_specs is not None:
self.remote_execution_specs = override_specs
@@ -824,9 +760,7 @@ class Project:
)
)
- if (
- CoreWarnings.OVERLAPS not in self._fatal_warnings
- ) and fail_on_overlap.as_bool():
+ if (CoreWarnings.OVERLAPS not in self._fatal_warnings) and fail_on_overlap.as_bool():
self._fatal_warnings.append(CoreWarnings.OVERLAPS)
# Load project.refs if it exists, this may be ignored.
@@ -889,18 +823,14 @@ class Project:
output.options.load(options_node)
if self.junction:
# load before user configuration
- output.options.load_yaml_values(
- self.junction.options, transform=self.junction.node_subst_vars
- )
+ output.options.load_yaml_values(self.junction.options, transform=self.junction.node_subst_vars)
# Collect option values specified in the user configuration
overrides = self._context.get_overrides(self.name)
override_options = overrides.get_mapping("options", default={})
output.options.load_yaml_values(override_options)
if self._cli_options:
- output.options.load_cli_values(
- self._cli_options, ignore_unknown=ignore_unknown
- )
+ output.options.load_cli_values(self._cli_options, ignore_unknown=ignore_unknown)
# We're done modifying options, now we can use them for substitutions
output.options.resolve()
@@ -938,9 +868,7 @@ class Project:
output.options.export_variables(output.base_variables)
# Override default_mirror if not set by command-line
- output.default_mirror = self._default_mirror or overrides.get_str(
- "default-mirror", default=None
- )
+ output.default_mirror = self._default_mirror or overrides.get_str("default-mirror", default=None)
mirrors = config.get_sequence("mirrors", default=[])
for mirror in mirrors:
@@ -949,9 +877,7 @@ class Project:
mirror_name = mirror.get_str("name")
alias_mappings = {}
for alias_mapping, uris in mirror.get_mapping("aliases").items():
- assert (
- type(uris) is SequenceNode
- ) # pylint: disable=unidiomatic-typecheck
+ assert type(uris) is SequenceNode # pylint: disable=unidiomatic-typecheck
alias_mappings[alias_mapping] = uris.as_str_list()
output.mirrors[mirror_name] = alias_mappings
if not output.default_mirror:
@@ -978,9 +904,7 @@ class Project:
def _find_project_dir(self, directory):
workspace_element = None
config_filenames = [_PROJECT_CONF_FILE, WORKSPACE_PROJECT_FILE]
- found_directory, filename = utils._search_upward_for_files(
- directory, config_filenames
- )
+ found_directory, filename = utils._search_upward_for_files(directory, config_filenames)
if filename == _PROJECT_CONF_FILE:
project_directory = found_directory
elif filename == WORKSPACE_PROJECT_FILE:
@@ -1022,8 +946,7 @@ class Project:
for key in source_versions.keys():
if key in source_format_versions:
raise LoadError(
- "Duplicate listing of source '{}'".format(key),
- LoadErrorReason.INVALID_YAML,
+ "Duplicate listing of source '{}'".format(key), LoadErrorReason.INVALID_YAML,
)
source_format_versions[key] = source_versions.get_int(key)
@@ -1032,8 +955,7 @@ class Project:
for key in element_versions.keys():
if key in element_format_versions:
raise LoadError(
- "Duplicate listing of element '{}'".format(key),
- LoadErrorReason.INVALID_YAML,
+ "Duplicate listing of element '{}'".format(key), LoadErrorReason.INVALID_YAML,
)
element_format_versions[key] = element_versions.get_int(key)
@@ -1047,14 +969,10 @@ class Project:
pluginbase = PluginBase(package="buildstream.plugins")
output.element_factory = ElementFactory(
- pluginbase,
- plugin_origins=plugin_element_origins,
- format_versions=element_format_versions,
+ pluginbase, plugin_origins=plugin_element_origins, format_versions=element_format_versions,
)
output.source_factory = SourceFactory(
- pluginbase,
- plugin_origins=plugin_source_origins,
- format_versions=source_format_versions,
+ pluginbase, plugin_origins=plugin_source_origins, format_versions=source_format_versions,
)
# _store_origin()
@@ -1074,9 +992,7 @@ class Project:
expected_groups = ["sources", "elements"]
if plugin_group not in expected_groups:
raise LoadError(
- "Unexpected plugin group: {}, expecting {}".format(
- plugin_group, expected_groups
- ),
+ "Unexpected plugin group: {}, expecting {}".format(plugin_group, expected_groups),
LoadErrorReason.INVALID_DATA,
)
if plugin_group in origin.keys():
@@ -1089,9 +1005,7 @@ class Project:
del origin_node[group]
if origin_node.get_enum("origin", PluginOrigins) == PluginOrigins.LOCAL:
- path = self.get_path_from_node(
- origin.get_scalar("path"), check_is_dir=True
- )
+ path = self.get_path_from_node(origin.get_scalar("path"), check_is_dir=True)
# paths are passed in relative to the project, but must be absolute
origin_node["path"] = os.path.join(self.directory, path)
destination.append(origin_node)
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index 4b25192e4..aca7c6712 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -62,9 +62,7 @@ class ProjectRefs:
#
def load(self, options):
try:
- self._toplevel_node = _yaml.load(
- self._fullpath, shortname=self._base_name, copy_tree=True
- )
+ self._toplevel_node = _yaml.load(self._fullpath, shortname=self._base_name, copy_tree=True)
provenance = self._toplevel_node.get_provenance()
self._toplevel_save = provenance._toplevel
@@ -114,9 +112,7 @@ class ProjectRefs:
# If we couldnt find the orignal, create a new one.
#
if node is None:
- node = self._lookup(
- self._toplevel_save, project, element, source_index, ensure=True
- )
+ node = self._lookup(self._toplevel_save, project, element, source_index, ensure=True)
return node
diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
index 5418aac18..334915dd3 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
@@ -18,9 +18,7 @@ _sym_db = _symbol_database.Default()
from buildstream._protos.build.bazel.remote.execution.v2 import (
remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2,
)
-from buildstream._protos.google.api import (
- annotations_pb2 as google_dot_api_dot_annotations__pb2,
-)
+from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
@@ -557,27 +555,19 @@ _UPDATEARTIFACTREQUEST = _descriptor.Descriptor(
_ARTIFACT_DEPENDENCY.containing_type = _ARTIFACT
_ARTIFACT_LOGFILE.fields_by_name[
"digest"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_ARTIFACT_LOGFILE.containing_type = _ARTIFACT
_ARTIFACT.fields_by_name[
"files"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_ARTIFACT.fields_by_name["build_deps"].message_type = _ARTIFACT_DEPENDENCY
_ARTIFACT.fields_by_name[
"public_data"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_ARTIFACT.fields_by_name["logs"].message_type = _ARTIFACT_LOGFILE
_ARTIFACT.fields_by_name[
"buildtree"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_UPDATEARTIFACTREQUEST.fields_by_name["artifact"].message_type = _ARTIFACT
DESCRIPTOR.message_types_by_name["Artifact"] = _ARTIFACT
DESCRIPTOR.message_types_by_name["GetArtifactRequest"] = _GETARTIFACTREQUEST
diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
index 694780e25..9e6dd4359 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
@@ -1,9 +1,7 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.buildstream.v2 import (
- artifact_pb2 as buildstream_dot_v2_dot_artifact__pb2,
-)
+from buildstream._protos.buildstream.v2 import artifact_pb2 as buildstream_dot_v2_dot_artifact__pb2
class ArtifactServiceStub(object):
@@ -66,7 +64,5 @@ def add_ArtifactServiceServicer_to_server(servicer, server):
response_serializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "buildstream.v2.ArtifactService", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("buildstream.v2.ArtifactService", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py b/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py
index 50621e7bd..c26497cb5 100644
--- a/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py
@@ -18,9 +18,7 @@ _sym_db = _symbol_database.Default()
from buildstream._protos.build.bazel.remote.execution.v2 import (
remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2,
)
-from buildstream._protos.google.api import (
- annotations_pb2 as google_dot_api_dot_annotations__pb2,
-)
+from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
@@ -482,20 +480,12 @@ _SERVERCAPABILITIES = _descriptor.Descriptor(
_GETREFERENCERESPONSE.fields_by_name[
"digest"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_UPDATEREFERENCEREQUEST.fields_by_name[
"digest"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
-_SERVERCAPABILITIES.fields_by_name[
- "artifact_capabilities"
-].message_type = _ARTIFACTCAPABILITIES
-_SERVERCAPABILITIES.fields_by_name[
- "source_capabilities"
-].message_type = _SOURCECAPABILITIES
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+_SERVERCAPABILITIES.fields_by_name["artifact_capabilities"].message_type = _ARTIFACTCAPABILITIES
+_SERVERCAPABILITIES.fields_by_name["source_capabilities"].message_type = _SOURCECAPABILITIES
DESCRIPTOR.message_types_by_name["GetReferenceRequest"] = _GETREFERENCEREQUEST
DESCRIPTOR.message_types_by_name["GetReferenceResponse"] = _GETREFERENCERESPONSE
DESCRIPTOR.message_types_by_name["UpdateReferenceRequest"] = _UPDATEREFERENCEREQUEST
@@ -635,9 +625,7 @@ _REFERENCESTORAGE = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_GETREFERENCEREQUEST,
output_type=_GETREFERENCERESPONSE,
- serialized_options=_b(
- "\202\323\344\223\002/\022-/v2/{instance_name=**}/buildstream/refs/{key}"
- ),
+ serialized_options=_b("\202\323\344\223\002/\022-/v2/{instance_name=**}/buildstream/refs/{key}"),
),
_descriptor.MethodDescriptor(
name="UpdateReference",
@@ -657,9 +645,7 @@ _REFERENCESTORAGE = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_STATUSREQUEST,
output_type=_STATUSRESPONSE,
- serialized_options=_b(
- "\202\323\344\223\0020\032./v2/{instance_name=**}/buildstream/refs:status"
- ),
+ serialized_options=_b("\202\323\344\223\0020\032./v2/{instance_name=**}/buildstream/refs:status"),
),
],
)
@@ -684,9 +670,7 @@ _CAPABILITIES = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_GETCAPABILITIESREQUEST,
output_type=_SERVERCAPABILITIES,
- serialized_options=_b(
- "\202\323\344\223\002%\022#/v2/{instance_name=**}/capabilities"
- ),
+ serialized_options=_b("\202\323\344\223\002%\022#/v2/{instance_name=**}/capabilities"),
),
],
)
diff --git a/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
index 7c4ca932b..ca890fa53 100644
--- a/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
@@ -1,9 +1,7 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.buildstream.v2 import (
- buildstream_pb2 as buildstream_dot_v2_dot_buildstream__pb2,
-)
+from buildstream._protos.buildstream.v2 import buildstream_pb2 as buildstream_dot_v2_dot_buildstream__pb2
class ReferenceStorageStub(object):
@@ -86,9 +84,7 @@ def add_ReferenceStorageServicer_to_server(servicer, server):
response_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "buildstream.v2.ReferenceStorage", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("buildstream.v2.ReferenceStorage", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
@@ -129,7 +125,5 @@ def add_CapabilitiesServicer_to_server(servicer, server):
response_serializer=buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "buildstream.v2.Capabilities", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("buildstream.v2.Capabilities", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/buildstream/v2/source_pb2.py b/src/buildstream/_protos/buildstream/v2/source_pb2.py
index af1e456b9..04ac7356d 100644
--- a/src/buildstream/_protos/buildstream/v2/source_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/source_pb2.py
@@ -18,9 +18,7 @@ _sym_db = _symbol_database.Default()
from buildstream._protos.build.bazel.remote.execution.v2 import (
remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2,
)
-from buildstream._protos.google.api import (
- annotations_pb2 as google_dot_api_dot_annotations__pb2,
-)
+from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
@@ -228,9 +226,7 @@ _UPDATESOURCEREQUEST = _descriptor.Descriptor(
_SOURCE.fields_by_name[
"files"
-].message_type = (
- build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-)
+].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_UPDATESOURCEREQUEST.fields_by_name["source"].message_type = _SOURCE
DESCRIPTOR.message_types_by_name["Source"] = _SOURCE
DESCRIPTOR.message_types_by_name["GetSourceRequest"] = _GETSOURCEREQUEST
diff --git a/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
index 11958366b..4c3a84075 100644
--- a/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
@@ -1,9 +1,7 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.buildstream.v2 import (
- source_pb2 as buildstream_dot_v2_dot_source__pb2,
-)
+from buildstream._protos.buildstream.v2 import source_pb2 as buildstream_dot_v2_dot_source__pb2
class SourceServiceStub(object):
@@ -67,7 +65,5 @@ def add_SourceServiceServicer_to_server(servicer, server):
response_serializer=buildstream_dot_v2_dot_source__pb2.Source.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "buildstream.v2.SourceService", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("buildstream.v2.SourceService", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/google/api/annotations_pb2.py b/src/buildstream/_protos/google/api/annotations_pb2.py
index b68e2147c..ac997d36e 100644
--- a/src/buildstream/_protos/google/api/annotations_pb2.py
+++ b/src/buildstream/_protos/google/api/annotations_pb2.py
@@ -29,10 +29,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
serialized_pb=_b(
'\n\x1cgoogle/api/annotations.proto\x12\ngoogle.api\x1a\x15google/api/http.proto\x1a google/protobuf/descriptor.proto:E\n\x04http\x12\x1e.google.protobuf.MethodOptions\x18\xb0\xca\xbc" \x01(\x0b\x32\x14.google.api.HttpRuleBn\n\x0e\x63om.google.apiB\x10\x41nnotationsProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3'
),
- dependencies=[
- google_dot_api_dot_http__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,
- ],
+ dependencies=[google_dot_api_dot_http__pb2.DESCRIPTOR, google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,],
)
diff --git a/src/buildstream/_protos/google/api/http_pb2.py b/src/buildstream/_protos/google/api/http_pb2.py
index b85402af0..5c3dbdf80 100644
--- a/src/buildstream/_protos/google/api/http_pb2.py
+++ b/src/buildstream/_protos/google/api/http_pb2.py
@@ -264,11 +264,7 @@ _HTTPRULE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
- name="pattern",
- full_name="google.api.HttpRule.pattern",
- index=0,
- containing_type=None,
- fields=[],
+ name="pattern", full_name="google.api.HttpRule.pattern", index=0, containing_type=None, fields=[],
),
],
serialized_start=124,
@@ -342,15 +338,11 @@ _HTTPRULE.fields_by_name["put"].containing_oneof = _HTTPRULE.oneofs_by_name["pat
_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["post"])
_HTTPRULE.fields_by_name["post"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["delete"])
-_HTTPRULE.fields_by_name["delete"].containing_oneof = _HTTPRULE.oneofs_by_name[
- "pattern"
-]
+_HTTPRULE.fields_by_name["delete"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["patch"])
_HTTPRULE.fields_by_name["patch"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["custom"])
-_HTTPRULE.fields_by_name["custom"].containing_oneof = _HTTPRULE.oneofs_by_name[
- "pattern"
-]
+_HTTPRULE.fields_by_name["custom"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
DESCRIPTOR.message_types_by_name["Http"] = _HTTP
DESCRIPTOR.message_types_by_name["HttpRule"] = _HTTPRULE
DESCRIPTOR.message_types_by_name["CustomHttpPattern"] = _CUSTOMHTTPPATTERN
diff --git a/src/buildstream/_protos/google/bytestream/bytestream_pb2.py b/src/buildstream/_protos/google/bytestream/bytestream_pb2.py
index 4a0badd27..e472b9ffb 100644
--- a/src/buildstream/_protos/google/bytestream/bytestream_pb2.py
+++ b/src/buildstream/_protos/google/bytestream/bytestream_pb2.py
@@ -15,9 +15,7 @@ from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
-from buildstream._protos.google.api import (
- annotations_pb2 as google_dot_api_dot_annotations__pb2,
-)
+from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
@@ -31,10 +29,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
serialized_pb=_b(
'\n"google/bytestream/bytestream.proto\x12\x11google.bytestream\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/wrappers.proto"M\n\x0bReadRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t\x12\x13\n\x0bread_offset\x18\x02 \x01(\x03\x12\x12\n\nread_limit\x18\x03 \x01(\x03"\x1c\n\x0cReadResponse\x12\x0c\n\x04\x64\x61ta\x18\n \x01(\x0c"_\n\x0cWriteRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t\x12\x14\n\x0cwrite_offset\x18\x02 \x01(\x03\x12\x14\n\x0c\x66inish_write\x18\x03 \x01(\x08\x12\x0c\n\x04\x64\x61ta\x18\n \x01(\x0c"\'\n\rWriteResponse\x12\x16\n\x0e\x63ommitted_size\x18\x01 \x01(\x03"0\n\x17QueryWriteStatusRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t"D\n\x18QueryWriteStatusResponse\x12\x16\n\x0e\x63ommitted_size\x18\x01 \x01(\x03\x12\x10\n\x08\x63omplete\x18\x02 \x01(\x08\x32\x92\x02\n\nByteStream\x12I\n\x04Read\x12\x1e.google.bytestream.ReadRequest\x1a\x1f.google.bytestream.ReadResponse0\x01\x12L\n\x05Write\x12\x1f.google.bytestream.WriteRequest\x1a .google.bytestream.WriteResponse(\x01\x12k\n\x10QueryWriteStatus\x12*.google.bytestream.QueryWriteStatusRequest\x1a+.google.bytestream.QueryWriteStatusResponseBe\n\x15\x63om.google.bytestreamB\x0f\x42yteStreamProtoZ;google.golang.org/genproto/googleapis/bytestream;bytestreamb\x06proto3'
),
- dependencies=[
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,
- ],
+ dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,],
)
diff --git a/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py b/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
index a7b5ac589..66ed25016 100644
--- a/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
+++ b/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
@@ -1,9 +1,7 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.google.bytestream import (
- bytestream_pb2 as google_dot_bytestream_dot_bytestream__pb2,
-)
+from buildstream._protos.google.bytestream import bytestream_pb2 as google_dot_bytestream_dot_bytestream__pb2
class ByteStreamStub(object):
@@ -157,7 +155,5 @@ def add_ByteStreamServicer_to_server(servicer, server):
response_serializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "google.bytestream.ByteStream", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("google.bytestream.ByteStream", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/google/longrunning/operations_pb2.py b/src/buildstream/_protos/google/longrunning/operations_pb2.py
index 0b30b7c11..ea2a6f674 100644
--- a/src/buildstream/_protos/google/longrunning/operations_pb2.py
+++ b/src/buildstream/_protos/google/longrunning/operations_pb2.py
@@ -15,9 +15,7 @@ from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
-from buildstream._protos.google.api import (
- annotations_pb2 as google_dot_api_dot_annotations__pb2,
-)
+from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from buildstream._protos.google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
@@ -149,11 +147,7 @@ _OPERATION = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
- name="result",
- full_name="google.longrunning.Operation.result",
- index=0,
- containing_type=None,
- fields=[],
+ name="result", full_name="google.longrunning.Operation.result", index=0, containing_type=None, fields=[],
),
],
serialized_start=171,
@@ -427,21 +421,13 @@ _DELETEOPERATIONREQUEST = _descriptor.Descriptor(
serialized_end=652,
)
-_OPERATION.fields_by_name[
- "metadata"
-].message_type = google_dot_protobuf_dot_any__pb2._ANY
+_OPERATION.fields_by_name["metadata"].message_type = google_dot_protobuf_dot_any__pb2._ANY
_OPERATION.fields_by_name["error"].message_type = google_dot_rpc_dot_status__pb2._STATUS
-_OPERATION.fields_by_name[
- "response"
-].message_type = google_dot_protobuf_dot_any__pb2._ANY
+_OPERATION.fields_by_name["response"].message_type = google_dot_protobuf_dot_any__pb2._ANY
_OPERATION.oneofs_by_name["result"].fields.append(_OPERATION.fields_by_name["error"])
-_OPERATION.fields_by_name["error"].containing_oneof = _OPERATION.oneofs_by_name[
- "result"
-]
+_OPERATION.fields_by_name["error"].containing_oneof = _OPERATION.oneofs_by_name["result"]
_OPERATION.oneofs_by_name["result"].fields.append(_OPERATION.fields_by_name["response"])
-_OPERATION.fields_by_name["response"].containing_oneof = _OPERATION.oneofs_by_name[
- "result"
-]
+_OPERATION.fields_by_name["response"].containing_oneof = _OPERATION.oneofs_by_name["result"]
_LISTOPERATIONSRESPONSE.fields_by_name["operations"].message_type = _OPERATION
DESCRIPTOR.message_types_by_name["Operation"] = _OPERATION
DESCRIPTOR.message_types_by_name["GetOperationRequest"] = _GETOPERATIONREQUEST
@@ -536,9 +522,7 @@ _OPERATIONS = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_LISTOPERATIONSREQUEST,
output_type=_LISTOPERATIONSRESPONSE,
- serialized_options=_b(
- "\202\323\344\223\002\027\022\025/v1/{name=operations}"
- ),
+ serialized_options=_b("\202\323\344\223\002\027\022\025/v1/{name=operations}"),
),
_descriptor.MethodDescriptor(
name="GetOperation",
@@ -547,9 +531,7 @@ _OPERATIONS = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_GETOPERATIONREQUEST,
output_type=_OPERATION,
- serialized_options=_b(
- "\202\323\344\223\002\032\022\030/v1/{name=operations/**}"
- ),
+ serialized_options=_b("\202\323\344\223\002\032\022\030/v1/{name=operations/**}"),
),
_descriptor.MethodDescriptor(
name="DeleteOperation",
@@ -558,9 +540,7 @@ _OPERATIONS = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_DELETEOPERATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- serialized_options=_b(
- "\202\323\344\223\002\032*\030/v1/{name=operations/**}"
- ),
+ serialized_options=_b("\202\323\344\223\002\032*\030/v1/{name=operations/**}"),
),
_descriptor.MethodDescriptor(
name="CancelOperation",
@@ -569,9 +549,7 @@ _OPERATIONS = _descriptor.ServiceDescriptor(
containing_service=None,
input_type=_CANCELOPERATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- serialized_options=_b(
- '\202\323\344\223\002$"\037/v1/{name=operations/**}:cancel:\001*'
- ),
+ serialized_options=_b('\202\323\344\223\002$"\037/v1/{name=operations/**}:cancel:\001*'),
),
],
)
diff --git a/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py b/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
index 24240730a..66c755056 100644
--- a/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
+++ b/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
@@ -1,9 +1,7 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.google.longrunning import (
- operations_pb2 as google_dot_longrunning_dot_operations__pb2,
-)
+from buildstream._protos.google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
@@ -129,7 +127,5 @@ def add_OperationsServicer_to_server(servicer, server):
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
- generic_handler = grpc.method_handlers_generic_handler(
- "google.longrunning.Operations", rpc_method_handlers
- )
+ generic_handler = grpc.method_handlers_generic_handler("google.longrunning.Operations", rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/google/rpc/code_pb2.py b/src/buildstream/_protos/google/rpc/code_pb2.py
index 120fb0812..d6688c1e8 100644
--- a/src/buildstream/_protos/google/rpc/code_pb2.py
+++ b/src/buildstream/_protos/google/rpc/code_pb2.py
@@ -34,85 +34,37 @@ _CODE = _descriptor.EnumDescriptor(
filename=None,
file=DESCRIPTOR,
values=[
+ _descriptor.EnumValueDescriptor(name="OK", index=0, number=0, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="CANCELLED", index=1, number=1, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="UNKNOWN", index=2, number=2, serialized_options=None, type=None),
_descriptor.EnumValueDescriptor(
- name="OK", index=0, number=0, serialized_options=None, type=None
+ name="INVALID_ARGUMENT", index=3, number=3, serialized_options=None, type=None,
),
_descriptor.EnumValueDescriptor(
- name="CANCELLED", index=1, number=1, serialized_options=None, type=None
+ name="DEADLINE_EXCEEDED", index=4, number=4, serialized_options=None, type=None,
),
+ _descriptor.EnumValueDescriptor(name="NOT_FOUND", index=5, number=5, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="ALREADY_EXISTS", index=6, number=6, serialized_options=None, type=None),
_descriptor.EnumValueDescriptor(
- name="UNKNOWN", index=2, number=2, serialized_options=None, type=None
+ name="PERMISSION_DENIED", index=7, number=7, serialized_options=None, type=None,
),
_descriptor.EnumValueDescriptor(
- name="INVALID_ARGUMENT",
- index=3,
- number=3,
- serialized_options=None,
- type=None,
+ name="UNAUTHENTICATED", index=8, number=16, serialized_options=None, type=None,
),
_descriptor.EnumValueDescriptor(
- name="DEADLINE_EXCEEDED",
- index=4,
- number=4,
- serialized_options=None,
- type=None,
+ name="RESOURCE_EXHAUSTED", index=9, number=8, serialized_options=None, type=None,
),
_descriptor.EnumValueDescriptor(
- name="NOT_FOUND", index=5, number=5, serialized_options=None, type=None
+ name="FAILED_PRECONDITION", index=10, number=9, serialized_options=None, type=None,
),
+ _descriptor.EnumValueDescriptor(name="ABORTED", index=11, number=10, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="OUT_OF_RANGE", index=12, number=11, serialized_options=None, type=None),
_descriptor.EnumValueDescriptor(
- name="ALREADY_EXISTS", index=6, number=6, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="PERMISSION_DENIED",
- index=7,
- number=7,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="UNAUTHENTICATED",
- index=8,
- number=16,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="RESOURCE_EXHAUSTED",
- index=9,
- number=8,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="FAILED_PRECONDITION",
- index=10,
- number=9,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="ABORTED", index=11, number=10, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="OUT_OF_RANGE", index=12, number=11, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="UNIMPLEMENTED",
- index=13,
- number=12,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="INTERNAL", index=14, number=13, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="UNAVAILABLE", index=15, number=14, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="DATA_LOSS", index=16, number=15, serialized_options=None, type=None
+ name="UNIMPLEMENTED", index=13, number=12, serialized_options=None, type=None,
),
+ _descriptor.EnumValueDescriptor(name="INTERNAL", index=14, number=13, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="UNAVAILABLE", index=15, number=14, serialized_options=None, type=None),
+ _descriptor.EnumValueDescriptor(name="DATA_LOSS", index=16, number=15, serialized_options=None, type=None),
],
containing_type=None,
serialized_options=None,
diff --git a/src/buildstream/_remote.py b/src/buildstream/_remote.py
index 671adb95c..473d5c2a3 100644
--- a/src/buildstream/_remote.py
+++ b/src/buildstream/_remote.py
@@ -42,11 +42,7 @@ class RemoteType(FastEnum):
#
# Defines the basic structure of a remote specification.
#
-class RemoteSpec(
- namedtuple(
- "RemoteSpec", "url push server_cert client_key client_cert instance_name type"
- )
-):
+class RemoteSpec(namedtuple("RemoteSpec", "url push server_cert client_key client_cert instance_name type")):
# new_from_config_node
#
@@ -65,23 +61,14 @@ class RemoteSpec(
@classmethod
def new_from_config_node(cls, spec_node, basedir=None):
spec_node.validate_keys(
- [
- "url",
- "push",
- "server-cert",
- "client-key",
- "client-cert",
- "instance-name",
- "type",
- ]
+ ["url", "push", "server-cert", "client-key", "client-cert", "instance-name", "type",]
)
url = spec_node.get_str("url")
if not url:
provenance = spec_node.get_node("url").get_provenance()
raise LoadError(
- "{}: empty artifact cache URL".format(provenance),
- LoadErrorReason.INVALID_DATA,
+ "{}: empty artifact cache URL".format(provenance), LoadErrorReason.INVALID_DATA,
)
push = spec_node.get_bool("push", default=False)
@@ -94,33 +81,25 @@ class RemoteSpec(
return cert
cert_keys = ("server-cert", "client-key", "client-cert")
- server_cert, client_key, client_cert = tuple(
- parse_cert(key) for key in cert_keys
- )
+ server_cert, client_key, client_cert = tuple(parse_cert(key) for key in cert_keys)
if client_key and not client_cert:
provenance = spec_node.get_node("client-key").get_provenance()
raise LoadError(
- "{}: 'client-key' was specified without 'client-cert'".format(
- provenance
- ),
+ "{}: 'client-key' was specified without 'client-cert'".format(provenance),
LoadErrorReason.INVALID_DATA,
)
if client_cert and not client_key:
provenance = spec_node.get_node("client-cert").get_provenance()
raise LoadError(
- "{}: 'client-cert' was specified without 'client-key'".format(
- provenance
- ),
+ "{}: 'client-cert' was specified without 'client-key'".format(provenance),
LoadErrorReason.INVALID_DATA,
)
type_ = spec_node.get_enum("type", RemoteType, default=RemoteType.ALL)
- return cls(
- url, push, server_cert, client_key, client_cert, instance_name, type_
- )
+ return cls(url, push, server_cert, client_key, client_cert, instance_name, type_)
# FIXME: This can be made much nicer in python 3.7 through the use of
@@ -194,13 +173,9 @@ class BaseRemote:
self.client_key = client_key
self.client_cert = client_cert
credentials = grpc.ssl_channel_credentials(
- root_certificates=self.server_cert,
- private_key=self.client_key,
- certificate_chain=self.client_cert,
- )
- self.channel = grpc.secure_channel(
- "{}:{}".format(url.hostname, port), credentials
+ root_certificates=self.server_cert, private_key=self.client_key, certificate_chain=self.client_cert,
)
+ self.channel = grpc.secure_channel("{}:{}".format(url.hostname, port), credentials)
else:
raise RemoteError("Unsupported URL: {}".format(self.spec.url))
diff --git a/src/buildstream/_scheduler/jobs/elementjob.py b/src/buildstream/_scheduler/jobs/elementjob.py
index 2a9f935b5..f3136104f 100644
--- a/src/buildstream/_scheduler/jobs/elementjob.py
+++ b/src/buildstream/_scheduler/jobs/elementjob.py
@@ -81,9 +81,7 @@ class ElementJob(Job):
self._complete_cb(self, self._element, status, self._result)
def create_child_job(self, *args, **kwargs):
- return ChildElementJob(
- *args, element=self._element, action_cb=self._action_cb, **kwargs
- )
+ return ChildElementJob(*args, element=self._element, action_cb=self._action_cb, **kwargs)
class ChildElementJob(ChildJob):
@@ -98,13 +96,9 @@ class ChildElementJob(ChildJob):
#
# This should probably be omitted for non-build tasks but it's harmless here
elt_env = self._element.get_environment()
- env_dump = yaml.round_trip_dump(
- elt_env, default_flow_style=False, allow_unicode=True
- )
+ env_dump = yaml.round_trip_dump(elt_env, default_flow_style=False, allow_unicode=True)
self.message(
- MessageType.LOG,
- "Build environment for element {}".format(self._element.name),
- detail=env_dump,
+ MessageType.LOG, "Build environment for element {}".format(self._element.name), detail=env_dump,
)
# Run the action
diff --git a/src/buildstream/_scheduler/jobs/job.py b/src/buildstream/_scheduler/jobs/job.py
index 3a5694a71..8baf8fe1b 100644
--- a/src/buildstream/_scheduler/jobs/job.py
+++ b/src/buildstream/_scheduler/jobs/job.py
@@ -146,13 +146,9 @@ class Job:
self._terminated = False # Whether this job has been explicitly terminated
self._logfile = logfile
- self._message_element_name = (
- None # The plugin instance element name for messaging
- )
+ self._message_element_name = None # The plugin instance element name for messaging
self._message_element_key = None # The element key for messaging
- self._element = (
- None # The Element() passed to the Job() constructor, if applicable
- )
+ self._element = None # The Element() passed to the Job() constructor, if applicable
# set_name()
#
@@ -182,15 +178,9 @@ class Job:
self._message_element_key,
)
- if (
- self._scheduler.context.platform.does_multiprocessing_start_require_pickling()
- ):
- pickled = pickle_child_job(
- child_job, self._scheduler.context.get_projects(),
- )
- self._process = Process(
- target=do_pickled_child_job, args=[pickled, self._queue],
- )
+ if self._scheduler.context.platform.does_multiprocessing_start_require_pickling():
+ pickled = pickle_child_job(child_job, self._scheduler.context.get_projects(),)
+ self._process = Process(target=do_pickled_child_job, args=[pickled, self._queue],)
else:
self._process = Process(target=child_job.child_action, args=[self._queue],)
@@ -198,9 +188,7 @@ class Job:
# the child process does not inherit the parent's state, but the main
# process will be notified of any signal after we launch the child.
#
- with _signals.blocked(
- [signal.SIGINT, signal.SIGTSTP, signal.SIGTERM], ignore=False
- ):
+ with _signals.blocked([signal.SIGINT, signal.SIGTSTP, signal.SIGTERM], ignore=False):
self._process.start()
# Wait for the child task to complete.
@@ -282,8 +270,7 @@ class Job:
def kill(self):
# Force kill
self.message(
- MessageType.WARN,
- "{} did not terminate gracefully, killing".format(self.action_name),
+ MessageType.WARN, "{} did not terminate gracefully, killing".format(self.action_name),
)
utils._kill_process_tree(self._process.pid)
@@ -358,22 +345,14 @@ class Job:
# kwargs: Remaining Message() constructor arguments, note that you can
# override 'element_name' and 'element_key' this way.
#
- def message(
- self, message_type, message, element_name=None, element_key=None, **kwargs
- ):
+ def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
kwargs["scheduler"] = True
# If default name & key values not provided, set as given job attributes
if element_name is None:
element_name = self._message_element_name
if element_key is None:
element_key = self._message_element_key
- message = Message(
- message_type,
- message,
- element_name=element_name,
- element_key=element_key,
- **kwargs
- )
+ message = Message(message_type, message, element_name=element_name, element_key=element_key, **kwargs)
self._scheduler.notify_messenger(message)
# get_element()
@@ -405,11 +384,7 @@ class Job:
# lists, dicts, numbers, but not Element instances).
#
def handle_message(self, message):
- raise ImplError(
- "Job '{kind}' does not implement handle_message()".format(
- kind=type(self).__name__
- )
- )
+ raise ImplError("Job '{kind}' does not implement handle_message()".format(kind=type(self).__name__))
# parent_complete()
#
@@ -421,11 +396,7 @@ class Job:
# result (any): The result returned by child_process().
#
def parent_complete(self, status, result):
- raise ImplError(
- "Job '{kind}' does not implement parent_complete()".format(
- kind=type(self).__name__
- )
- )
+ raise ImplError("Job '{kind}' does not implement parent_complete()".format(kind=type(self).__name__))
# create_child_job()
#
@@ -443,11 +414,7 @@ class Job:
# (ChildJob): An instance of a subclass of ChildJob.
#
def create_child_job(self, *args, **kwargs):
- raise ImplError(
- "Job '{kind}' does not implement create_child_job()".format(
- kind=type(self).__name__
- )
- )
+ raise ImplError("Job '{kind}' does not implement create_child_job()".format(kind=type(self).__name__))
#######################################################
# Local Private Methods #
@@ -480,9 +447,7 @@ class Job:
# An unexpected return code was returned; fail permanently and report
self.message(
MessageType.ERROR,
- "Internal job process unexpectedly died with exit code {}".format(
- returncode
- ),
+ "Internal job process unexpectedly died with exit code {}".format(returncode),
logfile=self._logfile,
)
returncode = _ReturnCode.PERM_FAIL
@@ -490,11 +455,7 @@ class Job:
# We don't want to retry if we got OK or a permanent fail.
retry_flag = returncode == _ReturnCode.FAIL
- if (
- retry_flag
- and (self._tries <= self._max_retries)
- and not self._scheduler.terminated
- ):
+ if retry_flag and (self._tries <= self._max_retries) and not self._scheduler.terminated:
self.start()
return
@@ -548,9 +509,7 @@ class Job:
elif envelope.message_type is _MessageType.SUBCLASS_CUSTOM_MESSAGE:
self.handle_message(envelope.message)
else:
- assert False, "Unhandled message type '{}': {}".format(
- envelope.message_type, envelope.message
- )
+ assert False, "Unhandled message type '{}': {}".format(envelope.message_type, envelope.message)
# _parent_process_queue()
#
@@ -587,9 +546,7 @@ class Job:
# http://bugs.python.org/issue3831
#
if not self._listening:
- self._scheduler.loop.add_reader(
- self._queue._reader.fileno(), self._parent_recv
- )
+ self._scheduler.loop.add_reader(self._queue._reader.fileno(), self._parent_recv)
self._listening = True
# _parent_stop_listening()
@@ -627,15 +584,7 @@ class Job:
#
class ChildJob:
def __init__(
- self,
- action_name,
- messenger,
- logdir,
- logfile,
- max_retries,
- tries,
- message_element_name,
- message_element_key,
+ self, action_name, messenger, logdir, logfile, max_retries, tries, message_element_name, message_element_key,
):
self.action_name = action_name
@@ -666,9 +615,7 @@ class ChildJob:
# for front end display if not already set or explicitly
# overriden here.
#
- def message(
- self, message_type, message, element_name=None, element_key=None, **kwargs
- ):
+ def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
kwargs["scheduler"] = True
# If default name & key values not provided, set as given job attributes
if element_name is None:
@@ -676,13 +623,7 @@ class ChildJob:
if element_key is None:
element_key = self._message_element_key
self._messenger.message(
- Message(
- message_type,
- message,
- element_name=element_name,
- element_key=element_key,
- **kwargs
- )
+ Message(message_type, message, element_name=element_name, element_key=element_key, **kwargs)
)
# send_message()
@@ -720,11 +661,7 @@ class ChildJob:
# the result of the Job.
#
def child_process(self):
- raise ImplError(
- "ChildJob '{kind}' does not implement child_process()".format(
- kind=type(self).__name__
- )
- )
+ raise ImplError("ChildJob '{kind}' does not implement child_process()".format(kind=type(self).__name__))
# child_process_data()
#
@@ -782,22 +719,18 @@ class ChildJob:
# Time, log and and run the action function
#
- with _signals.suspendable(
- stop_time, resume_time
- ), self._messenger.recorded_messages(self._logfile, self._logdir) as filename:
+ with _signals.suspendable(stop_time, resume_time), self._messenger.recorded_messages(
+ self._logfile, self._logdir
+ ) as filename:
self.message(MessageType.START, self.action_name, logfile=filename)
try:
# Try the task action
- result = (
- self.child_process()
- ) # pylint: disable=assignment-from-no-return
+ result = self.child_process() # pylint: disable=assignment-from-no-return
except SkipJob as e:
elapsed = datetime.datetime.now() - starttime
- self.message(
- MessageType.SKIPPED, str(e), elapsed=elapsed, logfile=filename
- )
+ self.message(MessageType.SKIPPED, str(e), elapsed=elapsed, logfile=filename)
# Alert parent of skip by return code
self._child_shutdown(_ReturnCode.SKIPPED)
@@ -829,9 +762,7 @@ class ChildJob:
# Set return code based on whether or not the error was temporary.
#
- self._child_shutdown(
- _ReturnCode.FAIL if retry_flag else _ReturnCode.PERM_FAIL
- )
+ self._child_shutdown(_ReturnCode.FAIL if retry_flag else _ReturnCode.PERM_FAIL)
except Exception: # pylint: disable=broad-except
@@ -840,16 +771,10 @@ class ChildJob:
# and print it to the log file.
#
elapsed = datetime.datetime.now() - starttime
- detail = "An unhandled exception occured:\n\n{}".format(
- traceback.format_exc()
- )
+ detail = "An unhandled exception occured:\n\n{}".format(traceback.format_exc())
self.message(
- MessageType.BUG,
- self.action_name,
- elapsed=elapsed,
- detail=detail,
- logfile=filename,
+ MessageType.BUG, self.action_name, elapsed=elapsed, detail=detail, logfile=filename,
)
# Unhandled exceptions should permenantly fail
self._child_shutdown(_ReturnCode.PERM_FAIL)
@@ -861,10 +786,7 @@ class ChildJob:
elapsed = datetime.datetime.now() - starttime
self.message(
- MessageType.SUCCESS,
- self.action_name,
- elapsed=elapsed,
- logfile=filename,
+ MessageType.SUCCESS, self.action_name, elapsed=elapsed, logfile=filename,
)
# Shutdown needs to stay outside of the above context manager,
diff --git a/src/buildstream/_scheduler/jobs/jobpickler.py b/src/buildstream/_scheduler/jobs/jobpickler.py
index 0b482d080..1d47f67db 100644
--- a/src/buildstream/_scheduler/jobs/jobpickler.py
+++ b/src/buildstream/_scheduler/jobs/jobpickler.py
@@ -23,9 +23,7 @@ import io
import pickle
from ..._protos.buildstream.v2.artifact_pb2 import Artifact as ArtifactProto
-from ..._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import (
- Digest as DigestProto,
-)
+from ..._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Digest as DigestProto
# BuildStream toplevel imports
from ..._loader import Loader
@@ -143,10 +141,7 @@ def _pickle_child_job_data(child_job_data, projects):
]
plugin_class_to_factory = {
- cls: factory
- for factory in factory_list
- if factory is not None
- for cls, _ in factory.all_loaded_plugins()
+ cls: factory for factory in factory_list if factory is not None for cls, _ in factory.all_loaded_plugins()
}
pickled_data = io.BytesIO()
diff --git a/src/buildstream/_scheduler/queues/queue.py b/src/buildstream/_scheduler/queues/queue.py
index 62ebcc003..d812a48d6 100644
--- a/src/buildstream/_scheduler/queues/queue.py
+++ b/src/buildstream/_scheduler/queues/queue.py
@@ -76,23 +76,16 @@ class Queue:
self._done_queue = deque() # Processed / Skipped elements
self._max_retries = 0
- self._required_element_check = (
- False # Whether we should check that elements are required before enqueuing
- )
+ self._required_element_check = False # Whether we should check that elements are required before enqueuing
# Assert the subclass has setup class data
assert self.action_name is not None
assert self.complete_name is not None
- if (
- ResourceType.UPLOAD in self.resources
- or ResourceType.DOWNLOAD in self.resources
- ):
+ if ResourceType.UPLOAD in self.resources or ResourceType.DOWNLOAD in self.resources:
self._max_retries = scheduler.context.sched_network_retries
- self._task_group = self._scheduler._state.add_task_group(
- self.action_name, self.complete_name
- )
+ self._task_group = self._scheduler._state.add_task_group(self.action_name, self.complete_name)
# destroy()
#
@@ -169,11 +162,7 @@ class Queue:
# element (Element): The element waiting to be pushed into the queue
#
def register_pending_element(self, element):
- raise ImplError(
- "Queue type: {} does not implement register_pending_element()".format(
- self.action_name
- )
- )
+ raise ImplError("Queue type: {} does not implement register_pending_element()".format(self.action_name))
#####################################################
# Scheduler / Pipeline facing APIs #
@@ -293,10 +282,7 @@ class Queue:
workspaces.save_config()
except BstError as e:
self._message(
- element,
- MessageType.ERROR,
- "Error saving workspaces",
- detail=str(e),
+ element, MessageType.ERROR, "Error saving workspaces", detail=str(e),
)
except Exception: # pylint: disable=broad-except
self._message(
@@ -334,9 +320,7 @@ class Queue:
# Report error and mark as failed
#
- self._message(
- element, MessageType.ERROR, "Post processing error", detail=str(e)
- )
+ self._message(element, MessageType.ERROR, "Post processing error", detail=str(e))
self._task_group.add_failed_task(element._get_full_name())
# Treat this as a task error as it's related to a task
@@ -351,10 +335,7 @@ class Queue:
# Report unhandled exceptions and mark as failed
#
self._message(
- element,
- MessageType.BUG,
- "Unhandled exception in post processing",
- detail=traceback.format_exc(),
+ element, MessageType.BUG, "Unhandled exception in post processing", detail=traceback.format_exc(),
)
self._task_group.add_failed_task(element._get_full_name())
else:
@@ -372,9 +353,7 @@ class Queue:
# Convenience wrapper for Queue implementations to send
# a message for the element they are processing
def _message(self, element, message_type, brief, **kwargs):
- message = Message(
- message_type, brief, element_name=element._get_full_name(), **kwargs
- )
+ message = Message(message_type, brief, element_name=element._get_full_name(), **kwargs)
self._scheduler.notify_messenger(message)
def _element_log_path(self, element):
diff --git a/src/buildstream/_scheduler/resources.py b/src/buildstream/_scheduler/resources.py
index 946a7f0b1..e76158779 100644
--- a/src/buildstream/_scheduler/resources.py
+++ b/src/buildstream/_scheduler/resources.py
@@ -90,10 +90,7 @@ class Resources:
# available. If we don't have enough, the job cannot be
# scheduled.
for resource in resources:
- if (
- self._max_resources[resource] > 0
- and self._used_resources[resource] >= self._max_resources[resource]
- ):
+ if self._max_resources[resource] > 0 and self._used_resources[resource] >= self._max_resources[resource]:
return False
# Now we register the fact that our job is using the resources
diff --git a/src/buildstream/_scheduler/scheduler.py b/src/buildstream/_scheduler/scheduler.py
index 0555b1103..6268ec169 100644
--- a/src/buildstream/_scheduler/scheduler.py
+++ b/src/buildstream/_scheduler/scheduler.py
@@ -122,16 +122,12 @@ class Scheduler:
#
self.queues = None # Exposed for the frontend to print summaries
self.context = context # The Context object shared with Queues
- self.terminated = (
- False # Whether the scheduler was asked to terminate or has terminated
- )
+ self.terminated = False # Whether the scheduler was asked to terminate or has terminated
self.suspended = False # Whether the scheduler is currently suspended
# These are shared with the Job, but should probably be removed or made private in some way.
self.loop = None # Shared for Job access to observe the message queue
- self.internal_stops = (
- 0 # Amount of SIGSTP signals we've introduced, this is shared with job.py
- )
+ self.internal_stops = 0 # Amount of SIGSTP signals we've introduced, this is shared with job.py
#
# Private members
@@ -147,9 +143,7 @@ class Scheduler:
self._notification_queue = notification_queue
self._notifier = notifier
- self.resources = Resources(
- context.sched_builders, context.sched_fetchers, context.sched_pushers
- )
+ self.resources = Resources(context.sched_builders, context.sched_fetchers, context.sched_pushers)
# run()
#
@@ -191,9 +185,7 @@ class Scheduler:
_watcher.add_child_handler(casd_process.pid, self._abort_on_casd_failure)
# Start the profiler
- with PROFILER.profile(
- Topics.SCHEDULER, "_".join(queue.action_name for queue in self.queues)
- ):
+ with PROFILER.profile(Topics.SCHEDULER, "_".join(queue.action_name for queue in self.queues)):
# Run the queues
self._sched()
self.loop.run_forever()
@@ -349,9 +341,7 @@ class Scheduler:
# returncode (int): the return code with which buildbox-casd exited
#
def _abort_on_casd_failure(self, pid, returncode):
- message = Message(
- MessageType.BUG, "buildbox-casd died while the pipeline was active."
- )
+ message = Message(MessageType.BUG, "buildbox-casd died while the pipeline was active.")
self._notify(Notification(NotificationType.MESSAGE, message=message))
self._casd_process.returncode = returncode
@@ -407,9 +397,7 @@ class Scheduler:
# to fetch tasks for elements which failed to pull, and
# thus need all the pulls to complete before ever starting
# a build
- ready.extend(
- chain.from_iterable(q.harvest_jobs() for q in reversed(self.queues))
- )
+ ready.extend(chain.from_iterable(q.harvest_jobs() for q in reversed(self.queues)))
# harvest_jobs() may have decided to skip some jobs, making
# them eligible for promotion to the next queue as a side effect.
@@ -419,11 +407,7 @@ class Scheduler:
# Make sure fork is allowed before starting jobs
if not self.context.prepare_fork():
- message = Message(
- MessageType.BUG,
- "Fork is not allowed",
- detail="Background threads are active",
- )
+ message = Message(MessageType.BUG, "Fork is not allowed", detail="Background threads are active",)
self._notify(Notification(NotificationType.MESSAGE, message=message))
self.terminate_jobs()
return
@@ -484,9 +468,7 @@ class Scheduler:
# Notify that we're unsuspended
self._notify(Notification(NotificationType.SUSPENDED))
self._starttime += datetime.datetime.now() - self._suspendtime
- self._notify(
- Notification(NotificationType.SCHED_START_TIME, time=self._starttime)
- )
+ self._notify(Notification(NotificationType.SCHED_START_TIME, time=self._starttime))
self._suspendtime = None
# _interrupt_event():
diff --git a/src/buildstream/_sourcecache.py b/src/buildstream/_sourcecache.py
index 66e1c1bb9..29d0bd5ef 100644
--- a/src/buildstream/_sourcecache.py
+++ b/src/buildstream/_sourcecache.py
@@ -190,9 +190,7 @@ class SourceCache(BaseCache):
vdir.import_files(self.export(previous_source))
if not source.BST_STAGE_VIRTUAL_DIRECTORY:
- with utils._tempdir(
- dir=self.context.tmpdir, prefix="staging-temp"
- ) as tmpdir:
+ with utils._tempdir(dir=self.context.tmpdir, prefix="staging-temp") as tmpdir:
if not vdir.is_empty():
vdir.export_files(tmpdir)
source._stage(tmpdir)
@@ -244,15 +242,11 @@ class SourceCache(BaseCache):
source_proto = self._pull_source(ref, remote)
if source_proto is None:
source.info(
- "Remote source service ({}) does not have source {} cached".format(
- remote, display_key
- )
+ "Remote source service ({}) does not have source {} cached".format(remote, display_key)
)
continue
except CASError as e:
- raise SourceCacheError(
- "Failed to pull source {}: {}".format(display_key, e)
- ) from e
+ raise SourceCacheError("Failed to pull source {}: {}".format(display_key, e)) from e
if not source_proto:
return False
@@ -260,40 +254,26 @@ class SourceCache(BaseCache):
for remote in storage_remotes:
try:
remote.init()
- source.status(
- "Pulling data for source {} <- {}".format(display_key, remote)
- )
+ source.status("Pulling data for source {} <- {}".format(display_key, remote))
# Fetch source blobs
self.cas._fetch_directory(remote, source_proto.files)
- required_blobs = self.cas.required_blobs_for_directory(
- source_proto.files
- )
+ required_blobs = self.cas.required_blobs_for_directory(source_proto.files)
missing_blobs = self.cas.local_missing_blobs(required_blobs)
missing_blobs = self.cas.fetch_blobs(remote, missing_blobs)
if missing_blobs:
- source.info(
- "Remote cas ({}) does not have source {} cached".format(
- remote, display_key
- )
- )
+ source.info("Remote cas ({}) does not have source {} cached".format(remote, display_key))
continue
source.info("Pulled source {} <- {}".format(display_key, remote))
return True
except BlobNotFound as e:
# Not all blobs are available on this remote
- source.info(
- "Remote cas ({}) does not have blob {} cached".format(
- remote, e.blob
- )
- )
+ source.info("Remote cas ({}) does not have blob {} cached".format(remote, e.blob))
continue
except CASError as e:
- raise SourceCacheError(
- "Failed to pull source {}: {}".format(display_key, e)
- ) from e
+ raise SourceCacheError("Failed to pull source {}: {}".format(display_key, e)) from e
return False
@@ -325,18 +305,14 @@ class SourceCache(BaseCache):
display_key = source._get_brief_display_key()
for remote in storage_remotes:
remote.init()
- source.status(
- "Pushing data for source {} -> {}".format(display_key, remote)
- )
+ source.status("Pushing data for source {} -> {}".format(display_key, remote))
source_proto = self._get_source(ref)
try:
self.cas._send_directory(remote, source_proto.files)
pushed_storage = True
except CASRemoteError:
- source.info(
- "Failed to push source files {} -> {}".format(display_key, remote)
- )
+ source.info("Failed to push source files {} -> {}".format(display_key, remote))
continue
for remote in index_remotes:
@@ -345,19 +321,11 @@ class SourceCache(BaseCache):
# check whether cache has files already
if self._pull_source(ref, remote) is not None:
- source.info(
- "Remote ({}) already has source {} cached".format(
- remote, display_key
- )
- )
+ source.info("Remote ({}) already has source {} cached".format(remote, display_key))
continue
if not self._push_source(ref, remote):
- source.info(
- "Failed to push source metadata {} -> {}".format(
- display_key, remote
- )
- )
+ source.info("Failed to push source metadata {} -> {}".format(display_key, remote))
continue
source.info("Pushed source {} -> {}".format(display_key, remote))
@@ -388,9 +356,7 @@ class SourceCache(BaseCache):
source_proto.ParseFromString(f.read())
return source_proto
except FileNotFoundError as e:
- raise SourceCacheError(
- "Attempted to access unavailable source: {}".format(e)
- ) from e
+ raise SourceCacheError("Attempted to access unavailable source: {}".format(e)) from e
def _source_path(self, ref):
return os.path.join(self.sourcerefdir, ref)
diff --git a/src/buildstream/_state.py b/src/buildstream/_state.py
index 07e1f8c9e..d85e348f2 100644
--- a/src/buildstream/_state.py
+++ b/src/buildstream/_state.py
@@ -244,9 +244,7 @@ class State:
# TaskGroup: The task group created
#
def add_task_group(self, name, complete_name=None):
- assert (
- name not in self.task_groups
- ), "Trying to add task group '{}' to '{}'".format(name, self.task_groups)
+ assert name not in self.task_groups, "Trying to add task group '{}' to '{}'".format(name, self.task_groups)
group = TaskGroup(name, self, complete_name)
self.task_groups[name] = group
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 402473e33..aa14f12c9 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -74,13 +74,7 @@ from . import Scope, Consistency
#
class Stream:
def __init__(
- self,
- context,
- session_start,
- *,
- session_start_callback=None,
- interrupt_callback=None,
- ticker_callback=None
+ self, context, session_start, *, session_start_callback=None, interrupt_callback=None, ticker_callback=None
):
#
@@ -101,26 +95,18 @@ class Stream:
self._pipeline = None
self._state = State(session_start) # Owned by Stream, used by Core to set state
self._notification_queue = deque()
- self._starttime = (
- session_start # Synchronised with Scheduler's relative start time
- )
+ self._starttime = session_start # Synchronised with Scheduler's relative start time
context.messenger.set_state(self._state)
self._scheduler = Scheduler(
- context,
- session_start,
- self._state,
- self._notification_queue,
- self._scheduler_notification_handler,
+ context, session_start, self._state, self._notification_queue, self._scheduler_notification_handler,
)
self._first_non_track_queue = None
self._session_start_callback = session_start_callback
self._ticker_callback = ticker_callback
self._interrupt_callback = interrupt_callback
- self._notifier = (
- self._scheduler._stream_notification_handler
- ) # Assign the schedulers notification handler
+ self._notifier = self._scheduler._stream_notification_handler # Assign the schedulers notification handler
self._scheduler_running = False
self._scheduler_terminated = False
self._scheduler_suspended = False
@@ -177,9 +163,7 @@ class Stream:
use_artifact_config=False,
load_refs=False
):
- with PROFILER.profile(
- Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, "-") for t in targets)
- ):
+ with PROFILER.profile(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, "-") for t in targets)):
target_objects, _ = self._load(
targets,
(),
@@ -233,22 +217,15 @@ class Stream:
# in which case we just blindly trust the directory, using the element
# definitions to control the execution environment only.
if directory is None:
- missing_deps = [
- dep
- for dep in self._pipeline.dependencies([element], scope)
- if not dep._cached()
- ]
+ missing_deps = [dep for dep in self._pipeline.dependencies([element], scope) if not dep._cached()]
if missing_deps:
if not pull_dependencies:
raise StreamError(
"Elements need to be built or downloaded before staging a shell environment",
- detail="\n".join(
- list(map(lambda x: x._get_full_name(), missing_deps))
- ),
+ detail="\n".join(list(map(lambda x: x._get_full_name(), missing_deps))),
)
self._message(
- MessageType.INFO,
- "Attempting to fetch missing or incomplete artifacts",
+ MessageType.INFO, "Attempting to fetch missing or incomplete artifacts",
)
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
@@ -264,8 +241,7 @@ class Stream:
# Attempt a pull queue for the given element if remote and context allow it
if require_buildtree:
self._message(
- MessageType.INFO,
- "Attempting to fetch missing artifact buildtree",
+ MessageType.INFO, "Attempting to fetch missing artifact buildtree",
)
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
@@ -280,20 +256,12 @@ class Stream:
if usebuildtree == "always":
raise StreamError(message)
- self._message(
- MessageType.INFO, message + ", shell will be loaded without it"
- )
+ self._message(MessageType.INFO, message + ", shell will be loaded without it")
else:
buildtree = True
return element._shell(
- scope,
- directory,
- mounts=mounts,
- isolate=isolate,
- prompt=prompt,
- command=command,
- usebuildtree=buildtree,
+ scope, directory, mounts=mounts, isolate=isolate, prompt=prompt, command=command, usebuildtree=buildtree,
)
# build()
@@ -309,14 +277,7 @@ class Stream:
# If `remote` specified as None, then regular configuration will be used
# to determine where to push artifacts to.
#
- def build(
- self,
- targets,
- *,
- selection=PipelineSelection.PLAN,
- ignore_junction_targets=False,
- remote=None
- ):
+ def build(self, targets, *, selection=PipelineSelection.PLAN, ignore_junction_targets=False, remote=None):
use_config = True
if remote:
@@ -336,9 +297,7 @@ class Stream:
# Assert that the elements are consistent
self._pipeline.assert_consistent(elements)
- if all(
- project.remote_execution_specs for project in self._context.get_projects()
- ):
+ if all(project.remote_execution_specs for project in self._context.get_projects()):
# Remote execution is configured for all projects.
# Require artifact files only for target elements and their runtime dependencies.
self._context.set_artifact_files_optional()
@@ -434,14 +393,7 @@ class Stream:
# If no error is encountered while tracking, then the project files
# are rewritten inline.
#
- def track(
- self,
- targets,
- *,
- selection=PipelineSelection.REDIRECT,
- except_targets=None,
- cross_junctions=False
- ):
+ def track(self, targets, *, selection=PipelineSelection.REDIRECT, except_targets=None, cross_junctions=False):
# We pass no target to build. Only to track. Passing build targets
# would fully load project configuration which might not be
@@ -475,14 +427,7 @@ class Stream:
# If `remote` specified as None, then regular configuration will be used
# to determine where to pull artifacts from.
#
- def pull(
- self,
- targets,
- *,
- selection=PipelineSelection.NONE,
- ignore_junction_targets=False,
- remote=None
- ):
+ def pull(self, targets, *, selection=PipelineSelection.NONE, ignore_junction_targets=False, remote=None):
use_config = True
if remote:
@@ -524,14 +469,7 @@ class Stream:
# a pull queue will be created if user context and available remotes allow for
# attempting to fetch them.
#
- def push(
- self,
- targets,
- *,
- selection=PipelineSelection.NONE,
- ignore_junction_targets=False,
- remote=None
- ):
+ def push(self, targets, *, selection=PipelineSelection.NONE, ignore_junction_targets=False, remote=None):
use_config = True
if remote:
@@ -555,9 +493,7 @@ class Stream:
# Check if we require a pull queue, with given artifact state and context
require_buildtrees = self._buildtree_pull_required(elements)
if require_buildtrees:
- self._message(
- MessageType.INFO, "Attempting to fetch missing artifact buildtrees"
- )
+ self._message(MessageType.INFO, "Attempting to fetch missing artifact buildtrees")
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(require_buildtrees)
@@ -589,10 +525,7 @@ class Stream:
# NOTE: Usually we check the _SchedulerErrorAction when a *job* has failed.
# However, we cannot create a PushQueue job unless we intentionally
# ready an uncached element in the PushQueue.
- if (
- self._context.sched_error_action == _SchedulerErrorAction.CONTINUE
- and uncached_elements
- ):
+ if self._context.sched_error_action == _SchedulerErrorAction.CONTINUE and uncached_elements:
names = [element.name for element in uncached_elements]
fail_str = (
"Error while pushing. The following elements were not pushed as they are "
@@ -635,9 +568,7 @@ class Stream:
tar=False
):
- elements, _ = self._load(
- (target,), (), selection=selection, use_artifact_config=True, load_refs=True
- )
+ elements, _ = self._load((target,), (), selection=selection, use_artifact_config=True, load_refs=True)
# self.targets contains a list of the loaded target objects
# if we specify --deps build, Stream._load() will return a list
@@ -649,9 +580,7 @@ class Stream:
uncached_elts = [elt for elt in elements if not elt._cached()]
if uncached_elts and pull:
- self._message(
- MessageType.INFO, "Attempting to fetch missing or incomplete artifact"
- )
+ self._message(MessageType.INFO, "Attempting to fetch missing or incomplete artifact")
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(uncached_elts)
@@ -664,19 +593,13 @@ class Stream:
"none": Scope.NONE,
"all": Scope.ALL,
}
- with target._prepare_sandbox(
- scope=scope[selection], directory=None, integrate=integrate
- ) as sandbox:
+ with target._prepare_sandbox(scope=scope[selection], directory=None, integrate=integrate) as sandbox:
# Copy or move the sandbox to the target directory
virdir = sandbox.get_virtual_directory()
- self._export_artifact(
- tar, location, compression, target, hardlinks, virdir
- )
+ self._export_artifact(tar, location, compression, target, hardlinks, virdir)
except BstError as e:
raise StreamError(
- "Error while staging dependencies into a sandbox" ": '{}'".format(e),
- detail=e.detail,
- reason=e.reason,
+ "Error while staging dependencies into a sandbox" ": '{}'".format(e), detail=e.detail, reason=e.reason,
) from e
# _export_artifact()
@@ -729,9 +652,7 @@ class Stream:
#
def artifact_show(self, targets, *, selection=PipelineSelection.NONE):
# Obtain list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(
- targets, selection=selection, use_artifact_config=True, load_refs=True
- )
+ target_objects = self.load_selection(targets, selection=selection, use_artifact_config=True, load_refs=True)
if self._artifacts.has_fetch_remotes():
self._pipeline.check_remotes(target_objects)
@@ -756,9 +677,7 @@ class Stream:
#
def artifact_log(self, targets):
# Return list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(
- targets, selection=PipelineSelection.NONE, load_refs=True
- )
+ target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
artifact_logs = {}
for obj in target_objects:
@@ -767,9 +686,7 @@ class Stream:
self._message(MessageType.WARN, "{} is not cached".format(ref))
continue
elif not obj._cached_logs():
- self._message(
- MessageType.WARN, "{} is cached without log files".format(ref)
- )
+ self._message(MessageType.WARN, "{} is cached without log files".format(ref))
continue
artifact_logs[obj.name] = obj.get_logs()
@@ -788,9 +705,7 @@ class Stream:
#
def artifact_list_contents(self, targets):
# Return list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(
- targets, selection=PipelineSelection.NONE, load_refs=True
- )
+ target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
elements_to_files = {}
for obj in target_objects:
@@ -814,9 +729,7 @@ class Stream:
#
def artifact_delete(self, targets, *, selection=PipelineSelection.NONE):
# Return list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(
- targets, selection=selection, load_refs=True
- )
+ target_objects = self.load_selection(targets, selection=selection, load_refs=True)
# Some of the targets may refer to the same key, so first obtain a
# set of the refs to be removed.
@@ -869,9 +782,7 @@ class Stream:
self._check_location_writable(location, force=force, tar=tar)
- elements, _ = self._load(
- (target,), (), selection=deps, except_targets=except_targets
- )
+ elements, _ = self._load((target,), (), selection=deps, except_targets=except_targets)
# Assert all sources are cached in the source dir
self._fetch(elements)
@@ -879,14 +790,10 @@ class Stream:
# Stage all sources determined by scope
try:
- self._source_checkout(
- elements, location, force, deps, tar, compression, include_build_scripts
- )
+ self._source_checkout(elements, location, force, deps, tar, compression, include_build_scripts)
except BstError as e:
raise StreamError(
- "Error while writing sources" ": '{}'".format(e),
- detail=e.detail,
- reason=e.reason,
+ "Error while writing sources" ": '{}'".format(e), detail=e.detail, reason=e.reason,
) from e
self._message(MessageType.INFO, "Checked out sources to '{}'".format(location))
@@ -934,18 +841,12 @@ class Stream:
for target in elements:
if not list(target.sources()):
- build_depends = [
- x.name for x in target.dependencies(Scope.BUILD, recurse=False)
- ]
+ build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
if not build_depends:
- raise StreamError(
- "The element {} has no sources".format(target.name)
- )
+ raise StreamError("The element {} has no sources".format(target.name))
detail = "Try opening a workspace on one of its dependencies instead:\n"
detail += " \n".join(build_depends)
- raise StreamError(
- "The element {} has no sources".format(target.name), detail=detail
- )
+ raise StreamError("The element {} has no sources".format(target.name), detail=detail)
# Check for workspace config
workspace = workspaces.get_workspace(target._get_full_name())
@@ -962,16 +863,10 @@ class Stream:
target.name, workspace.get_absolute_path()
)
)
- self.workspace_close(
- target._get_full_name(), remove_dir=not no_checkout
- )
+ self.workspace_close(target._get_full_name(), remove_dir=not no_checkout)
target_consistency = target._get_consistency()
- if (
- not no_checkout
- and target_consistency < Consistency.CACHED
- and target_consistency._source_cached()
- ):
+ if not no_checkout and target_consistency < Consistency.CACHED and target_consistency._source_cached():
raise StreamError(
"Could not stage uncached source. For {} ".format(target.name)
+ "Use `--track` to track and "
@@ -980,9 +875,7 @@ class Stream:
)
if not custom_dir:
- directory = os.path.abspath(
- os.path.join(self._context.workspacedir, target.name)
- )
+ directory = os.path.abspath(os.path.join(self._context.workspacedir, target.name))
if directory[-4:] == ".bst":
directory = directory[:-4]
expanded_directories.append(directory)
@@ -1006,17 +899,13 @@ class Stream:
if os.path.exists(directory):
if not os.path.isdir(directory):
raise StreamError(
- "For element '{}', Directory path is not a directory: {}".format(
- target.name, directory
- ),
+ "For element '{}', Directory path is not a directory: {}".format(target.name, directory),
reason="bad-directory",
)
if not (no_checkout or force) and os.listdir(directory):
raise StreamError(
- "For element '{}', Directory path is not empty: {}".format(
- target.name, directory
- ),
+ "For element '{}', Directory path is not empty: {}".format(target.name, directory),
reason="bad-directory",
)
if os.listdir(directory):
@@ -1028,8 +917,7 @@ class Stream:
targetGenerator = zip(elements, expanded_directories)
for target, directory in targetGenerator:
self._message(
- MessageType.INFO,
- "Creating workspace for element {}".format(target.name),
+ MessageType.INFO, "Creating workspace for element {}".format(target.name),
)
workspace = workspaces.get_workspace(target._get_full_name())
@@ -1040,22 +928,15 @@ class Stream:
try:
os.makedirs(directory, exist_ok=True)
except OSError as e:
- todo_elements = " ".join(
- [str(target.name) for target, directory_dict in targetGenerator]
- )
+ todo_elements = " ".join([str(target.name) for target, directory_dict in targetGenerator])
if todo_elements:
# This output should make creating the remaining workspaces as easy as possible.
- todo_elements = (
- "\nDid not try to create workspaces for " + todo_elements
- )
- raise StreamError(
- "Failed to create workspace directory: {}".format(e) + todo_elements
- ) from e
+ todo_elements = "\nDid not try to create workspaces for " + todo_elements
+ raise StreamError("Failed to create workspace directory: {}".format(e) + todo_elements) from e
workspaces.create_workspace(target, directory, checkout=not no_checkout)
self._message(
- MessageType.INFO,
- "Created a workspace for element: {}".format(target._get_full_name()),
+ MessageType.INFO, "Created a workspace for element: {}".format(target._get_full_name()),
)
# workspace_close
@@ -1078,11 +959,7 @@ class Stream:
try:
shutil.rmtree(workspace.get_absolute_path())
except OSError as e:
- raise StreamError(
- "Could not remove '{}': {}".format(
- workspace.get_absolute_path(), e
- )
- ) from e
+ raise StreamError("Could not remove '{}': {}".format(workspace.get_absolute_path(), e)) from e
# Delete the workspace and save the configuration
workspaces.delete_workspace(element_name)
@@ -1102,10 +979,7 @@ class Stream:
def workspace_reset(self, targets, *, soft, track_first):
elements, _ = self._load(
- targets,
- [],
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT,
+ targets, [], selection=PipelineSelection.REDIRECT, track_selection=PipelineSelection.REDIRECT,
)
nonexisting = []
@@ -1123,10 +997,7 @@ class Stream:
if soft:
workspace.prepared = False
self._message(
- MessageType.INFO,
- "Reset workspace state for {} at: {}".format(
- element.name, workspace_path
- ),
+ MessageType.INFO, "Reset workspace state for {} at: {}".format(element.name, workspace_path),
)
continue
@@ -1218,10 +1089,7 @@ class Stream:
output_elements.add(e)
if load_elements:
loaded_elements, _ = self._load(
- load_elements,
- (),
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT,
+ load_elements, (), selection=PipelineSelection.REDIRECT, track_selection=PipelineSelection.REDIRECT,
)
for e in loaded_elements:
@@ -1379,15 +1247,9 @@ class Stream:
if target_artifacts:
if not load_refs:
detail = "\n".join(target_artifacts)
- raise ArtifactElementError(
- "Cannot perform this operation with artifact refs:", detail=detail
- )
+ raise ArtifactElementError("Cannot perform this operation with artifact refs:", detail=detail)
if selection in (PipelineSelection.ALL, PipelineSelection.RUN):
- raise StreamError(
- "Error: '--deps {}' is not supported for artifact refs".format(
- selection
- )
- )
+ raise StreamError("Error: '--deps {}' is not supported for artifact refs".format(selection))
# Load rewritable if we have any tracking selection to make
rewritable = False
@@ -1402,12 +1264,7 @@ class Stream:
track_except_targets,
]
if any(loadable):
- (
- elements,
- except_elements,
- track_elements,
- track_except_elements,
- ) = self._pipeline.load(
+ (elements, except_elements, track_elements, track_except_elements,) = self._pipeline.load(
loadable, rewritable=rewritable, ignore_workspaces=ignore_workspaces
)
else:
@@ -1419,9 +1276,7 @@ class Stream:
)
# Load all target artifacts
- artifacts = (
- self._pipeline.load_artifacts(target_artifacts) if target_artifacts else []
- )
+ artifacts = self._pipeline.load_artifacts(target_artifacts) if target_artifacts else []
# Optionally filter out junction elements
if ignore_junction_targets:
@@ -1437,10 +1292,7 @@ class Stream:
# This can happen with `bst build --track`
#
if targets and not self._pipeline.targets_include(elements, track_elements):
- raise StreamError(
- "Specified tracking targets that are not "
- "within the scope of primary targets"
- )
+ raise StreamError("Specified tracking targets that are not " "within the scope of primary targets")
# First take care of marking tracking elements, this must be
# done before resolving element states.
@@ -1462,14 +1314,10 @@ class Stream:
for project, project_elements in track_projects.items():
selected = self._pipeline.get_selection(project_elements, track_selection)
- selected = self._pipeline.track_cross_junction_filter(
- project, selected, track_cross_junctions
- )
+ selected = self._pipeline.track_cross_junction_filter(project, selected, track_cross_junctions)
track_selected.extend(selected)
- track_selected = self._pipeline.except_elements(
- track_elements, track_selected, track_except_elements
- )
+ track_selected = self._pipeline.except_elements(track_elements, track_selected, track_except_elements)
for element in track_selected:
element._schedule_tracking()
@@ -1483,20 +1331,14 @@ class Stream:
project.ensure_fully_loaded()
# Connect to remote caches, this needs to be done before resolving element state
- self._artifacts.setup_remotes(
- use_config=use_artifact_config, remote_url=artifact_remote_url
- )
- self._sourcecache.setup_remotes(
- use_config=use_source_config, remote_url=source_remote_url
- )
+ self._artifacts.setup_remotes(use_config=use_artifact_config, remote_url=artifact_remote_url)
+ self._sourcecache.setup_remotes(use_config=use_source_config, remote_url=source_remote_url)
# Now move on to loading primary selection.
#
self._pipeline.resolve_elements(self.targets)
selected = self._pipeline.get_selection(self.targets, selection, silent=False)
- selected = self._pipeline.except_elements(
- self.targets, selected, except_elements
- )
+ selected = self._pipeline.except_elements(self.targets, selected, except_elements)
if selection == PipelineSelection.PLAN and dynamic_plan:
# We use a dynamic build plan, only request artifacts of top-level targets,
@@ -1557,9 +1399,7 @@ class Stream:
# unique_id (str): A unique_id to load an Element instance
#
def _failure_retry(self, action_name, unique_id):
- notification = Notification(
- NotificationType.RETRY, job_action=action_name, element=unique_id
- )
+ notification = Notification(NotificationType.RETRY, job_action=action_name, element=unique_id)
self._notify(notification)
# _run()
@@ -1576,9 +1416,7 @@ class Stream:
if self._session_start_callback is not None:
self._session_start_callback()
- status = self._scheduler.run(
- self.queues, self._context.get_cascache().get_casd_process()
- )
+ status = self._scheduler.run(self.queues, self._context.get_cascache().get_casd_process())
if status == SchedStatus.ERROR:
raise StreamError()
@@ -1643,17 +1481,11 @@ class Stream:
try:
os.makedirs(location, exist_ok=True)
except OSError as e:
- raise StreamError(
- "Failed to create destination directory: '{}'".format(e)
- ) from e
+ raise StreamError("Failed to create destination directory: '{}'".format(e)) from e
if not os.access(location, os.W_OK):
- raise StreamError(
- "Destination directory '{}' not writable".format(location)
- )
+ raise StreamError("Destination directory '{}' not writable".format(location))
if not force and os.listdir(location):
- raise StreamError(
- "Destination directory '{}' not empty".format(location)
- )
+ raise StreamError("Destination directory '{}' not empty".format(location))
elif os.path.exists(location) and location != "-":
if not os.access(location, os.W_OK):
raise StreamError("Output file '{}' not writable".format(location))
@@ -1666,9 +1498,7 @@ class Stream:
try:
utils.safe_remove(directory)
except OSError as e:
- raise StreamError(
- "Failed to remove checkout directory: {}".format(e)
- ) from e
+ raise StreamError("Failed to remove checkout directory: {}".format(e)) from e
sandbox_vroot.export_files(directory, can_link=True, can_destroy=True)
@@ -1698,9 +1528,7 @@ class Stream:
else:
self._move_directory(temp_source_dir.name, location, force)
except OSError as e:
- raise StreamError(
- "Failed to checkout sources to {}: {}".format(location, e)
- ) from e
+ raise StreamError("Failed to checkout sources to {}: {}".format(location, e)) from e
finally:
with suppress(FileNotFoundError):
temp_source_dir.cleanup()
@@ -1819,11 +1647,7 @@ class Stream:
for element in elements:
# Check if element is partially cached without its buildtree, as the element
# artifact may not be cached at all
- if (
- element._cached()
- and not element._cached_buildtree()
- and element._buildtree_exists()
- ):
+ if element._cached() and not element._cached_buildtree() and element._buildtree_exists():
required_list.append(element)
return required_list
@@ -1877,10 +1701,7 @@ class Stream:
artifact_refs.extend(self._artifacts.list_artifacts(glob=glob))
if not artifact_refs:
self._message(
- MessageType.WARN,
- "No artifacts found for globs: {}".format(
- ", ".join(artifact_globs)
- ),
+ MessageType.WARN, "No artifacts found for globs: {}".format(", ".join(artifact_globs)),
)
return element_targets, artifact_refs
@@ -1897,16 +1718,12 @@ class Stream:
elif notification.notification_type == NotificationType.TICK:
self._ticker_callback()
elif notification.notification_type == NotificationType.JOB_START:
- self._state.add_task(
- notification.job_action, notification.full_name, notification.time
- )
+ self._state.add_task(notification.job_action, notification.full_name, notification.time)
elif notification.notification_type == NotificationType.JOB_COMPLETE:
self._state.remove_task(notification.job_action, notification.full_name)
if notification.job_status == JobStatus.FAIL:
self._state.fail_task(
- notification.job_action,
- notification.full_name,
- notification.element,
+ notification.job_action, notification.full_name, notification.element,
)
elif notification.notification_type == NotificationType.SCHED_START_TIME:
self._starttime = notification.time
diff --git a/src/buildstream/_version.py b/src/buildstream/_version.py
index 12dde1df8..baeb00699 100644
--- a/src/buildstream/_version.py
+++ b/src/buildstream/_version.py
@@ -134,10 +134,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
root = os.path.dirname(root) # up a level
if verbose:
- print(
- "Tried directories %s but none started with prefix %s"
- % (str(rootdirs), parentdir_prefix)
- )
+ print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@@ -254,15 +251,7 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(
GITS,
- [
- "describe",
- "--tags",
- "--dirty",
- "--always",
- "--long",
- "--match",
- "%s%s" % (tag_prefix, tag_regex),
- ],
+ ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s%s" % (tag_prefix, tag_regex),],
cwd=root,
)
# --long was added in git-1.5.5
@@ -305,10 +294,7 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
- pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
- full_tag,
- tag_prefix,
- )
+ pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix,)
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix) :]
@@ -325,9 +311,7 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[
- 0
- ].strip()
+ date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index 488630634..45ae87a87 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -92,9 +92,7 @@ class WorkspaceProject:
def from_dict(cls, directory, dictionary):
# Only know how to handle one format-version at the moment.
format_version = int(dictionary["format-version"])
- assert (
- format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION
- ), "Format version {} not found in {}".format(
+ assert format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION, "Format version {} not found in {}".format(
BST_WORKSPACE_PROJECT_FORMAT_VERSION, dictionary
)
@@ -149,9 +147,7 @@ class WorkspaceProject:
#
def add_project(self, project_path, element_name):
assert project_path and element_name
- self._projects.append(
- {"project-path": project_path, "element-name": element_name}
- )
+ self._projects.append({"project-path": project_path, "element-name": element_name})
# WorkspaceProjectCache()
@@ -222,8 +218,7 @@ class WorkspaceProjectCache:
workspace_project = self.get(directory)
if not workspace_project:
raise LoadError(
- "Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE),
- LoadErrorReason.MISSING_FILE,
+ "Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE), LoadErrorReason.MISSING_FILE,
)
path = workspace_project.get_filename()
try:
@@ -250,15 +245,7 @@ class WorkspaceProjectCache:
# made obsolete with failed build artifacts.
#
class Workspace:
- def __init__(
- self,
- toplevel_project,
- *,
- last_successful=None,
- path=None,
- prepared=False,
- running_files=None
- ):
+ def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
self.prepared = prepared
self.last_successful = last_successful
self._path = path
@@ -336,9 +323,7 @@ class Workspace:
if os.path.isdir(fullpath):
utils.copy_files(fullpath, directory)
else:
- destfile = os.path.join(
- directory, os.path.basename(self.get_absolute_path())
- )
+ destfile = os.path.join(directory, os.path.basename(self.get_absolute_path()))
utils.safe_copy(fullpath, destfile)
# add_running_files()
@@ -381,8 +366,7 @@ class Workspace:
stat = os.lstat(filename)
except OSError as e:
raise LoadError(
- "Failed to stat file in workspace: {}".format(e),
- LoadErrorReason.MISSING_FILE,
+ "Failed to stat file in workspace: {}".format(e), LoadErrorReason.MISSING_FILE,
)
# Use the mtime of any file with sub second precision
@@ -397,16 +381,12 @@ class Workspace:
if os.path.isdir(fullpath):
filelist = utils.list_relative_paths(fullpath)
filelist = [
- (relpath, os.path.join(fullpath, relpath))
- for relpath in filelist
- if relpath not in excluded_files
+ (relpath, os.path.join(fullpath, relpath)) for relpath in filelist if relpath not in excluded_files
]
else:
filelist = [(self.get_absolute_path(), fullpath)]
- self._key = [
- (relpath, unique_key(fullpath)) for relpath, fullpath in filelist
- ]
+ self._key = [(relpath, unique_key(fullpath)) for relpath, fullpath in filelist]
return self._key
@@ -462,25 +442,17 @@ class Workspaces:
else:
workspace_path = path
- self._workspaces[element_name] = Workspace(
- self._toplevel_project, path=workspace_path
- )
+ self._workspaces[element_name] = Workspace(self._toplevel_project, path=workspace_path)
if checkout:
with target.timed_activity("Staging sources to {}".format(path)):
target._open_workspace()
- workspace_project = self._workspace_project_cache.add(
- path, project_dir, element_name
- )
+ workspace_project = self._workspace_project_cache.add(path, project_dir, element_name)
project_file_path = workspace_project.get_filename()
if os.path.exists(project_file_path):
- target.warn(
- "{} was staged from this element's sources".format(
- WORKSPACE_PROJECT_FILE
- )
- )
+ target.warn("{} was staged from this element's sources".format(WORKSPACE_PROJECT_FILE))
workspace_project.write()
self.save_config()
@@ -556,10 +528,7 @@ class Workspaces:
config = {
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- element: workspace.to_dict()
- for element, workspace in self._workspaces.items()
- },
+ "workspaces": {element: workspace.to_dict() for element, workspace in self._workspaces.items()},
}
os.makedirs(self._bst_directory, exist_ok=True)
_yaml.roundtrip_dump(config, self._get_filename())
@@ -605,8 +574,7 @@ class Workspaces:
version = workspaces.get_int("format-version", default=0)
except ValueError:
raise LoadError(
- "Format version is not an integer in workspace configuration",
- LoadErrorReason.INVALID_DATA,
+ "Format version is not an integer in workspace configuration", LoadErrorReason.INVALID_DATA,
)
if version == 0:
@@ -626,16 +594,14 @@ class Workspaces:
+ "Please remove this element from '{}'."
)
raise LoadError(
- detail.format(element, self._get_filename()),
- LoadErrorReason.INVALID_DATA,
+ detail.format(element, self._get_filename()), LoadErrorReason.INVALID_DATA,
)
workspaces[element] = sources[0]
else:
raise LoadError(
- "Workspace config is in unexpected format.",
- LoadErrorReason.INVALID_DATA,
+ "Workspace config is in unexpected format.", LoadErrorReason.INVALID_DATA,
)
res = {
@@ -645,10 +611,7 @@ class Workspaces:
elif 1 <= version <= BST_WORKSPACE_FORMAT_VERSION:
workspaces = workspaces.get_mapping("workspaces", default={})
- res = {
- element: self._load_workspace(node)
- for element, node in workspaces.items()
- }
+ res = {element: self._load_workspace(node) for element, node in workspaces.items()}
else:
raise LoadError(
diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py
index 55f0dc0c3..4fa678932 100644
--- a/src/buildstream/buildelement.py
+++ b/src/buildstream/buildelement.py
@@ -243,9 +243,7 @@ class BuildElement(Element):
if not commands or command_name == "configure-commands":
continue
- with sandbox.batch(
- SandboxFlags.ROOT_READ_ONLY, label="Running {}".format(command_name)
- ):
+ with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running {}".format(command_name)):
for cmd in commands:
self.__run_command(sandbox, cmd)
@@ -253,9 +251,7 @@ class BuildElement(Element):
# to - if an element later attempts to stage to a location
# that is not empty, we abort the build - in this case this
# will almost certainly happen.
- staged_build = os.path.join(
- self.get_variable("install-root"), self.get_variable("build-root")
- )
+ staged_build = os.path.join(self.get_variable("install-root"), self.get_variable("build-root"))
if os.path.isdir(staged_build) and os.listdir(staged_build):
self.warn(
@@ -272,9 +268,7 @@ class BuildElement(Element):
def prepare(self, sandbox):
commands = self.__commands["configure-commands"]
if commands:
- with sandbox.batch(
- SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"
- ):
+ with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"):
for cmd in commands:
self.__run_command(sandbox, cmd)
@@ -299,6 +293,4 @@ class BuildElement(Element):
# Note the -e switch to 'sh' means to exit with an error
# if any untested command fails.
#
- sandbox.run(
- ["sh", "-c", "-e", cmd + "\n"], SandboxFlags.ROOT_READ_ONLY, label=cmd
- )
+ sandbox.run(["sh", "-c", "-e", cmd + "\n"], SandboxFlags.ROOT_READ_ONLY, label=cmd)
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index fe9993d41..fbde79d0e 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -145,20 +145,10 @@ class ElementError(BstError):
"""
def __init__(
- self,
- message: str,
- *,
- detail: str = None,
- reason: str = None,
- collect: str = None,
- temporary: bool = False
+ self, message: str, *, detail: str = None, reason: str = None, collect: str = None, temporary: bool = False
):
super().__init__(
- message,
- detail=detail,
- domain=ErrorDomain.ELEMENT,
- reason=reason,
- temporary=temporary,
+ message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=temporary,
)
self.collect = collect
@@ -233,11 +223,7 @@ class Element(Plugin):
"""
def __init__(
- self,
- context: "Context",
- project: "Project",
- meta: "MetaElement",
- plugin_conf: Dict[str, Any],
+ self, context: "Context", project: "Project", meta: "MetaElement", plugin_conf: Dict[str, Any],
):
self.__cache_key_dict = None # Dict for cache key calculation
@@ -268,31 +254,15 @@ class Element(Plugin):
self.__reverse_build_deps = set() # type: Set[Element]
# Direct reverse runtime dependency Elements
self.__reverse_runtime_deps = set() # type: Set[Element]
- self.__build_deps_without_strict_cache_key = (
- None # Number of build dependencies without a strict key
- )
- self.__runtime_deps_without_strict_cache_key = (
- None # Number of runtime dependencies without a strict key
- )
- self.__build_deps_without_cache_key = (
- None # Number of build dependencies without a cache key
- )
- self.__runtime_deps_without_cache_key = (
- None # Number of runtime dependencies without a cache key
- )
+ self.__build_deps_without_strict_cache_key = None # Number of build dependencies without a strict key
+ self.__runtime_deps_without_strict_cache_key = None # Number of runtime dependencies without a strict key
+ self.__build_deps_without_cache_key = None # Number of build dependencies without a cache key
+ self.__runtime_deps_without_cache_key = None # Number of runtime dependencies without a cache key
self.__build_deps_uncached = None # Build dependencies which are not yet cached
- self.__runtime_deps_uncached = (
- None # Runtime dependencies which are not yet cached
- )
- self.__updated_strict_cache_keys_of_rdeps = (
- False # Whether we've updated strict cache keys of rdeps
- )
- self.__ready_for_runtime = (
- False # Whether the element and its runtime dependencies have cache keys
- )
- self.__ready_for_runtime_and_cached = (
- False # Whether all runtime deps are cached, as well as the element
- )
+ self.__runtime_deps_uncached = None # Runtime dependencies which are not yet cached
+ self.__updated_strict_cache_keys_of_rdeps = False # Whether we've updated strict cache keys of rdeps
+ self.__ready_for_runtime = False # Whether the element and its runtime dependencies have cache keys
+ self.__ready_for_runtime_and_cached = False # Whether all runtime deps are cached, as well as the element
self.__cached_remotely = None # Whether the element is cached remotely
# List of Sources
self.__sources = [] # type: List[Source]
@@ -300,37 +270,21 @@ class Element(Plugin):
self.__strict_cache_key = None # Our cached cache key for strict builds
self.__artifacts = context.artifactcache # Artifact cache
self.__sourcecache = context.sourcecache # Source cache
- self.__consistency = (
- Consistency.INCONSISTENT
- ) # Cached overall consistency state
+ self.__consistency = Consistency.INCONSISTENT # Cached overall consistency state
self.__assemble_scheduled = False # Element is scheduled to be assembled
self.__assemble_done = False # Element is assembled
self.__tracking_scheduled = False # Sources are scheduled to be tracked
self.__pull_done = False # Whether pull was attempted
- self.__cached_successfully = (
- None # If the Element is known to be successfully cached
- )
- self.__source_cached = (
- None # If the sources are known to be successfully cached
- )
+ self.__cached_successfully = None # If the Element is known to be successfully cached
+ self.__source_cached = None # If the sources are known to be successfully cached
self.__splits = None # Resolved regex objects for computing split domains
- self.__whitelist_regex = (
- None # Resolved regex object to check if file is allowed to overlap
- )
+ self.__whitelist_regex = None # Resolved regex object to check if file is allowed to overlap
# Location where Element.stage_sources() was called
self.__staged_sources_directory = None # type: Optional[str]
- self.__tainted = (
- None # Whether the artifact is tainted and should not be shared
- )
- self.__required = (
- False # Whether the artifact is required in the current session
- )
- self.__artifact_files_required = (
- False # Whether artifact files are required in the local cache
- )
- self.__build_result = (
- None # The result of assembling this Element (success, description, detail)
- )
+ self.__tainted = None # Whether the artifact is tainted and should not be shared
+ self.__required = False # Whether the artifact is required in the current session
+ self.__artifact_files_required = False # Whether artifact files are required in the local cache
+ self.__build_result = None # The result of assembling this Element (success, description, detail)
self._build_log_path = None # The path of the build log for this Element
# Artifact class for direct artifact composite interaction
self.__artifact = None # type: Optional[Artifact]
@@ -341,12 +295,8 @@ class Element(Plugin):
# sources for staging
self.__last_source_requires_previous_ix = None
- self.__batch_prepare_assemble = (
- False # Whether batching across prepare()/assemble() is configured
- )
- self.__batch_prepare_assemble_flags = (
- 0 # Sandbox flags for batching across prepare()/assemble()
- )
+ self.__batch_prepare_assemble = False # Whether batching across prepare()/assemble() is configured
+ self.__batch_prepare_assemble_flags = 0 # Sandbox flags for batching across prepare()/assemble()
# Collect dir for batching across prepare()/assemble()
self.__batch_prepare_assemble_collect = None # type: Optional[str]
@@ -356,9 +306,7 @@ class Element(Plugin):
self.__buildable_callback = None # Callback to BuildQueue
self._depth = None # Depth of Element in its current dependency graph
- self._resolved_initial_state = (
- False # Whether the initial state of the Element has been resolved
- )
+ self._resolved_initial_state = False # Whether the initial state of the Element has been resolved
# Ensure we have loaded this class's defaults
self.__init_defaults(project, plugin_conf, meta.kind, meta.is_junction)
@@ -421,11 +369,7 @@ class Element(Plugin):
Elements must implement this method to configure the sandbox object
for execution.
"""
- raise ImplError(
- "element plugin '{kind}' does not implement configure_sandbox()".format(
- kind=self.get_kind()
- )
- )
+ raise ImplError("element plugin '{kind}' does not implement configure_sandbox()".format(kind=self.get_kind()))
def stage(self, sandbox: "Sandbox") -> None:
"""Stage inputs into the sandbox directories
@@ -441,11 +385,7 @@ class Element(Plugin):
objects, by staging the artifacts of the elements this element depends
on, or both.
"""
- raise ImplError(
- "element plugin '{kind}' does not implement stage()".format(
- kind=self.get_kind()
- )
- )
+ raise ImplError("element plugin '{kind}' does not implement stage()".format(kind=self.get_kind()))
def prepare(self, sandbox: "Sandbox") -> None:
"""Run one-off preparation commands.
@@ -482,11 +422,7 @@ class Element(Plugin):
Elements must implement this method to create an output
artifact from its sources and dependencies.
"""
- raise ImplError(
- "element plugin '{kind}' does not implement assemble()".format(
- kind=self.get_kind()
- )
- )
+ raise ImplError("element plugin '{kind}' does not implement assemble()".format(kind=self.get_kind()))
def generate_script(self) -> str:
"""Generate a build (sh) script to build this element
@@ -507,11 +443,7 @@ class Element(Plugin):
If the script fails, it is expected to return with an exit
code != 0.
"""
- raise ImplError(
- "element plugin '{kind}' does not implement write_script()".format(
- kind=self.get_kind()
- )
- )
+ raise ImplError("element plugin '{kind}' does not implement write_script()".format(kind=self.get_kind()))
#############################################################
# Public Methods #
@@ -525,9 +457,7 @@ class Element(Plugin):
for source in self.__sources:
yield source
- def dependencies(
- self, scope: Scope, *, recurse: bool = True, visited=None
- ) -> Iterator["Element"]:
+ def dependencies(self, scope: Scope, *, recurse: bool = True, visited=None) -> Iterator["Element"]:
"""dependencies(scope, *, recurse=True)
A generator function which yields the dependencies of the given element.
@@ -560,13 +490,8 @@ class Element(Plugin):
visited[0].add(element._unique_id)
visited[1].add(element._unique_id)
- for dep in chain(
- element.__build_dependencies, element.__runtime_dependencies
- ):
- if (
- dep._unique_id not in visited[0]
- and dep._unique_id not in visited[1]
- ):
+ for dep in chain(element.__build_dependencies, element.__runtime_dependencies):
+ if dep._unique_id not in visited[0] and dep._unique_id not in visited[1]:
yield from visit(dep, Scope.ALL, visited)
yield element
@@ -640,9 +565,7 @@ class Element(Plugin):
return self.__variables.subst(node.as_str())
except LoadError as e:
provenance = node.get_provenance()
- raise LoadError(
- "{}: {}".format(provenance, e), e.reason, detail=e.detail
- ) from e
+ raise LoadError("{}: {}".format(provenance, e), e.reason, detail=e.detail) from e
def node_subst_sequence_vars(self, node: "SequenceNode[ScalarNode]") -> List[str]:
"""Substitute any variables in the given sequence
@@ -663,17 +586,11 @@ class Element(Plugin):
ret.append(self.__variables.subst(value.as_str()))
except LoadError as e:
provenance = value.get_provenance()
- raise LoadError(
- "{}: {}".format(provenance, e), e.reason, detail=e.detail
- ) from e
+ raise LoadError("{}: {}".format(provenance, e), e.reason, detail=e.detail) from e
return ret
def compute_manifest(
- self,
- *,
- include: Optional[List[str]] = None,
- exclude: Optional[List[str]] = None,
- orphans: bool = True
+ self, *, include: Optional[List[str]] = None, exclude: Optional[List[str]] = None, orphans: bool = True
) -> str:
"""Compute and return this element's selective manifest
@@ -769,9 +686,7 @@ class Element(Plugin):
+ "Try building the element first with `bst build`\n"
)
raise ElementError(
- "No artifacts to stage",
- detail=detail,
- reason="uncached-checkout-attempt",
+ "No artifacts to stage", detail=detail, reason="uncached-checkout-attempt",
)
if update_mtimes is None:
@@ -780,9 +695,7 @@ class Element(Plugin):
# Time to use the artifact, check once more that it's there
self.__assert_cached()
- with self.timed_activity(
- "Staging {}/{}".format(self.name, self._get_brief_display_key())
- ):
+ with self.timed_activity("Staging {}/{}".format(self.name, self._get_brief_display_key())):
# Disable type checking since we can't easily tell mypy that
# `self.__artifact` can't be None at this stage.
files_vdir = self.__artifact.get_files() # type: ignore
@@ -790,11 +703,7 @@ class Element(Plugin):
# Hard link it into the staging area
#
vbasedir = sandbox.get_virtual_directory()
- vstagedir = (
- vbasedir
- if path is None
- else vbasedir.descend(*path.lstrip(os.sep).split(os.sep))
- )
+ vstagedir = vbasedir if path is None else vbasedir.descend(*path.lstrip(os.sep).split(os.sep))
split_filter = self.__split_filter_func(include, exclude, orphans)
@@ -802,31 +711,21 @@ class Element(Plugin):
if update_mtimes:
def link_filter(path):
- return (
- split_filter is None or split_filter(path)
- ) and path not in update_mtimes
+ return (split_filter is None or split_filter(path)) and path not in update_mtimes
def copy_filter(path):
- return (
- split_filter is None or split_filter(path)
- ) and path in update_mtimes
+ return (split_filter is None or split_filter(path)) and path in update_mtimes
else:
link_filter = split_filter
result = vstagedir.import_files(
- files_vdir,
- filter_callback=link_filter,
- report_written=True,
- can_link=True,
+ files_vdir, filter_callback=link_filter, report_written=True, can_link=True,
)
if update_mtimes:
copy_result = vstagedir.import_files(
- files_vdir,
- filter_callback=copy_filter,
- report_written=True,
- update_mtime=True,
+ files_vdir, filter_callback=copy_filter, report_written=True, update_mtime=True,
)
result = result.combine(copy_result)
@@ -875,9 +774,7 @@ class Element(Plugin):
# build is still in the artifact cache
#
if self.__artifacts.contains(self, workspace.last_successful):
- last_successful = Artifact(
- self, context, strong_key=workspace.last_successful
- )
+ last_successful = Artifact(self, context, strong_key=workspace.last_successful)
# Get a dict of dependency strong keys
old_dep_keys = last_successful.get_metadata_dependencies()
else:
@@ -886,9 +783,7 @@ class Element(Plugin):
workspace.prepared = False
workspace.last_successful = None
- self.info(
- "Resetting workspace state, last successful build is no longer in the cache"
- )
+ self.info("Resetting workspace state, last successful build is no longer in the cache")
# In case we are staging in the main process
if utils._is_main_process():
@@ -920,12 +815,7 @@ class Element(Plugin):
context.get_workspaces().save_config()
result = dep.stage_artifact(
- sandbox,
- path=path,
- include=include,
- exclude=exclude,
- orphans=orphans,
- update_mtimes=to_update,
+ sandbox, path=path, include=include, exclude=exclude, orphans=orphans, update_mtimes=to_update,
)
if result.overwritten:
for overwrite in result.overwritten:
@@ -956,15 +846,11 @@ class Element(Plugin):
overlap_warning_elements.append(elm)
overlap_warning = True
- warning_detail += _overlap_error_detail(
- f, overlap_warning_elements, elements
- )
+ warning_detail += _overlap_error_detail(f, overlap_warning_elements, elements)
if overlap_warning:
self.warn(
- "Non-whitelisted overlaps detected",
- detail=warning_detail,
- warning_token=CoreWarnings.OVERLAPS,
+ "Non-whitelisted overlaps detected", detail=warning_detail, warning_token=CoreWarnings.OVERLAPS,
)
if ignored:
@@ -995,9 +881,7 @@ class Element(Plugin):
for command in commands:
cmd = self.node_subst_vars(command)
- sandbox.run(
- ["sh", "-e", "-c", cmd], 0, env=environment, cwd="/", label=cmd
- )
+ sandbox.run(["sh", "-e", "-c", cmd], 0, env=environment, cwd="/", label=cmd)
def stage_sources(self, sandbox: "Sandbox", directory: str) -> None:
"""Stage this element's sources to a directory in the sandbox
@@ -1083,9 +967,7 @@ class Element(Plugin):
# Flat is not recognized correctly by Pylint as being a dictionary
return self.__variables.flat.get(varname) # pylint: disable=no-member
- def batch_prepare_assemble(
- self, flags: int, *, collect: Optional[str] = None
- ) -> None:
+ def batch_prepare_assemble(self, flags: int, *, collect: Optional[str] = None) -> None:
""" Configure command batching across prepare() and assemble()
Args:
@@ -1097,11 +979,7 @@ class Element(Plugin):
to enable batching of all sandbox commands issued in prepare() and assemble().
"""
if self.__batch_prepare_assemble:
- raise ElementError(
- "{}: Command batching for prepare/assemble is already configured".format(
- self
- )
- )
+ raise ElementError("{}: Command batching for prepare/assemble is already configured".format(self))
self.__batch_prepare_assemble = True
self.__batch_prepare_assemble_flags = flags
@@ -1500,8 +1378,7 @@ class Element(Plugin):
+ "To start using the new reference, please close the existing workspace."
)
source.warn(
- "Updated reference will be ignored as source has open workspace",
- detail=detail,
+ "Updated reference will be ignored as source has open workspace", detail=detail,
)
return refs
@@ -1512,16 +1389,11 @@ class Element(Plugin):
# is used to stage things by the `bst artifact checkout` codepath
#
@contextmanager
- def _prepare_sandbox(
- self, scope, directory, shell=False, integrate=True, usebuildtree=False
- ):
+ def _prepare_sandbox(self, scope, directory, shell=False, integrate=True, usebuildtree=False):
# bst shell and bst artifact checkout require a local sandbox.
bare_directory = bool(directory)
with self.__sandbox(
- directory,
- config=self.__sandbox_config,
- allow_remote=False,
- bare_directory=bare_directory,
+ directory, config=self.__sandbox_config, allow_remote=False, bare_directory=bare_directory,
) as sandbox:
sandbox._usebuildtree = usebuildtree
@@ -1534,9 +1406,7 @@ class Element(Plugin):
self.stage(sandbox)
else:
# Stage deps in the sandbox root
- with self.timed_activity(
- "Staging dependencies", silent_nested=True
- ):
+ with self.timed_activity("Staging dependencies", silent_nested=True):
self.stage_dependency_artifacts(sandbox, scope)
# Run any integration commands provided by the dependencies
@@ -1565,9 +1435,7 @@ class Element(Plugin):
# Stage all sources that need to be copied
sandbox_vroot = sandbox.get_virtual_directory()
- host_vdirectory = sandbox_vroot.descend(
- *directory.lstrip(os.sep).split(os.sep), create=True
- )
+ host_vdirectory = sandbox_vroot.descend(*directory.lstrip(os.sep).split(os.sep), create=True)
self._stage_sources_at(host_vdirectory, usebuildtree=sandbox._usebuildtree)
# _stage_sources_at():
@@ -1593,9 +1461,7 @@ class Element(Plugin):
if not isinstance(vdirectory, Directory):
vdirectory = FileBasedDirectory(vdirectory)
if not vdirectory.is_empty():
- raise ElementError(
- "Staging directory '{}' is not empty".format(vdirectory)
- )
+ raise ElementError("Staging directory '{}' is not empty".format(vdirectory))
# Check if we have a cached buildtree to use
if usebuildtree:
@@ -1603,10 +1469,7 @@ class Element(Plugin):
if import_dir.is_empty():
detail = "Element type either does not expect a buildtree or it was explictily cached without one."
self.warn(
- "WARNING: {} Artifact contains an empty buildtree".format(
- self.name
- ),
- detail=detail,
+ "WARNING: {} Artifact contains an empty buildtree".format(self.name), detail=detail,
)
# No cached buildtree, stage source from source cache
@@ -1628,16 +1491,10 @@ class Element(Plugin):
import_dir.import_files(source_dir)
except SourceCacheError as e:
- raise ElementError(
- "Error trying to export source for {}: {}".format(
- self.name, e
- )
- )
+ raise ElementError("Error trying to export source for {}: {}".format(self.name, e))
except VirtualDirectoryError as e:
raise ElementError(
- "Error trying to import sources together for {}: {}".format(
- self.name, e
- ),
+ "Error trying to import sources together for {}: {}".format(self.name, e),
reason="import-source-files-fail",
)
@@ -1747,9 +1604,7 @@ class Element(Plugin):
self._update_ready_for_runtime_and_cached()
if self._get_workspace() and self._cached_success():
- assert (
- utils._is_main_process()
- ), "Attempted to save workspace configuration from child process"
+ assert utils._is_main_process(), "Attempted to save workspace configuration from child process"
#
# Note that this block can only happen in the
# main process, since `self._cached_success()` cannot
@@ -1794,9 +1649,7 @@ class Element(Plugin):
# Explicitly clean it up, keep the build dir around if exceptions are raised
os.makedirs(context.builddir, exist_ok=True)
- rootdir = tempfile.mkdtemp(
- prefix="{}-".format(self.normal_name), dir=context.builddir
- )
+ rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
# Cleanup the build directory on explicit SIGTERM
def cleanup_rootdir():
@@ -1812,12 +1665,8 @@ class Element(Plugin):
buildroot = self.get_variable("build-root")
cache_buildtrees = context.cache_buildtrees
if cache_buildtrees != _CacheBuildTrees.NEVER:
- always_cache_buildtrees = (
- cache_buildtrees == _CacheBuildTrees.ALWAYS
- )
- sandbox._set_build_directory(
- buildroot, always=always_cache_buildtrees
- )
+ always_cache_buildtrees = cache_buildtrees == _CacheBuildTrees.ALWAYS
+ sandbox._set_build_directory(buildroot, always=always_cache_buildtrees)
if not self.BST_RUN_COMMANDS:
# Element doesn't need to run any commands in the sandbox.
@@ -1839,8 +1688,7 @@ class Element(Plugin):
try:
if self.__batch_prepare_assemble:
cm = sandbox.batch(
- self.__batch_prepare_assemble_flags,
- collect=self.__batch_prepare_assemble_collect,
+ self.__batch_prepare_assemble_flags, collect=self.__batch_prepare_assemble_collect,
)
else:
cm = contextlib.suppress()
@@ -1849,18 +1697,14 @@ class Element(Plugin):
# Step 3 - Prepare
self.__prepare(sandbox)
# Step 4 - Assemble
- collect = self.assemble(
- sandbox
- ) # pylint: disable=assignment-from-no-return
+ collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
self.__set_build_result(success=True, description="succeeded")
except (ElementError, SandboxCommandError) as e:
# Shelling into a sandbox is useful to debug this error
e.sandbox = True
- self.__set_build_result(
- success=False, description=str(e), detail=e.detail
- )
+ self.__set_build_result(success=False, description=str(e), detail=e.detail)
self._cache_artifact(rootdir, sandbox, e.collect)
raise
@@ -1903,9 +1747,7 @@ class Element(Plugin):
if collect is not None:
try:
- collectvdir = sandbox_vroot.descend(
- *collect.lstrip(os.sep).split(os.sep)
- )
+ collectvdir = sandbox_vroot.descend(*collect.lstrip(os.sep).split(os.sep))
sandbox._fetch_missing_blobs(collectvdir)
except VirtualDirectoryError:
pass
@@ -1914,9 +1756,7 @@ class Element(Plugin):
self._assemble_done()
with self.timed_activity("Caching artifact"):
- artifact_size = self.__artifact.cache(
- rootdir, sandbox_build_dir, collectvdir, buildresult, publicdata
- )
+ artifact_size = self.__artifact.cache(rootdir, sandbox_build_dir, collectvdir, buildresult, publicdata)
if collect is not None and collectvdir is None:
raise ElementError(
@@ -2023,9 +1863,7 @@ class Element(Plugin):
def _skip_source_push(self):
if not self.__sources or self._get_workspace():
return True
- return not (
- self.__sourcecache.has_push_remotes(plugin=self) and self._source_cached()
- )
+ return not (self.__sourcecache.has_push_remotes(plugin=self) and self._source_cached())
def _source_push(self):
# try and push sources if we've got them
@@ -2101,20 +1939,10 @@ class Element(Plugin):
#
# If directory is not specified, one will be staged using scope
def _shell(
- self,
- scope=None,
- directory=None,
- *,
- mounts=None,
- isolate=False,
- prompt=None,
- command=None,
- usebuildtree=False
+ self, scope=None, directory=None, *, mounts=None, isolate=False, prompt=None, command=None, usebuildtree=False
):
- with self._prepare_sandbox(
- scope, directory, shell=True, usebuildtree=usebuildtree
- ) as sandbox:
+ with self._prepare_sandbox(scope, directory, shell=True, usebuildtree=usebuildtree) as sandbox:
environment = self.get_environment()
environment = copy.copy(environment)
flags = SandboxFlags.INTERACTIVE | SandboxFlags.ROOT_READ_ONLY
@@ -2123,11 +1951,7 @@ class Element(Plugin):
# subproject, we want to use the rules defined by the main one.
context = self._get_context()
project = context.get_toplevel_project()
- (
- shell_command,
- shell_environment,
- shell_host_files,
- ) = project.get_shell_config()
+ (shell_command, shell_environment, shell_host_files,) = project.get_shell_config()
if prompt is not None:
environment["PS1"] = prompt
@@ -2150,11 +1974,7 @@ class Element(Plugin):
for mount in shell_host_files + mounts:
if not os.path.exists(mount.host_path):
if not mount.optional:
- self.warn(
- "Not mounting non-existing host file: {}".format(
- mount.host_path
- )
- )
+ self.warn("Not mounting non-existing host file: {}".format(mount.host_path))
else:
sandbox.mark_directory(mount.path)
sandbox._set_mount_source(mount.path, mount.host_path)
@@ -2190,9 +2010,7 @@ class Element(Plugin):
# additional support from Source implementations.
#
os.makedirs(context.builddir, exist_ok=True)
- with utils._tempdir(
- dir=context.builddir, prefix="workspace-{}".format(self.normal_name)
- ) as temp:
+ with utils._tempdir(dir=context.builddir, prefix="workspace-{}".format(self.normal_name)) as temp:
for source in self.sources():
source._init_workspace(temp)
@@ -2309,10 +2127,7 @@ class Element(Plugin):
continue
# try and fetch from source cache
- if (
- source._get_consistency() < Consistency.CACHED
- and self.__sourcecache.has_fetch_remotes()
- ):
+ if source._get_consistency() < Consistency.CACHED and self.__sourcecache.has_fetch_remotes():
if self.__sourcecache.pull(source):
continue
@@ -2345,11 +2160,7 @@ class Element(Plugin):
# Generate dict that is used as base for all cache keys
if self.__cache_key_dict is None:
# Filter out nocache variables from the element's environment
- cache_env = {
- key: value
- for key, value in self.__environment.items()
- if key not in self.__env_nocache
- }
+ cache_env = {key: value for key, value in self.__environment.items() if key not in self.__env_nocache}
project = self._get_project()
@@ -2491,11 +2302,7 @@ class Element(Plugin):
#
def _update_ready_for_runtime_and_cached(self):
if not self.__ready_for_runtime_and_cached:
- if (
- self.__runtime_deps_uncached == 0
- and self._cached_success()
- and self.__cache_key
- ):
+ if self.__runtime_deps_uncached == 0 and self._cached_success() and self.__cache_key:
self.__ready_for_runtime_and_cached = True
# Notify reverse dependencies
@@ -2661,40 +2468,30 @@ class Element(Plugin):
def __preflight(self):
if self.BST_FORBID_RDEPENDS and self.BST_FORBID_BDEPENDS:
- if any(self.dependencies(Scope.RUN, recurse=False)) or any(
- self.dependencies(Scope.BUILD, recurse=False)
- ):
+ if any(self.dependencies(Scope.RUN, recurse=False)) or any(self.dependencies(Scope.BUILD, recurse=False)):
raise ElementError(
- "{}: Dependencies are forbidden for '{}' elements".format(
- self, self.get_kind()
- ),
+ "{}: Dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
reason="element-forbidden-depends",
)
if self.BST_FORBID_RDEPENDS:
if any(self.dependencies(Scope.RUN, recurse=False)):
raise ElementError(
- "{}: Runtime dependencies are forbidden for '{}' elements".format(
- self, self.get_kind()
- ),
+ "{}: Runtime dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
reason="element-forbidden-rdepends",
)
if self.BST_FORBID_BDEPENDS:
if any(self.dependencies(Scope.BUILD, recurse=False)):
raise ElementError(
- "{}: Build dependencies are forbidden for '{}' elements".format(
- self, self.get_kind()
- ),
+ "{}: Build dependencies are forbidden for '{}' elements".format(self, self.get_kind()),
reason="element-forbidden-bdepends",
)
if self.BST_FORBID_SOURCES:
if any(self.sources()):
raise ElementError(
- "{}: Sources are forbidden for '{}' elements".format(
- self, self.get_kind()
- ),
+ "{}: Sources are forbidden for '{}' elements".format(self, self.get_kind()),
reason="element-forbidden-sources",
)
@@ -2702,17 +2499,13 @@ class Element(Plugin):
self.preflight()
except BstError as e:
# Prepend provenance to the error
- raise ElementError(
- "{}: {}".format(self, e), reason=e.reason, detail=e.detail
- ) from e
+ raise ElementError("{}: {}".format(self, e), reason=e.reason, detail=e.detail) from e
# Ensure that the first source does not need access to previous soruces
if self.__sources and self.__sources[0]._requires_previous_sources():
raise ElementError(
"{}: {} cannot be the first source of an element "
- "as it requires access to previous sources".format(
- self, self.__sources[0]
- )
+ "as it requires access to previous sources".format(self, self.__sources[0])
)
# Preflight the sources
@@ -2724,9 +2517,7 @@ class Element(Plugin):
# Raises an error if the artifact is not cached.
#
def __assert_cached(self):
- assert self._cached(), "{}: Missing artifact {}".format(
- self, self._get_brief_display_key()
- )
+ assert self._cached(), "{}: Missing artifact {}".format(self, self._get_brief_display_key())
# __get_tainted():
#
@@ -2749,16 +2540,10 @@ class Element(Plugin):
workspaced = self.__artifact.get_metadata_workspaced()
# Whether this artifact's dependencies have workspaces
- workspaced_dependencies = (
- self.__artifact.get_metadata_workspaced_dependencies()
- )
+ workspaced_dependencies = self.__artifact.get_metadata_workspaced_dependencies()
# Other conditions should be or-ed
- self.__tainted = (
- workspaced
- or workspaced_dependencies
- or not self.__sandbox_config_supported
- )
+ self.__tainted = workspaced or workspaced_dependencies or not self.__sandbox_config_supported
return self.__tainted
@@ -2790,13 +2575,7 @@ class Element(Plugin):
#
@contextmanager
def __sandbox(
- self,
- directory,
- stdout=None,
- stderr=None,
- config=None,
- allow_remote=True,
- bare_directory=False,
+ self, directory, stdout=None, stderr=None, config=None, allow_remote=True, bare_directory=False,
):
context = self._get_context()
project = self._get_project()
@@ -2809,20 +2588,12 @@ class Element(Plugin):
"Element {} is configured to use remote execution but plugin does not support it.".format(
self.name
),
- detail="Plugin '{kind}' does not support virtual directories.".format(
- kind=self.get_kind()
- ),
+ detail="Plugin '{kind}' does not support virtual directories.".format(kind=self.get_kind()),
)
- self.info(
- "Using a remote sandbox for artifact {} with directory '{}'".format(
- self.name, directory
- )
- )
+ self.info("Using a remote sandbox for artifact {} with directory '{}'".format(self.name, directory))
- output_files_required = (
- context.require_artifact_files or self._artifact_files_required()
- )
+ output_files_required = context.require_artifact_files or self._artifact_files_required()
sandbox = SandboxRemote(
context,
@@ -2856,18 +2627,11 @@ class Element(Plugin):
else:
os.makedirs(context.builddir, exist_ok=True)
- rootdir = tempfile.mkdtemp(
- prefix="{}-".format(self.normal_name), dir=context.builddir
- )
+ rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
# Recursive contextmanager...
with self.__sandbox(
- rootdir,
- stdout=stdout,
- stderr=stderr,
- config=config,
- allow_remote=allow_remote,
- bare_directory=False,
+ rootdir, stdout=stdout, stderr=stderr, config=config, allow_remote=allow_remote, bare_directory=False,
) as sandbox:
yield sandbox
@@ -2997,9 +2761,7 @@ class Element(Plugin):
provenance = node.get_provenance()
if not provenance._is_synthetic:
raise LoadError(
- "{}: invalid redefinition of protected variable '{}'".format(
- provenance, var
- ),
+ "{}: invalid redefinition of protected variable '{}'".format(provenance, var),
LoadErrorReason.PROTECTED_VARIABLE_REDEFINED,
)
@@ -3043,9 +2805,7 @@ class Element(Plugin):
sandbox_config._assert_fully_composited()
# Sandbox config, unlike others, has fixed members so we should validate them
- sandbox_config.validate_keys(
- ["build-uid", "build-gid", "build-os", "build-arch"]
- )
+ sandbox_config.validate_keys(["build-uid", "build-gid", "build-os", "build-arch"])
build_arch = sandbox_config.get_str("build-arch", default=None)
if build_arch:
@@ -3093,9 +2853,7 @@ class Element(Plugin):
# Resolve any variables in the public split rules directly
for domain, splits in element_splits.items():
- splits = [
- self.__variables.subst(split.strip()) for split in splits.as_str_list()
- ]
+ splits = [self.__variables.subst(split.strip()) for split in splits.as_str_list()]
element_splits[domain] = splits
return element_public
@@ -3104,11 +2862,7 @@ class Element(Plugin):
bstdata = self.get_public_data("bst")
splits = bstdata.get_mapping("split-rules")
self.__splits = {
- domain: re.compile(
- "^(?:"
- + "|".join([utils._glob2re(r) for r in rules.as_str_list()])
- + ")$"
- )
+ domain: re.compile("^(?:" + "|".join([utils._glob2re(r) for r in rules.as_str_list()]) + ")$")
for domain, rules in splits.items()
}
@@ -3188,9 +2942,7 @@ class Element(Plugin):
return partial(self.__split_filter, element_domains, include, exclude, orphans)
def __compute_splits(self, include=None, exclude=None, orphans=True):
- filter_func = self.__split_filter_func(
- include=include, exclude=exclude, orphans=orphans
- )
+ filter_func = self.__split_filter_func(include=include, exclude=exclude, orphans=orphans)
files_vdir = self.__artifact.get_files()
@@ -3213,9 +2965,7 @@ class Element(Plugin):
if not self.__whitelist_regex:
bstdata = self.get_public_data("bst")
whitelist = bstdata.get_str_list("overlap-whitelist", default=[])
- whitelist_expressions = [
- utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist
- ]
+ whitelist_expressions = [utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist]
expression = "^(?:" + "|".join(whitelist_expressions) + ")$"
self.__whitelist_regex = re.compile(expression)
return self.__whitelist_regex.match(os.path.join(os.sep, path))
@@ -3296,9 +3046,7 @@ class Element(Plugin):
# commit all other sources by themselves
for ix, source in enumerate(self.__sources):
if source.BST_REQUIRES_PREVIOUS_SOURCES_STAGE:
- self.__sourcecache.commit(
- source, self.__sources[last_requires_previous:ix]
- )
+ self.__sourcecache.commit(source, self.__sources[last_requires_previous:ix])
last_requires_previous = ix
else:
self.__sourcecache.commit(source, [])
@@ -3386,9 +3134,7 @@ class Element(Plugin):
if self.__strict_cache_key is None:
dependencies = [
- [e.project_name, e.name, e.__strict_cache_key]
- if e.__strict_cache_key is not None
- else None
+ [e.project_name, e.name, e.__strict_cache_key] if e.__strict_cache_key is not None else None
for e in self.dependencies(Scope.BUILD)
]
self.__strict_cache_key = self._calculate_cache_key(dependencies)
@@ -3404,10 +3150,7 @@ class Element(Plugin):
else:
self.__update_strict_cache_key_of_rdeps()
- if (
- self.__strict_cache_key is not None
- and self.__can_query_cache_callback is not None
- ):
+ if self.__strict_cache_key is not None and self.__can_query_cache_callback is not None:
self.__can_query_cache_callback(self)
self.__can_query_cache_callback = None
@@ -3435,10 +3178,7 @@ class Element(Plugin):
if not self.__strict_artifact:
self.__strict_artifact = Artifact(
- self,
- context,
- strong_key=self.__strict_cache_key,
- weak_key=self.__weak_cache_key,
+ self, context, strong_key=self.__strict_cache_key, weak_key=self.__weak_cache_key,
)
if context.get_strict():
@@ -3471,9 +3211,7 @@ class Element(Plugin):
self.__cache_key = strong_key
elif self.__assemble_scheduled or self.__assemble_done:
# Artifact will or has been built, not downloaded
- dependencies = [
- e._get_cache_key() for e in self.dependencies(Scope.BUILD)
- ]
+ dependencies = [e._get_cache_key() for e in self.dependencies(Scope.BUILD)]
self.__cache_key = self._calculate_cache_key(dependencies)
if self.__cache_key is None:
@@ -3495,10 +3233,7 @@ class Element(Plugin):
#
def __update_strict_cache_key_of_rdeps(self):
if not self.__updated_strict_cache_keys_of_rdeps:
- if (
- self.__runtime_deps_without_strict_cache_key == 0
- and self.__strict_cache_key is not None
- ):
+ if self.__runtime_deps_without_strict_cache_key == 0 and self.__strict_cache_key is not None:
self.__updated_strict_cache_keys_of_rdeps = True
# Notify reverse dependencies
@@ -3532,10 +3267,7 @@ class Element(Plugin):
#
def __update_ready_for_runtime(self):
if not self.__ready_for_runtime:
- if (
- self.__runtime_deps_without_cache_key == 0
- and self.__cache_key is not None
- ):
+ if self.__runtime_deps_without_cache_key == 0 and self.__cache_key is not None:
self.__ready_for_runtime = True
# Notify reverse dependencies
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index 2e34106de..6a7bd78e1 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -273,9 +273,7 @@ class Plugin:
# If this plugin has been deprecated, emit a warning.
if self.BST_PLUGIN_DEPRECATED and not self.__deprecation_warning_silenced():
- detail = "Using deprecated plugin {}: {}".format(
- self.__kind, self.BST_PLUGIN_DEPRECATION_MESSAGE
- )
+ detail = "Using deprecated plugin {}: {}".format(self.__kind, self.BST_PLUGIN_DEPRECATION_MESSAGE)
self.__message(MessageType.WARN, detail)
def __del__(self):
@@ -316,9 +314,7 @@ class Plugin:
method can be used.
"""
raise ImplError(
- "{tag} plugin '{kind}' does not implement configure()".format(
- tag=self.__type_tag, kind=self.get_kind()
- )
+ "{tag} plugin '{kind}' does not implement configure()".format(tag=self.__type_tag, kind=self.get_kind())
)
def preflight(self) -> None:
@@ -340,9 +336,7 @@ class Plugin:
will raise an error automatically informing the user that a host tool is needed.
"""
raise ImplError(
- "{tag} plugin '{kind}' does not implement preflight()".format(
- tag=self.__type_tag, kind=self.get_kind()
- )
+ "{tag} plugin '{kind}' does not implement preflight()".format(tag=self.__type_tag, kind=self.get_kind())
)
def get_unique_key(self) -> SourceRef:
@@ -419,9 +413,7 @@ class Plugin:
"""
- return self.__project.get_path_from_node(
- node, check_is_file=check_is_file, check_is_dir=check_is_dir
- )
+ return self.__project.get_path_from_node(node, check_is_file=check_is_file, check_is_dir=check_is_dir)
def debug(self, brief: str, *, detail: Optional[str] = None) -> None:
"""Print a debugging message
@@ -459,13 +451,7 @@ class Plugin:
"""
self.__message(MessageType.INFO, brief, detail=detail)
- def warn(
- self,
- brief: str,
- *,
- detail: Optional[str] = None,
- warning_token: Optional[str] = None
- ) -> None:
+ def warn(self, brief: str, *, detail: Optional[str] = None, warning_token: Optional[str] = None) -> None:
"""Print a warning message, checks warning_token against project configuration
Args:
@@ -485,9 +471,7 @@ class Plugin:
if project._warning_is_fatal(warning_token):
detail = detail if detail else ""
- raise PluginError(
- message="{}\n{}".format(brief, detail), reason=warning_token
- )
+ raise PluginError(message="{}\n{}".format(brief, detail), reason=warning_token)
self.__message(MessageType.WARN, brief=brief, detail=detail)
@@ -505,11 +489,7 @@ class Plugin:
@contextmanager
def timed_activity(
- self,
- activity_name: str,
- *,
- detail: Optional[str] = None,
- silent_nested: bool = False
+ self, activity_name: str, *, detail: Optional[str] = None, silent_nested: bool = False
) -> Generator[None, None, None]:
"""Context manager for performing timed activities in plugins
@@ -533,20 +513,11 @@ class Plugin:
self.call(... command which takes time ...)
"""
with self.__context.messenger.timed_activity(
- activity_name,
- element_name=self._get_full_name(),
- detail=detail,
- silent_nested=silent_nested,
+ activity_name, element_name=self._get_full_name(), detail=detail, silent_nested=silent_nested,
):
yield
- def call(
- self,
- *popenargs,
- fail: Optional[str] = None,
- fail_temporarily: bool = False,
- **kwargs
- ) -> int:
+ def call(self, *popenargs, fail: Optional[str] = None, fail_temporarily: bool = False, **kwargs) -> int:
"""A wrapper for subprocess.call()
Args:
@@ -577,14 +548,10 @@ class Plugin:
"Failed to download ponies from {}".format(
self.mirror_directory))
"""
- exit_code, _ = self.__call(
- *popenargs, fail=fail, fail_temporarily=fail_temporarily, **kwargs
- )
+ exit_code, _ = self.__call(*popenargs, fail=fail, fail_temporarily=fail_temporarily, **kwargs)
return exit_code
- def check_output(
- self, *popenargs, fail=None, fail_temporarily=False, **kwargs
- ) -> Tuple[int, str]:
+ def check_output(self, *popenargs, fail=None, fail_temporarily=False, **kwargs) -> Tuple[int, str]:
"""A wrapper for subprocess.check_output()
Args:
@@ -630,13 +597,7 @@ class Plugin:
raise SourceError(
fmt.format(plugin=self, track=tracking)) from e
"""
- return self.__call(
- *popenargs,
- collect_stdout=True,
- fail=fail,
- fail_temporarily=fail_temporarily,
- **kwargs
- )
+ return self.__call(*popenargs, collect_stdout=True, fail=fail, fail_temporarily=fail_temporarily, **kwargs)
#############################################################
# Private Methods used in BuildStream #
@@ -773,14 +734,7 @@ class Plugin:
# Internal subprocess implementation for the call() and check_output() APIs
#
- def __call(
- self,
- *popenargs,
- collect_stdout=False,
- fail=None,
- fail_temporarily=False,
- **kwargs
- ):
+ def __call(self, *popenargs, collect_stdout=False, fail=None, fail_temporarily=False, **kwargs):
with self._output_file() as output_file:
if "stdout" not in kwargs:
@@ -796,16 +750,13 @@ class Plugin:
if fail and exit_code:
raise PluginError(
- "{plugin}: {message}".format(plugin=self, message=fail),
- temporary=fail_temporarily,
+ "{plugin}: {message}".format(plugin=self, message=fail), temporary=fail_temporarily,
)
return (exit_code, output)
def __message(self, message_type, brief, **kwargs):
- message = Message(
- message_type, brief, element_name=self._get_full_name(), **kwargs
- )
+ message = Message(message_type, brief, element_name=self._get_full_name(), **kwargs)
self.__context.messenger.message(message)
def __note_command(self, output, *popenargs, **kwargs):
@@ -834,9 +785,7 @@ class Plugin:
def __get_full_name(self):
project = self.__project
# Set the name, depending on element or source plugin type
- name = (
- self._element_name if self.__type_tag == "source" else self.name
- ) # pylint: disable=no-member
+ name = self._element_name if self.__type_tag == "source" else self.name # pylint: disable=no-member
if project.junction:
return "{}:{}".format(project.junction.name, name)
else:
@@ -845,9 +794,7 @@ class Plugin:
# A local table for _prefix_warning()
#
-__CORE_WARNINGS = [
- value for name, value in CoreWarnings.__dict__.items() if not name.startswith("__")
-]
+__CORE_WARNINGS = [value for name, value in CoreWarnings.__dict__.items() if not name.startswith("__")]
# _prefix_warning():
diff --git a/src/buildstream/plugins/elements/autotools.py b/src/buildstream/plugins/elements/autotools.py
index 71b2e5854..089c9bca0 100644
--- a/src/buildstream/plugins/elements/autotools.py
+++ b/src/buildstream/plugins/elements/autotools.py
@@ -66,9 +66,7 @@ class AutotoolsElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(
- SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root")
- )
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/compose.py b/src/buildstream/plugins/elements/compose.py
index 461320008..c54c317b0 100644
--- a/src/buildstream/plugins/elements/compose.py
+++ b/src/buildstream/plugins/elements/compose.py
@@ -102,9 +102,7 @@ class ComposeElement(Element):
with self.timed_activity("Computing split", silent_nested=True):
for dep in self.dependencies(Scope.BUILD):
files = dep.compute_manifest(
- include=self.include,
- exclude=self.exclude,
- orphans=self.include_orphans,
+ include=self.include, exclude=self.exclude, orphans=self.include_orphans,
)
manifest.update(files)
@@ -186,13 +184,9 @@ class ComposeElement(Element):
def import_filter(path):
return path in manifest
- with self.timed_activity(
- "Creating composition", detail=detail, silent_nested=True
- ):
+ with self.timed_activity("Creating composition", detail=detail, silent_nested=True):
self.info("Composing {} files".format(len(manifest)))
- installdir.import_files(
- vbasedir, filter_callback=import_filter, can_link=True
- )
+ installdir.import_files(vbasedir, filter_callback=import_filter, can_link=True)
# And we're done
return os.path.join(os.sep, "buildstream", "install")
diff --git a/src/buildstream/plugins/elements/filter.py b/src/buildstream/plugins/elements/filter.py
index 17e15c80c..49bebd5a9 100644
--- a/src/buildstream/plugins/elements/filter.py
+++ b/src/buildstream/plugins/elements/filter.py
@@ -167,9 +167,7 @@ class FilterElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- node.validate_keys(
- ["include", "exclude", "include-orphans", "pass-integration"]
- )
+ node.validate_keys(["include", "exclude", "include-orphans", "pass-integration"])
self.include_node = node.get_sequence("include")
self.exclude_node = node.get_sequence("exclude")
@@ -211,13 +209,9 @@ class FilterElement(Element):
# If a parent does not produce an artifact, fail and inform user that the dependency
# must produce artifacts
if not build_deps[0].BST_ELEMENT_HAS_ARTIFACT:
- detail = "{} does not produce an artifact, so there is nothing to filter".format(
- build_deps[0].name
- )
+ detail = "{} does not produce an artifact, so there is nothing to filter".format(build_deps[0].name)
raise ElementError(
- "{}: {} element's build dependency must produce an artifact".format(
- self, type(self).__name__
- ),
+ "{}: {} element's build dependency must produce an artifact".format(self, type(self).__name__),
detail=detail,
reason="filter-bdepend-no-artifact",
)
@@ -253,34 +247,19 @@ class FilterElement(Element):
detail = []
if unfound_includes:
- detail.append(
- "Unknown domains were used in {}".format(
- self.include_node.get_provenance()
- )
- )
- detail.extend(
- [" - {}".format(domain) for domain in unfound_includes]
- )
+ detail.append("Unknown domains were used in {}".format(self.include_node.get_provenance()))
+ detail.extend([" - {}".format(domain) for domain in unfound_includes])
if unfound_excludes:
- detail.append(
- "Unknown domains were used in {}".format(
- self.exclude_node.get_provenance()
- )
- )
- detail.extend(
- [" - {}".format(domain) for domain in unfound_excludes]
- )
+ detail.append("Unknown domains were used in {}".format(self.exclude_node.get_provenance()))
+ detail.extend([" - {}".format(domain) for domain in unfound_excludes])
if detail:
detail = "\n".join(detail)
raise ElementError("Unknown domains declared.", detail=detail)
dep.stage_artifact(
- sandbox,
- include=self.include,
- exclude=self.exclude,
- orphans=self.include_orphans,
+ sandbox, include=self.include, exclude=self.exclude, orphans=self.include_orphans,
)
return ""
diff --git a/src/buildstream/plugins/elements/import.py b/src/buildstream/plugins/elements/import.py
index b7318b131..2b68197a7 100644
--- a/src/buildstream/plugins/elements/import.py
+++ b/src/buildstream/plugins/elements/import.py
@@ -55,9 +55,7 @@ class ImportElement(Element):
sources = list(self.sources())
if not sources:
- raise ElementError(
- "{}: An import element must have at least one source.".format(self)
- )
+ raise ElementError("{}: An import element must have at least one source.".format(self))
def get_unique_key(self):
return {"source": self.source, "target": self.target}
@@ -81,16 +79,10 @@ class ImportElement(Element):
inputdir = inputdir.descend(*self.source.strip(os.sep).split(os.sep))
# The output target directory
- outputdir = outputdir.descend(
- *self.target.strip(os.sep).split(os.sep), create=True
- )
+ outputdir = outputdir.descend(*self.target.strip(os.sep).split(os.sep), create=True)
if inputdir.is_empty():
- raise ElementError(
- "{}: No files were found inside directory '{}'".format(
- self, self.source
- )
- )
+ raise ElementError("{}: No files were found inside directory '{}'".format(self, self.source))
# Move it over
outputdir.import_files(inputdir)
@@ -104,9 +96,7 @@ class ImportElement(Element):
commands = []
# The directory to grab
- inputdir = os.path.join(
- build_root, self.normal_name, self.source.lstrip(os.sep)
- )
+ inputdir = os.path.join(build_root, self.normal_name, self.source.lstrip(os.sep))
inputdir = inputdir.rstrip(os.sep)
# The output target directory
@@ -115,9 +105,7 @@ class ImportElement(Element):
# Ensure target directory parent exists but target directory doesn't
commands.append("mkdir -p {}".format(os.path.dirname(outputdir)))
- commands.append(
- "[ ! -e {outputdir} ] || rmdir {outputdir}".format(outputdir=outputdir)
- )
+ commands.append("[ ! -e {outputdir} ] || rmdir {outputdir}".format(outputdir=outputdir))
# Move it over
commands.append("mv {} {}".format(inputdir, outputdir))
diff --git a/src/buildstream/plugins/elements/junction.py b/src/buildstream/plugins/elements/junction.py
index f9327352e..42b9ef08e 100644
--- a/src/buildstream/plugins/elements/junction.py
+++ b/src/buildstream/plugins/elements/junction.py
@@ -192,12 +192,8 @@ class JunctionElement(Element):
self.target = node.get_str("target", default=None)
self.target_element = None
self.target_junction = None
- self.cache_junction_elements = node.get_bool(
- "cache-junction-elements", default=False
- )
- self.ignore_junction_remotes = node.get_bool(
- "ignore-junction-remotes", default=False
- )
+ self.cache_junction_elements = node.get_bool("cache-junction-elements", default=False)
+ self.ignore_junction_remotes = node.get_bool("ignore-junction-remotes", default=False)
def preflight(self):
# "target" cannot be used in conjunction with:
@@ -205,33 +201,23 @@ class JunctionElement(Element):
# 2. config['options']
# 3. config['path']
if self.target and any(self.sources()):
- raise ElementError(
- "junction elements cannot define both 'sources' and 'target' config option"
- )
+ raise ElementError("junction elements cannot define both 'sources' and 'target' config option")
if self.target and any(self.options.items()):
- raise ElementError(
- "junction elements cannot define both 'options' and 'target'"
- )
+ raise ElementError("junction elements cannot define both 'options' and 'target'")
if self.target and self.path:
- raise ElementError(
- "junction elements cannot define both 'path' and 'target'"
- )
+ raise ElementError("junction elements cannot define both 'path' and 'target'")
# Validate format of target, if defined
if self.target:
try:
self.target_junction, self.target_element = self.target.split(":")
except ValueError:
- raise ElementError(
- "'target' option must be in format '{junction-name}:{element-name}'"
- )
+ raise ElementError("'target' option must be in format '{junction-name}:{element-name}'")
# We cannot target a junction that has the same name as us, since that
# will cause an infinite recursion while trying to load it.
if self.name == self.target_element:
- raise ElementError(
- "junction elements cannot target an element with the same name"
- )
+ raise ElementError("junction elements cannot target an element with the same name")
def get_unique_key(self):
# Junctions do not produce artifacts. get_unique_key() implementation
diff --git a/src/buildstream/plugins/elements/manual.py b/src/buildstream/plugins/elements/manual.py
index 4e9fded17..97da41615 100644
--- a/src/buildstream/plugins/elements/manual.py
+++ b/src/buildstream/plugins/elements/manual.py
@@ -42,9 +42,7 @@ class ManualElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(
- SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root")
- )
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/pip.py b/src/buildstream/plugins/elements/pip.py
index 175568955..93303748d 100644
--- a/src/buildstream/plugins/elements/pip.py
+++ b/src/buildstream/plugins/elements/pip.py
@@ -42,9 +42,7 @@ class PipElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(
- SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root")
- )
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/sources/_downloadablefilesource.py b/src/buildstream/plugins/sources/_downloadablefilesource.py
index 2db4274f2..6449bc7d5 100644
--- a/src/buildstream/plugins/sources/_downloadablefilesource.py
+++ b/src/buildstream/plugins/sources/_downloadablefilesource.py
@@ -79,9 +79,7 @@ class DownloadableFileSource(Source):
self.original_url = node.get_str("url")
self.ref = node.get_str("ref", None)
self.url = self.translate_url(self.original_url)
- self._mirror_dir = os.path.join(
- self.get_mirror_directory(), utils.url_directory_name(self.original_url)
- )
+ self._mirror_dir = os.path.join(self.get_mirror_directory(), utils.url_directory_name(self.original_url))
self._warn_deprecated_etag(node)
def preflight(self):
@@ -143,9 +141,7 @@ class DownloadableFileSource(Source):
sha256 = self._ensure_mirror()
if sha256 != self.ref:
raise SourceError(
- "File downloaded from {} has sha256sum '{}', not '{}'!".format(
- self.url, sha256, self.ref
- )
+ "File downloaded from {} has sha256sum '{}', not '{}'!".format(self.url, sha256, self.ref)
)
def _warn_deprecated_etag(self, node):
@@ -217,21 +213,12 @@ class DownloadableFileSource(Source):
# Because we use etag only for matching ref, currently specified ref is what
# we would have downloaded.
return self.ref
- raise SourceError(
- "{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True
- ) from e
-
- except (
- urllib.error.URLError,
- urllib.error.ContentTooShortError,
- OSError,
- ValueError,
- ) as e:
+ raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True) from e
+
+ except (urllib.error.URLError, urllib.error.ContentTooShortError, OSError, ValueError,) as e:
# Note that urllib.request.Request in the try block may throw a
# ValueError for unknown url types, so we handle it here.
- raise SourceError(
- "{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True
- ) from e
+ raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True) from e
def _get_mirror_file(self, sha=None):
if sha is not None:
@@ -261,7 +248,5 @@ class DownloadableFileSource(Source):
netrc_pw_mgr = _NetrcPasswordManager(netrc_config)
http_auth = urllib.request.HTTPBasicAuthHandler(netrc_pw_mgr)
ftp_handler = _NetrcFTPOpener(netrc_config)
- DownloadableFileSource.__urlopener = urllib.request.build_opener(
- http_auth, ftp_handler
- )
+ DownloadableFileSource.__urlopener = urllib.request.build_opener(http_auth, ftp_handler)
return DownloadableFileSource.__urlopener
diff --git a/src/buildstream/plugins/sources/bzr.py b/src/buildstream/plugins/sources/bzr.py
index 657a885c2..ec3317649 100644
--- a/src/buildstream/plugins/sources/bzr.py
+++ b/src/buildstream/plugins/sources/bzr.py
@@ -102,33 +102,19 @@ class BzrSource(Source):
node["ref"] = self.ref = ref
def track(self):
- with self.timed_activity(
- "Tracking {}".format(self.url), silent_nested=True
- ), self._locked():
+ with self.timed_activity("Tracking {}".format(self.url), silent_nested=True), self._locked():
self._ensure_mirror(skip_ref_check=True)
ret, out = self.check_output(
- [
- self.host_bzr,
- "version-info",
- "--custom",
- "--template={revno}",
- self._get_branch_dir(),
- ],
- fail="Failed to read the revision number at '{}'".format(
- self._get_branch_dir()
- ),
+ [self.host_bzr, "version-info", "--custom", "--template={revno}", self._get_branch_dir(),],
+ fail="Failed to read the revision number at '{}'".format(self._get_branch_dir()),
)
if ret != 0:
- raise SourceError(
- "{}: Failed to get ref for tracking {}".format(self, self.tracking)
- )
+ raise SourceError("{}: Failed to get ref for tracking {}".format(self, self.tracking))
return out
def fetch(self):
- with self.timed_activity(
- "Fetching {}".format(self.url), silent_nested=True
- ), self._locked():
+ with self.timed_activity("Fetching {}".format(self.url), silent_nested=True), self._locked():
self._ensure_mirror()
def stage(self, directory):
@@ -150,9 +136,7 @@ class BzrSource(Source):
def init_workspace(self, directory):
url = os.path.join(self.url, self.tracking)
- with self.timed_activity(
- 'Setting up workspace "{}"'.format(directory), silent_nested=True
- ):
+ with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
# Checkout from the cache
self.call(
[
@@ -181,9 +165,7 @@ class BzrSource(Source):
@contextmanager
def _locked(self):
lockdir = os.path.join(self.get_mirror_directory(), "locks")
- lockfile = os.path.join(
- lockdir, utils.url_directory_name(self.original_url) + ".lock"
- )
+ lockfile = os.path.join(lockdir, utils.url_directory_name(self.original_url) + ".lock")
os.makedirs(lockdir, exist_ok=True)
with open(lockfile, "w") as lock:
fcntl.flock(lock, fcntl.LOCK_EX)
@@ -198,32 +180,21 @@ class BzrSource(Source):
return False
return (
- self.call(
- [
- self.host_bzr,
- "revno",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir(),
- ]
- )
- == 0
+ self.call([self.host_bzr, "revno", "--revision=revno:{}".format(self.ref), self._get_branch_dir(),]) == 0
)
def _get_branch_dir(self):
return os.path.join(self._get_mirror_dir(), self.tracking)
def _get_mirror_dir(self):
- return os.path.join(
- self.get_mirror_directory(), utils.url_directory_name(self.original_url)
- )
+ return os.path.join(self.get_mirror_directory(), utils.url_directory_name(self.original_url))
def _ensure_mirror(self, skip_ref_check=False):
mirror_dir = self._get_mirror_dir()
bzr_metadata_dir = os.path.join(mirror_dir, ".bzr")
if not os.path.exists(bzr_metadata_dir):
self.call(
- [self.host_bzr, "init-repo", "--no-trees", mirror_dir],
- fail="Failed to initialize bzr repository",
+ [self.host_bzr, "init-repo", "--no-trees", mirror_dir], fail="Failed to initialize bzr repository",
)
branch_dir = os.path.join(mirror_dir, self.tracking)
@@ -240,19 +211,13 @@ class BzrSource(Source):
# `bzr pull` the branch if it does exist
# to get any changes to the upstream code
self.call(
- [
- self.host_bzr,
- "pull",
- "--directory={}".format(branch_dir),
- branch_url,
- ],
+ [self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url,],
fail="Failed to pull new changes for {}".format(branch_dir),
)
if not skip_ref_check and not self._check_ref():
raise SourceError(
- "Failed to ensure ref '{}' was mirrored".format(self.ref),
- reason="ref-not-mirrored",
+ "Failed to ensure ref '{}' was mirrored".format(self.ref), reason="ref-not-mirrored",
)
diff --git a/src/buildstream/plugins/sources/deb.py b/src/buildstream/plugins/sources/deb.py
index 407241689..a7437b150 100644
--- a/src/buildstream/plugins/sources/deb.py
+++ b/src/buildstream/plugins/sources/deb.py
@@ -71,9 +71,7 @@ class DebSource(TarSource):
with open(self._get_mirror_file(), "rb") as deb_file:
arpy_archive = arpy.Archive(fileobj=deb_file)
arpy_archive.read_all_headers()
- data_tar_arpy = [
- v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k
- ][0]
+ data_tar_arpy = [v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k][0]
# ArchiveFileData is not enough like a file object for tarfile to use.
# Monkey-patching a seekable method makes it close enough for TarFile to open.
data_tar_arpy.seekable = lambda *args: True
diff --git a/src/buildstream/plugins/sources/local.py b/src/buildstream/plugins/sources/local.py
index 471992af9..4e85f6659 100644
--- a/src/buildstream/plugins/sources/local.py
+++ b/src/buildstream/plugins/sources/local.py
@@ -90,8 +90,7 @@ class LocalSource(Source):
if result.overwritten or result.ignored:
raise SourceError(
- "Failed to stage source: files clash with existing directory",
- reason="ensure-stage-dir-fail",
+ "Failed to stage source: files clash with existing directory", reason="ensure-stage-dir-fail",
)
def _get_local_path(self):
diff --git a/src/buildstream/plugins/sources/patch.py b/src/buildstream/plugins/sources/patch.py
index 2be4ee2f7..e9c4ff050 100644
--- a/src/buildstream/plugins/sources/patch.py
+++ b/src/buildstream/plugins/sources/patch.py
@@ -56,9 +56,7 @@ class PatchSource(Source):
def configure(self, node):
node.validate_keys(["path", "strip-level", *Source.COMMON_CONFIG_KEYS])
- self.path = self.node_get_project_path(
- node.get_scalar("path"), check_is_file=True
- )
+ self.path = self.node_get_project_path(node.get_scalar("path"), check_is_file=True)
self.strip_level = node.get_int("strip-level", default=1)
self.fullpath = os.path.join(self.get_project_directory(), self.path)
@@ -91,20 +89,12 @@ class PatchSource(Source):
# Bail out with a comprehensive message if the target directory is empty
if not os.listdir(directory):
raise SourceError(
- "Nothing to patch in directory '{}'".format(directory),
- reason="patch-no-files",
+ "Nothing to patch in directory '{}'".format(directory), reason="patch-no-files",
)
strip_level_option = "-p{}".format(self.strip_level)
self.call(
- [
- self.host_patch,
- strip_level_option,
- "-i",
- self.fullpath,
- "-d",
- directory,
- ],
+ [self.host_patch, strip_level_option, "-i", self.fullpath, "-d", directory,],
fail="Failed to apply patch {}".format(self.path),
)
diff --git a/src/buildstream/plugins/sources/pip.py b/src/buildstream/plugins/sources/pip.py
index 41e414855..253ac2078 100644
--- a/src/buildstream/plugins/sources/pip.py
+++ b/src/buildstream/plugins/sources/pip.py
@@ -95,10 +95,7 @@ _PYTHON_VERSIONS = [
# https://docs.python.org/3/distutils/sourcedist.html.
# Names of source distribution archives must be of the form
# '%{package-name}-%{version}.%{extension}'.
-_SDIST_RE = re.compile(
- r"^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$",
- re.IGNORECASE,
-)
+_SDIST_RE = re.compile(r"^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$", re.IGNORECASE,)
class PipSource(Source):
@@ -110,9 +107,7 @@ class PipSource(Source):
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = True
def configure(self, node):
- node.validate_keys(
- ["url", "packages", "ref", "requirements-files"] + Source.COMMON_CONFIG_KEYS
- )
+ node.validate_keys(["url", "packages", "ref", "requirements-files"] + Source.COMMON_CONFIG_KEYS)
self.ref = node.get_str("ref", None)
self.original_url = node.get_str("url", _PYPI_INDEX_URL)
self.index_url = self.translate_url(self.original_url)
@@ -120,11 +115,7 @@ class PipSource(Source):
self.requirements_files = node.get_str_list("requirements-files", [])
if not (self.packages or self.requirements_files):
- raise SourceError(
- "{}: Either 'packages' or 'requirements-files' must be specified".format(
- self
- )
- )
+ raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified".format(self))
def preflight(self):
# Try to find a pip version that spports download command
diff --git a/src/buildstream/plugins/sources/remote.py b/src/buildstream/plugins/sources/remote.py
index 6705d20e5..af5b4f266 100644
--- a/src/buildstream/plugins/sources/remote.py
+++ b/src/buildstream/plugins/sources/remote.py
@@ -67,12 +67,9 @@ class RemoteSource(DownloadableFileSource):
if os.sep in self.filename:
raise SourceError(
- "{}: filename parameter cannot contain directories".format(self),
- reason="filename-contains-directory",
+ "{}: filename parameter cannot contain directories".format(self), reason="filename-contains-directory",
)
- node.validate_keys(
- DownloadableFileSource.COMMON_CONFIG_KEYS + ["filename", "executable"]
- )
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ["filename", "executable"])
def get_unique_key(self):
return super().get_unique_key() + [self.filename, self.executable]
diff --git a/src/buildstream/plugins/sources/tar.py b/src/buildstream/plugins/sources/tar.py
index 7e5868baa..8bc0cc743 100644
--- a/src/buildstream/plugins/sources/tar.py
+++ b/src/buildstream/plugins/sources/tar.py
@@ -115,9 +115,7 @@ class TarSource(DownloadableFileSource):
def _get_tar(self):
if self.url.endswith(".lz"):
with self._run_lzip() as lzip_dec:
- with tarfile.open(
- fileobj=lzip_dec, mode="r:", tarinfo=ReadableTarInfo
- ) as tar:
+ with tarfile.open(fileobj=lzip_dec, mode="r:", tarinfo=ReadableTarInfo) as tar:
yield tar
else:
with tarfile.open(self._get_mirror_file(), tarinfo=ReadableTarInfo) as tar:
@@ -132,8 +130,7 @@ class TarSource(DownloadableFileSource):
if base_dir:
tar.extractall(
- path=directory,
- members=self._extract_members(tar, base_dir, directory),
+ path=directory, members=self._extract_members(tar, base_dir, directory),
)
else:
tar.extractall(path=directory)
@@ -244,11 +241,7 @@ class TarSource(DownloadableFileSource):
paths = self._list_tar_paths(tar)
matches = sorted(list(utils.glob(paths, pattern)))
if not matches:
- raise SourceError(
- "{}: Could not find base directory matching pattern: {}".format(
- self, pattern
- )
- )
+ raise SourceError("{}: Could not find base directory matching pattern: {}".format(self, pattern))
return matches[0]
diff --git a/src/buildstream/plugins/sources/workspace.py b/src/buildstream/plugins/sources/workspace.py
index 56b4db1a4..0db3a6ffa 100644
--- a/src/buildstream/plugins/sources/workspace.py
+++ b/src/buildstream/plugins/sources/workspace.py
@@ -98,8 +98,7 @@ class WorkspaceSource(Source):
if result.overwritten or result.ignored:
raise SourceError(
- "Failed to stage source: files clash with existing directory",
- reason="ensure-stage-dir-fail",
+ "Failed to stage source: files clash with existing directory", reason="ensure-stage-dir-fail",
)
def _get_local_path(self) -> str:
diff --git a/src/buildstream/plugins/sources/zip.py b/src/buildstream/plugins/sources/zip.py
index 69324b29d..47933c8eb 100644
--- a/src/buildstream/plugins/sources/zip.py
+++ b/src/buildstream/plugins/sources/zip.py
@@ -79,9 +79,7 @@ class ZipSource(DownloadableFileSource):
return super().get_unique_key() + [self.base_dir]
def stage(self, directory):
- exec_rights = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) & ~(
- stat.S_IWGRP | stat.S_IWOTH
- )
+ exec_rights = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) & ~(stat.S_IWGRP | stat.S_IWOTH)
noexec_rights = exec_rights & ~(stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
try:
@@ -173,11 +171,7 @@ class ZipSource(DownloadableFileSource):
paths = self._list_archive_paths(archive)
matches = sorted(list(utils.glob(paths, pattern)))
if not matches:
- raise SourceError(
- "{}: Could not find base directory matching pattern: {}".format(
- self, pattern
- )
- )
+ raise SourceError("{}: Could not find base directory matching pattern: {}".format(self, pattern))
return matches[0]
diff --git a/src/buildstream/sandbox/_mount.py b/src/buildstream/sandbox/_mount.py
index b182a6adc..18751dde5 100644
--- a/src/buildstream/sandbox/_mount.py
+++ b/src/buildstream/sandbox/_mount.py
@@ -38,9 +38,7 @@ class Mount:
self.mount_point = mount_point
self.safe_hardlinks = safe_hardlinks
- self._fuse_mount_options = (
- {} if fuse_mount_options is None else fuse_mount_options
- )
+ self._fuse_mount_options = {} if fuse_mount_options is None else fuse_mount_options
# FIXME: When the criteria for mounting something and its parent
# mount is identical, then there is no need to mount an additional
@@ -55,9 +53,7 @@ class Mount:
scratch_directory = sandbox._get_scratch_directory()
# Redirected mount
self.mount_origin = os.path.join(root_directory, mount_point.lstrip(os.sep))
- self.mount_base = os.path.join(
- scratch_directory, utils.url_directory_name(mount_point)
- )
+ self.mount_base = os.path.join(scratch_directory, utils.url_directory_name(mount_point))
self.mount_source = os.path.join(self.mount_base, "mount")
self.mount_tempdir = os.path.join(self.mount_base, "temp")
os.makedirs(self.mount_origin, exist_ok=True)
@@ -87,9 +83,7 @@ class Mount:
@contextmanager
def mounted(self, sandbox):
if self.safe_hardlinks:
- mount = SafeHardlinks(
- self.mount_origin, self.mount_tempdir, self._fuse_mount_options
- )
+ mount = SafeHardlinks(self.mount_origin, self.mount_tempdir, self._fuse_mount_options)
with mount.mounted(self.mount_source):
yield
else:
@@ -122,9 +116,7 @@ class MountMap:
# We want safe hardlinks for any non-root directory where
# artifacts will be staged to
- self.mounts[directory] = Mount(
- sandbox, directory, artifact, fuse_mount_options
- )
+ self.mounts[directory] = Mount(sandbox, directory, artifact, fuse_mount_options)
# get_mount_source()
#
diff --git a/src/buildstream/sandbox/_mounter.py b/src/buildstream/sandbox/_mounter.py
index 57f35bdec..38b9b9c1c 100644
--- a/src/buildstream/sandbox/_mounter.py
+++ b/src/buildstream/sandbox/_mounter.py
@@ -28,14 +28,7 @@ from .. import utils, _signals
class Mounter:
@classmethod
def _mount(
- cls,
- dest,
- src=None,
- mount_type=None,
- stdout=None,
- stderr=None,
- options=None,
- flags=None,
+ cls, dest, src=None, mount_type=None, stdout=None, stderr=None, options=None, flags=None,
):
if stdout is None:
@@ -58,9 +51,7 @@ class Mounter:
status, _ = utils._call(argv, terminate=True, stdout=stdout, stderr=stderr)
if status != 0:
- raise SandboxError(
- "`{}` failed with exit code {}".format(" ".join(argv), status)
- )
+ raise SandboxError("`{}` failed with exit code {}".format(" ".join(argv), status))
return dest
@@ -75,9 +66,7 @@ class Mounter:
status, _ = utils._call(cmd, terminate=True, stdout=stdout, stderr=stderr)
if status != 0:
- raise SandboxError(
- "`{}` failed with exit code {}".format(" ".join(cmd), status)
- )
+ raise SandboxError("`{}` failed with exit code {}".format(" ".join(cmd), status))
# mount()
#
@@ -108,9 +97,7 @@ class Mounter:
options = ",".join([key for key, val in kwargs.items() if val])
- path = cls._mount(
- dest, src, mount_type, stdout=stdout, stderr=stderr, options=options
- )
+ path = cls._mount(dest, src, mount_type, stdout=stdout, stderr=stderr, options=options)
try:
with _signals.terminator(kill_proc):
yield path
diff --git a/src/buildstream/sandbox/_sandboxbuildbox.py b/src/buildstream/sandbox/_sandboxbuildbox.py
index 15e45a4df..6016fffe3 100644
--- a/src/buildstream/sandbox/_sandboxbuildbox.py
+++ b/src/buildstream/sandbox/_sandboxbuildbox.py
@@ -47,9 +47,7 @@ class SandboxBuildBox(Sandbox):
utils.get_host_tool("buildbox")
except utils.ProgramNotFoundError as Error:
cls._dummy_reasons += ["buildbox not found"]
- raise SandboxError(
- " and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox"
- ) from Error
+ raise SandboxError(" and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox") from Error
@classmethod
def check_sandbox_config(cls, platform, config):
@@ -74,8 +72,7 @@ class SandboxBuildBox(Sandbox):
if not self._has_command(command[0], env):
raise SandboxCommandError(
- "Staged artifacts do not provide command " "'{}'".format(command[0]),
- reason="missing-command",
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command",
)
# Grab the full path of the buildbox binary
@@ -83,10 +80,7 @@ class SandboxBuildBox(Sandbox):
buildbox_command = [utils.get_host_tool("buildbox")]
except ProgramNotFoundError as Err:
raise SandboxError(
- (
- "BuildBox not on path, you are using the BuildBox sandbox because "
- "BST_FORCE_SANDBOX=buildbox"
- )
+ ("BuildBox not on path, you are using the BuildBox sandbox because " "BST_FORCE_SANDBOX=buildbox")
) from Err
for mark in self._get_marked_directories():
@@ -109,9 +103,7 @@ class SandboxBuildBox(Sandbox):
if not flags & SandboxFlags.NETWORK_ENABLED:
# TODO
- self._issue_warning(
- "BuildBox sandbox does not have Networking yet", detail=common_details
- )
+ self._issue_warning("BuildBox sandbox does not have Networking yet", detail=common_details)
if cwd is not None:
buildbox_command += ["--chdir=" + cwd]
@@ -124,23 +116,20 @@ class SandboxBuildBox(Sandbox):
if flags & SandboxFlags.INTERACTIVE:
# TODO
self._issue_warning(
- "BuildBox sandbox does not fully support BuildStream shells yet",
- detail=common_details,
+ "BuildBox sandbox does not fully support BuildStream shells yet", detail=common_details,
)
if flags & SandboxFlags.ROOT_READ_ONLY:
# TODO
self._issue_warning(
- "BuildBox sandbox does not fully support BuildStream `Read only Root`",
- detail=common_details,
+ "BuildBox sandbox does not fully support BuildStream `Read only Root`", detail=common_details,
)
# Set UID and GID
if not flags & SandboxFlags.INHERIT_UID:
# TODO
self._issue_warning(
- "BuildBox sandbox does not fully support BuildStream Inherit UID",
- detail=common_details,
+ "BuildBox sandbox does not fully support BuildStream Inherit UID", detail=common_details,
)
os.makedirs(os.path.join(scratch_directory, "mnt"), exist_ok=True)
@@ -179,14 +168,10 @@ class SandboxBuildBox(Sandbox):
)
if exit_code == 0:
- with open(
- os.path.join(scratch_directory, "out"), "rb"
- ) as output_digest_file:
+ with open(os.path.join(scratch_directory, "out"), "rb") as output_digest_file:
output_digest = remote_execution_pb2.Digest()
output_digest.ParseFromString(output_digest_file.read())
- self._vdir = CasBasedDirectory(
- root_directory.cas_cache, digest=output_digest
- )
+ self._vdir = CasBasedDirectory(root_directory.cas_cache, digest=output_digest)
return exit_code
@@ -210,9 +195,7 @@ class SandboxBuildBox(Sandbox):
group_id = os.getpgid(process.pid)
os.killpg(group_id, signal.SIGCONT)
- with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(
- kill_proc
- ):
+ with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
process = subprocess.Popen(
argv,
close_fds=True,
diff --git a/src/buildstream/sandbox/_sandboxbwrap.py b/src/buildstream/sandbox/_sandboxbwrap.py
index d17139293..0e114d9f1 100644
--- a/src/buildstream/sandbox/_sandboxbwrap.py
+++ b/src/buildstream/sandbox/_sandboxbwrap.py
@@ -68,9 +68,7 @@ class SandboxBwrap(Sandbox):
cls._die_with_parent_available = False
cls._json_status_available = False
cls._dummy_reasons += ["Bubblewrap not found"]
- raise SandboxError(
- " and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox"
- ) from Error
+ raise SandboxError(" and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox") from Error
bwrap_version = _site.get_bwrap_version()
@@ -97,18 +95,7 @@ class SandboxBwrap(Sandbox):
try:
whoami = utils.get_host_tool("whoami")
output = subprocess.check_output(
- [
- bwrap,
- "--ro-bind",
- "/",
- "/",
- "--unshare-user",
- "--uid",
- "0",
- "--gid",
- "0",
- whoami,
- ],
+ [bwrap, "--ro-bind", "/", "/", "--unshare-user", "--uid", "0", "--gid", "0", whoami,],
universal_newlines=True,
).strip()
except subprocess.CalledProcessError:
@@ -123,10 +110,7 @@ class SandboxBwrap(Sandbox):
if cls.user_ns_available:
# User namespace support allows arbitrary build UID/GID settings.
pass
- elif (
- config.build_uid != local_platform._uid
- or config.build_gid != local_platform._gid
- ):
+ elif config.build_uid != local_platform._uid or config.build_gid != local_platform._gid:
# Without user namespace support, the UID/GID in the sandbox
# will match the host UID/GID.
return False
@@ -136,9 +120,7 @@ class SandboxBwrap(Sandbox):
if config.build_os != host_os:
raise SandboxError("Configured and host OS don't match.")
if config.build_arch != host_arch and not local_platform.can_crossbuild(config):
- raise SandboxError(
- "Configured architecture and host architecture don't match."
- )
+ raise SandboxError("Configured architecture and host architecture don't match.")
return True
@@ -150,8 +132,7 @@ class SandboxBwrap(Sandbox):
if not self._has_command(command[0], env):
raise SandboxCommandError(
- "Staged artifacts do not provide command " "'{}'".format(command[0]),
- reason="missing-command",
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command",
)
# NOTE: MountMap transitively imports `_fuse/fuse.py` which raises an
@@ -218,9 +199,7 @@ class SandboxBwrap(Sandbox):
mount_source_overrides = self._get_mount_sources()
for mark in marked_directories:
mount_point = mark["directory"]
- if (
- mount_point in mount_source_overrides
- ): # pylint: disable=consider-using-get
+ if mount_point in mount_source_overrides: # pylint: disable=consider-using-get
mount_source = mount_source_overrides[mount_point]
else:
mount_source = mount_map.get_mount_source(mount_point)
@@ -287,12 +266,7 @@ class SandboxBwrap(Sandbox):
# Run bubblewrap !
exit_code = self.run_bwrap(
- bwrap_command,
- stdin,
- stdout,
- stderr,
- (flags & SandboxFlags.INTERACTIVE),
- pass_fds,
+ bwrap_command, stdin, stdout, stderr, (flags & SandboxFlags.INTERACTIVE), pass_fds,
)
# Cleanup things which bwrap might have left behind, while
@@ -355,9 +329,7 @@ class SandboxBwrap(Sandbox):
break
if child_exit_code is None:
raise SandboxError(
- "`bwrap' terminated during sandbox setup with exitcode {}".format(
- exit_code
- ),
+ "`bwrap' terminated during sandbox setup with exitcode {}".format(exit_code),
reason="bwrap-sandbox-fail",
)
exit_code = child_exit_code
diff --git a/src/buildstream/sandbox/_sandboxchroot.py b/src/buildstream/sandbox/_sandboxchroot.py
index ad76bf998..b26f468be 100644
--- a/src/buildstream/sandbox/_sandboxchroot.py
+++ b/src/buildstream/sandbox/_sandboxchroot.py
@@ -81,17 +81,14 @@ class SandboxChroot(Sandbox):
if not self._has_command(command[0], env):
raise SandboxCommandError(
- "Staged artifacts do not provide command " "'{}'".format(command[0]),
- reason="missing-command",
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command",
)
stdout, stderr = self._get_output()
# Create the mount map, this will tell us where
# each mount point needs to be mounted from and to
- self.mount_map = MountMap(
- self, flags & SandboxFlags.ROOT_READ_ONLY, self._FUSE_MOUNT_OPTIONS
- )
+ self.mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY, self._FUSE_MOUNT_OPTIONS)
# Create a sysroot and run the command inside it
with ExitStack() as stack:
@@ -121,9 +118,7 @@ class SandboxChroot(Sandbox):
if cwd is not None:
workdir = os.path.join(rootfs, cwd.lstrip(os.sep))
os.makedirs(workdir, exist_ok=True)
- status = self.chroot(
- rootfs, command, stdin, stdout, stderr, cwd, env, flags
- )
+ status = self.chroot(rootfs, command, stdin, stdout, stderr, cwd, env, flags)
self._vdir._mark_changed()
return status
@@ -166,9 +161,7 @@ class SandboxChroot(Sandbox):
os.killpg(group_id, signal.SIGCONT)
try:
- with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(
- kill_proc
- ):
+ with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
process = subprocess.Popen( # pylint: disable=subprocess-popen-preexec-fn
command,
close_fds=True,
@@ -224,9 +217,7 @@ class SandboxChroot(Sandbox):
if str(e) == "Exception occurred in preexec_fn.":
raise SandboxError(
"Could not chroot into {} or chdir into {}. "
- "Ensure you are root and that the relevant directory exists.".format(
- rootfs, cwd
- )
+ "Ensure you are root and that the relevant directory exists.".format(rootfs, cwd)
) from e
# Otherwise, raise a more general error
@@ -262,9 +253,7 @@ class SandboxChroot(Sandbox):
except OSError as err:
if err.errno == 1:
raise SandboxError(
- "Permission denied while creating device node: {}.".format(
- err
- )
+ "Permission denied while creating device node: {}.".format(err)
+ "BuildStream reqiures root permissions for these setttings."
)
@@ -300,9 +289,7 @@ class SandboxChroot(Sandbox):
mount_source = self.mount_map.get_mount_source(point)
mount_point = os.path.join(rootfs, point.lstrip(os.sep))
- with Mounter.bind_mount(
- mount_point, src=mount_source, stdout=stdout, stderr=stderr, **kwargs
- ):
+ with Mounter.bind_mount(mount_point, src=mount_source, stdout=stdout, stderr=stderr, **kwargs):
yield
@contextmanager
@@ -310,9 +297,7 @@ class SandboxChroot(Sandbox):
mount_point = os.path.join(rootfs, src.lstrip(os.sep))
os.makedirs(mount_point, exist_ok=True)
- with Mounter.bind_mount(
- mount_point, src=src, stdout=stdout, stderr=stderr, **kwargs
- ):
+ with Mounter.bind_mount(mount_point, src=src, stdout=stdout, stderr=stderr, **kwargs):
yield
with ExitStack() as stack:
@@ -331,14 +316,7 @@ class SandboxChroot(Sandbox):
# Remount root RO if necessary
if flags & flags & SandboxFlags.ROOT_READ_ONLY:
- root_mount = Mounter.mount(
- rootfs,
- stdout=stdout,
- stderr=stderr,
- remount=True,
- ro=True,
- bind=True,
- )
+ root_mount = Mounter.mount(rootfs, stdout=stdout, stderr=stderr, remount=True, ro=True, bind=True,)
# Since the exit stack has already registered a mount
# for this path, we do not need to register another
# umount call.
@@ -368,13 +346,9 @@ class SandboxChroot(Sandbox):
os.mknod(target, mode=stat.S_IFCHR | dev.st_mode, device=target_dev)
except PermissionError as e:
- raise SandboxError(
- "Could not create device {}, ensure that you have root permissions: {}"
- )
+ raise SandboxError("Could not create device {}, ensure that you have root permissions: {}")
except OSError as e:
- raise SandboxError(
- "Could not create device {}: {}".format(target, e)
- ) from e
+ raise SandboxError("Could not create device {}: {}".format(target, e)) from e
return target
diff --git a/src/buildstream/sandbox/_sandboxdummy.py b/src/buildstream/sandbox/_sandboxdummy.py
index 78c08035d..f9272f007 100644
--- a/src/buildstream/sandbox/_sandboxdummy.py
+++ b/src/buildstream/sandbox/_sandboxdummy.py
@@ -29,11 +29,9 @@ class SandboxDummy(Sandbox):
if not self._has_command(command[0], env):
raise SandboxCommandError(
- "Staged artifacts do not provide command " "'{}'".format(command[0]),
- reason="missing-command",
+ "Staged artifacts do not provide command " "'{}'".format(command[0]), reason="missing-command",
)
raise SandboxError(
- "This platform does not support local builds: {}".format(self._reason),
- reason="unavailable-local-sandbox",
+ "This platform does not support local builds: {}".format(self._reason), reason="unavailable-local-sandbox",
)
diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py
index 72b0f8f1a..308be2c3b 100644
--- a/src/buildstream/sandbox/_sandboxremote.py
+++ b/src/buildstream/sandbox/_sandboxremote.py
@@ -44,9 +44,7 @@ from .._cas import CASRemote
from .._remote import RemoteSpec
-class RemoteExecutionSpec(
- namedtuple("RemoteExecutionSpec", "exec_service storage_service action_service")
-):
+class RemoteExecutionSpec(namedtuple("RemoteExecutionSpec", "exec_service storage_service action_service")):
pass
@@ -126,9 +124,7 @@ class SandboxRemote(Sandbox):
provenance = remote_config.get_provenance()
raise _yaml.LoadError(
"{}: '{}' was not present in the remote "
- "execution configuration (remote-execution). ".format(
- str(provenance), keyname
- ),
+ "execution configuration (remote-execution). ".format(str(provenance), keyname),
_yaml.LoadErrorReason.INVALID_DATA,
)
return val
@@ -190,9 +186,7 @@ class SandboxRemote(Sandbox):
config[tls_key] = resolve_path(config.get_str(tls_key))
# TODO: we should probably not be stripping node info and rather load files the safe way
- return RemoteExecutionSpec(
- *[conf.strip_node_info() for conf in service_configs]
- )
+ return RemoteExecutionSpec(*[conf.strip_node_info() for conf in service_configs])
def run_remote_command(self, channel, action_digest):
# Sends an execution request to the remote execution server.
@@ -202,9 +196,7 @@ class SandboxRemote(Sandbox):
# Try to create a communication channel to the BuildGrid server.
stub = remote_execution_pb2_grpc.ExecutionStub(channel)
request = remote_execution_pb2.ExecuteRequest(
- instance_name=self.exec_instance,
- action_digest=action_digest,
- skip_cache_lookup=False,
+ instance_name=self.exec_instance, action_digest=action_digest, skip_cache_lookup=False,
)
def __run_remote_command(stub, execute_request=None, running_operation=None):
@@ -213,9 +205,7 @@ class SandboxRemote(Sandbox):
if execute_request is not None:
operation_iterator = stub.Execute(execute_request)
else:
- request = remote_execution_pb2.WaitExecutionRequest(
- name=running_operation.name
- )
+ request = remote_execution_pb2.WaitExecutionRequest(name=running_operation.name)
operation_iterator = stub.WaitExecution(request)
for operation in operation_iterator:
@@ -229,11 +219,7 @@ class SandboxRemote(Sandbox):
except grpc.RpcError as e:
status_code = e.code()
if status_code == grpc.StatusCode.UNAVAILABLE:
- raise SandboxError(
- "Failed contacting remote execution server at {}.".format(
- self.exec_url
- )
- )
+ raise SandboxError("Failed contacting remote execution server at {}.".format(self.exec_url))
if status_code in (
grpc.StatusCode.INVALID_ARGUMENT,
@@ -278,15 +264,11 @@ class SandboxRemote(Sandbox):
try:
stub.CancelOperation(request)
except grpc.RpcError as e:
- if (
- e.code() == grpc.StatusCode.UNIMPLEMENTED
- or e.code() == grpc.StatusCode.INVALID_ARGUMENT
- ):
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED or e.code() == grpc.StatusCode.INVALID_ARGUMENT:
pass
else:
raise SandboxError(
- "Failed trying to send CancelOperation request: "
- "{} ({})".format(e.details(), e.code().name)
+ "Failed trying to send CancelOperation request: " "{} ({})".format(e.details(), e.code().name)
)
def process_job_output(self, output_directories, output_files, *, failure):
@@ -304,9 +286,7 @@ class SandboxRemote(Sandbox):
error_text = "No output directory was returned from the build server."
raise SandboxError(error_text)
if len(output_directories) > 1:
- error_text = (
- "More than one output directory was returned from the build server: {}."
- )
+ error_text = "More than one output directory was returned from the build server: {}."
raise SandboxError(error_text.format(output_directories))
tree_digest = output_directories[0].tree_digest
@@ -352,20 +332,14 @@ class SandboxRemote(Sandbox):
# however, artifact push remotes will need them.
# Only fetch blobs that are missing on one or multiple
# artifact servers.
- blobs_to_fetch = artifactcache.find_missing_blobs(
- project, local_missing_blobs
- )
+ blobs_to_fetch = artifactcache.find_missing_blobs(project, local_missing_blobs)
with CASRemote(self.storage_remote_spec, cascache) as casremote:
- remote_missing_blobs = cascache.fetch_blobs(
- casremote, blobs_to_fetch
- )
+ remote_missing_blobs = cascache.fetch_blobs(casremote, blobs_to_fetch)
if remote_missing_blobs:
raise SandboxError(
- "{} output files are missing on the CAS server".format(
- len(remote_missing_blobs)
- )
+ "{} output files are missing on the CAS server".format(len(remote_missing_blobs))
)
def _run(self, command, flags, *, cwd, env):
@@ -391,9 +365,7 @@ class SandboxRemote(Sandbox):
input_root_digest = upload_vdir._get_digest()
command_proto = self._create_command(command, cwd, env)
command_digest = utils._message_digest(command_proto.SerializeToString())
- action = remote_execution_pb2.Action(
- command_digest=command_digest, input_root_digest=input_root_digest
- )
+ action = remote_execution_pb2.Action(command_digest=command_digest, input_root_digest=input_root_digest)
action_digest = utils._message_digest(action.SerializeToString())
# check action cache download and download if there
@@ -405,20 +377,14 @@ class SandboxRemote(Sandbox):
casremote.init()
except grpc.RpcError as e:
raise SandboxError(
- "Failed to contact remote execution CAS endpoint at {}: {}".format(
- self.storage_url, e
- )
+ "Failed to contact remote execution CAS endpoint at {}: {}".format(self.storage_url, e)
) from e
# Determine blobs missing on remote
try:
- missing_blobs = cascache.remote_missing_blobs_for_directory(
- casremote, input_root_digest
- )
+ missing_blobs = cascache.remote_missing_blobs_for_directory(casremote, input_root_digest)
except grpc.RpcError as e:
- raise SandboxError(
- "Failed to determine missing blobs: {}".format(e)
- ) from e
+ raise SandboxError("Failed to determine missing blobs: {}".format(e)) from e
# Check if any blobs are also missing locally (partial artifact)
# and pull them from the artifact cache.
@@ -427,17 +393,13 @@ class SandboxRemote(Sandbox):
if local_missing_blobs:
artifactcache.fetch_missing_blobs(project, local_missing_blobs)
except (grpc.RpcError, BstError) as e:
- raise SandboxError(
- "Failed to pull missing blobs from artifact cache: {}".format(e)
- ) from e
+ raise SandboxError("Failed to pull missing blobs from artifact cache: {}".format(e)) from e
# Now, push the missing blobs to the remote.
try:
cascache.send_blobs(casremote, missing_blobs)
except grpc.RpcError as e:
- raise SandboxError(
- "Failed to push source directory to remote: {}".format(e)
- ) from e
+ raise SandboxError("Failed to push source directory to remote: {}".format(e)) from e
# Push command and action
try:
@@ -460,9 +422,7 @@ class SandboxRemote(Sandbox):
if url.scheme == "http":
channel = grpc.insecure_channel("{}:{}".format(url.hostname, url.port))
elif url.scheme == "https":
- channel = grpc.secure_channel(
- "{}:{}".format(url.hostname, url.port), self.exec_credentials
- )
+ channel = grpc.secure_channel("{}:{}".format(url.hostname, url.port), self.exec_credentials)
else:
raise SandboxError(
"Remote execution currently only supports the 'http' protocol "
@@ -476,9 +436,7 @@ class SandboxRemote(Sandbox):
# Get output of build
self.process_job_output(
- action_result.output_directories,
- action_result.output_files,
- failure=action_result.exit_code != 0,
+ action_result.output_directories, action_result.output_files, failure=action_result.exit_code != 0,
)
if stdout:
@@ -511,9 +469,7 @@ class SandboxRemote(Sandbox):
if url.scheme == "http":
channel = grpc.insecure_channel("{}:{}".format(url.hostname, url.port))
elif url.scheme == "https":
- channel = grpc.secure_channel(
- "{}:{}".format(url.hostname, url.port), self.action_credentials
- )
+ channel = grpc.secure_channel("{}:{}".format(url.hostname, url.port), self.action_credentials)
with channel:
request = remote_execution_pb2.GetActionResultRequest(
@@ -524,11 +480,7 @@ class SandboxRemote(Sandbox):
result = stub.GetActionResult(request)
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
- raise SandboxError(
- "Failed to query action cache: {} ({})".format(
- e.code(), e.details()
- )
- )
+ raise SandboxError("Failed to query action cache: {} ({})".format(e.code(), e.details()))
return None
else:
self.info("Action result found in action cache")
@@ -537,8 +489,7 @@ class SandboxRemote(Sandbox):
def _create_command(self, command, working_directory, environment):
# Creates a command proto
environment_variables = [
- remote_execution_pb2.Command.EnvironmentVariable(name=k, value=v)
- for (k, v) in environment.items()
+ remote_execution_pb2.Command.EnvironmentVariable(name=k, value=v) for (k, v) in environment.items()
]
# Request the whole directory tree as output
@@ -604,16 +555,7 @@ class _SandboxRemoteBatch(_SandboxBatch):
self.main_group.execute(self)
first = self.first_command
- if (
- first
- and self.sandbox.run(
- ["sh", "-c", "-e", self.script],
- self.flags,
- cwd=first.cwd,
- env=first.env,
- )
- != 0
- ):
+ if first and self.sandbox.run(["sh", "-c", "-e", self.script], self.flags, cwd=first.cwd, env=first.env,) != 0:
raise SandboxCommandError("Command execution failed", collect=self.collect)
def execute_group(self, group):
@@ -650,11 +592,7 @@ class _SandboxRemoteBatch(_SandboxBatch):
# Error handling
label = command.label or cmdline
quoted_label = shlex.quote("'{}'".format(label))
- self.script += " || (echo Command {} failed with exitcode $? >&2 ; exit 1)\n".format(
- quoted_label
- )
+ self.script += " || (echo Command {} failed with exitcode $? >&2 ; exit 1)\n".format(quoted_label)
def execute_call(self, call):
- raise SandboxError(
- "SandboxRemote does not support callbacks in command batches"
- )
+ raise SandboxError("SandboxRemote does not support callbacks in command batches")
diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py
index 2ce556ab8..e661cd31e 100644
--- a/src/buildstream/sandbox/sandbox.py
+++ b/src/buildstream/sandbox/sandbox.py
@@ -118,9 +118,7 @@ class Sandbox:
DEVICES = ["/dev/urandom", "/dev/random", "/dev/zero", "/dev/null"]
_dummy_reasons = [] # type: List[str]
- def __init__(
- self, context: "Context", project: "Project", directory: str, **kwargs
- ):
+ def __init__(self, context: "Context", project: "Project", directory: str, **kwargs):
self.__context = context
self.__project = project
self.__directories = [] # type: List[Dict[str, Union[int, str]]]
@@ -312,9 +310,7 @@ class Sandbox:
command = [command]
if self.__batch:
- assert (
- flags == self.__batch.flags
- ), "Inconsistent sandbox flags in single command batch"
+ assert flags == self.__batch.flags, "Inconsistent sandbox flags in single command batch"
batch_command = _SandboxBatchCommand(command, cwd=cwd, env=env, label=label)
@@ -325,9 +321,7 @@ class Sandbox:
return self._run(command, flags, cwd=cwd, env=env)
@contextmanager
- def batch(
- self, flags: int, *, label: str = None, collect: str = None
- ) -> Generator[None, None, None]:
+ def batch(self, flags: int, *, label: str = None, collect: str = None) -> Generator[None, None, None]:
"""Context manager for command batching
This provides a batch context that defers execution of commands until
@@ -353,9 +347,7 @@ class Sandbox:
if self.__batch:
# Nested batch
- assert (
- flags == self.__batch.flags
- ), "Inconsistent sandbox flags in single command batch"
+ assert flags == self.__batch.flags, "Inconsistent sandbox flags in single command batch"
parent_group = self.__batch.current_group
parent_group.append(group)
@@ -396,9 +388,7 @@ class Sandbox:
# (int): The program exit code.
#
def _run(self, command, flags, *, cwd, env):
- raise ImplError(
- "Sandbox of type '{}' does not implement _run()".format(type(self).__name__)
- )
+ raise ImplError("Sandbox of type '{}' does not implement _run()".format(type(self).__name__))
# _create_batch()
#
@@ -545,9 +535,7 @@ class Sandbox:
# Returns:
# (str): The sandbox scratch directory
def _get_scratch_directory(self):
- assert (
- not self.__bare_directory
- ), "Scratch is not going to work with bare directories"
+ assert not self.__bare_directory, "Scratch is not going to work with bare directories"
return self.__scratch
# _get_output()
@@ -654,9 +642,7 @@ class Sandbox:
# message (str): A message to issue
# details (str): optional, more detatils
def _issue_warning(self, message, detail=None):
- self.__context.messenger.message(
- Message(MessageType.WARN, message, detail=detail)
- )
+ self.__context.messenger.message(Message(MessageType.WARN, message, detail=detail))
# _SandboxBatch()
@@ -677,9 +663,7 @@ class _SandboxBatch:
def execute_group(self, group):
if group.label:
context = self.sandbox._get_context()
- cm = context.messenger.timed_activity(
- group.label, element_name=self.sandbox._get_element_name()
- )
+ cm = context.messenger.timed_activity(group.label, element_name=self.sandbox._get_element_name())
else:
cm = contextlib.suppress()
@@ -697,16 +681,12 @@ class _SandboxBatch:
)
context.messenger.message(message)
- exitcode = self.sandbox._run(
- command.command, self.flags, cwd=command.cwd, env=command.env
- )
+ exitcode = self.sandbox._run(command.command, self.flags, cwd=command.cwd, env=command.env)
if exitcode != 0:
cmdline = " ".join(shlex.quote(cmd) for cmd in command.command)
label = command.label or cmdline
raise SandboxCommandError(
- "Command failed with exitcode {}".format(exitcode),
- detail=label,
- collect=self.collect,
+ "Command failed with exitcode {}".format(exitcode), detail=label, collect=self.collect,
)
def execute_call(self, call):
diff --git a/src/buildstream/scriptelement.py b/src/buildstream/scriptelement.py
index d90e8b6ba..b93c36d1f 100644
--- a/src/buildstream/scriptelement.py
+++ b/src/buildstream/scriptelement.py
@@ -221,16 +221,12 @@ class ScriptElement(Element):
if not self.__layout:
# if no layout set, stage all dependencies into /
for build_dep in self.dependencies(Scope.BUILD, recurse=False):
- with self.timed_activity(
- "Staging {} at /".format(build_dep.name), silent_nested=True
- ):
+ with self.timed_activity("Staging {} at /".format(build_dep.name), silent_nested=True):
build_dep.stage_dependency_artifacts(sandbox, Scope.RUN, path="/")
with sandbox.batch(SandboxFlags.NONE):
for build_dep in self.dependencies(Scope.BUILD, recurse=False):
- with self.timed_activity(
- "Integrating {}".format(build_dep.name), silent_nested=True
- ):
+ with self.timed_activity("Integrating {}".format(build_dep.name), silent_nested=True):
for dep in build_dep.dependencies(Scope.RUN):
dep.integrate(sandbox)
else:
@@ -243,23 +239,15 @@ class ScriptElement(Element):
element = self.search(Scope.BUILD, item["element"])
if item["destination"] == "/":
- with self.timed_activity(
- "Staging {} at /".format(element.name), silent_nested=True
- ):
+ with self.timed_activity("Staging {} at /".format(element.name), silent_nested=True):
element.stage_dependency_artifacts(sandbox, Scope.RUN)
else:
with self.timed_activity(
- "Staging {} at {}".format(element.name, item["destination"]),
- silent_nested=True,
+ "Staging {} at {}".format(element.name, item["destination"]), silent_nested=True,
):
virtual_dstdir = sandbox.get_virtual_directory()
- virtual_dstdir.descend(
- *item["destination"].lstrip(os.sep).split(os.sep),
- create=True
- )
- element.stage_dependency_artifacts(
- sandbox, Scope.RUN, path=item["destination"]
- )
+ virtual_dstdir.descend(*item["destination"].lstrip(os.sep).split(os.sep), create=True)
+ element.stage_dependency_artifacts(sandbox, Scope.RUN, path=item["destination"])
with sandbox.batch(SandboxFlags.NONE):
for item in self.__layout:
@@ -272,16 +260,12 @@ class ScriptElement(Element):
# Integration commands can only be run for elements staged to /
if item["destination"] == "/":
- with self.timed_activity(
- "Integrating {}".format(element.name), silent_nested=True
- ):
+ with self.timed_activity("Integrating {}".format(element.name), silent_nested=True):
for dep in element.dependencies(Scope.RUN):
dep.integrate(sandbox)
install_root_path_components = self.__install_root.lstrip(os.sep).split(os.sep)
- sandbox.get_virtual_directory().descend(
- *install_root_path_components, create=True
- )
+ sandbox.get_virtual_directory().descend(*install_root_path_components, create=True)
def assemble(self, sandbox):
@@ -307,13 +291,9 @@ class ScriptElement(Element):
def __validate_layout(self):
if self.__layout:
# Cannot proceeed if layout is used, but none are for "/"
- root_defined = any(
- [(entry["destination"] == "/") for entry in self.__layout]
- )
+ root_defined = any([(entry["destination"] == "/") for entry in self.__layout])
if not root_defined:
- raise ElementError(
- "{}: Using layout, but none are staged as '/'".format(self)
- )
+ raise ElementError("{}: Using layout, but none are staged as '/'".format(self))
# Cannot proceed if layout specifies an element that isn't part
# of the dependencies.
@@ -321,9 +301,7 @@ class ScriptElement(Element):
if item["element"]:
if not self.search(Scope.BUILD, item["element"]):
raise ElementError(
- "{}: '{}' in layout not found in dependencies".format(
- self, item["element"]
- )
+ "{}: '{}' in layout not found in dependencies".format(self, item["element"])
)
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index f8de12bc7..59d78ba6e 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -200,19 +200,10 @@ class SourceError(BstError):
"""
def __init__(
- self,
- message: str,
- *,
- detail: Optional[str] = None,
- reason: Optional[str] = None,
- temporary: bool = False
+ self, message: str, *, detail: Optional[str] = None, reason: Optional[str] = None, temporary: bool = False
):
super().__init__(
- message,
- detail=detail,
- domain=ErrorDomain.SOURCE,
- reason=reason,
- temporary=temporary,
+ message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason, temporary=temporary,
)
@@ -254,9 +245,7 @@ class SourceFetcher:
Implementors should raise :class:`.SourceError` if the there is some
network error or if the source reference could not be matched.
"""
- raise ImplError(
- "SourceFetcher '{}' does not implement fetch()".format(type(self))
- )
+ raise ImplError("SourceFetcher '{}' does not implement fetch()".format(type(self)))
#############################################################
# Public Methods #
@@ -356,9 +345,7 @@ class Source(Plugin):
):
provenance = meta.config.get_provenance()
# Set element_name member before parent init, as needed for debug messaging
- self.__element_name = (
- meta.element_name
- ) # The name of the element owning this source
+ self.__element_name = meta.element_name # The name of the element owning this source
super().__init__(
"{}-{}".format(meta.element_name, meta.element_index),
context,
@@ -368,12 +355,8 @@ class Source(Plugin):
unique_id=unique_id,
)
- self.__element_index = (
- meta.element_index
- ) # The index of the source in the owning element's source list
- self.__element_kind = (
- meta.element_kind
- ) # The kind of the element owning this source
+ self.__element_index = meta.element_index # The index of the source in the owning element's source list
+ self.__element_kind = meta.element_kind # The kind of the element owning this source
self.__directory = meta.directory # Staging relative directory
self.__consistency = Consistency.INCONSISTENT # Cached consistency state
self.__meta_kind = meta.kind # The kind of this source, required for unpickling
@@ -381,9 +364,7 @@ class Source(Plugin):
self.__key = None # Cache key for source
# The alias_override is only set on a re-instantiated Source
- self.__alias_override = (
- alias_override # Tuple of alias and its override to use instead
- )
+ self.__alias_override = alias_override # Tuple of alias and its override to use instead
self.__expected_alias = None # The primary alias
# Set of marked download URLs
self.__marked_urls = set() # type: Set[str]
@@ -416,11 +397,7 @@ class Source(Plugin):
Returns:
(:class:`.Consistency`): The source consistency
"""
- raise ImplError(
- "Source plugin '{}' does not implement get_consistency()".format(
- self.get_kind()
- )
- )
+ raise ImplError("Source plugin '{}' does not implement get_consistency()".format(self.get_kind()))
def load_ref(self, node: MappingNode) -> None:
"""Loads the *ref* for this Source from the specified *node*.
@@ -438,9 +415,7 @@ class Source(Plugin):
*Since: 1.2*
"""
- raise ImplError(
- "Source plugin '{}' does not implement load_ref()".format(self.get_kind())
- )
+ raise ImplError("Source plugin '{}' does not implement load_ref()".format(self.get_kind()))
def get_ref(self) -> SourceRef:
"""Fetch the internal ref, however it is represented
@@ -458,9 +433,7 @@ class Source(Plugin):
Implementations *must* return a ``None`` value in the case that
the ref was not loaded. E.g. a ``(None, None)`` tuple is not acceptable.
"""
- raise ImplError(
- "Source plugin '{}' does not implement get_ref()".format(self.get_kind())
- )
+ raise ImplError("Source plugin '{}' does not implement get_ref()".format(self.get_kind()))
def set_ref(self, ref: SourceRef, node: MappingNode) -> None:
"""Applies the internal ref, however it is represented
@@ -478,9 +451,7 @@ class Source(Plugin):
Implementors must support the special ``None`` value here to
allow clearing any existing ref.
"""
- raise ImplError(
- "Source plugin '{}' does not implement set_ref()".format(self.get_kind())
- )
+ raise ImplError("Source plugin '{}' does not implement set_ref()".format(self.get_kind()))
def track(self, **kwargs) -> SourceRef:
"""Resolve a new ref from the plugin's track option
@@ -526,9 +497,7 @@ class Source(Plugin):
Implementors should raise :class:`.SourceError` if the there is some
network error or if the source reference could not be matched.
"""
- raise ImplError(
- "Source plugin '{}' does not implement fetch()".format(self.get_kind())
- )
+ raise ImplError("Source plugin '{}' does not implement fetch()".format(self.get_kind()))
def stage(self, directory: Union[str, Directory]) -> None:
"""Stage the sources to a directory
@@ -545,9 +514,7 @@ class Source(Plugin):
Implementors should raise :class:`.SourceError` when encountering
some system error.
"""
- raise ImplError(
- "Source plugin '{}' does not implement stage()".format(self.get_kind())
- )
+ raise ImplError("Source plugin '{}' does not implement stage()".format(self.get_kind()))
def init_workspace(self, directory: str) -> None:
"""Initialises a new workspace
@@ -622,9 +589,7 @@ class Source(Plugin):
return self.__mirror_directory
- def translate_url(
- self, url: str, *, alias_override: Optional[str] = None, primary: bool = True
- ) -> str:
+ def translate_url(self, url: str, *, alias_override: Optional[str] = None, primary: bool = True) -> str:
"""Translates the given url which may be specified with an alias
into a fully qualified url.
@@ -689,8 +654,7 @@ class Source(Plugin):
expected_alias = _extract_alias(url)
assert (
- self.__expected_alias is None
- or self.__expected_alias == expected_alias
+ self.__expected_alias is None or self.__expected_alias == expected_alias
), "Primary URL marked twice with different URLs"
self.__expected_alias = expected_alias
@@ -801,9 +765,7 @@ class Source(Plugin):
# Source consistency interrogations are silent.
context = self._get_context()
with context.messenger.silence():
- self.__consistency = (
- self.get_consistency()
- ) # pylint: disable=assignment-from-no-return
+ self.__consistency = self.get_consistency() # pylint: disable=assignment-from-no-return
# Give the Source an opportunity to validate the cached
# sources as soon as the Source becomes Consistency.CACHED.
@@ -826,9 +788,7 @@ class Source(Plugin):
if self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH:
self.__ensure_previous_sources(previous_sources)
with self.__stage_previous_sources(previous_sources) as staging_directory:
- self.__do_fetch(
- previous_sources_dir=self.__ensure_directory(staging_directory)
- )
+ self.__do_fetch(previous_sources_dir=self.__ensure_directory(staging_directory))
else:
self.__do_fetch()
@@ -842,9 +802,7 @@ class Source(Plugin):
if self.BST_KEY_REQUIRES_STAGE:
# _get_unique_key should be called before _stage
assert self.__digest is not None
- cas_dir = CasBasedDirectory(
- self._get_context().get_cascache(), digest=self.__digest
- )
+ cas_dir = CasBasedDirectory(self._get_context().get_cascache(), digest=self.__digest)
directory.import_files(cas_dir)
else:
self.stage(directory)
@@ -868,9 +826,7 @@ class Source(Plugin):
if self.BST_KEY_REQUIRES_STAGE:
key["unique"] = self._stage_into_cas()
else:
- key[
- "unique"
- ] = self.get_unique_key() # pylint: disable=assignment-from-no-return
+ key["unique"] = self.get_unique_key() # pylint: disable=assignment-from-no-return
return key
# _project_refs():
@@ -919,17 +875,12 @@ class Source(Plugin):
self.load_ref(ref_node)
except ImplError as e:
raise SourceError(
- "{}: Storing refs in project.refs is not supported by '{}' sources".format(
- self, self.get_kind()
- ),
+ "{}: Storing refs in project.refs is not supported by '{}' sources".format(self, self.get_kind()),
reason="unsupported-load-ref",
) from e
# If the main project overrides the ref, use the override
- if (
- project is not toplevel
- and toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS
- ):
+ if project is not toplevel and toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
refs = self._project_refs(toplevel)
ref_node = refs.lookup_ref(project.name, element_name, element_idx)
if ref_node is not None:
@@ -987,35 +938,22 @@ class Source(Plugin):
#
node = {}
if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
- node = toplevel_refs.lookup_ref(
- project.name, element_name, element_idx, write=True
- )
+ node = toplevel_refs.lookup_ref(project.name, element_name, element_idx, write=True)
if project is toplevel and not node:
node = provenance._node
# Ensure the node is not from a junction
- if (
- not toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS
- and provenance._project is not toplevel
- ):
+ if not toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS and provenance._project is not toplevel:
if provenance._project is project:
- self.warn(
- "{}: Not persisting new reference in junctioned project".format(
- self
- )
- )
+ self.warn("{}: Not persisting new reference in junctioned project".format(self))
elif provenance._project is None:
assert provenance._filename == ""
assert provenance._shortname == ""
- raise SourceError(
- "{}: Error saving source reference to synthetic node.".format(self)
- )
+ raise SourceError("{}: Error saving source reference to synthetic node.".format(self))
else:
raise SourceError(
- "{}: Cannot track source in a fragment from a junction".format(
- provenance._shortname
- ),
+ "{}: Cannot track source in a fragment from a junction".format(provenance._shortname),
reason="tracking-junction-fragment",
)
@@ -1061,9 +999,7 @@ class Source(Plugin):
if type(step) is str: # pylint: disable=unidiomatic-typecheck
# handle dict container
if step not in container:
- if (
- type(next_step) is str
- ): # pylint: disable=unidiomatic-typecheck
+ if type(next_step) is str: # pylint: disable=unidiomatic-typecheck
container[step] = {}
else:
container[step] = []
@@ -1107,9 +1043,9 @@ class Source(Plugin):
roundtrip_file = roundtrip_cache.get(provenance._filename)
if not roundtrip_file:
- roundtrip_file = roundtrip_cache[
- provenance._filename
- ] = _yaml.roundtrip_load(provenance._filename, allow_missing=True)
+ roundtrip_file = roundtrip_cache[provenance._filename] = _yaml.roundtrip_load(
+ provenance._filename, allow_missing=True
+ )
# Get the value of the round trip file that we need to change
process_value(action, roundtrip_file, path, key, to_modify.get(key))
@@ -1123,10 +1059,7 @@ class Source(Plugin):
_yaml.roundtrip_dump(data, filename)
except OSError as e:
raise SourceError(
- "{}: Error saving source reference to '{}': {}".format(
- self, filename, e
- ),
- reason="save-ref-error",
+ "{}: Error saving source reference to '{}': {}".format(self, filename, e), reason="save-ref-error",
) from e
return True
@@ -1145,9 +1078,7 @@ class Source(Plugin):
if self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK:
self.__ensure_previous_sources(previous_sources)
with self.__stage_previous_sources(previous_sources) as staging_directory:
- new_ref = self.__do_track(
- previous_sources_dir=self.__ensure_directory(staging_directory)
- )
+ new_ref = self.__do_track(previous_sources_dir=self.__ensure_directory(staging_directory))
else:
new_ref = self.__do_track()
@@ -1174,10 +1105,7 @@ class Source(Plugin):
# (bool): Whether this source requires access to previous sources
#
def _requires_previous_sources(self):
- return (
- self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK
- or self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH
- )
+ return self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK or self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH
# Returns the alias if it's defined in the project
def _get_alias(self):
@@ -1283,13 +1211,7 @@ class Source(Plugin):
meta.first_pass = self.__first_pass
- clone = source_kind(
- context,
- project,
- meta,
- alias_override=(alias, uri),
- unique_id=self._unique_id,
- )
+ clone = source_kind(context, project, meta, alias_override=(alias, uri), unique_id=self._unique_id,)
# Do the necessary post instantiation routines here
#
@@ -1407,9 +1329,7 @@ class Source(Plugin):
# NOTE: We are assuming here that tracking only requires substituting the
# first alias used
- for uri in reversed(
- project.get_alias_uris(alias, first_pass=self.__first_pass)
- ):
+ for uri in reversed(project.get_alias_uris(alias, first_pass=self.__first_pass)):
new_source = self.__clone_for_uri(uri)
try:
ref = new_source.track(**kwargs) # pylint: disable=assignment-from-none
@@ -1432,20 +1352,16 @@ class Source(Plugin):
os.makedirs(directory, exist_ok=True)
except OSError as e:
raise SourceError(
- "Failed to create staging directory: {}".format(e),
- reason="ensure-stage-dir-fail",
+ "Failed to create staging directory: {}".format(e), reason="ensure-stage-dir-fail",
) from e
else:
if self.__directory is not None:
try:
- directory = directory.descend(
- *self.__directory.lstrip(os.sep).split(os.sep), create=True
- )
+ directory = directory.descend(*self.__directory.lstrip(os.sep).split(os.sep), create=True)
except VirtualDirectoryError as e:
raise SourceError(
- "Failed to descend into staging directory: {}".format(e),
- reason="ensure-stage-dir-fail",
+ "Failed to descend into staging directory: {}".format(e), reason="ensure-stage-dir-fail",
) from e
return directory
diff --git a/src/buildstream/storage/_casbaseddirectory.py b/src/buildstream/storage/_casbaseddirectory.py
index 3b248f3ae..9c5c179b0 100644
--- a/src/buildstream/storage/_casbaseddirectory.py
+++ b/src/buildstream/storage/_casbaseddirectory.py
@@ -99,15 +99,7 @@ class CasBasedDirectory(Directory):
_pb2_path_sep = "/"
_pb2_absolute_path_prefix = "/"
- def __init__(
- self,
- cas_cache,
- *,
- digest=None,
- parent=None,
- common_name="untitled",
- filename=None
- ):
+ def __init__(self, cas_cache, *, digest=None, parent=None, common_name="untitled", filename=None):
self.filename = filename
self.common_name = common_name
self.cas_cache = cas_cache
@@ -123,25 +115,16 @@ class CasBasedDirectory(Directory):
with open(self.cas_cache.objpath(digest), "rb") as f:
pb2_directory.ParseFromString(f.read())
except FileNotFoundError as e:
- raise VirtualDirectoryError(
- "Directory not found in local cache: {}".format(e)
- ) from e
+ raise VirtualDirectoryError("Directory not found in local cache: {}".format(e)) from e
for entry in pb2_directory.directories:
- self.index[entry.name] = IndexEntry(
- entry.name, _FileType.DIRECTORY, digest=entry.digest
- )
+ self.index[entry.name] = IndexEntry(entry.name, _FileType.DIRECTORY, digest=entry.digest)
for entry in pb2_directory.files:
self.index[entry.name] = IndexEntry(
- entry.name,
- _FileType.REGULAR_FILE,
- digest=entry.digest,
- is_executable=entry.is_executable,
+ entry.name, _FileType.REGULAR_FILE, digest=entry.digest, is_executable=entry.is_executable,
)
for entry in pb2_directory.symlinks:
- self.index[entry.name] = IndexEntry(
- entry.name, _FileType.SYMLINK, target=entry.target
- )
+ self.index[entry.name] = IndexEntry(entry.name, _FileType.SYMLINK, target=entry.target)
def _find_self_in_parent(self):
assert self.parent is not None
@@ -156,20 +139,14 @@ class CasBasedDirectory(Directory):
newdir = CasBasedDirectory(self.cas_cache, parent=self, filename=name)
- self.index[name] = IndexEntry(
- name, _FileType.DIRECTORY, buildstream_object=newdir
- )
+ self.index[name] = IndexEntry(name, _FileType.DIRECTORY, buildstream_object=newdir)
self.__invalidate_digest()
return newdir
def _add_file(self, basename, filename, modified=False, can_link=False):
- entry = IndexEntry(
- filename,
- _FileType.REGULAR_FILE,
- modified=modified or filename in self.index,
- )
+ entry = IndexEntry(filename, _FileType.REGULAR_FILE, modified=modified or filename in self.index,)
path = os.path.join(basename, filename)
entry.digest = self.cas_cache.add_object(path=path, link_directly=can_link)
entry.is_executable = os.access(path, os.X_OK)
@@ -178,14 +155,10 @@ class CasBasedDirectory(Directory):
self.__invalidate_digest()
def _copy_link_from_filesystem(self, basename, filename):
- self._add_new_link_direct(
- filename, os.readlink(os.path.join(basename, filename))
- )
+ self._add_new_link_direct(filename, os.readlink(os.path.join(basename, filename)))
def _add_new_link_direct(self, name, target):
- self.index[name] = IndexEntry(
- name, _FileType.SYMLINK, target=target, modified=name in self.index
- )
+ self.index[name] = IndexEntry(name, _FileType.SYMLINK, target=target, modified=name in self.index)
self.__invalidate_digest()
@@ -237,20 +210,13 @@ class CasBasedDirectory(Directory):
linklocation = entry.target
newpaths = linklocation.split(os.path.sep)
if os.path.isabs(linklocation):
- current_dir = current_dir.find_root().descend(
- *newpaths, follow_symlinks=True
- )
+ current_dir = current_dir.find_root().descend(*newpaths, follow_symlinks=True)
else:
- current_dir = current_dir.descend(
- *newpaths, follow_symlinks=True
- )
+ current_dir = current_dir.descend(*newpaths, follow_symlinks=True)
else:
- error = (
- "Cannot descend into {}, which is a '{}' in the directory {}"
- )
+ error = "Cannot descend into {}, which is a '{}' in the directory {}"
raise VirtualDirectoryError(
- error.format(path, current_dir.index[path].type, current_dir),
- reason="not-a-directory",
+ error.format(path, current_dir.index[path].type, current_dir), reason="not-a-directory",
)
else:
if path == ".":
@@ -265,8 +231,7 @@ class CasBasedDirectory(Directory):
else:
error = "'{}' not found in {}"
raise VirtualDirectoryError(
- error.format(path, str(current_dir)),
- reason="directory-not-found",
+ error.format(path, str(current_dir)), reason="directory-not-found",
)
return current_dir
@@ -297,9 +262,7 @@ class CasBasedDirectory(Directory):
fileListResult.overwritten.append(relative_pathname)
return True
- def _partial_import_cas_into_cas(
- self, source_directory, filter_callback, *, path_prefix="", origin=None, result
- ):
+ def _partial_import_cas_into_cas(self, source_directory, filter_callback, *, path_prefix="", origin=None, result):
""" Import files from a CAS-based directory. """
if origin is None:
origin = self
@@ -318,9 +281,7 @@ class CasBasedDirectory(Directory):
# we can import the whole source directory by digest instead
# of importing each directory entry individually.
subdir_digest = entry.get_digest()
- dest_entry = IndexEntry(
- name, _FileType.DIRECTORY, digest=subdir_digest
- )
+ dest_entry = IndexEntry(name, _FileType.DIRECTORY, digest=subdir_digest)
self.index[name] = dest_entry
self.__invalidate_digest()
@@ -337,9 +298,7 @@ class CasBasedDirectory(Directory):
else:
subdir = dest_entry.get_directory(self)
- subdir.__add_files_to_result(
- path_prefix=relative_pathname, result=result
- )
+ subdir.__add_files_to_result(path_prefix=relative_pathname, result=result)
else:
src_subdir = source_directory.descend(name)
if src_subdir == origin:
@@ -350,17 +309,11 @@ class CasBasedDirectory(Directory):
except VirtualDirectoryError:
filetype = self.index[name].type
raise VirtualDirectoryError(
- "Destination is a {}, not a directory: /{}".format(
- filetype, relative_pathname
- )
+ "Destination is a {}, not a directory: /{}".format(filetype, relative_pathname)
)
dest_subdir._partial_import_cas_into_cas(
- src_subdir,
- filter_callback,
- path_prefix=relative_pathname,
- origin=origin,
- result=result,
+ src_subdir, filter_callback, path_prefix=relative_pathname, origin=origin, result=result,
)
if filter_callback and not filter_callback(relative_pathname):
@@ -388,13 +341,7 @@ class CasBasedDirectory(Directory):
result.files_written.append(relative_pathname)
def import_files(
- self,
- external_pathspec,
- *,
- filter_callback=None,
- report_written=True,
- update_mtime=False,
- can_link=False
+ self, external_pathspec, *, filter_callback=None, report_written=True, update_mtime=False, can_link=False
):
""" See superclass Directory for arguments """
@@ -413,9 +360,7 @@ class CasBasedDirectory(Directory):
external_pathspec = CasBasedDirectory(self.cas_cache, digest=digest)
assert isinstance(external_pathspec, CasBasedDirectory)
- self._partial_import_cas_into_cas(
- external_pathspec, filter_callback, result=result
- )
+ self._partial_import_cas_into_cas(external_pathspec, filter_callback, result=result)
# TODO: No notice is taken of report_written or update_mtime.
# Current behaviour is to fully populate the report, which is inefficient,
@@ -425,11 +370,7 @@ class CasBasedDirectory(Directory):
def import_single_file(self, external_pathspec):
result = FileListResult()
- if self._check_replacement(
- os.path.basename(external_pathspec),
- os.path.dirname(external_pathspec),
- result,
- ):
+ if self._check_replacement(os.path.basename(external_pathspec), os.path.dirname(external_pathspec), result,):
self._add_file(
os.path.dirname(external_pathspec),
os.path.basename(external_pathspec),
@@ -495,9 +436,7 @@ class CasBasedDirectory(Directory):
f = StringIO(entry.target)
tarfile.addfile(tarinfo, f)
else:
- raise VirtualDirectoryError(
- "can not export file type {} to tar".format(entry.type)
- )
+ raise VirtualDirectoryError("can not export file type {} to tar".format(entry.type))
def _mark_changed(self):
""" It should not be possible to externally modify a CAS-based
@@ -588,12 +527,8 @@ class CasBasedDirectory(Directory):
"""
- file_list = list(
- filter(lambda i: i[1].type != _FileType.DIRECTORY, self.index.items())
- )
- directory_list = filter(
- lambda i: i[1].type == _FileType.DIRECTORY, self.index.items()
- )
+ file_list = list(filter(lambda i: i[1].type != _FileType.DIRECTORY, self.index.items()))
+ directory_list = filter(lambda i: i[1].type == _FileType.DIRECTORY, self.index.items())
if prefix != "":
yield prefix
@@ -603,9 +538,7 @@ class CasBasedDirectory(Directory):
for (k, v) in sorted(directory_list):
subdir = v.get_directory(self)
- yield from subdir._list_prefixed_relative_paths(
- prefix=os.path.join(prefix, k)
- )
+ yield from subdir._list_prefixed_relative_paths(prefix=os.path.join(prefix, k))
def walk(self):
"""Provide a list of dictionaries containing information about the files.
@@ -673,8 +606,7 @@ class CasBasedDirectory(Directory):
""" There is no underlying directory for a CAS-backed directory, so
throw an exception. """
raise VirtualDirectoryError(
- "_get_underlying_directory was called on a CAS-backed directory,"
- + " which has no underlying directory."
+ "_get_underlying_directory was called on a CAS-backed directory," + " which has no underlying directory."
)
# _get_digest():
@@ -712,9 +644,7 @@ class CasBasedDirectory(Directory):
symlinknode.name = name
symlinknode.target = entry.target
- self.__digest = self.cas_cache.add_object(
- buffer=pb2_directory.SerializeToString()
- )
+ self.__digest = self.cas_cache.add_object(buffer=pb2_directory.SerializeToString())
return self.__digest
@@ -729,9 +659,7 @@ class CasBasedDirectory(Directory):
linklocation = target.target
newpath = linklocation.split(os.path.sep)
if os.path.isabs(linklocation):
- return subdir.find_root()._exists(
- *newpath, follow_symlinks=True
- )
+ return subdir.find_root()._exists(*newpath, follow_symlinks=True)
return subdir._exists(*newpath, follow_symlinks=True)
return False
except VirtualDirectoryError:
@@ -750,8 +678,6 @@ class CasBasedDirectory(Directory):
if entry.type == _FileType.DIRECTORY:
subdir = self.descend(name)
- subdir.__add_files_to_result(
- path_prefix=relative_pathname, result=result
- )
+ subdir.__add_files_to_result(path_prefix=relative_pathname, result=result)
else:
result.files_written.append(relative_pathname)
diff --git a/src/buildstream/storage/_filebaseddirectory.py b/src/buildstream/storage/_filebaseddirectory.py
index 21515649d..0926c1b3a 100644
--- a/src/buildstream/storage/_filebaseddirectory.py
+++ b/src/buildstream/storage/_filebaseddirectory.py
@@ -58,9 +58,7 @@ class FileBasedDirectory(Directory):
""" See superclass Directory for arguments """
if follow_symlinks:
- ImplError(
- "FileBasedDirectory.Decend dose not implement follow_symlinks=True"
- )
+ ImplError("FileBasedDirectory.Decend dose not implement follow_symlinks=True")
current_dir = self
@@ -74,38 +72,24 @@ class FileBasedDirectory(Directory):
st = os.lstat(new_path)
if not stat.S_ISDIR(st.st_mode):
raise VirtualDirectoryError(
- "Cannot descend into '{}': '{}' is not a directory".format(
- path, new_path
- )
+ "Cannot descend into '{}': '{}' is not a directory".format(path, new_path)
)
except FileNotFoundError:
if create:
os.mkdir(new_path)
else:
- raise VirtualDirectoryError(
- "Cannot descend into '{}': '{}' does not exist".format(
- path, new_path
- )
- )
+ raise VirtualDirectoryError("Cannot descend into '{}': '{}' does not exist".format(path, new_path))
current_dir = FileBasedDirectory(new_path)
return current_dir
def import_files(
- self,
- external_pathspec,
- *,
- filter_callback=None,
- report_written=True,
- update_mtime=False,
- can_link=False
+ self, external_pathspec, *, filter_callback=None, report_written=True, update_mtime=False, can_link=False
):
""" See superclass Directory for arguments """
- from ._casbaseddirectory import (
- CasBasedDirectory,
- ) # pylint: disable=cyclic-import
+ from ._casbaseddirectory import CasBasedDirectory # pylint: disable=cyclic-import
if isinstance(external_pathspec, CasBasedDirectory):
if can_link and not update_mtime:
@@ -114,9 +98,7 @@ class FileBasedDirectory(Directory):
actionfunc = utils.safe_copy
import_result = FileListResult()
- self._import_files_from_cas(
- external_pathspec, actionfunc, filter_callback, result=import_result
- )
+ self._import_files_from_cas(external_pathspec, actionfunc, filter_callback, result=import_result)
else:
if isinstance(external_pathspec, Directory):
source_directory = external_pathspec.external_directory
@@ -144,15 +126,11 @@ class FileBasedDirectory(Directory):
cur_time = time.time()
for f in import_result.files_written:
- os.utime(
- os.path.join(self.external_directory, f), times=(cur_time, cur_time)
- )
+ os.utime(os.path.join(self.external_directory, f), times=(cur_time, cur_time))
return import_result
def import_single_file(self, external_pathspec):
- dstpath = os.path.join(
- self.external_directory, os.path.basename(external_pathspec)
- )
+ dstpath = os.path.join(self.external_directory, os.path.basename(external_pathspec))
result = FileListResult()
if os.path.exists(dstpath):
result.ignored.append(dstpath)
@@ -206,9 +184,7 @@ class FileBasedDirectory(Directory):
tarfile.addfile(tarinfo, f)
elif tarinfo.isdir():
tarfile.addfile(tarinfo)
- self.descend(*filename.split(os.path.sep)).export_to_tar(
- tarfile, arcname, mtime
- )
+ self.descend(*filename.split(os.path.sep)).export_to_tar(tarfile, arcname, mtime)
else:
tarfile.addfile(tarinfo)
@@ -230,8 +206,7 @@ class FileBasedDirectory(Directory):
return [
f
for f in list_relative_paths(self.external_directory)
- if _get_link_mtime(os.path.join(self.external_directory, f))
- != BST_ARBITRARY_TIMESTAMP
+ if _get_link_mtime(os.path.join(self.external_directory, f)) != BST_ARBITRARY_TIMESTAMP
]
def list_relative_paths(self):
@@ -272,9 +247,7 @@ class FileBasedDirectory(Directory):
else:
return _FileType.SPECIAL_FILE
- def _import_files_from_cas(
- self, source_directory, actionfunc, filter_callback, *, path_prefix="", result
- ):
+ def _import_files_from_cas(self, source_directory, actionfunc, filter_callback, *, path_prefix="", result):
""" Import files from a CAS-based directory. """
for name, entry in source_directory.index.items():
@@ -295,17 +268,11 @@ class FileBasedDirectory(Directory):
except VirtualDirectoryError:
filetype = self._get_filetype(name)
raise VirtualDirectoryError(
- "Destination is a {}, not a directory: /{}".format(
- filetype, relative_pathname
- )
+ "Destination is a {}, not a directory: /{}".format(filetype, relative_pathname)
)
dest_subdir._import_files_from_cas(
- src_subdir,
- actionfunc,
- filter_callback,
- path_prefix=relative_pathname,
- result=result,
+ src_subdir, actionfunc, filter_callback, path_prefix=relative_pathname, result=result,
)
if filter_callback and not filter_callback(relative_pathname):
diff --git a/src/buildstream/testing/__init__.py b/src/buildstream/testing/__init__.py
index 2fd882e18..67e96885a 100644
--- a/src/buildstream/testing/__init__.py
+++ b/src/buildstream/testing/__init__.py
@@ -32,10 +32,7 @@ try:
import pytest
except ImportError:
module_name = globals()["__name__"]
- msg = (
- "Could not import pytest:\n"
- "To use the {} module, you must have pytest installed.".format(module_name)
- )
+ msg = "Could not import pytest:\n" "To use the {} module, you must have pytest installed.".format(module_name)
raise ImportError(msg)
@@ -120,9 +117,7 @@ def sourcetests_collection_hook(session):
# Add the location of the source tests to the session's
# python_files config. Without this, pytest may filter out these
# tests during collection.
- session.config.addinivalue_line(
- "python_files", os.path.join(source_test_path, "*.py")
- )
+ session.config.addinivalue_line("python_files", os.path.join(source_test_path, "*.py"))
# If test invocation has specified specic tests, don't
# automatically collect templated tests.
if should_collect_tests(session.config):
diff --git a/src/buildstream/testing/_sourcetests/build_checkout.py b/src/buildstream/testing/_sourcetests/build_checkout.py
index e673702e2..d0abb0345 100644
--- a/src/buildstream/testing/_sourcetests/build_checkout.py
+++ b/src/buildstream/testing/_sourcetests/build_checkout.py
@@ -64,10 +64,7 @@ def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
# Now check it out
result = cli.run(
- project=project,
- args=strict_args(
- ["artifact", "checkout", element_name, "--directory", checkout], strict
- ),
+ project=project, args=strict_args(["artifact", "checkout", element_name, "--directory", checkout], strict),
)
result.assert_success()
diff --git a/src/buildstream/testing/_sourcetests/fetch.py b/src/buildstream/testing/_sourcetests/fetch.py
index fc95c6e5b..e07bf8824 100644
--- a/src/buildstream/testing/_sourcetests/fetch.py
+++ b/src/buildstream/testing/_sourcetests/fetch.py
@@ -85,19 +85,13 @@ def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
update_project_configuration(project, {"ref-storage": ref_storage})
- generate_junction(
- tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
- )
+ generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))
if ref_storage == "project.refs":
result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
- result = cli.run(
- project=project, args=["source", "track", "junction.bst:import-etc.bst"]
- )
+ result = cli.run(project=project, args=["source", "track", "junction.bst:import-etc.bst"])
result.assert_success()
- result = cli.run(
- project=project, args=["source", "fetch", "junction.bst:import-etc.bst"]
- )
+ result = cli.run(project=project, args=["source", "fetch", "junction.bst:import-etc.bst"])
result.assert_success()
diff --git a/src/buildstream/testing/_sourcetests/mirror.py b/src/buildstream/testing/_sourcetests/mirror.py
index a28bf3c00..b907a6ee4 100644
--- a/src/buildstream/testing/_sourcetests/mirror.py
+++ b/src/buildstream/testing/_sourcetests/mirror.py
@@ -163,17 +163,11 @@ def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
config_project_dir = str(tmpdir.join("config"))
os.makedirs(config_project_dir, exist_ok=True)
config_project = {"name": "config"}
- _yaml.roundtrip_dump(
- config_project, os.path.join(config_project_dir, "project.conf")
- )
- extra_mirrors = {
- "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]
- }
+ _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, "project.conf"))
+ extra_mirrors = {"mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]}
_yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, "mirrors.yml"))
generate_junction(
- str(tmpdir.join("config_repo")),
- config_project_dir,
- os.path.join(element_dir, "config.bst"),
+ str(tmpdir.join("config_repo")), config_project_dir, os.path.join(element_dir, "config.bst"),
)
_set_project_includes_and_aliases(
@@ -217,22 +211,14 @@ def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
config_project_dir = str(tmpdir.join("config"))
os.makedirs(config_project_dir, exist_ok=True)
config_project = {"name": "config"}
- _yaml.roundtrip_dump(
- config_project, os.path.join(config_project_dir, "project.conf")
- )
- extra_mirrors = {
- "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]
- }
+ _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, "project.conf"))
+ extra_mirrors = {"mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]}
_yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, "mirrors.yml"))
generate_junction(
- str(tmpdir.join("config_repo")),
- config_project_dir,
- os.path.join(element_dir, "config.bst"),
+ str(tmpdir.join("config_repo")), config_project_dir, os.path.join(element_dir, "config.bst"),
)
- _set_project_includes_and_aliases(
- project_dir, ["config.bst:mirrors.yml"], {alias: upstream_map + "/"}
- )
+ _set_project_includes_and_aliases(project_dir, ["config.bst:mirrors.yml"], {alias: upstream_map + "/"})
# Now make the upstream unavailable.
os.rename(upstream_repo.repo, "{}.bak".format(upstream_repo.repo))
diff --git a/src/buildstream/testing/_sourcetests/source_determinism.py b/src/buildstream/testing/_sourcetests/source_determinism.py
index 465afa23b..d829984a8 100644
--- a/src/buildstream/testing/_sourcetests/source_determinism.py
+++ b/src/buildstream/testing/_sourcetests/source_determinism.py
@@ -49,12 +49,8 @@ def create_test_directory(*path, mode=0o644):
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox, Must Fix"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
+@pytest.mark.skipif(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox, Must Fix")
def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
project = str(datafiles)
element_name = "list.bst"
@@ -96,10 +92,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkoutdir],)
result.assert_success()
with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
diff --git a/src/buildstream/testing/_sourcetests/track.py b/src/buildstream/testing/_sourcetests/track.py
index c857e246d..8c54f6259 100644
--- a/src/buildstream/testing/_sourcetests/track.py
+++ b/src/buildstream/testing/_sourcetests/track.py
@@ -138,18 +138,14 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
assert states[element_name] == "no reference"
# Now first try to track it
- result = cli.run(
- project=project, args=["source", "track", "--deps", "all", last_element_name]
- )
+ result = cli.run(project=project, args=["source", "track", "--deps", "all", last_element_name])
result.assert_success()
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(
- project=project, args=["source", "fetch", "--deps", "all", last_element_name]
- )
+ result = cli.run(project=project, args=["source", "fetch", "--deps", "all", last_element_name])
result.assert_success()
# Assert that the base is buildable and the rest are waiting
@@ -177,9 +173,7 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
# Write out our test targets
generate_element(repo, os.path.join(element_path, element_dep_name))
- generate_element(
- repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name
- )
+ generate_element(repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name)
# Assert that a fetch is needed
states = cli.get_element_states(project, [element_target_name])
@@ -188,16 +182,7 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
# Now first try to track it
result = cli.run(
- project=project,
- args=[
- "source",
- "track",
- "--deps",
- "all",
- "--except",
- element_dep_name,
- element_target_name,
- ],
+ project=project, args=["source", "track", "--deps", "all", "--except", element_dep_name, element_target_name,],
)
result.assert_success()
@@ -205,9 +190,7 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(
- project=project, args=["source", "fetch", "--deps", "none", element_target_name]
- )
+ result = cli.run(project=project, args=["source", "fetch", "--deps", "none", element_target_name])
result.assert_success()
# Assert that the dependency is buildable and the target is waiting
@@ -233,25 +216,17 @@ def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
generate_element(repo, repo_element_path)
generate_junction(
- str(tmpdir.join("junction_repo")),
- subproject_path,
- junction_path,
- store_ref=False,
+ str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=False,
)
# Track the junction itself first.
result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
- assert (
- cli.get_element_state(project, "junction.bst:import-etc-repo.bst")
- == "no reference"
- )
+ assert cli.get_element_state(project, "junction.bst:import-etc-repo.bst") == "no reference"
# Track the cross junction element. -J is not given, it is implied.
- result = cli.run(
- project=project, args=["source", "track", "junction.bst:import-etc-repo.bst"]
- )
+ result = cli.run(project=project, args=["source", "track", "junction.bst:import-etc-repo.bst"])
if ref_storage == "inline":
# This is not allowed to track cross junction without project.refs.
@@ -259,10 +234,7 @@ def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
else:
result.assert_success()
- assert (
- cli.get_element_state(project, "junction.bst:import-etc-repo.bst")
- == "buildable"
- )
+ assert cli.get_element_state(project, "junction.bst:import-etc-repo.bst") == "buildable"
assert os.path.exists(os.path.join(project, "project.refs"))
@@ -354,10 +326,7 @@ def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
_yaml.roundtrip_dump(sources, os.path.join(sub_element_path, "sources.yml"))
generate_junction(
- str(tmpdir.join("junction_repo")),
- subproject_path,
- junction_path,
- store_ref=True,
+ str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=True,
)
result = cli.run(project=project, args=["source", "track", "junction.bst"])
@@ -374,10 +343,7 @@ def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
# FIXME: We should expect an error. But only a warning is emitted
# result.assert_main_error(ErrorDomain.SOURCE, 'tracking-junction-fragment')
- assert (
- "junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction"
- in result.stderr
- )
+ assert "junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction" in result.stderr
else:
assert os.path.exists(os.path.join(project, "project.refs"))
@@ -401,15 +367,10 @@ def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind):
subproject_path = os.path.join(project, "files", "sub-project")
junction_path = os.path.join(element_path, "junction.bst")
- update_project_configuration(
- project, {"ref-storage": ref_storage, "(@)": ["junction.bst:test.yml"]}
- )
+ update_project_configuration(project, {"ref-storage": ref_storage, "(@)": ["junction.bst:test.yml"]})
generate_junction(
- str(tmpdir.join("junction_repo")),
- subproject_path,
- junction_path,
- store_ref=False,
+ str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=False,
)
result = cli.run(project=project, args=["source", "track", "junction.bst"])
diff --git a/src/buildstream/testing/_sourcetests/track_cross_junction.py b/src/buildstream/testing/_sourcetests/track_cross_junction.py
index 470b67e8d..e69e25be9 100644
--- a/src/buildstream/testing/_sourcetests/track_cross_junction.py
+++ b/src/buildstream/testing/_sourcetests/track_cross_junction.py
@@ -115,15 +115,11 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind):
# Create junctions for projects a and b in main.
junction_a = "{}.bst".format(project_a)
junction_a_path = os.path.join(project, "elements", junction_a)
- generate_junction(
- tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False
- )
+ generate_junction(tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False)
junction_b = "{}.bst".format(project_b)
junction_b_path = os.path.join(project, "elements", junction_b)
- generate_junction(
- tmpdir.join("repo_b"), project_b_path, junction_b_path, store_ref=False
- )
+ generate_junction(tmpdir.join("repo_b"), project_b_path, junction_b_path, store_ref=False)
# Track the junctions.
result = cli.run(project=project, args=["source", "track", junction_a, junction_b])
@@ -138,15 +134,7 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind):
# Track without following junctions. But explicitly also track the elements in project a.
result = cli.run(
- project=project,
- args=[
- "source",
- "track",
- "--deps",
- "all",
- all_bst,
- "{}:{}".format(junction_a, stack_a),
- ],
+ project=project, args=["source", "track", "--deps", "all", all_bst, "{}:{}".format(junction_a, stack_a),],
)
result.assert_success()
@@ -169,9 +157,7 @@ def test_track_exceptions(cli, tmpdir, kind):
junction_a = "{}.bst".format(project_a)
junction_a_path = os.path.join(project, "elements", junction_a)
- generate_junction(
- tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False
- )
+ generate_junction(tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False)
result = cli.run(project=project, args=["source", "track", junction_a])
result.assert_success()
diff --git a/src/buildstream/testing/_sourcetests/utils.py b/src/buildstream/testing/_sourcetests/utils.py
index ca245a57d..1ceefa3ce 100644
--- a/src/buildstream/testing/_sourcetests/utils.py
+++ b/src/buildstream/testing/_sourcetests/utils.py
@@ -28,10 +28,7 @@ try:
import pytest
except ImportError:
module_name = globals()["__name__"]
- msg = (
- "Could not import pytest:\n"
- "To use the {} module, you must have pytest installed.".format(module_name)
- )
+ msg = "Could not import pytest:\n" "To use the {} module, you must have pytest installed.".format(module_name)
raise ImportError(msg)
from buildstream import _yaml
@@ -77,11 +74,7 @@ def add_plugins_conf(project, plugin_kind):
if plugin_package is not None:
project_conf["plugins"] = [
- {
- "origin": "pip",
- "package-name": plugin_package,
- "sources": {plugin_kind: 0,},
- },
+ {"origin": "pip", "package-name": plugin_package, "sources": {plugin_kind: 0,},},
]
_yaml.roundtrip_dump(project_conf, project_conf_file)
diff --git a/src/buildstream/testing/_sourcetests/workspace.py b/src/buildstream/testing/_sourcetests/workspace.py
index 7cc308006..149723069 100644
--- a/src/buildstream/testing/_sourcetests/workspace.py
+++ b/src/buildstream/testing/_sourcetests/workspace.py
@@ -49,9 +49,7 @@ class WorkspaceCreator:
self.workspace_cmd = os.path.join(self.project_path, "workspace_cmd")
- def create_workspace_element(
- self, kind, track, suffix="", workspace_dir=None, element_attrs=None
- ):
+ def create_workspace_element(self, kind, track, suffix="", workspace_dir=None, element_attrs=None):
element_name = "workspace-test-{}{}.bst".format(kind, suffix)
element_path = os.path.join(self.project_path, "elements")
if not workspace_dir:
@@ -73,9 +71,7 @@ class WorkspaceCreator:
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
return element_name, element_path, workspace_dir
- def create_workspace_elements(
- self, kinds, track, suffixs=None, workspace_dir_usr=None, element_attrs=None
- ):
+ def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None, element_attrs=None):
element_tuples = []
@@ -92,9 +88,7 @@ class WorkspaceCreator:
element_tuples.append((element_name, workspace_dir))
# Assert that there is no reference, a track & fetch is needed
- states = self.cli.get_element_states(
- self.project_path, [e for e, _ in element_tuples]
- )
+ states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples])
if track:
assert not any(states[e] != "no reference" for e, _ in element_tuples)
else:
@@ -103,18 +97,10 @@ class WorkspaceCreator:
return element_tuples
def open_workspaces(
- self,
- kinds,
- track,
- suffixs=None,
- workspace_dir=None,
- element_attrs=None,
- no_checkout=False,
+ self, kinds, track, suffixs=None, workspace_dir=None, element_attrs=None, no_checkout=False,
):
- element_tuples = self.create_workspace_elements(
- kinds, track, suffixs, workspace_dir, element_attrs
- )
+ element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir, element_attrs)
os.makedirs(self.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
@@ -129,20 +115,14 @@ class WorkspaceCreator:
_, workspace_dir = element_tuples[0]
args.extend(["--directory", workspace_dir])
- args.extend(
- [element_name for element_name, workspace_dir_suffix in element_tuples]
- )
- result = self.cli.run(
- cwd=self.workspace_cmd, project=self.project_path, args=args
- )
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
+ result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
result.assert_success()
if not no_checkout:
# Assert that we are now buildable because the source is now cached.
- states = self.cli.get_element_states(
- self.project_path, [e for e, _ in element_tuples]
- )
+ states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples])
assert not any(states[e] != "buildable" for e, _ in element_tuples)
# Check that the executable hello file is found in each workspace
@@ -166,9 +146,7 @@ def open_workspace(
no_checkout=False,
):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path)
- workspaces = workspace_object.open_workspaces(
- (kind,), track, (suffix,), workspace_dir, element_attrs, no_checkout
- )
+ workspaces = workspace_object.open_workspaces((kind,), track, (suffix,), workspace_dir, element_attrs, no_checkout)
assert len(workspaces) == 1
element_name, workspace = workspaces[0]
return element_name, workspace_object.project_path, workspace
diff --git a/src/buildstream/testing/_utils/junction.py b/src/buildstream/testing/_utils/junction.py
index ddfbead55..cfc5898a9 100644
--- a/src/buildstream/testing/_utils/junction.py
+++ b/src/buildstream/testing/_utils/junction.py
@@ -49,9 +49,7 @@ class _SimpleGit(Repo):
return self.latest_commit()
def latest_commit(self):
- return self._run_git(
- "rev-parse", "HEAD", stdout=subprocess.PIPE, universal_newlines=True,
- ).stdout.strip()
+ return self._run_git("rev-parse", "HEAD", stdout=subprocess.PIPE, universal_newlines=True,).stdout.strip()
def source_config(self, ref=None):
return self.source_config_extra(ref)
diff --git a/src/buildstream/testing/integration.py b/src/buildstream/testing/integration.py
index 5734c6c82..584d7da1b 100644
--- a/src/buildstream/testing/integration.py
+++ b/src/buildstream/testing/integration.py
@@ -51,9 +51,7 @@ def assert_contains(directory, expected):
missing = set(expected)
missing.difference_update(walk_dir(directory))
if missing:
- raise AssertionError(
- "Missing {} expected elements from list: {}".format(len(missing), missing)
- )
+ raise AssertionError("Missing {} expected elements from list: {}".format(len(missing), missing))
class IntegrationCache:
diff --git a/src/buildstream/testing/runcli.py b/src/buildstream/testing/runcli.py
index 9cded9f9e..6c9197d0d 100644
--- a/src/buildstream/testing/runcli.py
+++ b/src/buildstream/testing/runcli.py
@@ -62,9 +62,7 @@ from buildstream._protos.buildstream.v2 import artifact_pb2
# Wrapper for the click.testing result
class Result:
- def __init__(
- self, exit_code=None, exception=None, exc_info=None, output=None, stderr=None
- ):
+ def __init__(self, exit_code=None, exception=None, exc_info=None, output=None, stderr=None):
self.exit_code = exit_code
self.exc = exception
self.exc_info = exc_info
@@ -126,9 +124,7 @@ class Result:
# Raises:
# (AssertionError): If any of the assertions fail
#
- def assert_main_error(
- self, error_domain, error_reason, fail_message="", *, debug=False
- ):
+ def assert_main_error(self, error_domain, error_reason, fail_message="", *, debug=False):
if debug:
print(
"""
@@ -137,10 +133,7 @@ class Result:
Domain: {}
Reason: {}
""".format(
- self.exit_code,
- self.exception,
- self.exception.domain,
- self.exception.reason,
+ self.exit_code, self.exception, self.exception.domain, self.exception.reason,
)
)
assert self.exit_code == -1, fail_message
@@ -203,9 +196,7 @@ class Result:
# (list): A list of element names in the order which they first appeared in the result
#
def get_start_order(self, activity):
- results = re.findall(
- r"\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log".format(activity), self.stderr
- )
+ results = re.findall(r"\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log".format(activity), self.stderr)
if results is None:
return []
return list(results)
@@ -228,18 +219,14 @@ class Result:
return list(tracked)
def get_pushed_elements(self):
- pushed = re.findall(
- r"\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed artifact", self.stderr
- )
+ pushed = re.findall(r"\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed artifact", self.stderr)
if pushed is None:
return []
return list(pushed)
def get_pulled_elements(self):
- pulled = re.findall(
- r"\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact", self.stderr
- )
+ pulled = re.findall(r"\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact", self.stderr)
if pulled is None:
return []
@@ -345,9 +332,7 @@ class Cli:
bst_args += ["--no-verbose"]
if configure:
- config_file = stack.enter_context(
- configured(self.directory, self.config)
- )
+ config_file = stack.enter_context(configured(self.directory, self.config))
bst_args += ["--config", config_file]
if project:
@@ -377,11 +362,7 @@ class Cli:
# Some informative stdout we can observe when anything fails
if self.verbose:
command = "bst " + " ".join(bst_args)
- print(
- "BuildStream exited with code {} for invocation:\n\t{}".format(
- result.exit_code, command
- )
- )
+ print("BuildStream exited with code {} for invocation:\n\t{}".format(result.exit_code, command))
if result.output:
print("Program output was:\n{}".format(result.output))
if result.stderr:
@@ -431,13 +412,7 @@ class Cli:
out, err = capture.readouterr()
capture.stop_capturing()
- return Result(
- exit_code=exit_code,
- exception=exception,
- exc_info=exc_info,
- output=out,
- stderr=err,
- )
+ return Result(exit_code=exit_code, exception=exception, exc_info=exc_info, output=out, stderr=err,)
# Fetch an element state by name by
# invoking bst show on the project with the CLI
@@ -447,9 +422,7 @@ class Cli:
#
def get_element_state(self, project, element_name):
result = self.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{state}", element_name],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{state}", element_name],
)
result.assert_success()
return result.output.strip()
@@ -460,9 +433,7 @@ class Cli:
#
def get_element_states(self, project, targets, deps="all"):
result = self.run(
- project=project,
- silent=True,
- args=["show", "--deps", deps, "--format", "%{name}||%{state}", *targets],
+ project=project, silent=True, args=["show", "--deps", deps, "--format", "%{name}||%{state}", *targets],
)
result.assert_success()
lines = result.output.splitlines()
@@ -477,9 +448,7 @@ class Cli:
#
def get_element_key(self, project, element_name):
result = self.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{full-key}", element_name],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{full-key}", element_name],
)
result.assert_success()
return result.output.strip()
@@ -488,9 +457,7 @@ class Cli:
#
def get_element_config(self, project, element_name):
result = self.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{config}", element_name],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{config}", element_name],
)
result.assert_success()
@@ -504,9 +471,7 @@ class Cli:
except_ = []
args = ["show", "--deps", scope, "--format", "%{name}"]
- args += list(
- itertools.chain.from_iterable(zip(itertools.repeat("--except"), except_))
- )
+ args += list(itertools.chain.from_iterable(zip(itertools.repeat("--except"), except_)))
result = self.run(project=project, silent=True, args=args + elements)
result.assert_success()
@@ -634,9 +599,7 @@ class CliRemote(CliIntegration):
#
# Returns a list of configured services (by names).
#
- def ensure_services(
- self, actions=True, execution=True, storage=True, artifacts=False, sources=False
- ):
+ def ensure_services(self, actions=True, execution=True, storage=True, artifacts=False, sources=False):
# Build a list of configured services by name:
configured_services = []
if not self.config:
@@ -712,9 +675,7 @@ class TestArtifact:
# cas = CASCache(str(cache_dir))
artifact_ref = element.get_artifact_name(element_key)
- return os.path.exists(
- os.path.join(cache_dir, "artifacts", "refs", artifact_ref)
- )
+ return os.path.exists(os.path.join(cache_dir, "artifacts", "refs", artifact_ref))
# get_digest():
#
@@ -815,10 +776,7 @@ def cli_integration(tmpdir, integration_cache):
# We want to cache sources for integration tests more permanently,
# to avoid downloading the huge base-sdk repeatedly
fixture.configure(
- {
- "cachedir": integration_cache.cachedir,
- "sourcedir": integration_cache.sources,
- }
+ {"cachedir": integration_cache.cachedir, "sourcedir": integration_cache.sources,}
)
yield fixture
diff --git a/src/buildstream/types.py b/src/buildstream/types.py
index 2a27891ba..180044dbd 100644
--- a/src/buildstream/types.py
+++ b/src/buildstream/types.py
@@ -68,18 +68,14 @@ class FastEnum(metaclass=MetaFastEnum):
def __eq__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError(
- "Unexpected comparison between {} and {}".format(self, repr(other))
- )
+ raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
# Enums instances are unique, so creating an instance with the same value as another will just
# send back the other one, hence we can use an identity comparison, which is much faster than '=='
return self is other
def __ne__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError(
- "Unexpected comparison between {} and {}".format(self, repr(other))
- )
+ raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
return self is not other
def __hash__(self):
@@ -146,16 +142,12 @@ class Consistency(FastEnum):
def __ge__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError(
- "Unexpected comparison between {} and {}".format(self, repr(other))
- )
+ raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
return self.value >= other.value
def __lt__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError(
- "Unexpected comparison between {} and {}".format(self, repr(other))
- )
+ raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
return self.value < other.value
diff --git a/src/buildstream/utils.py b/src/buildstream/utils.py
index ec57b7f7f..1f16837df 100644
--- a/src/buildstream/utils.py
+++ b/src/buildstream/utils.py
@@ -250,9 +250,7 @@ def sha256sum(filename: str) -> str:
h.update(chunk)
except OSError as e:
- raise UtilError(
- "Failed to get a checksum of file '{}': {}".format(filename, e)
- ) from e
+ raise UtilError("Failed to get a checksum of file '{}': {}".format(filename, e)) from e
return h.hexdigest()
@@ -277,9 +275,7 @@ def safe_copy(src: str, dest: str, *, result: Optional[FileListResult] = None) -
os.unlink(dest)
except OSError as e:
if e.errno != errno.ENOENT:
- raise UtilError(
- "Failed to remove destination file '{}': {}".format(dest, e)
- ) from e
+ raise UtilError("Failed to remove destination file '{}': {}".format(dest, e)) from e
shutil.copyfile(src, dest)
try:
@@ -298,9 +294,7 @@ def safe_copy(src: str, dest: str, *, result: Optional[FileListResult] = None) -
raise UtilError("Failed to copy '{} -> {}': {}".format(src, dest, e)) from e
-def safe_link(
- src: str, dest: str, *, result: Optional[FileListResult] = None, _unlink=False
-) -> None:
+def safe_link(src: str, dest: str, *, result: Optional[FileListResult] = None, _unlink=False) -> None:
"""Try to create a hardlink, but resort to copying in the case of cross device links.
Args:
@@ -318,9 +312,7 @@ def safe_link(
os.unlink(dest)
except OSError as e:
if e.errno != errno.ENOENT:
- raise UtilError(
- "Failed to remove destination file '{}': {}".format(dest, e)
- ) from e
+ raise UtilError("Failed to remove destination file '{}': {}".format(dest, e)) from e
# If we can't link it due to cross-device hardlink, copy
try:
@@ -493,9 +485,7 @@ def get_host_tool(name: str) -> str:
program_path = shutil.which(name, path=search_path)
if not program_path:
- raise ProgramNotFoundError(
- "Did not find '{}' in PATH: {}".format(name, search_path)
- )
+ raise ProgramNotFoundError("Did not find '{}' in PATH: {}".format(name, search_path))
return program_path
@@ -532,12 +522,7 @@ def get_bst_version() -> Tuple[int, int]:
)
-def move_atomic(
- source: Union[Path, str],
- destination: Union[Path, str],
- *,
- ensure_parents: bool = True
-) -> None:
+def move_atomic(source: Union[Path, str], destination: Union[Path, str], *, ensure_parents: bool = True) -> None:
"""Move the source to the destination using atomic primitives.
This uses `os.rename` to move a file or directory to a new destination.
@@ -613,9 +598,7 @@ def save_file_atomic(
# This feature has been proposed for upstream Python in the past, e.g.:
# https://bugs.python.org/issue8604
- assert os.path.isabs(
- filename
- ), "The utils.save_file_atomic() parameter ``filename`` must be an absolute path"
+ assert os.path.isabs(filename), "The utils.save_file_atomic() parameter ``filename`` must be an absolute path"
if tempdir is None:
tempdir = os.path.dirname(filename)
fd, tempname = tempfile.mkstemp(dir=tempdir)
@@ -639,9 +622,7 @@ def save_file_atomic(
except FileNotFoundError:
pass
except OSError as e:
- raise UtilError(
- "Failed to cleanup temporary file {}: {}".format(tempname, e)
- ) from e
+ raise UtilError("Failed to cleanup temporary file {}: {}".format(tempname, e)) from e
try:
with _signals.terminator(cleanup_tempfile):
@@ -702,9 +683,7 @@ def _get_volume_size(path):
try:
usage = shutil.disk_usage(path)
except OSError as e:
- raise UtilError(
- "Failed to retrieve stats on volume for path '{}': {}".format(path, e)
- ) from e
+ raise UtilError("Failed to retrieve stats on volume for path '{}': {}".format(path, e)) from e
return usage.total, usage.free
@@ -794,11 +773,7 @@ def _force_rmtree(rootpath, **kwargs):
try:
os.chmod(path, 0o755)
except OSError as e:
- raise UtilError(
- "Failed to ensure write permission on file '{}': {}".format(
- path, e
- )
- )
+ raise UtilError("Failed to ensure write permission on file '{}': {}".format(path, e))
try:
shutil.rmtree(rootpath, **kwargs)
@@ -824,10 +799,7 @@ def _copy_directories(srcdir, destdir, target):
os.makedirs(new_dir)
yield (new_dir, mode)
else:
- raise UtilError(
- "Source directory tree has file where "
- "directory expected: {}".format(old_dir)
- )
+ raise UtilError("Source directory tree has file where " "directory expected: {}".format(old_dir))
else:
if not os.access(new_dir, os.W_OK):
# If the destination directory is not writable, change permissions to make it
@@ -862,9 +834,7 @@ def _ensure_real_directory(root, path):
else:
filetype = "special file"
- raise UtilError(
- "Destination is a {}, not a directory: {}".format(filetype, relpath)
- )
+ raise UtilError("Destination is a {}, not a directory: {}".format(filetype, relpath))
except FileNotFoundError:
os.makedirs(destpath)
@@ -886,13 +856,7 @@ def _ensure_real_directory(root, path):
#
#
def _process_list(
- srcdir,
- destdir,
- actionfunc,
- result,
- filter_callback=None,
- ignore_missing=False,
- report_written=False,
+ srcdir, destdir, actionfunc, result, filter_callback=None, ignore_missing=False, report_written=False,
):
# Keep track of directory permissions, since these need to be set
@@ -976,9 +940,7 @@ def _process_list(
else:
# Unsupported type.
- raise UtilError(
- "Cannot extract {} into staging-area. Unsupported type.".format(srcpath)
- )
+ raise UtilError("Cannot extract {} into staging-area. Unsupported type.".format(srcpath))
# Write directory permissions now that all files have been written
for d, perms in permissions:
@@ -1085,9 +1047,7 @@ def _tempdir(suffix="", prefix="tmp", dir=None): # pylint: disable=redefined-bu
# on SIGTERM.
#
@contextmanager
-def _tempnamedfile(
- suffix="", prefix="tmp", dir=None
-): # pylint: disable=redefined-builtin
+def _tempnamedfile(suffix="", prefix="tmp", dir=None): # pylint: disable=redefined-builtin
temp = None
def close_tempfile():
@@ -1261,9 +1221,7 @@ def _call(*popenargs, terminate=False, **kwargs):
group_id = os.getpgid(process.pid)
os.killpg(group_id, signal.SIGCONT)
- with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(
- kill_proc
- ):
+ with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
process = subprocess.Popen( # pylint: disable=subprocess-popen-preexec-fn
*popenargs, preexec_fn=preexec_fn, universal_newlines=True, **kwargs
)
@@ -1469,9 +1427,7 @@ def _get_compression(tar):
if suffix == ".tar":
raise UtilError(
"Expected compression with unknown file extension ('{}'), "
- "supported extensions are ('.tar'), ('.gz'), ('.xz'), ('.bz2')".format(
- ext
- )
+ "supported extensions are ('.tar'), ('.gz'), ('.xz'), ('.bz2')".format(ext)
)
# Assume just an unconventional name was provided, default to uncompressed
diff --git a/tests/artifactcache/artifactservice.py b/tests/artifactcache/artifactservice.py
index dafbc8fc9..67dd80942 100644
--- a/tests/artifactcache/artifactservice.py
+++ b/tests/artifactcache/artifactservice.py
@@ -28,9 +28,7 @@ from buildstream._protos.buildstream.v2.artifact_pb2 import (
UpdateArtifactRequest,
)
from buildstream._protos.buildstream.v2.artifact_pb2_grpc import ArtifactServiceStub
-from buildstream._protos.build.bazel.remote.execution.v2 import (
- remote_execution_pb2 as re_pb2,
-)
+from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as re_pb2
from buildstream import utils
from tests.testutils.artifactshare import create_artifact_share
@@ -100,14 +98,9 @@ def test_update_artifact(tmpdir, files):
except grpc.RpcError as e:
assert e.code() == grpc.StatusCode.FAILED_PRECONDITION
if files == "absent":
- assert (
- e.details() == "Artifact files specified but no files found"
- )
+ assert e.details() == "Artifact files specified but no files found"
elif files == "invalid":
- assert (
- e.details()
- == "Artifact files specified but directory not found"
- )
+ assert e.details() == "Artifact files specified but directory not found"
return
# If we uploaded the artifact check GetArtifact
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index d2df0fd79..204bc7398 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -25,12 +25,8 @@ cache3 = RemoteSpec(url="https://example.com/cache3", push=False)
cache4 = RemoteSpec(url="https://example.com/cache4", push=False)
cache5 = RemoteSpec(url="https://example.com/cache5", push=False)
cache6 = RemoteSpec(url="https://example.com/cache6", push=True, type=RemoteType.ALL)
-cache7 = RemoteSpec(
- url="https://index.example.com/cache1", push=True, type=RemoteType.INDEX
-)
-cache8 = RemoteSpec(
- url="https://storage.example.com/cache1", push=True, type=RemoteType.STORAGE
-)
+cache7 = RemoteSpec(url="https://index.example.com/cache1", push=True, type=RemoteType.INDEX)
+cache8 = RemoteSpec(url="https://storage.example.com/cache1", push=True, type=RemoteType.STORAGE)
# Generate cache configuration fragments for the user config and project config files.
@@ -57,8 +53,7 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
}
elif len(user_caches) > 1:
user_config["artifacts"] = [
- {"url": cache.url, "push": cache.push, "type": type_strings[cache.type]}
- for cache in user_caches
+ {"url": cache.url, "push": cache.push, "type": type_strings[cache.type]} for cache in user_caches
]
if len(override_caches) == 1:
@@ -75,11 +70,7 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
user_config["projects"] = {
"test": {
"artifacts": [
- {
- "url": cache.url,
- "push": cache.push,
- "type": type_strings[cache.type],
- }
+ {"url": cache.url, "push": cache.push, "type": type_strings[cache.type],}
for cache in override_caches
]
}
@@ -101,11 +92,7 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
project_config.update(
{
"artifacts": [
- {
- "url": cache.url,
- "push": cache.push,
- "type": type_strings[cache.type],
- }
+ {"url": cache.url, "push": cache.push, "type": type_strings[cache.type],}
for cache in project_caches
]
}
@@ -123,25 +110,15 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
pytest.param([], [], [], id="empty-config"),
pytest.param([], [], [cache1, cache2], id="user-config"),
pytest.param([], [cache1, cache2], [cache3], id="project-config"),
- pytest.param(
- [cache1], [cache2], [cache3], id="project-override-in-user-config"
- ),
- pytest.param(
- [cache1, cache2], [cache3, cache4], [cache5, cache6], id="list-order"
- ),
- pytest.param(
- [cache1, cache2, cache1], [cache2], [cache2, cache1], id="duplicates"
- ),
+ pytest.param([cache1], [cache2], [cache3], id="project-override-in-user-config"),
+ pytest.param([cache1, cache2], [cache3, cache4], [cache5, cache6], id="list-order"),
+ pytest.param([cache1, cache2, cache1], [cache2], [cache2, cache1], id="duplicates"),
pytest.param([cache7, cache8], [], [cache1], id="split-caches"),
],
)
-def test_artifact_cache_precedence(
- tmpdir, override_caches, project_caches, user_caches
-):
+def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user_caches):
# Produce a fake user and project config with the cache configuration.
- user_config, project_config = configure_remote_caches(
- override_caches, project_caches, user_caches
- )
+ user_config, project_config = configure_remote_caches(override_caches, project_caches, user_caches)
project_config["name"] = "test"
user_config_file = str(tmpdir.join("buildstream.conf"))
@@ -156,14 +133,10 @@ def test_artifact_cache_precedence(
project.ensure_fully_loaded()
# Use the helper from the artifactcache module to parse our configuration.
- parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(
- context, project
- )
+ parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(context, project)
# Verify that it was correctly read.
- expected_cache_specs = list(
- _deduplicate(itertools.chain(override_caches, project_caches, user_caches))
- )
+ expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches)))
assert parsed_cache_specs == expected_cache_specs
@@ -172,19 +145,14 @@ def test_artifact_cache_precedence(
# instead of an unhandled exception.
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
- "config_key, config_value",
- [("client-cert", "client.crt"), ("client-key", "client.key")],
+ "config_key, config_value", [("client-cert", "client.crt"), ("client-key", "client.key")],
)
def test_missing_certs(cli, datafiles, config_key, config_value):
project = os.path.join(datafiles.dirname, datafiles.basename, "missing-certs")
project_conf = {
"name": "test",
- "artifacts": {
- "url": "https://cache.example.com:12345",
- "push": "true",
- config_key: config_value,
- },
+ "artifacts": {"url": "https://cache.example.com:12345", "push": "true", config_key: config_value,},
}
project_conf_file = os.path.join(project, "project.conf")
_yaml.roundtrip_dump(project_conf, project_conf_file)
@@ -215,9 +183,7 @@ def test_only_one(cli, datafiles, override_caches, project_caches, user_caches):
project = os.path.join(datafiles.dirname, datafiles.basename, "only-one")
# Produce a fake user and project config with the cache configuration.
- user_config, project_config = configure_remote_caches(
- override_caches, project_caches, user_caches
- )
+ user_config, project_config = configure_remote_caches(override_caches, project_caches, user_caches)
project_config["name"] = "test"
cli.configure(user_config)
diff --git a/tests/artifactcache/expiry.py b/tests/artifactcache/expiry.py
index 1bc7b9781..030f4a023 100644
--- a/tests/artifactcache/expiry.py
+++ b/tests/artifactcache/expiry.py
@@ -138,10 +138,7 @@ def test_expiry_order(cli, datafiles):
wait_for_cache_granularity()
# Now extract dep.bst
- res = cli.run(
- project=project,
- args=["artifact", "checkout", "dep.bst", "--directory", checkout],
- )
+ res = cli.run(project=project, args=["artifact", "checkout", "dep.bst", "--directory", checkout],)
res.assert_success()
# Finally, build something that will cause the cache to overflow
@@ -203,9 +200,7 @@ def test_keep_dependencies(cli, datafiles):
# duplicating artifacts (bad!) we need to make this equal in size
# or smaller than half the size of its dependencies.
#
- create_element_size(
- "target.bst", project, element_path, ["dependency.bst"], 2000000
- )
+ create_element_size("target.bst", project, element_path, ["dependency.bst"], 2000000)
res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
@@ -221,9 +216,7 @@ def test_never_delete_required(cli, datafiles):
project = str(datafiles)
element_path = "elements"
- cli.configure(
- {"cache": {"quota": 10000000}, "scheduler": {"fetchers": 1, "builders": 1}}
- )
+ cli.configure({"cache": {"quota": 10000000}, "scheduler": {"fetchers": 1, "builders": 1}})
# Create a linear build tree
create_element_size("dep1.bst", project, element_path, [], 8000000)
@@ -314,9 +307,7 @@ def test_cleanup_first(cli, datafiles):
#
# Fix the fetchers and builders just to ensure a predictable
# sequence of events (although it does not effect this test)
- cli.configure(
- {"cache": {"quota": 5000000,}, "scheduler": {"fetchers": 1, "builders": 1}}
- )
+ cli.configure({"cache": {"quota": 5000000,}, "scheduler": {"fetchers": 1, "builders": 1}})
# Our cache is now more than full, BuildStream
create_element_size("target2.bst", project, element_path, [], 4000000)
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index 91cc01fff..76ba85fb5 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -26,9 +26,7 @@ def test_push_pull(cli, tmpdir, datafiles):
project = os.path.join(str(datafiles), "parent")
base_project = os.path.join(str(project), "base")
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare-parent")
- ) as share, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare-parent")) as share, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare-base")
) as base_share:
@@ -46,9 +44,7 @@ def test_push_pull(cli, tmpdir, datafiles):
project_set_artifacts(base_project, base_share.repo)
# Now try bst artifact push
- result = cli.run(
- project=project, args=["artifact", "push", "--deps", "all", "target.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "push", "--deps", "all", "target.bst"])
assert result.exit_code == 0
# And finally assert that the artifacts are in the right shares
@@ -56,16 +52,12 @@ def test_push_pull(cli, tmpdir, datafiles):
# In the parent project's cache
assert_shared(cli, share, project, "target.bst", project_name="parent")
assert_shared(cli, share, project, "app.bst", project_name="parent")
- assert_not_shared(
- cli, share, base_project, "base-element.bst", project_name="base"
- )
+ assert_not_shared(cli, share, base_project, "base-element.bst", project_name="base")
# In the junction project's cache
assert_not_shared(cli, base_share, project, "target.bst", project_name="parent")
assert_not_shared(cli, base_share, project, "app.bst", project_name="parent")
- assert_shared(
- cli, base_share, base_project, "base-element.bst", project_name="base"
- )
+ assert_shared(cli, base_share, base_project, "base-element.bst", project_name="base")
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
@@ -82,9 +74,7 @@ def test_push_pull(cli, tmpdir, datafiles):
assert state != "cached"
# Now try bst artifact pull
- result = cli.run(
- project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "pull", "--deps", "all", "target.bst"])
assert result.exit_code == 0
# And assert that they are again in the local cache, without having built
@@ -107,9 +97,7 @@ def test_caching_junction_elements(cli, tmpdir, datafiles):
junction_data["config"] = {"cache-junction-elements": True}
_yaml.roundtrip_dump(junction_data, junction_element)
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare-parent")
- ) as share, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare-parent")) as share, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare-base")
) as base_share:
@@ -127,9 +115,7 @@ def test_caching_junction_elements(cli, tmpdir, datafiles):
project_set_artifacts(base_project, base_share.repo)
# Now try bst artifact push
- result = cli.run(
- project=project, args=["artifact", "push", "--deps", "all", "target.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "push", "--deps", "all", "target.bst"])
assert result.exit_code == 0
# And finally assert that the artifacts are in the right shares
@@ -142,9 +128,7 @@ def test_caching_junction_elements(cli, tmpdir, datafiles):
# The junction project's cache should only contain elements in the junction project
assert_not_shared(cli, base_share, project, "target.bst", project_name="parent")
assert_not_shared(cli, base_share, project, "app.bst", project_name="parent")
- assert_shared(
- cli, base_share, base_project, "base-element.bst", project_name="base"
- )
+ assert_shared(cli, base_share, base_project, "base-element.bst", project_name="base")
@pytest.mark.datafiles(DATA_DIR)
@@ -156,9 +140,7 @@ def test_ignore_junction_remotes(cli, tmpdir, datafiles):
junction_element = os.path.join(project, "base.bst")
junction_data = _yaml.roundtrip_load(junction_element)
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare-parent")
- ) as share, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare-parent")) as share, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare-base")
) as base_share:
@@ -175,16 +157,12 @@ def test_ignore_junction_remotes(cli, tmpdir, datafiles):
# The parent project's cache should only contain project elements
assert_shared(cli, share, project, "target.bst", project_name="parent")
assert_shared(cli, share, project, "app.bst", project_name="parent")
- assert_not_shared(
- cli, share, base_project, "base-element.bst", project_name="base"
- )
+ assert_not_shared(cli, share, base_project, "base-element.bst", project_name="base")
# The junction project's cache should only contain elements in the junction project
assert_not_shared(cli, base_share, project, "target.bst", project_name="parent")
assert_not_shared(cli, base_share, project, "app.bst", project_name="parent")
- assert_shared(
- cli, base_share, base_project, "base-element.bst", project_name="base"
- )
+ assert_shared(cli, base_share, base_project, "base-element.bst", project_name="base")
# Ensure that, from now on, we ignore junction element remotes
junction_data["config"] = {"ignore-junction-remotes": True}
@@ -205,9 +183,7 @@ def test_ignore_junction_remotes(cli, tmpdir, datafiles):
assert state != "cached"
# Now try bst artifact pull
- result = cli.run(
- project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "pull", "--deps", "all", "target.bst"])
assert result.exit_code == 0
# And assert that they are again in the local cache, without having built
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index 6c4134b0b..188f4cf97 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -57,9 +57,7 @@ def test_pull(cli, tmpdir, datafiles):
# Assert that we are now cached locally
assert cli.get_element_state(project_dir, "target.bst") == "cached"
# Assert that we shared/pushed the cached artifact
- assert share.get_artifact(
- cli.get_artifact_name(project_dir, "test", "target.bst")
- )
+ assert share.get_artifact(cli.get_artifact_name(project_dir, "test", "target.bst"))
# Delete the artifact locally
cli.remove_artifact_from_cache(project_dir, "target.bst")
@@ -91,9 +89,7 @@ def test_pull(cli, tmpdir, datafiles):
# Manually setup the CAS remote
artifactcache.setup_remotes(use_config=True)
- assert artifactcache.has_push_remotes(
- plugin=element
- ), "No remote configured for element target.bst"
+ assert artifactcache.has_push_remotes(plugin=element), "No remote configured for element target.bst"
assert artifactcache.pull(element, element_key), "Pull operation failed"
assert cli.artifact.is_cached(cache_dir, element, element_key)
@@ -126,9 +122,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
# Assert that we are now cached locally
assert cli.get_element_state(project_dir, "target.bst") == "cached"
# Assert that we shared/pushed the cached artifact
- assert share.get_artifact(
- cli.get_artifact_name(project_dir, "test", "target.bst")
- )
+ assert share.get_artifact(cli.get_artifact_name(project_dir, "test", "target.bst"))
with dummy_context(config=user_config_file) as context:
# Load the project and CAS cache
@@ -142,9 +136,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
assert cli.artifact.is_cached(rootcache_dir, element, element_key)
# Retrieve the Directory object from the cached artifact
- artifact_digest = cli.artifact.get_digest(
- rootcache_dir, element, element_key
- )
+ artifact_digest = cli.artifact.get_digest(rootcache_dir, element, element_key)
artifactcache = context.artifactcache
# Manually setup the CAS remote
@@ -173,9 +165,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
cas.close_grpc_channels()
assert cli.get_element_state(project_dir, "target.bst") != "cached"
- tree_digest = remote_execution_pb2.Digest(
- hash=tree_hash, size_bytes=tree_size
- )
+ tree_digest = remote_execution_pb2.Digest(hash=tree_hash, size_bytes=tree_size)
# Pull the artifact using the Tree object
directory_digest = artifactcache.pull_tree(project, artifact_digest)
@@ -187,9 +177,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
# Directory size now zero with AaaP and stack element commit #1cbc5e63dc
assert directory_hash and not directory_size
- directory_digest = remote_execution_pb2.Digest(
- hash=directory_hash, size_bytes=directory_size
- )
+ directory_digest = remote_execution_pb2.Digest(hash=directory_hash, size_bytes=directory_size)
# Ensure the entire Tree stucture has been pulled
assert os.path.exists(cas.objpath(directory_digest))
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index dded57563..238d5f7ef 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -43,9 +43,7 @@ def _push(cli, cache_dir, project_dir, config_file, target):
artifactcache.setup_remotes(use_config=True)
artifactcache.initialize_remotes()
- assert artifactcache.has_push_remotes(
- plugin=element
- ), "No remote configured for element target.bst"
+ assert artifactcache.has_push_remotes(plugin=element), "No remote configured for element target.bst"
assert element._push(), "Push operation failed"
return element_key
@@ -75,14 +73,8 @@ def test_push(cli, tmpdir, datafiles):
# Write down the user configuration file
_yaml.roundtrip_dump(user_config, file=user_config_file)
- element_key = _push(
- cli, rootcache_dir, project_dir, user_config_file, "target.bst"
- )
- assert share.get_artifact(
- cli.get_artifact_name(
- project_dir, "test", "target.bst", cache_key=element_key
- )
- )
+ element_key = _push(cli, rootcache_dir, project_dir, user_config_file, "target.bst")
+ assert share.get_artifact(cli.get_artifact_name(project_dir, "test", "target.bst", cache_key=element_key))
@pytest.mark.datafiles(DATA_DIR)
@@ -115,9 +107,7 @@ def test_push_split(cli, tmpdir, datafiles):
element_key = _push(cli, rootcache_dir, project_dir, config_path, "target.bst")
proto = index.get_artifact_proto(
- cli.get_artifact_name(
- project_dir, "test", "target.bst", cache_key=element_key
- )
+ cli.get_artifact_name(project_dir, "test", "target.bst", cache_key=element_key)
)
assert storage.get_cas_files(proto) is not None
@@ -165,7 +155,5 @@ def test_push_message(tmpdir, datafiles):
message_hash, message_size = command_digest.hash, command_digest.size_bytes
assert message_hash and message_size
- message_digest = remote_execution_pb2.Digest(
- hash=message_hash, size_bytes=message_size
- )
+ message_digest = remote_execution_pb2.Digest(hash=message_hash, size_bytes=message_size)
assert share.has_object(message_digest)
diff --git a/tests/cachekey/cachekey.py b/tests/cachekey/cachekey.py
index 882d07240..eb248b9ed 100644
--- a/tests/cachekey/cachekey.py
+++ b/tests/cachekey/cachekey.py
@@ -153,9 +153,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
# The cache key test uses a project which exercises all plugins,
# so we cant run it at all if we dont have them installed.
#
-@pytest.mark.skipif(
- MACHINE_ARCH != "x86-64", reason="Cache keys depend on architecture"
-)
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Cache keys depend on architecture")
@pytest.mark.skipif(not IS_LINUX, reason="Only available on linux")
@pytest.mark.skipif(HAVE_BZR is False, reason="bzr is not available")
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@@ -174,11 +172,7 @@ def test_cache_key(datafiles, cli):
# https://github.com/omarkohl/pytest-datafiles/issues/11
os.chmod(goodbye_link, 0o755)
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--format", "%{name}::%{full-key}", "target.bst"],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--format", "%{name}::%{full-key}", "target.bst"],)
result.assert_success()
assert_cache_keys(project, result.output)
@@ -198,9 +192,7 @@ def test_cache_key(datafiles, cli):
],
],
)
-def test_cache_key_fatal_warnings(
- cli, tmpdir, first_warnings, second_warnings, identical_keys
-):
+def test_cache_key_fatal_warnings(cli, tmpdir, first_warnings, second_warnings, identical_keys):
# Builds project, Runs bst show, gathers cache keys
def run_get_cache_key(project_name, warnings):
@@ -218,10 +210,7 @@ def test_cache_key_fatal_warnings(
element_file = str(elem_dir.join("stack.bst"))
_yaml.roundtrip_dump({"kind": "stack"}, file=element_file)
- result = cli.run(
- project=str(project_dir),
- args=["show", "--format", "%{name}::%{full-key}", "stack.bst"],
- )
+ result = cli.run(project=str(project_dir), args=["show", "--format", "%{name}::%{full-key}", "stack.bst"],)
return result.output
# Returns true if all keys are identical
@@ -241,23 +230,15 @@ def test_keys_stable_over_targets(cli, datafiles):
target2 = "elements/key-stability/t2.bst"
project = str(datafiles)
- full_graph_result = cli.run(
- project=project, args=["show", "--format", "%{name}::%{full-key}", root_element]
- )
+ full_graph_result = cli.run(project=project, args=["show", "--format", "%{name}::%{full-key}", root_element])
full_graph_result.assert_success()
all_cache_keys = parse_output_keys(full_graph_result.output)
- ordering1_result = cli.run(
- project=project,
- args=["show", "--format", "%{name}::%{full-key}", target1, target2],
- )
+ ordering1_result = cli.run(project=project, args=["show", "--format", "%{name}::%{full-key}", target1, target2],)
ordering1_result.assert_success()
ordering1_cache_keys = parse_output_keys(ordering1_result.output)
- ordering2_result = cli.run(
- project=project,
- args=["show", "--format", "%{name}::%{full-key}", target2, target1],
- )
+ ordering2_result = cli.run(project=project, args=["show", "--format", "%{name}::%{full-key}", target2, target1],)
ordering2_result.assert_success()
ordering2_cache_keys = parse_output_keys(ordering2_result.output)
diff --git a/tests/cachekey/update.py b/tests/cachekey/update.py
index ae8b368c5..2dd4085c2 100755
--- a/tests/cachekey/update.py
+++ b/tests/cachekey/update.py
@@ -45,13 +45,7 @@ def update_keys():
result = cli.run(
project=PROJECT_DIR,
silent=True,
- args=[
- "--no-colors",
- "show",
- "--format",
- "%{name}::%{full-key}",
- "target.bst",
- ],
+ args=["--no-colors", "show", "--format", "%{name}::%{full-key}", "target.bst",],
)
# Load the actual keys, and the expected ones if they exist
diff --git a/tests/conftest.py b/tests/conftest.py
index 05a4853f6..610423443 100755
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -28,9 +28,7 @@ from buildstream.testing._fixtures import (
reset_global_node_state,
thread_check,
) # pylint: disable=unused-import
-from buildstream.testing.integration import (
- integration_cache,
-) # pylint: disable=unused-import
+from buildstream.testing.integration import integration_cache # pylint: disable=unused-import
from tests.testutils.repo.git import Git
@@ -51,17 +49,11 @@ from tests.testutils.repo.zip import Zip
#################################################
def pytest_addoption(parser):
parser.addoption(
- "--integration",
- action="store_true",
- default=False,
- help="Run integration tests",
+ "--integration", action="store_true", default=False, help="Run integration tests",
)
parser.addoption(
- "--remote-execution",
- action="store_true",
- default=False,
- help="Run remote-execution tests only",
+ "--remote-execution", action="store_true", default=False, help="Run remote-execution tests only",
)
diff --git a/tests/elements/filter.py b/tests/elements/filter.py
index 54ddf216a..3b0be378a 100644
--- a/tests/elements/filter.py
+++ b/tests/elements/filter.py
@@ -22,10 +22,7 @@ def test_filter_include(datafiles, cli, tmpdir):
result.assert_success()
checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "output-include.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "output-include.bst", "--directory", checkout],)
result.assert_success()
assert os.path.exists(os.path.join(checkout, "foo"))
assert not os.path.exists(os.path.join(checkout, "bar"))
@@ -40,14 +37,7 @@ def test_filter_include_dynamic(datafiles, cli, tmpdir):
checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "output-dynamic-include.bst",
- "--directory",
- checkout,
- ],
+ project=project, args=["artifact", "checkout", "output-dynamic-include.bst", "--directory", checkout,],
)
result.assert_success()
assert os.path.exists(os.path.join(checkout, "foo"))
@@ -61,10 +51,7 @@ def test_filter_exclude(datafiles, cli, tmpdir):
result.assert_success()
checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "output-exclude.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "output-exclude.bst", "--directory", checkout],)
result.assert_success()
assert not os.path.exists(os.path.join(checkout, "foo"))
assert os.path.exists(os.path.join(checkout, "bar"))
@@ -77,10 +64,7 @@ def test_filter_orphans(datafiles, cli, tmpdir):
result.assert_success()
checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "output-orphans.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "output-orphans.bst", "--directory", checkout],)
result.assert_success()
assert os.path.exists(os.path.join(checkout, "baz"))
@@ -91,10 +75,7 @@ def test_filter_deps_ok(datafiles, cli):
result = cli.run(project=project, args=["build", "deps-permitted.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["show", "--deps=run", "--format='%{name}'", "deps-permitted.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps=run", "--format='%{name}'", "deps-permitted.bst"],)
result.assert_success()
assert "output-exclude.bst" in result.output
@@ -133,10 +114,7 @@ def test_filter_forbid_also_rdep(datafiles, cli):
def test_filter_workspace_open(datafiles, cli, tmpdir):
project = str(datafiles)
workspace_dir = os.path.join(tmpdir.dirname, tmpdir.basename, "workspace")
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace_dir, "deps-permitted.bst"],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace_dir, "deps-permitted.bst"],)
result.assert_success()
assert os.path.exists(os.path.join(workspace_dir, "foo"))
assert os.path.exists(os.path.join(workspace_dir, "bar"))
@@ -147,9 +125,7 @@ def test_filter_workspace_open(datafiles, cli, tmpdir):
def test_filter_workspace_open_multi(datafiles, cli):
project = str(datafiles)
result = cli.run(
- cwd=project,
- project=project,
- args=["workspace", "open", "deps-permitted.bst", "output-orphans.bst"],
+ cwd=project, project=project, args=["workspace", "open", "deps-permitted.bst", "output-orphans.bst"],
)
result.assert_success()
assert os.path.exists(os.path.join(project, "input"))
@@ -160,10 +136,7 @@ def test_filter_workspace_build(datafiles, cli, tmpdir):
project = str(datafiles)
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
workspace_dir = os.path.join(tempdir, "workspace")
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace_dir, "output-orphans.bst"],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace_dir, "output-orphans.bst"],)
result.assert_success()
src = os.path.join(workspace_dir, "foo")
dst = os.path.join(workspace_dir, "quux")
@@ -172,14 +145,7 @@ def test_filter_workspace_build(datafiles, cli, tmpdir):
result.assert_success()
checkout_dir = os.path.join(tempdir, "checkout")
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "output-orphans.bst",
- "--directory",
- checkout_dir,
- ],
+ project=project, args=["artifact", "checkout", "output-orphans.bst", "--directory", checkout_dir,],
)
result.assert_success()
assert os.path.exists(os.path.join(checkout_dir, "quux"))
@@ -190,10 +156,7 @@ def test_filter_workspace_close(datafiles, cli, tmpdir):
project = str(datafiles)
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
workspace_dir = os.path.join(tempdir, "workspace")
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace_dir, "output-orphans.bst"],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace_dir, "output-orphans.bst"],)
result.assert_success()
src = os.path.join(workspace_dir, "foo")
dst = os.path.join(workspace_dir, "quux")
@@ -204,14 +167,7 @@ def test_filter_workspace_close(datafiles, cli, tmpdir):
result.assert_success()
checkout_dir = os.path.join(tempdir, "checkout")
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "output-orphans.bst",
- "--directory",
- checkout_dir,
- ],
+ project=project, args=["artifact", "checkout", "output-orphans.bst", "--directory", checkout_dir,],
)
result.assert_success()
assert not os.path.exists(os.path.join(checkout_dir, "quux"))
@@ -222,10 +178,7 @@ def test_filter_workspace_reset(datafiles, cli, tmpdir):
project = str(datafiles)
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
workspace_dir = os.path.join(tempdir, "workspace")
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace_dir, "output-orphans.bst"],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace_dir, "output-orphans.bst"],)
result.assert_success()
src = os.path.join(workspace_dir, "foo")
dst = os.path.join(workspace_dir, "quux")
@@ -236,14 +189,7 @@ def test_filter_workspace_reset(datafiles, cli, tmpdir):
result.assert_success()
checkout_dir = os.path.join(tempdir, "checkout")
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "output-orphans.bst",
- "--directory",
- checkout_dir,
- ],
+ project=project, args=["artifact", "checkout", "output-orphans.bst", "--directory", checkout_dir,],
)
result.assert_success()
assert not os.path.exists(os.path.join(checkout_dir, "quux"))
@@ -341,10 +287,7 @@ def test_filter_track_excepted(datafiles, cli, tmpdir):
assert cli.get_element_state(project, input_name) == "no reference"
# Now try to track it
- result = cli.run(
- project=project,
- args=["source", "track", "filter2.bst", "--except", "input.bst"],
- )
+ result = cli.run(project=project, args=["source", "track", "filter2.bst", "--except", "input.bst"],)
result.assert_success()
# Now check that a ref field exists
@@ -394,9 +337,7 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
assert cli.get_element_state(project, input_name) == "no reference"
# Now try to track it
- result = cli.run(
- project=project, args=["source", "track", "filter1.bst", "filter2.bst"]
- )
+ result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
result.assert_success()
# Now check that a ref field exists
@@ -457,9 +398,7 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
}
# Now try to track it
- result = cli.run(
- project=project, args=["source", "track", "filter1.bst", "filter2.bst"]
- )
+ result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
result.assert_success()
# Now check that a ref field exists
@@ -524,10 +463,7 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
}
# Now try to track it
- result = cli.run(
- project=project,
- args=["source", "track", "filter1.bst", "filter2.bst", "--except", input_name],
- )
+ result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst", "--except", input_name],)
result.assert_success()
# Now check that a ref field exists
@@ -544,21 +480,13 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_include_with_indirect_deps(datafiles, cli, tmpdir):
project = str(datafiles)
- result = cli.run(
- project=project, args=["build", "output-include-with-indirect-deps.bst"]
- )
+ result = cli.run(project=project, args=["build", "output-include-with-indirect-deps.bst"])
result.assert_success()
checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
result = cli.run(
project=project,
- args=[
- "artifact",
- "checkout",
- "output-include-with-indirect-deps.bst",
- "--directory",
- checkout,
- ],
+ args=["artifact", "checkout", "output-include-with-indirect-deps.bst", "--directory", checkout,],
)
result.assert_success()
@@ -573,9 +501,7 @@ def test_filter_include_with_indirect_deps(datafiles, cli, tmpdir):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_fails_for_nonexisting_domain(datafiles, cli):
project = str(datafiles)
- result = cli.run(
- project=project, args=["build", "output-include-nonexistent-domain.bst"]
- )
+ result = cli.run(project=project, args=["build", "output-include-nonexistent-domain.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
error = "Unknown domains were used in output-include-nonexistent-domain.bst [line 7 column 2]"
@@ -599,14 +525,7 @@ def test_filter_pass_integration(datafiles, cli):
checkout_dir = os.path.join(project, "no-pass")
result = cli.run(
project=project,
- args=[
- "artifact",
- "checkout",
- "--integrate",
- "--directory",
- checkout_dir,
- "no-pass-integration.bst",
- ],
+ args=["artifact", "checkout", "--integrate", "--directory", checkout_dir, "no-pass-integration.bst",],
)
result.assert_success()
@@ -615,14 +534,7 @@ def test_filter_pass_integration(datafiles, cli):
checkout_dir = os.path.join(project, "pass")
result = cli.run(
project=project,
- args=[
- "artifact",
- "checkout",
- "--integrate",
- "--directory",
- checkout_dir,
- "pass-integration.bst",
- ],
+ args=["artifact", "checkout", "--integrate", "--directory", checkout_dir, "pass-integration.bst",],
)
result.assert_main_error(ErrorDomain.STREAM, "missing-command")
diff --git a/tests/elements/filter/basic/element_plugins/dynamic.py b/tests/elements/filter/basic/element_plugins/dynamic.py
index 6cd6b1093..bf079111f 100644
--- a/tests/elements/filter/basic/element_plugins/dynamic.py
+++ b/tests/elements/filter/basic/element_plugins/dynamic.py
@@ -5,10 +5,7 @@ from buildstream import Element, Scope
class DynamicElement(Element):
def configure(self, node):
node.validate_keys(["split-rules"])
- self.split_rules = {
- key: value.as_str_list()
- for key, value in node.get_mapping("split-rules").items()
- }
+ self.split_rules = {key: value.as_str_list() for key, value in node.get_mapping("split-rules").items()}
def preflight(self):
pass
diff --git a/tests/examples/autotools.py b/tests/examples/autotools.py
index e684fd43c..bd5d530ef 100644
--- a/tests/examples/autotools.py
+++ b/tests/examples/autotools.py
@@ -10,24 +10,14 @@ from buildstream.testing._utils.site import IS_LINUX, MACHINE_ARCH, HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "..",
- "..",
- "doc",
- "examples",
- "autotools",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..", "doc", "examples", "autotools",)
# Tests a build of the autotools amhello project on a alpine-linux base runtime
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox")
@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot",
- reason="This test is not meant to work with chroot sandbox",
+ HAVE_SANDBOX == "chroot", reason="This test is not meant to work with chroot sandbox",
)
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_build(cli, datafiles):
@@ -38,10 +28,7 @@ def test_autotools_build(cli, datafiles):
result = cli.run(project=project, args=["build", "hello.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "hello.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "hello.bst", "--directory", checkout],)
result.assert_success()
assert_contains(
@@ -61,13 +48,10 @@ def test_autotools_build(cli, datafiles):
# Test running an executable built with autotools.
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
-@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
-)
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot",
- reason="This test is not meant to work with chroot sandbox",
+ HAVE_SANDBOX == "chroot", reason="This test is not meant to work with chroot sandbox",
)
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_run(cli, datafiles):
diff --git a/tests/examples/developing.py b/tests/examples/developing.py
index df6e82623..4d7d8ab69 100644
--- a/tests/examples/developing.py
+++ b/tests/examples/developing.py
@@ -11,24 +11,13 @@ import tests.testutils.patch as patch
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "..",
- "..",
- "doc",
- "examples",
- "developing",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..", "doc", "examples", "developing",)
# Test that the project builds successfully
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
-@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with SANDBOX"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot", reason="This is not meant to work with chroot"
-)
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with SANDBOX")
+@pytest.mark.skipif(HAVE_SANDBOX == "chroot", reason="This is not meant to work with chroot")
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_build(cli, datafiles):
project = str(datafiles)
@@ -38,25 +27,16 @@ def test_autotools_build(cli, datafiles):
result = cli.run(project=project, args=["build", "hello.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "hello.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "hello.bst", "--directory", checkout],)
result.assert_success()
- assert_contains(
- checkout, ["/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello"]
- )
+ assert_contains(checkout, ["/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello"])
# Test the unmodified hello command works as expected.
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
-@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with SANDBOX"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot", reason="This is not meant to work with chroot"
-)
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with SANDBOX")
+@pytest.mark.skipif(HAVE_SANDBOX == "chroot", reason="This is not meant to work with chroot")
@pytest.mark.datafiles(DATA_DIR)
def test_run_unmodified_hello(cli, datafiles):
project = str(datafiles)
@@ -76,38 +56,26 @@ def test_open_workspace(cli, tmpdir, datafiles):
project = str(datafiles)
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
- result = cli.run(
- project=project,
- args=["workspace", "open", "-f", "--directory", workspace_dir, "hello.bst",],
- )
+ result = cli.run(project=project, args=["workspace", "open", "-f", "--directory", workspace_dir, "hello.bst",],)
result.assert_success()
result = cli.run(project=project, args=["workspace", "list"])
result.assert_success()
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", "hello.bst"]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", "hello.bst"])
result.assert_success()
# Test making a change using the workspace
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
-@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with SANDBOX"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot", reason="This is not meant to work with chroot"
-)
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with SANDBOX")
+@pytest.mark.skipif(HAVE_SANDBOX == "chroot", reason="This is not meant to work with chroot")
@pytest.mark.datafiles(DATA_DIR)
def test_make_change_in_workspace(cli, tmpdir, datafiles):
project = str(datafiles)
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
- result = cli.run(
- project=project,
- args=["workspace", "open", "-f", "--directory", workspace_dir, "hello.bst"],
- )
+ result = cli.run(project=project, args=["workspace", "open", "-f", "--directory", workspace_dir, "hello.bst"],)
result.assert_success()
result = cli.run(project=project, args=["workspace", "list"])
@@ -124,7 +92,5 @@ def test_make_change_in_workspace(cli, tmpdir, datafiles):
result.assert_success()
assert result.output == "Hello World\nWe can use workspaces!\n"
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", "hello.bst"]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", "hello.bst"])
result.assert_success()
diff --git a/tests/examples/first-project.py b/tests/examples/first-project.py
index 4a378df62..906bb326d 100644
--- a/tests/examples/first-project.py
+++ b/tests/examples/first-project.py
@@ -12,14 +12,7 @@ from buildstream.testing._utils.site import IS_LINUX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "..",
- "..",
- "doc",
- "examples",
- "first-project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..", "doc", "examples", "first-project",)
@pytest.mark.skipif(not IS_LINUX, reason="Only available on linux")
@@ -31,10 +24,7 @@ def test_first_project_build_checkout(cli, datafiles):
result = cli.run(project=project, args=["build", "hello.bst"])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "hello.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "hello.bst", "--directory", checkout],)
assert result.exit_code == 0
assert_contains(checkout, ["/hello.world"])
diff --git a/tests/examples/flatpak-autotools.py b/tests/examples/flatpak-autotools.py
index 4e7a9e36f..9e9ee8827 100644
--- a/tests/examples/flatpak-autotools.py
+++ b/tests/examples/flatpak-autotools.py
@@ -13,19 +13,12 @@ pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "..",
- "..",
- "doc",
- "examples",
- "flatpak-autotools",
+ os.path.dirname(os.path.realpath(__file__)), "..", "..", "doc", "examples", "flatpak-autotools",
)
try:
- from bst_plugins_experimental.sources import (
- _ostree,
- ) # pylint: disable=unused-import
+ from bst_plugins_experimental.sources import _ostree # pylint: disable=unused-import
# Even when we have the plugin, it might be missing dependencies. This requires
# bst_plugins_experimantal to be fully installed, with host ostree dependencies
@@ -41,15 +34,9 @@ except (ImportError, ValueError):
def workaround_setuptools_bug(project):
os.makedirs(os.path.join(project, "files", "links"), exist_ok=True)
try:
- os.symlink(
- os.path.join("usr", "lib"), os.path.join(project, "files", "links", "lib")
- )
- os.symlink(
- os.path.join("usr", "bin"), os.path.join(project, "files", "links", "bin")
- )
- os.symlink(
- os.path.join("usr", "etc"), os.path.join(project, "files", "links", "etc")
- )
+ os.symlink(os.path.join("usr", "lib"), os.path.join(project, "files", "links", "lib"))
+ os.symlink(os.path.join("usr", "bin"), os.path.join(project, "files", "links", "bin"))
+ os.symlink(os.path.join("usr", "etc"), os.path.join(project, "files", "links", "etc"))
except FileExistsError:
# If the files exist, we're running from a git checkout and
# not a source distribution, no need to complain
@@ -59,9 +46,7 @@ def workaround_setuptools_bug(project):
# Test that a build upon flatpak runtime 'works' - we use the autotools sample
# amhello project for this.
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
-@pytest.mark.skipif(
- not IS_LINUX or not HAVE_OSTREE_PLUGIN, reason="Only available on linux with ostree"
-)
+@pytest.mark.skipif(not IS_LINUX or not HAVE_OSTREE_PLUGIN, reason="Only available on linux with ostree")
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_build(cli, datafiles):
project = str(datafiles)
@@ -71,10 +56,7 @@ def test_autotools_build(cli, datafiles):
result = cli.run(project=project, args=["build", "hello.bst"])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "hello.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "hello.bst", "--directory", checkout],)
assert result.exit_code == 0
assert_contains(
@@ -94,9 +76,7 @@ def test_autotools_build(cli, datafiles):
# Test running an executable built with autotools
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
-@pytest.mark.skipif(
- not IS_LINUX or not HAVE_OSTREE_PLUGIN, reason="Only available on linux with ostree"
-)
+@pytest.mark.skipif(not IS_LINUX or not HAVE_OSTREE_PLUGIN, reason="Only available on linux with ostree")
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_run(cli, datafiles):
project = str(datafiles)
diff --git a/tests/examples/integration-commands.py b/tests/examples/integration-commands.py
index fac45fd22..257ecc44f 100644
--- a/tests/examples/integration-commands.py
+++ b/tests/examples/integration-commands.py
@@ -10,22 +10,14 @@ from buildstream.testing._utils.site import IS_LINUX, MACHINE_ARCH, HAVE_SANDBOX
pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "..",
- "..",
- "doc",
- "examples",
- "integration-commands",
+ os.path.dirname(os.path.realpath(__file__)), "..", "..", "doc", "examples", "integration-commands",
)
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox")
@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot",
- reason="This test is not meant to work with chroot sandbox",
+ HAVE_SANDBOX == "chroot", reason="This test is not meant to work with chroot sandbox",
)
@pytest.mark.datafiles(DATA_DIR)
def test_integration_commands_build(cli, datafiles):
@@ -37,12 +29,9 @@ def test_integration_commands_build(cli, datafiles):
# Test running the executable
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox")
@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot",
- reason="This test is not meant to work with chroot sandbox",
+ HAVE_SANDBOX == "chroot", reason="This test is not meant to work with chroot sandbox",
)
@pytest.mark.datafiles(DATA_DIR)
def test_integration_commands_run(cli, datafiles):
@@ -51,8 +40,6 @@ def test_integration_commands_run(cli, datafiles):
result = cli.run(project=project, args=["build", "hello.bst"])
assert result.exit_code == 0
- result = cli.run(
- project=project, args=["shell", "hello.bst", "--", "hello", "pony"]
- )
+ result = cli.run(project=project, args=["shell", "hello.bst", "--", "hello", "pony"])
assert result.exit_code == 0
assert result.output == "Hello pony\n"
diff --git a/tests/examples/junctions.py b/tests/examples/junctions.py
index e93db8a68..c0a83a254 100644
--- a/tests/examples/junctions.py
+++ b/tests/examples/junctions.py
@@ -9,24 +9,14 @@ from buildstream.testing._utils.site import IS_LINUX, MACHINE_ARCH, HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "..",
- "..",
- "doc",
- "examples",
- "junctions",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..", "doc", "examples", "junctions",)
# Test that the project builds successfully
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with bubblewrap")
@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with bubblewrap"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot",
- reason="This test is not meant to work with chroot sandbox",
+ HAVE_SANDBOX == "chroot", reason="This test is not meant to work with chroot sandbox",
)
@pytest.mark.datafiles(DATA_DIR)
def test_build(cli, datafiles):
@@ -38,12 +28,9 @@ def test_build(cli, datafiles):
# Test the callHello script works as expected.
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with bubblewrap")
@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with bubblewrap"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot",
- reason="This test is not meant to work with chroot sandbox",
+ HAVE_SANDBOX == "chroot", reason="This test is not meant to work with chroot sandbox",
)
@pytest.mark.datafiles(DATA_DIR)
def test_shell_call_hello(cli, datafiles):
@@ -52,10 +39,7 @@ def test_shell_call_hello(cli, datafiles):
result = cli.run(project=project, args=["build", "callHello.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["shell", "callHello.bst", "--", "/bin/sh", "callHello.sh"],
- )
+ result = cli.run(project=project, args=["shell", "callHello.bst", "--", "/bin/sh", "callHello.sh"],)
result.assert_success()
assert result.output == "Calling hello:\nHello World!\nThis is amhello 1.0.\n"
@@ -68,19 +52,9 @@ def test_open_cross_junction_workspace(cli, tmpdir, datafiles):
workspace_dir = os.path.join(str(tmpdir), "workspace_hello_junction")
result = cli.run(
- project=project,
- args=[
- "workspace",
- "open",
- "--directory",
- workspace_dir,
- "hello-junction.bst:hello.bst",
- ],
+ project=project, args=["workspace", "open", "--directory", workspace_dir, "hello-junction.bst:hello.bst",],
)
result.assert_success()
- result = cli.run(
- project=project,
- args=["workspace", "close", "--remove-dir", "hello-junction.bst:hello.bst"],
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", "hello-junction.bst:hello.bst"],)
result.assert_success()
diff --git a/tests/examples/running-commands.py b/tests/examples/running-commands.py
index 177f4e3cc..3d6fd0d26 100644
--- a/tests/examples/running-commands.py
+++ b/tests/examples/running-commands.py
@@ -10,23 +10,15 @@ from buildstream.testing._utils.site import IS_LINUX, MACHINE_ARCH, HAVE_SANDBOX
pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "..",
- "..",
- "doc",
- "examples",
- "running-commands",
+ os.path.dirname(os.path.realpath(__file__)), "..", "..", "doc", "examples", "running-commands",
)
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox")
@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot",
- reason="This test is not meant to work with chroot sandbox",
+ HAVE_SANDBOX == "chroot", reason="This test is not meant to work with chroot sandbox",
)
def test_running_commands_build(cli, datafiles):
project = str(datafiles)
@@ -37,12 +29,9 @@ def test_running_commands_build(cli, datafiles):
# Test running the executable
@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox")
@pytest.mark.skipif(
- not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
-)
-@pytest.mark.skipif(
- HAVE_SANDBOX == "chroot",
- reason="This test is not meant to work with chroot sandbox",
+ HAVE_SANDBOX == "chroot", reason="This test is not meant to work with chroot sandbox",
)
@pytest.mark.datafiles(DATA_DIR)
def test_running_commands_run(cli, datafiles):
diff --git a/tests/external_plugins.py b/tests/external_plugins.py
index 3e5684ea5..2123b846b 100644
--- a/tests/external_plugins.py
+++ b/tests/external_plugins.py
@@ -31,17 +31,7 @@ class ExternalPluginRepo:
def clone(self, location):
self._clone_location = os.path.join(location, self.name)
subprocess.run(
- [
- "git",
- "clone",
- "--single-branch",
- "--branch",
- self.ref,
- "--depth",
- "1",
- self.url,
- self._clone_location,
- ]
+ ["git", "clone", "--single-branch", "--branch", self.ref, "--depth", "1", self.url, self._clone_location,]
)
return self._clone_location
@@ -61,9 +51,7 @@ class ExternalPluginRepo:
match_list.extend(matches)
if not match_list:
- raise ValueError(
- "No matches found for patterns {}".format(self._test_match_patterns)
- )
+ raise ValueError("No matches found for patterns {}".format(self._test_match_patterns))
return match_list
diff --git a/tests/format/include.py b/tests/format/include.py
index 9aec83ff5..d61754d82 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -18,10 +18,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "include")
@pytest.mark.datafiles(DATA_DIR)
def test_include_project_file(cli, datafiles):
project = os.path.join(str(datafiles), "file")
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],)
result.assert_success()
loaded = _yaml.load_data(result.output)
assert loaded.get_bool("included")
@@ -79,16 +76,10 @@ def test_include_junction_file(cli, tmpdir, datafiles):
project = os.path.join(str(datafiles), "junction")
generate_junction(
- tmpdir,
- os.path.join(project, "subproject"),
- os.path.join(project, "junction.bst"),
- store_ref=True,
+ tmpdir, os.path.join(project, "subproject"), os.path.join(project, "junction.bst"), store_ref=True,
)
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],)
result.assert_success()
loaded = _yaml.load_data(result.output)
assert loaded.get_bool("included")
@@ -100,17 +91,7 @@ def test_include_junction_options(cli, datafiles):
result = cli.run(
project=project,
- args=[
- "-o",
- "build_arch",
- "x86_64",
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- "element.bst",
- ],
+ args=["-o", "build_arch", "x86_64", "show", "--deps", "none", "--format", "%{vars}", "element.bst",],
)
result.assert_success()
loaded = _yaml.load_data(result.output)
@@ -135,10 +116,7 @@ def test_junction_element_partial_project_project(cli, tmpdir, datafiles):
element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]}
_yaml.roundtrip_dump(element, junction_path)
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{vars}", "junction.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{vars}", "junction.bst"],)
result.assert_success()
loaded = _yaml.load_data(result.output)
assert loaded.get_str("included", default=None) is None
@@ -162,10 +140,7 @@ def test_junction_element_not_partial_project_file(cli, tmpdir, datafiles):
element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]}
_yaml.roundtrip_dump(element, junction_path)
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{vars}", "junction.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{vars}", "junction.bst"],)
result.assert_success()
loaded = _yaml.load_data(result.output)
assert loaded.get_str("included", default=None) is not None
@@ -175,10 +150,7 @@ def test_junction_element_not_partial_project_file(cli, tmpdir, datafiles):
def test_include_element_overrides(cli, datafiles):
project = os.path.join(str(datafiles), "overrides")
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],)
result.assert_success()
loaded = _yaml.load_data(result.output)
assert loaded.get_str("manual_main_override", default=None) is not None
@@ -189,10 +161,7 @@ def test_include_element_overrides(cli, datafiles):
def test_include_element_overrides_composition(cli, datafiles):
project = os.path.join(str(datafiles), "overrides")
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{config}", "element.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{config}", "element.bst"],)
result.assert_success()
loaded = _yaml.load_data(result.output)
assert loaded.get_str_list("build-commands") == ["first", "second"]
@@ -202,10 +171,7 @@ def test_include_element_overrides_composition(cli, datafiles):
def test_list_overide_does_not_fail_upon_first_composition(cli, datafiles):
project = os.path.join(str(datafiles), "eventual_overrides")
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{public}", "element.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{public}", "element.bst"],)
result.assert_success()
loaded = _yaml.load_data(result.output)
@@ -219,10 +185,7 @@ def test_list_overide_does_not_fail_upon_first_composition(cli, datafiles):
def test_include_element_overrides_sub_include(cli, datafiles):
project = os.path.join(str(datafiles), "sub-include")
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],)
result.assert_success()
loaded = _yaml.load_data(result.output)
assert loaded.get_str("included", default=None) is not None
@@ -233,16 +196,10 @@ def test_junction_do_not_use_included_overrides(cli, tmpdir, datafiles):
project = os.path.join(str(datafiles), "overrides-junction")
generate_junction(
- tmpdir,
- os.path.join(project, "subproject"),
- os.path.join(project, "junction.bst"),
- store_ref=True,
+ tmpdir, os.path.join(project, "subproject"), os.path.join(project, "junction.bst"), store_ref=True,
)
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{vars}", "junction.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{vars}", "junction.bst"],)
result.assert_success()
loaded = _yaml.load_data(result.output)
assert loaded.get_str("main_override", default=None) is not None
@@ -255,17 +212,7 @@ def test_conditional_in_fragment(cli, datafiles):
result = cli.run(
project=project,
- args=[
- "-o",
- "build_arch",
- "x86_64",
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- "element.bst",
- ],
+ args=["-o", "build_arch", "x86_64", "show", "--deps", "none", "--format", "%{vars}", "element.bst",],
)
result.assert_success()
loaded = _yaml.load_data(result.output)
@@ -277,17 +224,7 @@ def test_inner(cli, datafiles):
project = os.path.join(str(datafiles), "inner")
result = cli.run(
project=project,
- args=[
- "-o",
- "build_arch",
- "x86_64",
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- "element.bst",
- ],
+ args=["-o", "build_arch", "x86_64", "show", "--deps", "none", "--format", "%{vars}", "element.bst",],
)
result.assert_success()
loaded = _yaml.load_data(result.output)
@@ -298,10 +235,7 @@ def test_inner(cli, datafiles):
def test_recursive_include(cli, datafiles):
project = os.path.join(str(datafiles), "recursive")
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.RECURSIVE_INCLUDE)
assert "line 2 column 2" in result.stderr
@@ -311,16 +245,10 @@ def test_local_to_junction(cli, tmpdir, datafiles):
project = os.path.join(str(datafiles), "local_to_junction")
generate_junction(
- tmpdir,
- os.path.join(project, "subproject"),
- os.path.join(project, "junction.bst"),
- store_ref=True,
+ tmpdir, os.path.join(project, "subproject"), os.path.join(project, "junction.bst"), store_ref=True,
)
- result = cli.run(
- project=project,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
- )
+ result = cli.run(project=project, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],)
result.assert_success()
loaded = _yaml.load_data(result.output)
assert loaded.get_bool("included")
diff --git a/tests/format/include_composition.py b/tests/format/include_composition.py
index a840b6bad..4369ecbe8 100644
--- a/tests/format/include_composition.py
+++ b/tests/format/include_composition.py
@@ -21,9 +21,7 @@ def make_includes(basedir):
def test_main_has_priority(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump(
- {"(@)": ["a.yml"], "test": ["main"]}, str(tmpdir.join("main.yml"))
- )
+ _yaml.roundtrip_dump({"(@)": ["a.yml"], "test": ["main"]}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")))
@@ -37,9 +35,7 @@ def test_main_has_priority(tmpdir):
def test_include_cannot_append(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump(
- {"(@)": ["a.yml"], "test": ["main"]}, str(tmpdir.join("main.yml"))
- )
+ _yaml.roundtrip_dump({"(@)": ["a.yml"], "test": ["main"]}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")))
_yaml.roundtrip_dump({"test": {"(>)": ["a"]}}, str(tmpdir.join("a.yml")))
@@ -52,9 +48,7 @@ def test_include_cannot_append(tmpdir):
def test_main_can_append(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump(
- {"(@)": ["a.yml"], "test": {"(>)": ["main"]}}, str(tmpdir.join("main.yml"))
- )
+ _yaml.roundtrip_dump({"(@)": ["a.yml"], "test": {"(>)": ["main"]}}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")))
_yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
@@ -109,9 +103,7 @@ def test_lastest_sibling_has_priority(tmpdir):
def test_main_keeps_keys(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump(
- {"(@)": ["a.yml"], "something": "else"}, str(tmpdir.join("main.yml"))
- )
+ _yaml.roundtrip_dump({"(@)": ["a.yml"], "something": "else"}, str(tmpdir.join("main.yml")))
main = _yaml.load(str(tmpdir.join("main.yml")))
_yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
@@ -126,18 +118,14 @@ def test_overwrite_directive_on_later_composite(tmpdir):
with make_includes(str(tmpdir)) as includes:
_yaml.roundtrip_dump(
- {"(@)": ["a.yml", "b.yml"], "test": {"(=)": ["Overwritten"]}},
- str(tmpdir.join("main.yml")),
+ {"(@)": ["a.yml", "b.yml"], "test": {"(=)": ["Overwritten"]}}, str(tmpdir.join("main.yml")),
)
main = _yaml.load(str(tmpdir.join("main.yml")))
# a.yml
_yaml.roundtrip_dump(
- {
- "test": ["some useless", "list", "to be overwritten"],
- "foo": "should not be present",
- },
+ {"test": ["some useless", "list", "to be overwritten"], "foo": "should not be present",},
str(tmpdir.join("a.yml")),
)
diff --git a/tests/format/invalid_keys.py b/tests/format/invalid_keys.py
index 40a7b7c34..ce1e2e487 100644
--- a/tests/format/invalid_keys.py
+++ b/tests/format/invalid_keys.py
@@ -25,9 +25,4 @@ def test_compositied_node_fails_usefully(cli, datafiles, element, location):
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
assert "synthetic node" not in result.stderr
- assert (
- "{} [{}]: Dictionary did not contain expected key 'path'".format(
- element, location
- )
- in result.stderr
- )
+ assert "{} [{}]: Dictionary did not contain expected key 'path'".format(element, location) in result.stderr
diff --git a/tests/format/junctions.py b/tests/format/junctions.py
index eedf4d69b..269f2a525 100644
--- a/tests/format/junctions.py
+++ b/tests/format/junctions.py
@@ -19,8 +19,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "junctions"
def copy_subprojects(project, datafiles, subprojects):
for subproject in subprojects:
shutil.copytree(
- os.path.join(str(datafiles), subproject),
- os.path.join(str(project), subproject),
+ os.path.join(str(datafiles), subproject), os.path.join(str(project), subproject),
)
@@ -44,10 +43,7 @@ def test_simple_build(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the checkout contains the expected files from both projects
@@ -96,10 +92,7 @@ def test_workspaced_junction_missing_project_conf(cli, datafiles):
workspace_dir = project / "base_workspace"
copy_subprojects(project, datafiles, ["base"])
- result = cli.run(
- project=project,
- args=["workspace", "open", "base.bst", "--directory", workspace_dir],
- )
+ result = cli.run(project=project, args=["workspace", "open", "base.bst", "--directory", workspace_dir],)
print(result)
result.assert_success()
@@ -166,10 +159,7 @@ def test_nested_simple(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the checkout contains the expected files from all subprojects
@@ -193,10 +183,7 @@ def test_nested_double(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the checkout contains the expected files from all subprojects
@@ -284,10 +271,7 @@ def test_options_default(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
assert os.path.exists(os.path.join(checkoutdir, "pony.txt"))
@@ -304,10 +288,7 @@ def test_options(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
assert not os.path.exists(os.path.join(checkoutdir, "pony.txt"))
@@ -324,10 +305,7 @@ def test_options_inherit(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
assert not os.path.exists(os.path.join(checkoutdir, "pony.txt"))
@@ -370,10 +348,7 @@ def test_git_build(cli, tmpdir, datafiles):
# Build (with implicit fetch of subproject), checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the checkout contains the expected files from both projects
@@ -434,14 +409,7 @@ def test_build_git_cross_junction_names(cli, tmpdir, datafiles):
result = cli.run(project=project, args=["build", "base.bst:target.bst"])
result.assert_success()
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "base.bst:target.bst",
- "--directory",
- checkoutdir,
- ],
+ project=project, args=["artifact", "checkout", "base.bst:target.bst", "--directory", checkoutdir,],
)
result.assert_success()
@@ -457,10 +425,7 @@ def test_config_target(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the checkout contains the expected files from sub-sub-project
@@ -474,10 +439,7 @@ def test_invalid_sources_and_target(cli, tmpdir, datafiles):
result = cli.run(project=project, args=["show", "invalid-source-target.bst"])
result.assert_main_error(ErrorDomain.ELEMENT, None)
- assert (
- "junction elements cannot define both 'sources' and 'target' config option"
- in result.stderr
- )
+ assert "junction elements cannot define both 'sources' and 'target' config option" in result.stderr
@pytest.mark.datafiles(DATA_DIR)
@@ -493,9 +455,7 @@ def test_invalid_target_name(cli, tmpdir, datafiles):
result = cli.run(project=project, args=["show", "subsubproject-junction.bst"])
result.assert_main_error(ErrorDomain.ELEMENT, None)
- assert (
- "junction elements cannot target an element with the same name" in result.stderr
- )
+ assert "junction elements cannot target an element with the same name" in result.stderr
# We cannot exhaustively test all possible ways in which this can go wrong, so
@@ -508,7 +468,4 @@ def test_invalid_target_format(cli, tmpdir, datafiles, target):
result = cli.run(project=project, args=["show", target])
result.assert_main_error(ErrorDomain.ELEMENT, None)
- assert (
- "'target' option must be in format '{junction-name}:{element-name}'"
- in result.stderr
- )
+ assert "'target' option must be in format '{junction-name}:{element-name}'" in result.stderr
diff --git a/tests/format/listdirectiveerrors.py b/tests/format/listdirectiveerrors.py
index e17dd7e8c..66b6c738b 100644
--- a/tests/format/listdirectiveerrors.py
+++ b/tests/format/listdirectiveerrors.py
@@ -12,44 +12,28 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
def test_project_error(cli, datafiles):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "list-directive-error-project"
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "list-directive-error-project")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.TRAILING_LIST_DIRECTIVE)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize(
- "target", [("variables.bst"), ("environment.bst"), ("config.bst"), ("public.bst")]
-)
+@pytest.mark.parametrize("target", [("variables.bst"), ("environment.bst"), ("config.bst"), ("public.bst")])
def test_element_error(cli, datafiles, target):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "list-directive-error-element"
- )
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", target],
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "list-directive-error-element")
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", target],)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.TRAILING_LIST_DIRECTIVE)
@pytest.mark.datafiles(DATA_DIR)
def test_project_composite_error(cli, datafiles):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "list-directive-type-error"
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "list-directive-type-error")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.ILLEGAL_COMPOSITE)
diff --git a/tests/format/optionarch.py b/tests/format/optionarch.py
index 69faee347..1d2cdc627 100644
--- a/tests/format/optionarch.py
+++ b/tests/format/optionarch.py
@@ -53,9 +53,7 @@ def test_unsupported_arch(cli, datafiles):
with override_platform_uname(machine="x86_64"):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-arch")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@@ -65,13 +63,9 @@ def test_unsupported_arch(cli, datafiles):
def test_alias(cli, datafiles):
with override_platform_uname(machine="arm"):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "option-arch-alias"
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-arch-alias")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_success()
@@ -83,9 +77,7 @@ def test_unknown_host_arch(cli, datafiles):
with override_platform_uname(machine="x86_128"):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-arch")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.PLATFORM, None)
@@ -96,9 +88,7 @@ def test_unknown_project_arch(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-arch-unknown")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optionbool.py b/tests/format/optionbool.py
index 275be61cf..58d353a2d 100644
--- a/tests/format/optionbool.py
+++ b/tests/format/optionbool.py
@@ -36,17 +36,7 @@ def test_conditional_cli(cli, datafiles, target, option, expected):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "pony",
- option,
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- target,
- ],
+ args=["--option", "pony", option, "show", "--deps", "none", "--format", "%{vars}", target,],
)
result.assert_success()
@@ -58,17 +48,12 @@ def test_conditional_cli(cli, datafiles, target, option, expected):
#
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
- "target,option,expected",
- [("element.bst", True, "a pony"), ("element.bst", False, "not pony"),],
+ "target,option,expected", [("element.bst", True, "a pony"), ("element.bst", False, "not pony"),],
)
def test_conditional_config(cli, datafiles, target, option, expected):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-bool")
cli.configure({"projects": {"test": {"options": {"pony": option}}}})
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", target],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", target],)
result.assert_success()
loaded = _yaml.load_data(result.output)
@@ -82,31 +67,17 @@ def test_invalid_value_cli(cli, datafiles, cli_option):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "pony",
- cli_option,
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- "element.bst",
- ],
+ args=["--option", "pony", cli_option, "show", "--deps", "none", "--format", "%{vars}", "element.bst",],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize(
- "config_option", [("pony"), (["its", "a", "list"]), ({"dic": "tionary"})]
-)
+@pytest.mark.parametrize("config_option", [("pony"), (["its", "a", "list"]), ({"dic": "tionary"})])
def test_invalid_value_config(cli, datafiles, config_option):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-bool")
cli.configure({"projects": {"test": {"options": {"pony": config_option}}}})
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optioneltmask.py b/tests/format/optioneltmask.py
index 2530999bf..77eaf2c9b 100644
--- a/tests/format/optioneltmask.py
+++ b/tests/format/optioneltmask.py
@@ -25,17 +25,7 @@ def test_conditional_cli(cli, datafiles, target, value, expected):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "debug_elements",
- value,
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- target,
- ],
+ args=["--option", "debug_elements", value, "show", "--deps", "none", "--format", "%{vars}", target,],
)
result.assert_success()
@@ -55,11 +45,7 @@ def test_conditional_cli(cli, datafiles, target, value, expected):
def test_conditional_config(cli, datafiles, target, value, expected):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-element-mask")
cli.configure({"projects": {"test": {"options": {"debug_elements": value}}}})
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", target],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", target],)
result.assert_success()
loaded = _yaml.load_data(result.output)
@@ -68,14 +54,8 @@ def test_conditional_config(cli, datafiles, target, value, expected):
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_declaration(cli, datafiles):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "option-element-mask-invalid"
- )
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "pony.bst"],
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-element-mask-invalid")
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "pony.bst"],)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optionenum.py b/tests/format/optionenum.py
index ee6a4fa0e..89d2d0cd9 100644
--- a/tests/format/optionenum.py
+++ b/tests/format/optionenum.py
@@ -30,17 +30,7 @@ def test_conditional_cli(cli, datafiles, target, option, value, expected):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- option,
- value,
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- target,
- ],
+ args=["--option", option, value, "show", "--deps", "none", "--format", "%{vars}", target,],
)
result.assert_success()
@@ -65,11 +55,7 @@ def test_conditional_cli(cli, datafiles, target, option, value, expected):
def test_conditional_config(cli, datafiles, target, option, value, expected):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-enum")
cli.configure({"projects": {"test": {"options": {option: value}}}})
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", target],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", target],)
result.assert_success()
loaded = _yaml.load_data(result.output)
@@ -82,32 +68,18 @@ def test_invalid_value_cli(cli, datafiles):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "brother",
- "giraffy",
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- "element.bst",
- ],
+ args=["--option", "brother", "giraffy", "show", "--deps", "none", "--format", "%{vars}", "element.bst",],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize(
- "config_option", [("giraffy"), (["its", "a", "list"]), ({"dic": "tionary"})]
-)
+@pytest.mark.parametrize("config_option", [("giraffy"), (["its", "a", "list"]), ({"dic": "tionary"})])
def test_invalid_value_config(cli, datafiles, config_option):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-enum")
cli.configure({"projects": {"test": {"options": {"brother": config_option}}}})
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@@ -116,8 +88,6 @@ def test_invalid_value_config(cli, datafiles, config_option):
def test_missing_values(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-enum-missing")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optionexports.py b/tests/format/optionexports.py
index 90bbace97..486562f01 100644
--- a/tests/format/optionexports.py
+++ b/tests/format/optionexports.py
@@ -30,17 +30,7 @@ def test_export(cli, datafiles, option_name, option_value, var_name, var_value):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- option_name,
- option_value,
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- "element.bst",
- ],
+ args=["--option", option_name, option_value, "show", "--deps", "none", "--format", "%{vars}", "element.bst",],
)
result.assert_success()
diff --git a/tests/format/optionflags.py b/tests/format/optionflags.py
index 72d175bf8..f2ea129a7 100644
--- a/tests/format/optionflags.py
+++ b/tests/format/optionflags.py
@@ -33,17 +33,7 @@ def test_conditional_cli(cli, datafiles, target, option, value, expected):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- option,
- value,
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- target,
- ],
+ args=["--option", option, value, "show", "--deps", "none", "--format", "%{vars}", target,],
)
result.assert_success()
@@ -65,11 +55,7 @@ def test_conditional_cli(cli, datafiles, target, option, value, expected):
def test_conditional_config(cli, datafiles, target, option, value, expected):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-flags")
cli.configure({"projects": {"test": {"options": {option: value}}}})
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", target],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", target],)
result.assert_success()
loaded = _yaml.load_data(result.output)
@@ -79,27 +65,14 @@ def test_conditional_config(cli, datafiles, target, option, value, expected):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
"cli_option",
- [
- ("giraffy"), # Not a valid animal for the farm option
- ("horsy pony"), # Does not include comma separators
- ],
+ [("giraffy"), ("horsy pony"),], # Not a valid animal for the farm option # Does not include comma separators
)
def test_invalid_value_cli(cli, datafiles, cli_option):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-flags")
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "farm",
- cli_option,
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- "element.bst",
- ],
+ args=["--option", "farm", cli_option, "show", "--deps", "none", "--format", "%{vars}", "element.bst",],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@@ -117,21 +90,15 @@ def test_invalid_value_config(cli, datafiles, config_option):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-flags")
cli.configure({"projects": {"test": {"options": {"farm": config_option}}}})
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_missing_values(cli, datafiles):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "option-flags-missing"
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-flags-missing")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optionos.py b/tests/format/optionos.py
index cb75db71f..b0f9cbd09 100644
--- a/tests/format/optionos.py
+++ b/tests/format/optionos.py
@@ -52,9 +52,7 @@ def test_unsupported_arch(cli, datafiles):
with override_platform_uname(system="AIX"):
project = os.path.join(datafiles.dirname, datafiles.basename, "option-os")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/options.py b/tests/format/options.py
index c2f4584d4..9376cd0d2 100644
--- a/tests/format/options.py
+++ b/tests/format/options.py
@@ -14,12 +14,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "options")
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
"project_dir",
- [
- ("invalid-name-spaces"),
- ("invalid-name-dashes"),
- ("invalid-name-plus"),
- ("invalid-name-leading-number"),
- ],
+ [("invalid-name-spaces"), ("invalid-name-dashes"), ("invalid-name-plus"), ("invalid-name-leading-number"),],
)
def test_invalid_option_name(cli, datafiles, project_dir):
project = os.path.join(datafiles.dirname, datafiles.basename, project_dir)
@@ -28,9 +23,7 @@ def test_invalid_option_name(cli, datafiles, project_dir):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize(
- "project_dir", [("invalid-variable-name-spaces"), ("invalid-variable-name-plus"),]
-)
+@pytest.mark.parametrize("project_dir", [("invalid-variable-name-spaces"), ("invalid-variable-name-plus"),])
def test_invalid_variable_name(cli, datafiles, project_dir):
project = os.path.join(datafiles.dirname, datafiles.basename, project_dir)
result = cli.run(project=project, silent=True, args=["show", "element.bst"])
@@ -45,17 +38,7 @@ def test_invalid_option_type(cli, datafiles):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "opt",
- "funny",
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- "element.bst",
- ],
+ args=["--option", "opt", "funny", "show", "--deps", "none", "--format", "%{vars}", "element.bst",],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@@ -68,17 +51,7 @@ def test_invalid_option_cli(cli, datafiles):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "fart",
- "funny",
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- "element.bst",
- ],
+ args=["--option", "fart", "funny", "show", "--deps", "none", "--format", "%{vars}", "element.bst",],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@@ -88,9 +61,7 @@ def test_invalid_option_config(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "simple-condition")
cli.configure({"projects": {"test": {"options": {"fart": "Hello"}}}})
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@@ -99,9 +70,7 @@ def test_invalid_option_config(cli, datafiles):
def test_invalid_expression(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "invalid-expression")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.EXPRESSION_FAILED)
@@ -110,9 +79,7 @@ def test_invalid_expression(cli, datafiles):
def test_undefined(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "undefined-variable")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.EXPRESSION_FAILED)
@@ -121,17 +88,13 @@ def test_undefined(cli, datafiles):
def test_invalid_condition(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "invalid-condition")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize(
- "opt_option,expected_prefix", [("False", "/usr"), ("True", "/opt"),]
-)
+@pytest.mark.parametrize("opt_option,expected_prefix", [("False", "/usr"), ("True", "/opt"),])
def test_simple_conditional(cli, datafiles, opt_option, expected_prefix):
project = os.path.join(datafiles.dirname, datafiles.basename, "simple-condition")
@@ -139,17 +102,7 @@ def test_simple_conditional(cli, datafiles, opt_option, expected_prefix):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "opt",
- opt_option,
- "show",
- "--deps",
- "none",
- "--format",
- "%{vars}",
- "element.bst",
- ],
+ args=["--option", "opt", opt_option, "show", "--deps", "none", "--format", "%{vars}", "element.bst",],
)
result.assert_success()
loaded = _yaml.load_data(result.output)
@@ -159,12 +112,7 @@ def test_simple_conditional(cli, datafiles, opt_option, expected_prefix):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
"debug,logging,expected",
- [
- ("False", "False", "False"),
- ("True", "False", "False"),
- ("False", "True", "False"),
- ("True", "True", "True"),
- ],
+ [("False", "False", "False"), ("True", "False", "False"), ("False", "True", "False"), ("True", "True", "True"),],
)
def test_nested_conditional(cli, datafiles, debug, logging, expected):
project = os.path.join(datafiles.dirname, datafiles.basename, "nested-condition")
@@ -196,17 +144,10 @@ def test_nested_conditional(cli, datafiles, debug, logging, expected):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
"debug,logging,expected",
- [
- ("False", "False", "False"),
- ("True", "False", "False"),
- ("False", "True", "False"),
- ("True", "True", "True"),
- ],
+ [("False", "False", "False"), ("True", "False", "False"), ("False", "True", "False"), ("True", "True", "True"),],
)
def test_compound_and_conditional(cli, datafiles, debug, logging, expected):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "compound-and-condition"
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "compound-and-condition")
# Test with the opt option set
result = cli.run(
@@ -235,17 +176,10 @@ def test_compound_and_conditional(cli, datafiles, debug, logging, expected):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
"debug,logging,expected",
- [
- ("False", "False", "False"),
- ("True", "False", "True"),
- ("False", "True", "True"),
- ("True", "True", "True"),
- ],
+ [("False", "False", "False"), ("True", "False", "True"), ("False", "True", "True"), ("True", "True", "True"),],
)
def test_compound_or_conditional(cli, datafiles, debug, logging, expected):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "compound-or-condition"
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "compound-or-condition")
# Test with the opt option set
result = cli.run(
@@ -278,17 +212,7 @@ def test_deep_nesting_level1(cli, datafiles, option, expected):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "pony",
- option,
- "show",
- "--deps",
- "none",
- "--format",
- "%{public}",
- "element.bst",
- ],
+ args=["--option", "pony", option, "show", "--deps", "none", "--format", "%{public}", "element.bst",],
)
result.assert_success()
loaded = _yaml.load_data(result.output)
@@ -305,17 +229,7 @@ def test_deep_nesting_level2(cli, datafiles, option, expected):
result = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "pony",
- option,
- "show",
- "--deps",
- "none",
- "--format",
- "%{public}",
- "element-deeper.bst",
- ],
+ args=["--option", "pony", option, "show", "--deps", "none", "--format", "%{public}", "element-deeper.bst",],
)
result.assert_success()
loaded = _yaml.load_data(result.output)
diff --git a/tests/format/project.py b/tests/format/project.py
index b9171865e..8934ff5d0 100644
--- a/tests/format/project.py
+++ b/tests/format/project.py
@@ -85,9 +85,7 @@ def test_load_default_project(cli, datafiles):
def test_load_project_from_subdir(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "project-from-subdir")
result = cli.run(
- project=project,
- cwd=os.path.join(project, "subdirectory"),
- args=["show", "--format", "%{env}", "manual.bst"],
+ project=project, cwd=os.path.join(project, "subdirectory"), args=["show", "--format", "%{env}", "manual.bst"],
)
result.assert_success()
@@ -130,9 +128,7 @@ def test_element_path_not_a_directory(cli, datafiles):
for _file_type in filetypegenerator.generate_file_types(path):
result = cli.run(project=project, args=["workspace", "list"])
if not os.path.isdir(path):
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND
- )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND)
else:
result.assert_success()
@@ -151,9 +147,7 @@ def test_local_plugin_not_directory(cli, datafiles):
for _file_type in filetypegenerator.generate_file_types(path):
result = cli.run(project=project, args=["workspace", "list"])
if not os.path.isdir(path):
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND
- )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND)
else:
result.assert_success()
@@ -182,9 +176,7 @@ def test_plugin_no_load_ref(cli, datafiles, ref_storage):
config = {
"name": "test",
"ref-storage": ref_storage,
- "plugins": [
- {"origin": "local", "path": "plugins", "sources": {"noloadref": 0}}
- ],
+ "plugins": [{"origin": "local", "path": "plugins", "sources": {"noloadref": 0}}],
}
_yaml.roundtrip_dump(config, os.path.join(project, "project.conf"))
@@ -200,9 +192,7 @@ def test_plugin_no_load_ref(cli, datafiles, ref_storage):
@pytest.mark.datafiles(DATA_DIR)
def test_plugin_preflight_error(cli, datafiles):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "plugin-preflight-error"
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "plugin-preflight-error")
result = cli.run(project=project, args=["source", "fetch", "error.bst"])
result.assert_main_error(ErrorDomain.SOURCE, "the-preflight-error")
@@ -224,34 +214,14 @@ def test_project_refs_options(cli, datafiles):
result1 = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "test",
- "True",
- "show",
- "--deps",
- "none",
- "--format",
- "%{key}",
- "target.bst",
- ],
+ args=["--option", "test", "True", "show", "--deps", "none", "--format", "%{key}", "target.bst",],
)
result1.assert_success()
result2 = cli.run(
project=project,
silent=True,
- args=[
- "--option",
- "test",
- "False",
- "show",
- "--deps",
- "none",
- "--format",
- "%{key}",
- "target.bst",
- ],
+ args=["--option", "test", "False", "show", "--deps", "none", "--format", "%{key}", "target.bst",],
)
result2.assert_success()
diff --git a/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py b/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
index f0d66e3c7..762be8f36 100644
--- a/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
+++ b/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
@@ -9,8 +9,7 @@ class PreflightErrorSource(Source):
# Raise a preflight error unconditionally
raise SourceError(
- "Unsatisfied requirements in preflight, raising this error",
- reason="the-preflight-error",
+ "Unsatisfied requirements in preflight, raising this error", reason="the-preflight-error",
)
def get_unique_key(self):
diff --git a/tests/format/projectoverrides.py b/tests/format/projectoverrides.py
index bba630c54..e2aaa4173 100644
--- a/tests/format/projectoverrides.py
+++ b/tests/format/projectoverrides.py
@@ -7,20 +7,14 @@ from buildstream import _yaml
from buildstream.testing.runcli import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), "project-overrides"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project-overrides")
@pytest.mark.datafiles(DATA_DIR)
def test_prepend_configure_commands(cli, datafiles):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "prepend-configure-commands"
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "prepend-configure-commands")
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{config}", "element.bst"],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{config}", "element.bst"],
)
result.assert_success()
diff --git a/tests/format/variables.py b/tests/format/variables.py
index 31f969f4b..343fe7237 100644
--- a/tests/format/variables.py
+++ b/tests/format/variables.py
@@ -29,11 +29,7 @@ def print_warning(msg):
@pytest.mark.parametrize(
"target,varname,expected",
[
- (
- "autotools.bst",
- "make-install",
- 'make -j1 DESTDIR="/buildstream-install" install',
- ),
+ ("autotools.bst", "make-install", 'make -j1 DESTDIR="/buildstream-install" install',),
(
"cmake.bst",
"cmake",
@@ -44,34 +40,17 @@ def print_warning(msg):
(
"distutils.bst",
"python-install",
- 'python3 ./setup.py install --prefix "/usr" \\\n'
- + '--root "/buildstream-install"',
- ),
- (
- "makemaker.bst",
- "configure",
- "perl Makefile.PL PREFIX=/buildstream-install/usr",
- ),
- (
- "modulebuild.bst",
- "configure",
- 'perl Build.PL --prefix "/buildstream-install/usr"',
- ),
- (
- "qmake.bst",
- "make-install",
- 'make -j1 INSTALL_ROOT="/buildstream-install" install',
+ 'python3 ./setup.py install --prefix "/usr" \\\n' + '--root "/buildstream-install"',
),
+ ("makemaker.bst", "configure", "perl Makefile.PL PREFIX=/buildstream-install/usr",),
+ ("modulebuild.bst", "configure", 'perl Build.PL --prefix "/buildstream-install/usr"',),
+ ("qmake.bst", "make-install", 'make -j1 INSTALL_ROOT="/buildstream-install" install',),
],
)
@pytest.mark.datafiles(os.path.join(DATA_DIR, "defaults"))
def test_defaults(cli, datafiles, target, varname, expected):
project = str(datafiles)
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", target],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", target],)
result.assert_success()
result_vars = _yaml.load_data(result.output)
assert result_vars.get_str(varname) == expected
@@ -83,11 +62,7 @@ def test_defaults(cli, datafiles, target, varname, expected):
@pytest.mark.parametrize(
"target,varname,expected",
[
- (
- "autotools.bst",
- "make-install",
- 'make -j1 DESTDIR="/custom/install/root" install',
- ),
+ ("autotools.bst", "make-install", 'make -j1 DESTDIR="/custom/install/root" install',),
(
"cmake.bst",
"cmake",
@@ -98,34 +73,17 @@ def test_defaults(cli, datafiles, target, varname, expected):
(
"distutils.bst",
"python-install",
- 'python3 ./setup.py install --prefix "/opt" \\\n'
- + '--root "/custom/install/root"',
- ),
- (
- "makemaker.bst",
- "configure",
- "perl Makefile.PL PREFIX=/custom/install/root/opt",
- ),
- (
- "modulebuild.bst",
- "configure",
- 'perl Build.PL --prefix "/custom/install/root/opt"',
- ),
- (
- "qmake.bst",
- "make-install",
- 'make -j1 INSTALL_ROOT="/custom/install/root" install',
+ 'python3 ./setup.py install --prefix "/opt" \\\n' + '--root "/custom/install/root"',
),
+ ("makemaker.bst", "configure", "perl Makefile.PL PREFIX=/custom/install/root/opt",),
+ ("modulebuild.bst", "configure", 'perl Build.PL --prefix "/custom/install/root/opt"',),
+ ("qmake.bst", "make-install", 'make -j1 INSTALL_ROOT="/custom/install/root" install',),
],
)
@pytest.mark.datafiles(os.path.join(DATA_DIR, "overrides"))
def test_overrides(cli, datafiles, target, varname, expected):
project = str(datafiles)
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{vars}", target],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", target],)
result.assert_success()
result_vars = _yaml.load_data(result.output)
assert result_vars.get_str(varname) == expected
@@ -135,21 +93,14 @@ def test_overrides(cli, datafiles, target, varname, expected):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "missing_variables"))
def test_missing_variable(cli, datafiles, element):
project = str(datafiles)
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{config}", element],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", "%{config}", element],)
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.UNRESOLVED_VARIABLE)
@pytest.mark.timeout(3, method="signal")
@pytest.mark.datafiles(os.path.join(DATA_DIR, "cyclic_variables"))
def test_cyclic_variables(cli, datafiles):
- print_warning(
- "Performing cyclic test, if this test times out it will "
- + "exit the test sequence"
- )
+ print_warning("Performing cyclic test, if this test times out it will " + "exit the test sequence")
project = str(datafiles)
result = cli.run(project=project, silent=True, args=["build", "cyclic.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.RECURSIVE_VARIABLE)
@@ -169,9 +120,7 @@ def test_use_of_protected_var_project_conf(cli, datafiles, protected_var):
_yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
result = cli.run(project=project, args=["build", "target.bst"])
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.PROTECTED_VARIABLE_REDEFINED
- )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
@pytest.mark.parametrize("protected_var", PROTECTED_VARIABLES)
@@ -191,9 +140,7 @@ def test_use_of_protected_var_element_overrides(cli, datafiles, protected_var):
_yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
result = cli.run(project=project, args=["build", "target.bst"])
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.PROTECTED_VARIABLE_REDEFINED
- )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
@pytest.mark.parametrize("protected_var", PROTECTED_VARIABLES)
@@ -208,6 +155,4 @@ def test_use_of_protected_var_in_element(cli, datafiles, protected_var):
_yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
result = cli.run(project=project, args=["build", "target.bst"])
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.PROTECTED_VARIABLE_REDEFINED
- )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
diff --git a/tests/frontend/artifact_delete.py b/tests/frontend/artifact_delete.py
index 9389788b3..a93d99ef6 100644
--- a/tests/frontend/artifact_delete.py
+++ b/tests/frontend/artifact_delete.py
@@ -74,9 +74,7 @@ def test_artifact_delete_artifact(cli, tmpdir, datafiles):
result.assert_success()
# Check that the ARTIFACT is no longer in the cache
- assert not os.path.exists(
- os.path.join(local_cache, "cas", "refs", "heads", artifact)
- )
+ assert not os.path.exists(os.path.join(local_cache, "cas", "refs", "heads", artifact))
# Test the `bst artifact delete` command with multiple, different arguments.
@@ -190,9 +188,7 @@ def test_artifact_delete_elements_build_deps(cli, tmpdir, datafiles):
for state in bdep_states.values():
assert state == "cached"
- result = cli.run(
- project=project, args=["artifact", "delete", "--deps", "build", element]
- )
+ result = cli.run(project=project, args=["artifact", "delete", "--deps", "build", element])
result.assert_success()
# Assert that the build deps have been deleted and that the artifact remains cached
@@ -227,20 +223,14 @@ def test_artifact_delete_artifacts_build_deps(cli, tmpdir, datafiles):
bdep_refs = []
bdep_states = cli.get_element_states(project, [element], deps="build")
for bdep in bdep_states.keys():
- bdep_refs.append(
- os.path.join(
- "test", _get_normal_name(bdep), cli.get_element_key(project, bdep)
- )
- )
+ bdep_refs.append(os.path.join("test", _get_normal_name(bdep), cli.get_element_key(project, bdep)))
# Assert build dependencies are cached
for ref in bdep_refs:
assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", ref))
# Delete the artifact
- result = cli.run(
- project=project, args=["artifact", "delete", "--deps", "build", artifact]
- )
+ result = cli.run(project=project, args=["artifact", "delete", "--deps", "build", artifact])
result.assert_success()
# Check that the artifact's build deps are no longer in the cache
@@ -265,9 +255,7 @@ def test_artifact_delete_artifact_with_deps_all_fails(cli, tmpdir, datafiles):
artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)
# Try to delete the artifact with all of its dependencies
- result = cli.run(
- project=project, args=["artifact", "delete", "--deps", "all", artifact]
- )
+ result = cli.run(project=project, args=["artifact", "delete", "--deps", "all", artifact])
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Error: '--deps all' is not supported for artifact refs" in result.stderr
diff --git a/tests/frontend/artifact_list_contents.py b/tests/frontend/artifact_list_contents.py
index ddd2d50a6..7e8bb6508 100644
--- a/tests/frontend/artifact_list_contents.py
+++ b/tests/frontend/artifact_list_contents.py
@@ -37,9 +37,7 @@ def test_artifact_list_exact_contents_element(cli, datafiles):
assert result.exit_code == 0
# List the contents via the element name
- result = cli.run(
- project=project, args=["artifact", "list-contents", "import-bin.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "list-contents", "import-bin.bst"])
assert result.exit_code == 0
expected_output = "import-bin.bst:\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n"
assert expected_output in result.output
@@ -57,14 +55,10 @@ def test_artifact_list_exact_contents_ref(cli, datafiles):
assert result.exit_code == 0
# List the contents via the key
- result = cli.run(
- project=project, args=["artifact", "list-contents", "test/import-bin/" + key]
- )
+ result = cli.run(project=project, args=["artifact", "list-contents", "test/import-bin/" + key])
assert result.exit_code == 0
- expected_output = (
- "test/import-bin/" + key + ":\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n"
- )
+ expected_output = "test/import-bin/" + key + ":\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n"
assert expected_output in result.output
@@ -106,9 +100,7 @@ def test_artifact_list_exact_contents_element_long(cli, datafiles):
assert result.exit_code == 0
# List the contents via the element name
- result = cli.run(
- project=project, args=["artifact", "list-contents", "--long", "import-bin.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "list-contents", "--long", "import-bin.bst"])
assert result.exit_code == 0
expected_output = (
"import-bin.bst:\n"
@@ -132,10 +124,7 @@ def test_artifact_list_exact_contents_ref_long(cli, datafiles):
assert result.exit_code == 0
# List the contents via the key
- result = cli.run(
- project=project,
- args=["artifact", "list-contents", "-l", "test/import-bin/" + key],
- )
+ result = cli.run(project=project, args=["artifact", "list-contents", "-l", "test/import-bin/" + key],)
assert result.exit_code == 0
expected_output = (
diff --git a/tests/frontend/artifact_log.py b/tests/frontend/artifact_log.py
index 44c35aa3d..806a3b437 100644
--- a/tests/frontend/artifact_log.py
+++ b/tests/frontend/artifact_log.py
@@ -36,15 +36,7 @@ def test_artifact_log(cli, datafiles):
result = cli.run(
project=project,
silent=True,
- args=[
- "--no-colors",
- "show",
- "--deps",
- "none",
- "--format",
- "%{full-key}",
- "target.bst",
- ],
+ args=["--no-colors", "show", "--deps", "none", "--format", "%{full-key}", "target.bst",],
)
key = result.output.strip()
@@ -89,10 +81,7 @@ def test_artifact_log_files(cli, datafiles):
assert not os.path.exists(import_bin)
# Run the command and ensure the file now exists
- result = cli.run(
- project=project,
- args=["artifact", "log", "--out", logfiles, "target.bst", "import-bin.bst"],
- )
+ result = cli.run(project=project, args=["artifact", "log", "--out", logfiles, "target.bst", "import-bin.bst"],)
assert result.exit_code == 0
assert os.path.exists(logfiles)
assert os.path.exists(target)
diff --git a/tests/frontend/artifact_show.py b/tests/frontend/artifact_show.py
index c47222e18..6f824c0e4 100644
--- a/tests/frontend/artifact_show.py
+++ b/tests/frontend/artifact_show.py
@@ -79,9 +79,7 @@ def test_artifact_show_element_missing_deps(cli, tmpdir, datafiles):
result = cli.run(project=project, args=["artifact", "delete", dependency])
result.assert_success()
- result = cli.run(
- project=project, args=["artifact", "show", "--deps", "all", element]
- )
+ result = cli.run(project=project, args=["artifact", "show", "--deps", "all", element])
result.assert_success()
assert "not cached {}".format(dependency) in result.output
assert "cached {}".format(element) in result.output
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index f3080269d..7772c48ef 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -33,12 +33,7 @@ def strict_args(args, strict):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
"strict,hardlinks",
- [
- ("strict", "copies"),
- ("strict", "hardlinks"),
- ("non-strict", "copies"),
- ("non-strict", "hardlinks"),
- ],
+ [("strict", "copies"), ("strict", "hardlinks"), ("non-strict", "copies"), ("non-strict", "hardlinks"),],
)
def test_build_checkout(datafiles, cli, strict, hardlinks):
project = str(datafiles)
@@ -115,9 +110,7 @@ def test_build_invalid_suffix_dep(datafiles, cli, strict, hardlinks):
project = str(datafiles)
# target2.bst depends on an element called target.foo
- result = cli.run(
- project=project, args=strict_args(["build", "target2.bst"], strict)
- )
+ result = cli.run(project=project, args=strict_args(["build", "target2.bst"], strict))
result.assert_main_error(ErrorDomain.LOAD, "bad-element-suffix")
@@ -134,9 +127,7 @@ def test_build_invalid_filename_chars(datafiles, cli):
}
_yaml.roundtrip_dump(element, os.path.join(project, "elements", element_name))
- result = cli.run(
- project=project, args=strict_args(["build", element_name], "non-strict")
- )
+ result = cli.run(project=project, args=strict_args(["build", element_name], "non-strict"))
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
@@ -154,10 +145,7 @@ def test_build_invalid_filename_chars_dep(datafiles, cli):
}
_yaml.roundtrip_dump(element, os.path.join(project, "elements", element_name))
- result = cli.run(
- project=project,
- args=strict_args(["build", "invalid-chars-in-dep.bst"], "non-strict"),
- )
+ result = cli.run(project=project, args=strict_args(["build", "invalid-chars-in-dep.bst"], "non-strict"),)
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
@@ -179,16 +167,7 @@ def test_build_checkout_deps(datafiles, cli, deps):
# Now check it out
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- element_name,
- "--deps",
- deps,
- "--directory",
- checkout,
- ],
+ project=project, args=["artifact", "checkout", element_name, "--deps", deps, "--directory", checkout,],
)
result.assert_success()
@@ -220,10 +199,7 @@ def test_build_checkout_unbuilt(datafiles, cli):
checkout = os.path.join(cli.directory, "checkout")
# Check that checking out an unbuilt element fails nicely
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkout],)
result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
@@ -246,10 +222,7 @@ def test_build_checkout_compression_no_tar(datafiles, cli):
]
result = cli.run(project=project, args=checkout_args)
- assert (
- "ERROR: --compression can only be provided if --tar is provided"
- in result.stderr
- )
+ assert "ERROR: --compression can only be provided if --tar is provided" in result.stderr
assert result.exit_code != 0
@@ -466,10 +439,7 @@ def test_build_checkout_invalid_ref(datafiles, cli):
]
result = cli.run(project=project, args=checkout_args)
- assert (
- "Error while staging dependencies into a sandbox: 'No artifacts to stage'"
- in result.stderr
- )
+ assert "Error while staging dependencies into a sandbox: 'No artifacts to stage'" in result.stderr
@pytest.mark.datafiles(DATA_DIR)
@@ -613,9 +583,7 @@ def test_build_checkout_tarball_links(datafiles, cli):
# of the symlink and the test therefore doesn't have the correct content
os.symlink(
os.path.join("..", "basicfile"),
- os.path.join(
- project, "files", "files-and-links", "basicfolder", "basicsymlink"
- ),
+ os.path.join(project, "files", "files-and-links", "basicfolder", "basicsymlink"),
)
result = cli.run(project=project, args=["build", "import-links.bst"])
@@ -632,10 +600,7 @@ def test_build_checkout_tarball_links(datafiles, cli):
tar = tarfile.open(name=checkout, mode="r:")
tar.extractall(extract)
- assert (
- open(os.path.join(extract, "basicfolder", "basicsymlink")).read()
- == "file contents\n"
- )
+ assert open(os.path.join(extract, "basicfolder", "basicsymlink")).read() == "file contents\n"
@pytest.mark.datafiles(DATA_DIR)
@@ -648,9 +613,7 @@ def test_build_checkout_links(datafiles, cli):
# of the symlink and the test therefore doesn't have the correct content
os.symlink(
os.path.join("..", "basicfile"),
- os.path.join(
- project, "files", "files-and-links", "basicfolder", "basicsymlink"
- ),
+ os.path.join(project, "files", "files-and-links", "basicfolder", "basicsymlink"),
)
result = cli.run(project=project, args=["build", "import-links.bst"])
@@ -671,10 +634,7 @@ def test_build_checkout_links(datafiles, cli):
result = cli.run(project=project, args=checkout_args)
result.assert_success()
- assert (
- open(os.path.join(checkout, "basicfolder", "basicsymlink")).read()
- == "file contents\n"
- )
+ assert open(os.path.join(checkout, "basicfolder", "basicsymlink")).read() == "file contents\n"
@pytest.mark.datafiles(DATA_DIR)
@@ -836,9 +796,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage):
configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
- ref = generate_junction(
- tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
- )
+ ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))
# Create a stack element to depend on a cross junction element
#
@@ -891,10 +849,7 @@ def test_build_checkout_junction(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],)
result.assert_success()
# Assert the content of /etc/animal.conf
@@ -934,10 +889,7 @@ def test_build_checkout_junction_default_targets(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],)
result.assert_success()
# Assert the content of /etc/animal.conf
@@ -970,10 +922,7 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
# Now open a workspace on the junction
#
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, "junction.bst"],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, "junction.bst"],)
result.assert_success()
filename = os.path.join(workspace, "files", "etc-files", "etc", "animal.conf")
@@ -996,10 +945,7 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],)
result.assert_success()
# Assert the workspace modified content of /etc/animal.conf
@@ -1023,14 +969,7 @@ def test_build_checkout_cross_junction(datafiles, cli, tmpdir):
result.assert_success()
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "junction.bst:import-etc.bst",
- "--directory",
- checkout,
- ],
+ project=project, args=["artifact", "checkout", "junction.bst:import-etc.bst", "--directory", checkout,],
)
result.assert_success()
@@ -1063,10 +1002,7 @@ def test_build_junction_short_notation(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],)
result.assert_success()
# Assert the content of /etc/animal.conf
@@ -1105,10 +1041,7 @@ def test_build_junction_short_notation_filename(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],)
result.assert_success()
# Assert the content of /etc/animal.conf
@@ -1133,9 +1066,7 @@ def test_build_junction_short_notation_with_junction(cli, tmpdir, datafiles):
# colon (:) as the separator
element = {
"kind": "stack",
- "depends": [
- {"filename": "junction.bst:import-etc.bst", "junction": "junction.bst",}
- ],
+ "depends": [{"filename": "junction.bst:import-etc.bst", "junction": "junction.bst",}],
}
_yaml.roundtrip_dump(element, element_path)
@@ -1202,30 +1133,17 @@ def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
# A push artifact cache means we have to pull to push to them, so
# delete some blobs from that CAS such that we have to fetch
- digest = utils.sha256sum(
- os.path.join(project, "files", "bin-files", "usr", "bin", "hello")
- )
+ digest = utils.sha256sum(os.path.join(project, "files", "bin-files", "usr", "bin", "hello"))
objpath = os.path.join(cli.directory, "cas", "objects", digest[:2], digest[2:])
os.unlink(objpath)
# Verify that the build-only dependency is not (complete) in the local cache
- result = cli.run(
- project=project,
- args=["artifact", "checkout", input_name, "--directory", checkout_dir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", input_name, "--directory", checkout_dir],)
result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
# Verify that the pull method fetches relevant artifacts in order to stage
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "--pull",
- input_name,
- "--directory",
- checkout_dir,
- ],
+ project=project, args=["artifact", "checkout", "--pull", input_name, "--directory", checkout_dir,],
)
result.assert_success()
@@ -1244,17 +1162,7 @@ def test_partial_checkout_fail(tmpdir, datafiles, cli):
cli.configure({"artifacts": {"url": share.repo, "push": True}})
res = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "--pull",
- build_elt,
- "--directory",
- checkout_dir,
- ],
+ project=project, args=["artifact", "checkout", "--pull", build_elt, "--directory", checkout_dir,],
)
res.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
- assert re.findall(
- r"Remote \((\S+)\) does not have artifact (\S+) cached", res.stderr
- )
+ assert re.findall(r"Remote \((\S+)\) does not have artifact (\S+) cached", res.stderr)
diff --git a/tests/frontend/completions.py b/tests/frontend/completions.py
index 075fd70f1..3603543c7 100644
--- a/tests/frontend/completions.py
+++ b/tests/frontend/completions.py
@@ -84,13 +84,7 @@ MIXED_ELEMENTS = PROJECT_ELEMENTS + INVALID_ELEMENTS
def assert_completion(cli, cmd, word_idx, expected, cwd=None):
result = cli.run(
- project=".",
- cwd=cwd,
- env={
- "_BST_COMPLETION": "complete",
- "COMP_WORDS": cmd,
- "COMP_CWORD": str(word_idx),
- },
+ project=".", cwd=cwd, env={"_BST_COMPLETION": "complete", "COMP_WORDS": cmd, "COMP_CWORD": str(word_idx),},
)
words = []
if result.output:
@@ -105,14 +99,7 @@ def assert_completion(cli, cmd, word_idx, expected, cwd=None):
def assert_completion_failed(cli, cmd, word_idx, expected, cwd=None):
- result = cli.run(
- cwd=cwd,
- env={
- "_BST_COMPLETION": "complete",
- "COMP_WORDS": cmd,
- "COMP_CWORD": str(word_idx),
- },
- )
+ result = cli.run(cwd=cwd, env={"_BST_COMPLETION": "complete", "COMP_WORDS": cmd, "COMP_CWORD": str(word_idx),},)
words = []
if result.output:
words = result.output.splitlines()
@@ -182,29 +169,14 @@ def test_option_choice(cli, cmd, word_idx, expected):
# Note that elements/ and files/ are partial completions and
# as such do not come with trailing whitespace
("bst --config ", 2, ["cache/", "elements/", "files/", "project.conf "], None),
- (
- "bst --log-file ",
- 2,
- ["cache/", "elements/", "files/", "project.conf "],
- None,
- ),
+ ("bst --log-file ", 2, ["cache/", "elements/", "files/", "project.conf "], None,),
("bst --config f", 2, ["files/"], None),
("bst --log-file f", 2, ["files/"], None),
("bst --config files", 2, ["files/bin-files/", "files/dev-files/"], None),
("bst --log-file files", 2, ["files/bin-files/", "files/dev-files/"], None),
("bst --config files/", 2, ["files/bin-files/", "files/dev-files/"], None),
- (
- "bst --log-file elements/",
- 2,
- [os.path.join("elements", e) + " " for e in PROJECT_ELEMENTS],
- None,
- ),
- (
- "bst --config ../",
- 2,
- ["../cache/", "../elements/", "../files/", "../project.conf "],
- "files",
- ),
+ ("bst --log-file elements/", 2, [os.path.join("elements", e) + " " for e in PROJECT_ELEMENTS], None,),
+ ("bst --config ../", 2, ["../cache/", "../elements/", "../files/", "../project.conf "], "files",),
(
"bst --config ../elements/",
2,
@@ -251,11 +223,7 @@ def test_option_directory(datafiles, cli, cmd, word_idx, expected, subdir):
"project",
"bst build com",
2,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
None,
),
# When running from the files subdir
@@ -264,83 +232,37 @@ def test_option_directory(datafiles, cli, cmd, word_idx, expected, subdir):
"project",
"bst build com",
2,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
"files",
),
# When passing the project directory
- (
- "project",
- "bst --directory ../ show ",
- 4,
- [e + " " for e in PROJECT_ELEMENTS],
- "files",
- ),
+ ("project", "bst --directory ../ show ", 4, [e + " " for e in PROJECT_ELEMENTS], "files",),
(
"project",
"bst --directory ../ build com",
4,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
"files",
),
# Also try multi arguments together
- (
- "project",
- "bst --directory ../ artifact checkout t ",
- 5,
- ["target.bst "],
- "files",
- ),
- (
- "project",
- "bst --directory ../ artifact checkout --directory ",
- 6,
- ["bin-files/", "dev-files/"],
- "files",
- ),
+ ("project", "bst --directory ../ artifact checkout t ", 5, ["target.bst "], "files",),
+ ("project", "bst --directory ../ artifact checkout --directory ", 6, ["bin-files/", "dev-files/"], "files",),
# When running in the project directory
- (
- "no-element-path",
- "bst show ",
- 2,
- [e + " " for e in PROJECT_ELEMENTS] + ["files/"],
- None,
- ),
+ ("no-element-path", "bst show ", 2, [e + " " for e in PROJECT_ELEMENTS] + ["files/"], None,),
(
"no-element-path",
"bst build com",
2,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
None,
),
# When running from the files subdir
- (
- "no-element-path",
- "bst show ",
- 2,
- [e + " " for e in PROJECT_ELEMENTS] + ["files/"],
- "files",
- ),
+ ("no-element-path", "bst show ", 2, [e + " " for e in PROJECT_ELEMENTS] + ["files/"], "files",),
(
"no-element-path",
"bst build com",
2,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
"files",
),
# When passing the project directory
@@ -352,32 +274,16 @@ def test_option_directory(datafiles, cli, cmd, word_idx, expected, subdir):
"files",
),
("no-element-path", "bst --directory ../ show f", 4, ["files/"], "files"),
- (
- "no-element-path",
- "bst --directory ../ show files/",
- 4,
- ["files/bin-files/", "files/dev-files/"],
- "files",
- ),
+ ("no-element-path", "bst --directory ../ show files/", 4, ["files/bin-files/", "files/dev-files/"], "files",),
(
"no-element-path",
"bst --directory ../ build com",
4,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
"files",
),
# Also try multi arguments together
- (
- "no-element-path",
- "bst --directory ../ artifact checkout t ",
- 5,
- ["target.bst "],
- "files",
- ),
+ ("no-element-path", "bst --directory ../ artifact checkout t ", 5, ["target.bst "], "files",),
(
"no-element-path",
"bst --directory ../ artifact checkout --directory ",
@@ -402,18 +308,10 @@ def test_argument_element(datafiles, cli, project, cmd, word_idx, expected, subd
"project,cmd,word_idx,expected,subdir",
[
# When element has invalid suffix
- (
- "project",
- "bst --directory ../ show ",
- 4,
- [e + " " for e in MIXED_ELEMENTS],
- "files",
- )
+ ("project", "bst --directory ../ show ", 4, [e + " " for e in MIXED_ELEMENTS], "files",)
],
)
-def test_argument_element_invalid(
- datafiles, cli, project, cmd, word_idx, expected, subdir
-):
+def test_argument_element_invalid(datafiles, cli, project, cmd, word_idx, expected, subdir):
cwd = os.path.join(str(datafiles), project)
if subdir:
cwd = os.path.join(cwd, subdir)
@@ -442,9 +340,7 @@ def test_argument_artifact(cli, datafiles):
project = str(datafiles)
# Build an import element with no dependencies (as there will only be ONE cache key)
- result = cli.run(
- project=project, args=["build", "import-bin.bst"]
- ) # Has no dependencies
+ result = cli.run(project=project, args=["build", "import-bin.bst"]) # Has no dependencies
result.assert_success()
# Get the key and the artifact ref ($project/$element_name/$key)
@@ -459,23 +355,15 @@ def test_argument_artifact(cli, datafiles):
result = cli.run(
project=project,
cwd=project,
- env={
- "_BST_COMPLETION": "complete",
- "COMP_WORDS": cmd,
- "COMP_CWORD": str(word_idx),
- },
+ env={"_BST_COMPLETION": "complete", "COMP_WORDS": cmd, "COMP_CWORD": str(word_idx),},
)
if result.output:
- words = (
- result.output.splitlines()
- ) # This leaves an extra space on each e.g. ['foo.bst ']
+ words = result.output.splitlines() # This leaves an extra space on each e.g. ['foo.bst ']
words = [word.strip() for word in words]
if i == 0:
- expected = PROJECT_ELEMENTS + [
- artifact
- ] # We should now be able to see the artifact
+ expected = PROJECT_ELEMENTS + [artifact] # We should now be able to see the artifact
elif i == 1:
expected = ["target.bst", artifact]
elif i == 2:
diff --git a/tests/frontend/compose_splits.py b/tests/frontend/compose_splits.py
index 3a308a9f5..d333b031e 100644
--- a/tests/frontend/compose_splits.py
+++ b/tests/frontend/compose_splits.py
@@ -9,9 +9,7 @@ from buildstream.testing.runcli import cli # pylint: disable=unused-import
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
-@pytest.mark.parametrize(
- "target", [("compose-include-bin.bst"), ("compose-exclude-dev.bst")]
-)
+@pytest.mark.parametrize("target", [("compose-include-bin.bst"), ("compose-exclude-dev.bst")])
@pytest.mark.datafiles(DATA_DIR)
def test_compose_splits(datafiles, cli, target):
project = str(datafiles)
@@ -22,9 +20,7 @@ def test_compose_splits(datafiles, cli, target):
result.assert_success()
# Now check it out
- result = cli.run(
- project=project, args=["artifact", "checkout", target, "--directory", checkout]
- )
+ result = cli.run(project=project, args=["artifact", "checkout", target, "--directory", checkout])
result.assert_success()
# Check that the executable hello file is found in the checkout
diff --git a/tests/frontend/configurable_warnings.py b/tests/frontend/configurable_warnings.py
index f756aae2b..52cb03cec 100644
--- a/tests/frontend/configurable_warnings.py
+++ b/tests/frontend/configurable_warnings.py
@@ -19,11 +19,7 @@ def get_project(fatal_warnings):
"name": "test",
"element-path": "elements",
"plugins": [
- {
- "origin": "local",
- "path": "plugins",
- "elements": {"warninga": 0, "warningb": 0, "corewarn": 0,},
- }
+ {"origin": "local", "path": "plugins", "elements": {"warninga": 0, "warningb": 0, "corewarn": 0,},}
],
"fatal-warnings": fatal_warnings,
}
@@ -53,9 +49,7 @@ def build_project(datafiles, fatal_warnings):
("warningb.bst", [CoreWarnings.OVERLAPS], False, None),
],
)
-def test_fatal_warnings(
- cli, datafiles, element_name, fatal_warnings, expect_fatal, error_domain
-):
+def test_fatal_warnings(cli, datafiles, element_name, fatal_warnings, expect_fatal, error_domain):
if HAVE_SANDBOX == "buildbox" and error_domain != ErrorDomain.STREAM:
pytest.xfail()
project_path = build_project(datafiles, fatal_warnings)
diff --git a/tests/frontend/configuredwarning/plugins/corewarn.py b/tests/frontend/configuredwarning/plugins/corewarn.py
index 5e43115f7..7ca8daed9 100644
--- a/tests/frontend/configuredwarning/plugins/corewarn.py
+++ b/tests/frontend/configuredwarning/plugins/corewarn.py
@@ -20,8 +20,7 @@ class CoreWarn(Element):
def assemble(self, sandbox):
self.warn(
- "Testing: CoreWarning produced during assemble",
- warning_token=CoreWarnings.OVERLAPS,
+ "Testing: CoreWarning produced during assemble", warning_token=CoreWarnings.OVERLAPS,
)
diff --git a/tests/frontend/configuredwarning/plugins/warninga.py b/tests/frontend/configuredwarning/plugins/warninga.py
index dde90bb42..9fd8dc61b 100644
--- a/tests/frontend/configuredwarning/plugins/warninga.py
+++ b/tests/frontend/configuredwarning/plugins/warninga.py
@@ -20,9 +20,7 @@ class WarningA(Element):
pass
def assemble(self, sandbox):
- self.warn(
- "Testing: warning-a produced during assemble", warning_token=WARNING_A
- )
+ self.warn("Testing: warning-a produced during assemble", warning_token=WARNING_A)
def setup():
diff --git a/tests/frontend/configuredwarning/plugins/warningb.py b/tests/frontend/configuredwarning/plugins/warningb.py
index d9229f0d0..64d25ef39 100644
--- a/tests/frontend/configuredwarning/plugins/warningb.py
+++ b/tests/frontend/configuredwarning/plugins/warningb.py
@@ -20,9 +20,7 @@ class WarningB(Element):
pass
def assemble(self, sandbox):
- self.warn(
- "Testing: warning-b produced during assemble", warning_token=WARNING_B
- )
+ self.warn("Testing: warning-b produced during assemble", warning_token=WARNING_B)
def setup():
diff --git a/tests/frontend/consistencyerror/plugins/consistencyerror.py b/tests/frontend/consistencyerror/plugins/consistencyerror.py
index 656bd981c..125baf39c 100644
--- a/tests/frontend/consistencyerror/plugins/consistencyerror.py
+++ b/tests/frontend/consistencyerror/plugins/consistencyerror.py
@@ -14,9 +14,7 @@ class ConsistencyErrorSource(Source):
def get_consistency(self):
# Raise an error unconditionally
- raise SourceError(
- "Something went terribly wrong", reason="the-consistency-error"
- )
+ raise SourceError("Something went terribly wrong", reason="the-consistency-error")
def get_ref(self):
return None
diff --git a/tests/frontend/cross_junction_workspace.py b/tests/frontend/cross_junction_workspace.py
index 90e68d8ac..3ac3e8814 100644
--- a/tests/frontend/cross_junction_workspace.py
+++ b/tests/frontend/cross_junction_workspace.py
@@ -27,8 +27,7 @@ def prepare_junction_project(cli, tmpdir):
import_ref = import_repo.create(str(import_dir))
_yaml.roundtrip_dump(
- {"kind": "import", "sources": [import_repo.source_config(ref=import_ref)]},
- str(sub_project.join("data.bst")),
+ {"kind": "import", "sources": [import_repo.source_config(ref=import_ref)]}, str(sub_project.join("data.bst")),
)
sub_repo_dir = tmpdir.join("sub_repo")
@@ -37,8 +36,7 @@ def prepare_junction_project(cli, tmpdir):
sub_ref = sub_repo.create(str(sub_project))
_yaml.roundtrip_dump(
- {"kind": "junction", "sources": [sub_repo.source_config(ref=sub_ref)]},
- str(main_project.join("sub.bst")),
+ {"kind": "junction", "sources": [sub_repo.source_config(ref=sub_ref)]}, str(main_project.join("sub.bst")),
)
args = ["source", "fetch", "sub.bst"]
diff --git a/tests/frontend/fetch.py b/tests/frontend/fetch.py
index d34764d13..10a420ddd 100644
--- a/tests/frontend/fetch.py
+++ b/tests/frontend/fetch.py
@@ -85,9 +85,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, strict, ref_storage):
cli.configure({"projects": {"test": {"strict": strict}}})
# Create a repo to hold the subproject and generate a junction element for it
- ref = generate_junction(
- tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
- )
+ ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))
# Create a stack element to depend on a cross junction element
#
diff --git a/tests/frontend/help.py b/tests/frontend/help.py
index 3bbae44f5..de3b0e678 100644
--- a/tests/frontend/help.py
+++ b/tests/frontend/help.py
@@ -9,8 +9,7 @@ def assert_help(cli_output):
expected_start = "Usage: "
if not cli_output.startswith(expected_start):
raise AssertionError(
- "Help output expected to begin with '{}',".format(expected_start)
- + " output was: {}".format(cli_output)
+ "Help output expected to begin with '{}',".format(expected_start) + " output was: {}".format(cli_output)
)
@@ -21,16 +20,7 @@ def test_help_main(cli):
@pytest.mark.parametrize(
- "command",
- [
- ("artifact"),
- ("build"),
- ("checkout"),
- ("shell"),
- ("show"),
- ("source"),
- ("workspace"),
- ],
+ "command", [("artifact"), ("build"), ("checkout"), ("shell"), ("show"), ("source"), ("workspace"),],
)
def test_help(cli, command):
result = cli.run(args=[command, "--help"])
diff --git a/tests/frontend/init.py b/tests/frontend/init.py
index 01686b7c6..aef9d148e 100644
--- a/tests/frontend/init.py
+++ b/tests/frontend/init.py
@@ -29,16 +29,7 @@ def test_all_options(cli, tmpdir):
project_path = os.path.join(project, "project.conf")
result = cli.run(
- args=[
- "init",
- "--project-name",
- "foo",
- "--format-version",
- "2",
- "--element-path",
- "ponies",
- project,
- ]
+ args=["init", "--project-name", "foo", "--format-version", "2", "--element-path", "ponies", project,]
)
result.assert_success()
@@ -96,9 +87,7 @@ def test_relative_path_directory_as_argument(cli, tmpdir):
def test_set_directory_and_directory_as_argument(cli, tmpdir):
- result = cli.run(
- args=["-C", "/foo/bar", "init", "--project-name", "foo", "/boo/far"]
- )
+ result = cli.run(args=["-C", "/foo/bar", "init", "--project-name", "foo", "/boo/far"])
result.assert_main_error(ErrorDomain.APP, "init-with-set-directory")
@@ -110,33 +99,13 @@ def test_bad_project_name(cli, tmpdir, project_name):
@pytest.mark.parametrize("format_version", [(str(-1)), (str(BST_FORMAT_VERSION + 1))])
def test_bad_format_version(cli, tmpdir, format_version):
- result = cli.run(
- args=[
- "init",
- "--project-name",
- "foo",
- "--format-version",
- format_version,
- str(tmpdir),
- ]
- )
+ result = cli.run(args=["init", "--project-name", "foo", "--format-version", format_version, str(tmpdir),])
result.assert_main_error(ErrorDomain.APP, "invalid-format-version")
-@pytest.mark.parametrize(
- "element_path", [("/absolute/path"), ("../outside/of/project")]
-)
+@pytest.mark.parametrize("element_path", [("/absolute/path"), ("../outside/of/project")])
def test_bad_element_path(cli, tmpdir, element_path):
- result = cli.run(
- args=[
- "init",
- "--project-name",
- "foo",
- "--element-path",
- element_path,
- str(tmpdir),
- ]
- )
+ result = cli.run(args=["init", "--project-name", "foo", "--element-path", element_path, str(tmpdir),])
result.assert_main_error(ErrorDomain.APP, "invalid-element-path")
@@ -154,9 +123,7 @@ def test_element_path_interactive(cli, tmp_path, monkeypatch, element_path):
def create(cls, *args, **kwargs):
return DummyInteractiveApp(*args, **kwargs)
- def _init_project_interactive(
- self, *args, **kwargs
- ): # pylint: disable=arguments-differ
+ def _init_project_interactive(self, *args, **kwargs): # pylint: disable=arguments-differ
return ("project_name", "0", element_path)
monkeypatch.setattr(App, "create", DummyInteractiveApp.create)
diff --git a/tests/frontend/large_directory.py b/tests/frontend/large_directory.py
index e01d5f3c6..ea29fd1ca 100644
--- a/tests/frontend/large_directory.py
+++ b/tests/frontend/large_directory.py
@@ -37,9 +37,7 @@ def limit_grpc_message_length(limit):
orig_insecure_channel = grpc.insecure_channel
def new_insecure_channel(target):
- return orig_insecure_channel(
- target, options=(("grpc.max_send_message_length", limit),)
- )
+ return orig_insecure_channel(target, options=(("grpc.max_send_message_length", limit),))
grpc.insecure_channel = new_insecure_channel
try:
@@ -71,9 +69,7 @@ def test_large_directory(cli, tmpdir, datafiles):
# Enforce 1 MB gRPC message limit
with limit_grpc_message_length(MAX_MESSAGE_LENGTH):
# Build and push
- result = cli.run(
- project=project, args=["build", "import-large-directory.bst"]
- )
+ result = cli.run(project=project, args=["build", "import-large-directory.bst"])
result.assert_success()
# Assert that we are now cached locally
diff --git a/tests/frontend/logging.py b/tests/frontend/logging.py
index d4f8d0d23..27ff88352 100644
--- a/tests/frontend/logging.py
+++ b/tests/frontend/logging.py
@@ -37,9 +37,7 @@ def test_default_logging(cli, tmpdir, datafiles):
result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
- m = re.search(
- r"\[\d\d:\d\d:\d\d\]\[\s*\]\[.*\] SUCCESS Checking sources", result.stderr
- )
+ m = re.search(r"\[\d\d:\d\d:\d\d\]\[\s*\]\[.*\] SUCCESS Checking sources", result.stderr)
assert m is not None
@@ -51,8 +49,7 @@ def test_custom_logging(cli, tmpdir, datafiles):
element_name = "fetch-test-git.bst"
custom_log_format = (
- "%{elapsed},%{elapsed-us},%{wallclock},%{wallclock-us},"
- "%{key},%{element},%{action},%{message}"
+ "%{elapsed},%{elapsed-us},%{wallclock},%{wallclock-us}," "%{key},%{element},%{action},%{message}"
)
user_config = {"logging": {"message-format": custom_log_format}}
cli.configure(user_config)
@@ -72,8 +69,7 @@ def test_custom_logging(cli, tmpdir, datafiles):
result.assert_success()
m = re.search(
- r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6}\s*,.*"
- r",SUCCESS,Checking sources",
+ r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6}\s*,.*" r",SUCCESS,Checking sources",
result.stderr,
)
assert m is not None
@@ -89,9 +85,7 @@ def test_failed_build_listing(cli, datafiles):
element = {"kind": "script", "config": {"commands": ["false"]}}
_yaml.roundtrip_dump(element, os.path.join(project, element_path))
element_names.append(element_name)
- result = cli.run(
- project=project, args=["--on-error=continue", "build", *element_names]
- )
+ result = cli.run(project=project, args=["--on-error=continue", "build", *element_names])
result.assert_main_error(ErrorDomain.STREAM, None)
# Check that we re-print the failure summaries only in the "Failure Summary"
@@ -102,12 +96,8 @@ def test_failed_build_listing(cli, datafiles):
# testfail-0.bst:
# [00:00:00][44f1b8c3][ build:testfail-0.bst ] FAILURE Running 'commands'
#
- failure_heading_pos = re.search(
- r"^Failure Summary$", result.stderr, re.MULTILINE
- ).start()
- pipeline_heading_pos = re.search(
- r"^Pipeline Summary$", result.stderr, re.MULTILINE
- ).start()
+ failure_heading_pos = re.search(r"^Failure Summary$", result.stderr, re.MULTILINE).start()
+ pipeline_heading_pos = re.search(r"^Pipeline Summary$", result.stderr, re.MULTILINE).start()
failure_summary_range = range(failure_heading_pos, pipeline_heading_pos)
matches = tuple(re.finditer(r"^\s+testfail-.\.bst:$", result.stderr, re.MULTILINE))
for m in matches:
@@ -119,6 +109,4 @@ def test_failed_build_listing(cli, datafiles):
# with the name of the relevant element, e.g. 'testfail-1.bst'. Check that
# they have the name as expected.
pattern = r"\[..:..:..\] FAILURE testfail-.\.bst: Staged artifacts do not provide command 'sh'"
- assert (
- len(re.findall(pattern, result.stderr, re.MULTILINE)) == 6
- ) # each element should be matched twice.
+ assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 6 # each element should be matched twice.
diff --git a/tests/frontend/mirror.py b/tests/frontend/mirror.py
index dbd21e1e9..1146893cd 100644
--- a/tests/frontend/mirror.py
+++ b/tests/frontend/mirror.py
@@ -48,9 +48,7 @@ def generate_project():
{"name": "arrakis", "aliases": {"foo": ["OFO/"], "bar": ["RBA/"],},},
{"name": "oz", "aliases": {"foo": ["ooF/"], "bar": ["raB/"],}},
],
- "plugins": [
- {"origin": "local", "path": "sources", "sources": {"fetch_source": 0}}
- ],
+ "plugins": [{"origin": "local", "path": "sources", "sources": {"fetch_source": 0}}],
}
return project
@@ -75,11 +73,7 @@ def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
element = {
"kind": "import",
- "sources": [
- upstream_repo.source_config(
- ref=upstream_ref if ref_storage == "inline" else None
- )
- ],
+ "sources": [upstream_repo.source_config(ref=upstream_ref if ref_storage == "inline" else None)],
}
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
@@ -109,11 +103,7 @@ def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
mirror_data = [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"]}}]
if mirror == "unrelated-mirror":
mirror_data.insert(
- 0,
- {
- "name": "narnia",
- "aliases": {"frob": ["http://www.example.com/repo"]},
- },
+ 0, {"name": "narnia", "aliases": {"frob": ["http://www.example.com/repo"]},},
)
project["mirrors"] = mirror_data
@@ -164,10 +154,7 @@ def test_mirror_fetch_default_cmdline(cli, tmpdir):
project = generate_project()
_yaml.roundtrip_dump(project, project_file)
- result = cli.run(
- project=project_dir,
- args=["--default-mirror", "arrakis", "source", "fetch", element_name],
- )
+ result = cli.run(project=project_dir, args=["--default-mirror", "arrakis", "source", "fetch", element_name],)
result.assert_success()
with open(output_file) as f:
contents = f.read()
@@ -179,9 +166,7 @@ def test_mirror_fetch_default_cmdline(cli, tmpdir):
me_str = "OOF/repo1"
me_pos = contents.find(me_str)
assert me_pos != -1, "'{}' wasn't found".format(me_str)
- assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(
- arrakis_str, me_str
- )
+ assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
@pytest.mark.datafiles(DATA_DIR)
@@ -237,10 +222,7 @@ def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir):
userconfig = {"projects": {"test": {"default-mirror": "oz"}}}
cli.configure(userconfig)
- result = cli.run(
- project=project_dir,
- args=["--default-mirror", "arrakis", "source", "fetch", element_name],
- )
+ result = cli.run(project=project_dir, args=["--default-mirror", "arrakis", "source", "fetch", element_name],)
result.assert_success()
with open(output_file) as f:
contents = f.read()
@@ -252,9 +234,7 @@ def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir):
me_str = "OOF/repo1"
me_pos = contents.find(me_str)
assert me_pos != -1, "'{}' wasn't found".format(me_str)
- assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(
- arrakis_str, me_str
- )
+ assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
@pytest.mark.datafiles(DATA_DIR)
@@ -317,9 +297,7 @@ def test_mirror_git_submodule_fetch(cli, tmpdir, datafiles):
"name": "test",
"element-path": "elements",
"aliases": {alias: "http://www.example.com/"},
- "mirrors": [
- {"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},
- ],
+ "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},],
}
project_file = os.path.join(project_dir, "project.conf")
_yaml.roundtrip_dump(project, project_file)
@@ -382,9 +360,7 @@ def test_mirror_fallback_git_only_submodules(cli, tmpdir, datafiles):
element = {
"kind": "import",
- "sources": [
- main_repo.source_config_extra(ref=main_ref, checkout_submodules=True)
- ],
+ "sources": [main_repo.source_config_extra(ref=main_ref, checkout_submodules=True)],
}
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
@@ -409,10 +385,7 @@ def test_mirror_fallback_git_only_submodules(cli, tmpdir, datafiles):
result.assert_success()
checkout = os.path.join(str(tmpdir), "checkout")
- result = cli.run(
- project=project_dir,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project_dir, args=["artifact", "checkout", element_name, "--directory", checkout],)
result.assert_success()
assert os.path.exists(os.path.join(checkout, "bin", "bin", "hello"))
@@ -471,11 +444,7 @@ def test_mirror_fallback_git_with_submodules(cli, tmpdir, datafiles):
element = {
"kind": "import",
- "sources": [
- upstream_main_repo.source_config_extra(
- ref=upstream_main_ref, checkout_submodules=True
- )
- ],
+ "sources": [upstream_main_repo.source_config_extra(ref=upstream_main_ref, checkout_submodules=True)],
}
element["sources"][0]["url"] = aliased_repo
element_name = "test.bst"
@@ -501,10 +470,7 @@ def test_mirror_fallback_git_with_submodules(cli, tmpdir, datafiles):
result.assert_success()
checkout = os.path.join(str(tmpdir), "checkout")
- result = cli.run(
- project=project_dir,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project_dir, args=["artifact", "checkout", element_name, "--directory", checkout],)
result.assert_success()
assert os.path.exists(os.path.join(checkout, "bin", "bin", "hello"))
diff --git a/tests/frontend/order.py b/tests/frontend/order.py
index a66064694..9032379ef 100644
--- a/tests/frontend/order.py
+++ b/tests/frontend/order.py
@@ -57,12 +57,7 @@ def create_element(project, name, dependencies):
# First simple test
(
"3.bst",
- {
- "0.bst": ["1.bst"],
- "1.bst": [],
- "2.bst": ["0.bst"],
- "3.bst": ["0.bst", "1.bst", "2.bst"],
- },
+ {"0.bst": ["1.bst"], "1.bst": [], "2.bst": ["0.bst"], "3.bst": ["0.bst", "1.bst", "2.bst"],},
["1.bst", "0.bst", "2.bst", "3.bst"],
),
# A more complicated test with build of build dependencies
@@ -74,22 +69,9 @@ def create_element(project, name, dependencies):
"timezones.bst": [],
"middleware.bst": [{"filename": "base.bst", "type": "build"}],
"app.bst": [{"filename": "middleware.bst", "type": "build"}],
- "target.bst": [
- "a.bst",
- "base.bst",
- "middleware.bst",
- "app.bst",
- "timezones.bst",
- ],
+ "target.bst": ["a.bst", "base.bst", "middleware.bst", "app.bst", "timezones.bst",],
},
- [
- "base.bst",
- "middleware.bst",
- "a.bst",
- "app.bst",
- "timezones.bst",
- "target.bst",
- ],
+ ["base.bst", "middleware.bst", "a.bst", "app.bst", "timezones.bst", "target.bst",],
),
],
)
@@ -109,18 +91,12 @@ def test_order(cli, datafiles, operation, target, template, expected):
# Run test and collect results
if operation == "show":
- result = cli.run(
- args=["show", "--deps", "plan", "--format", "%{name}", target],
- project=project,
- silent=True,
- )
+ result = cli.run(args=["show", "--deps", "plan", "--format", "%{name}", target], project=project, silent=True,)
result.assert_success()
results = result.output.splitlines()
else:
if operation == "fetch":
- result = cli.run(
- args=["source", "fetch", target], project=project, silent=True
- )
+ result = cli.run(args=["source", "fetch", target], project=project, silent=True)
else:
result = cli.run(args=[operation, target], project=project, silent=True)
result.assert_success()
diff --git a/tests/frontend/overlaps.py b/tests/frontend/overlaps.py
index 4f6f72af5..d3e0c9d60 100644
--- a/tests/frontend/overlaps.py
+++ b/tests/frontend/overlaps.py
@@ -13,9 +13,7 @@ from tests.testutils import generate_junction
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "overlaps")
-def gen_project(
- project_dir, fail_on_overlap, use_fatal_warnings=True, project_name="test"
-):
+def gen_project(project_dir, fail_on_overlap, use_fatal_warnings=True, project_name="test"):
template = {"name": project_name}
if use_fatal_warnings:
template["fatal-warnings"] = [CoreWarnings.OVERLAPS] if fail_on_overlap else []
@@ -48,9 +46,7 @@ def test_overlaps_error(cli, datafiles, use_fatal_warnings):
def test_overlaps_whitelist(cli, datafiles):
project_dir = str(datafiles)
gen_project(project_dir, True)
- result = cli.run(
- project=project_dir, silent=True, args=["build", "collect-whitelisted.bst"]
- )
+ result = cli.run(project=project_dir, silent=True, args=["build", "collect-whitelisted.bst"])
result.assert_success()
@@ -58,9 +54,7 @@ def test_overlaps_whitelist(cli, datafiles):
def test_overlaps_whitelist_ignored(cli, datafiles):
project_dir = str(datafiles)
gen_project(project_dir, False)
- result = cli.run(
- project=project_dir, silent=True, args=["build", "collect-whitelisted.bst"]
- )
+ result = cli.run(project=project_dir, silent=True, args=["build", "collect-whitelisted.bst"])
result.assert_success()
@@ -71,11 +65,7 @@ def test_overlaps_whitelist_on_overlapper(cli, datafiles):
# it'll still fail because A doesn't permit overlaps.
project_dir = str(datafiles)
gen_project(project_dir, True)
- result = cli.run(
- project=project_dir,
- silent=True,
- args=["build", "collect-partially-whitelisted.bst"],
- )
+ result = cli.run(project=project_dir, silent=True, args=["build", "collect-partially-whitelisted.bst"],)
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.OVERLAPS)
@@ -100,9 +90,7 @@ def test_overlap_subproject(cli, tmpdir, datafiles, project_policy, subproject_p
junction_path = os.path.join(project_dir, "sub-project.bst")
gen_project(project_dir, bool(project_policy == "fail"), project_name="test")
- gen_project(
- subproject_dir, bool(subproject_policy == "fail"), project_name="subtest"
- )
+ gen_project(subproject_dir, bool(subproject_policy == "fail"), project_name="subtest")
generate_junction(tmpdir, subproject_dir, junction_path)
# Here we have a dependency chain where the project element
@@ -111,9 +99,7 @@ def test_overlap_subproject(cli, tmpdir, datafiles, project_policy, subproject_p
# Test that overlap error vs warning policy for this overlap
# is always controlled by the project and not the subproject.
#
- result = cli.run(
- project=project_dir, silent=True, args=["build", "sub-collect.bst"]
- )
+ result = cli.run(project=project_dir, silent=True, args=["build", "sub-collect.bst"])
if project_policy == "fail":
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.OVERLAPS)
diff --git a/tests/frontend/progress.py b/tests/frontend/progress.py
index 3ca81f543..86abe830c 100644
--- a/tests/frontend/progress.py
+++ b/tests/frontend/progress.py
@@ -43,9 +43,7 @@ def test_junction_tally(cli, tmpdir, datafiles):
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(
- project=project, silent=True, args=["source", "fetch", "junction.bst"]
- )
+ result = cli.run(project=project, silent=True, args=["source", "fetch", "junction.bst"])
result.assert_success()
# Assert the correct progress tallies are in the logging
@@ -62,9 +60,7 @@ def test_nested_junction_tally(cli, tmpdir, datafiles):
sub1_path = os.path.join(project, "files", "sub-project")
sub2_path = os.path.join(project, "files", "sub2-project")
# A junction element which pulls sub1 into sub2
- sub1_element = os.path.join(
- project, "files", "sub2-project", "elements", "sub-junction.bst"
- )
+ sub1_element = os.path.join(project, "files", "sub2-project", "elements", "sub-junction.bst")
# A junction element which pulls sub2 into the main project
sub2_element = os.path.join(project, "elements", "junction.bst")
element_path = os.path.join(project, "elements", "junction-dep.bst")
@@ -80,9 +76,7 @@ def test_nested_junction_tally(cli, tmpdir, datafiles):
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(
- project=project, silent=True, args=["source", "fetch", "junction.bst"]
- )
+ result = cli.run(project=project, silent=True, args=["source", "fetch", "junction.bst"])
result.assert_success()
# Assert the correct progress tallies are in the logging
@@ -116,9 +110,7 @@ def test_junction_dep_tally(cli, tmpdir, datafiles):
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(
- project=project, silent=True, args=["source", "fetch", "junction-dep.bst"]
- )
+ result = cli.run(project=project, silent=True, args=["source", "fetch", "junction-dep.bst"])
# Since we aren't allowed to specify any dependencies on a
# junction, we should fail
diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py
index cb3ab024e..51bfe1049 100644
--- a/tests/frontend/project/sources/fetch_source.py
+++ b/tests/frontend/project/sources/fetch_source.py
@@ -22,16 +22,10 @@ class FetchFetcher(SourceFetcher):
self.mark_download_url(url)
def fetch(self, alias_override=None):
- url = self.source.translate_url(
- self.original_url, alias_override=alias_override, primary=self.primary
- )
+ url = self.source.translate_url(self.original_url, alias_override=alias_override, primary=self.primary)
with open(self.source.output_file, "a") as f:
- success = (
- url in self.source.fetch_succeeds and self.source.fetch_succeeds[url]
- )
- message = "Fetch {} {} from {}\n".format(
- self.original_url, "succeeded" if success else "failed", url
- )
+ success = url in self.source.fetch_succeeds and self.source.fetch_succeeds[url]
+ message = "Fetch {} {} from {}\n".format(self.original_url, "succeeded" if success else "failed", url)
f.write(message)
if not success:
raise SourceError("Failed to fetch {}".format(url))
@@ -42,10 +36,7 @@ class FetchSource(Source):
def configure(self, node):
self.original_urls = node.get_str_list("urls")
self.output_file = node.get_str("output-text")
- self.fetch_succeeds = {
- key: value.as_bool()
- for key, value in node.get_mapping("fetch-succeeds", {}).items()
- }
+ self.fetch_succeeds = {key: value.as_bool() for key, value in node.get_mapping("fetch-succeeds", {}).items()}
# First URL is the primary one for this test
#
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 970987d36..100a9a914 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -60,9 +60,7 @@ def test_push_pull_all(cli, tmpdir, datafiles):
assert not any(states[e] == "cached" for e in all_elements)
# Now try bst artifact pull
- result = cli.run(
- project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "pull", "--deps", "all", "target.bst"])
result.assert_success()
# And assert that it's again in the local cache, without having built
@@ -132,21 +130,12 @@ def test_push_pull_default_targets(cli, tmpdir, datafiles):
def test_pull_secondary_cache(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare1")
- ) as share1, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare2")
) as share2:
# Build the target and push it to share2 only.
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": False},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ cli.configure({"artifacts": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]})
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
@@ -180,9 +169,7 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(
- os.path.join(str(tmpdir), "goodartifactshare")
- ) as good_share, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "goodartifactshare")) as good_share, create_artifact_share(
os.path.join(str(tmpdir), "badartifactshare")
) as bad_share:
@@ -200,10 +187,7 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
)
# Now try `bst artifact push` to the good_share.
- result = cli.run(
- project=project,
- args=["artifact", "push", "target.bst", "--remote", good_share.repo],
- )
+ result = cli.run(project=project, args=["artifact", "push", "target.bst", "--remote", good_share.repo],)
result.assert_success()
# Assert that all the artifacts are in the share we pushed
@@ -219,10 +203,7 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
artifactdir = os.path.join(cli.directory, "artifacts")
shutil.rmtree(artifactdir)
- result = cli.run(
- project=project,
- args=["artifact", "pull", "target.bst", "--remote", good_share.repo],
- )
+ result = cli.run(project=project, args=["artifact", "pull", "target.bst", "--remote", good_share.repo],)
result.assert_success()
# And assert that it's again in the local cache, without having built
@@ -240,10 +221,7 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build the target element and push to the remote.
cli.configure(
- {
- "artifacts": {"url": share.repo, "push": True},
- "projects": {"test": {"strict": False}},
- }
+ {"artifacts": {"url": share.repo, "push": True}, "projects": {"test": {"strict": False}},}
)
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
@@ -272,9 +250,7 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
assert cli.get_element_state(project, element_name) != "cached"
# Add a file to force change in strict cache key of import-bin.bst
- with open(
- os.path.join(str(project), "files", "bin-files", "usr", "bin", "world"), "w"
- ) as f:
+ with open(os.path.join(str(project), "files", "bin-files", "usr", "bin", "world"), "w") as f:
f.write("world")
# Assert that the workspaced element requires a rebuild
@@ -283,9 +259,7 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "target.bst") == "waiting"
# Now try bst artifact pull
- result = cli.run(
- project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "pull", "--deps", "all", "target.bst"])
result.assert_success()
# And assert that the target is again in the local cache, without having built
@@ -313,14 +287,10 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
artifact_dir = os.path.join(project, "cache", "artifacts")
shutil.rmtree(artifact_dir)
- assert (
- cli.get_element_state(project, "junction.bst:import-etc.bst") == "buildable"
- )
+ assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "buildable"
# Now try bst artifact pull
- result = cli.run(
- project=project, args=["artifact", "pull", "junction.bst:import-etc.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "pull", "junction.bst:import-etc.bst"])
result.assert_success()
# And assert that it's again in the local cache, without having built
@@ -418,9 +388,7 @@ def test_pull_missing_local_blob(cli, tmpdir, datafiles):
# This is a placeholder to test partial CAS handling until we support
# partial artifact pulling (or blob-based CAS expiry).
#
- digest = utils.sha256sum(
- os.path.join(project, "files", "bin-files", "usr", "bin", "hello")
- )
+ digest = utils.sha256sum(os.path.join(project, "files", "bin-files", "usr", "bin", "hello"))
objpath = os.path.join(cli.directory, "cas", "objects", digest[:2], digest[2:])
os.unlink(objpath)
@@ -443,9 +411,7 @@ def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles):
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- assert (
- not result.get_pulled_elements()
- ), "No elements should have been pulled since the cache was empty"
+ assert not result.get_pulled_elements(), "No elements should have been pulled since the cache was empty"
assert "INFO Remote ({}) does not have".format(share.repo) in result.stderr
assert "SKIPPED Pull" in result.stderr
@@ -456,19 +422,13 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare1")
- ) as shareuser, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare2")
- ) as shareproject, create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare3")
- ) as sharecli:
+ ) as shareproject, create_artifact_share(os.path.join(str(tmpdir), "artifactshare3")) as sharecli:
# Add shareproject repo url to project.conf
with open(os.path.join(project, "project.conf"), "a") as projconf:
- projconf.write(
- "artifacts:\n url: {}\n push: True".format(shareproject.repo)
- )
+ projconf.write("artifacts:\n url: {}\n push: True".format(shareproject.repo))
# Configure shareuser remote in user conf
cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
@@ -489,9 +449,7 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
# Now check that a build with cli set as sharecli results in nothing being pulled,
# as it doesn't have them cached and shareuser/shareproject should be ignored. This
# will however result in the artifacts being built and pushed to it
- result = cli.run(
- project=project, args=["build", "--remote", sharecli.repo, "target.bst"]
- )
+ result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
result.assert_success()
for element_name in all_elements:
assert element_name not in result.get_pulled_elements()
@@ -500,9 +458,7 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
# Now check that a clean build with cli set as sharecli should result in artifacts only
# being pulled from it, as that was provided via the cli and is populated
- result = cli.run(
- project=project, args=["build", "--remote", sharecli.repo, "target.bst"]
- )
+ result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
result.assert_success()
for element_name in all_elements:
assert cli.get_element_state(project, element_name) == "cached"
@@ -616,9 +572,7 @@ def test_pull_artifact(cli, tmpdir, datafiles):
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
- assert os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
# Assert that the target is shared (note that assert shared will use the artifact name)
assert_shared(cli, share, project, element)
@@ -627,15 +581,11 @@ def test_pull_artifact(cli, tmpdir, datafiles):
shutil.rmtree(os.path.join(local_cache, "artifacts"))
# Assert that nothing is cached locally anymore
- assert not os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert not os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
# Now try bst artifact pull
result = cli.run(project=project, args=["artifact", "pull", artifact_ref])
result.assert_success()
# And assert that it's again in the local cache, without having built
- assert os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 21a47838c..6e2e283cd 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -61,9 +61,7 @@ def test_push(cli, tmpdir, datafiles):
# Set up two artifact shares.
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1:
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare2")
- ) as share2:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:
# Try pushing with no remotes configured. This should fail.
result = cli.run(project=project, args=["artifact", "push", "target.bst"])
@@ -78,14 +76,7 @@ def test_push(cli, tmpdir, datafiles):
result.assert_main_error(ErrorDomain.STREAM, None)
# Configure bst to push to one of the caches and run `bst artifact push`. This works.
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": False},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ cli.configure({"artifacts": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]})
cli.run(project=project, args=["artifact", "push", "target.bst"])
assert_not_shared(cli, share1, project, "target.bst")
@@ -93,17 +84,8 @@ def test_push(cli, tmpdir, datafiles):
# Now try pushing to both
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare2")
- ) as share2:
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": True},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:
+ cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
cli.run(project=project, args=["artifact", "push", "target.bst"])
assert_shared(cli, share1, project, "target.bst")
@@ -129,9 +111,7 @@ def test_push_artifact(cli, tmpdir, datafiles):
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
- assert os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
# Configure artifact share
cli.configure(
@@ -215,15 +195,7 @@ def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
# Now try and push the target with its deps using --on-error continue
# and assert that push failed, but what could be pushed was pushed
result = cli.run(
- project=project,
- args=[
- "--on-error=continue",
- "artifact",
- "push",
- "--deps",
- "all",
- "target.bst",
- ],
+ project=project, args=["--on-error=continue", "artifact", "push", "--deps", "all", "target.bst",],
)
# The overall process should return as failed
@@ -279,9 +251,7 @@ def test_push_all(cli, tmpdir, datafiles):
)
# Now try bst artifact push all the deps
- result = cli.run(
- project=project, args=["artifact", "push", "target.bst", "--deps", "all"]
- )
+ result = cli.run(project=project, args=["artifact", "push", "target.bst", "--deps", "all"])
result.assert_success()
# And finally assert that all the artifacts are in the share
@@ -310,9 +280,7 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
- assert os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
# Configure artifact share
cli.configure(
@@ -331,9 +299,7 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
)
# Now try bst artifact push all the deps
- result = cli.run(
- project=project, args=["artifact", "push", "--deps", "all", artifact_ref]
- )
+ result = cli.run(project=project, args=["artifact", "push", "--deps", "all", artifact_ref])
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Error: '--deps all' is not supported for artifact refs" in result.stderr
@@ -347,9 +313,7 @@ def test_push_after_pull(cli, tmpdir, datafiles):
project = str(datafiles)
# Set up two artifact shares.
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare1")
- ) as share1, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare2")
) as share2:
@@ -381,14 +345,7 @@ def test_push_after_pull(cli, tmpdir, datafiles):
# Now we add share2 into the mix as a second push remote. This time,
# `bst build` should push to share2 after pulling from share1.
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": True},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
assert result.get_pulled_elements() == ["target.bst"]
@@ -405,9 +362,7 @@ def test_artifact_expires(cli, datafiles, tmpdir):
# Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
# Set a 22 MB quota
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)
- ) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
# Configure bst to push to the cache
cli.configure(
@@ -459,9 +414,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
# Create an artifact share (remote cache) in tmpdir/artifactshare
# Mock a file system with 5 MB total space
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)
- ) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)) as share:
# Configure bst to push to the remote cache
cli.configure(
@@ -488,9 +441,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
result.assert_main_error(ErrorDomain.STREAM, None)
# Ensure that the small artifact is still in the share
- states = cli.get_element_states(
- project, ["small_element.bst", "large_element.bst"]
- )
+ states = cli.get_element_states(project, ["small_element.bst", "large_element.bst"])
assert states["small_element.bst"] == "cached"
assert_shared(cli, share, project, "small_element.bst")
@@ -507,9 +458,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
# Create an artifact share (remote cache) in tmpdir/artifactshare
# Set a 22 MB quota
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)
- ) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
# Configure bst to push to the cache
cli.configure(
@@ -541,10 +490,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
assert cli.get_element_state(project, "element1.bst") != "cached"
# Pull the element1 from the remote cache (this should update its mtime)
- result = cli.run(
- project=project,
- args=["artifact", "pull", "element1.bst", "--remote", share.repo],
- )
+ result = cli.run(project=project, args=["artifact", "pull", "element1.bst", "--remote", share.repo],)
result.assert_success()
# Ensure element1 is cached locally
@@ -583,16 +529,10 @@ def test_push_cross_junction(cli, tmpdir, datafiles):
cli.configure(
{"artifacts": {"url": share.repo, "push": True},}
)
- cli.run(
- project=project, args=["artifact", "push", "junction.bst:import-etc.bst"]
- )
+ cli.run(project=project, args=["artifact", "push", "junction.bst:import-etc.bst"])
cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst")
- assert share.get_artifact(
- cli.get_artifact_name(
- project, "subtest", "import-etc.bst", cache_key=cache_key
- )
- )
+ assert share.get_artifact(cli.get_artifact_name(project, "subtest", "import-etc.bst", cache_key=cache_key))
@pytest.mark.datafiles(DATA_DIR)
@@ -611,9 +551,7 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles):
result = cli.run(project=project, args=["artifact", "push", "target.bst"])
result.assert_success()
- assert (
- not result.get_pushed_elements()
- ), "No elements should have been pushed since the cache was populated"
+ assert not result.get_pushed_elements(), "No elements should have been pushed since the cache was populated"
assert "INFO Remote ({}) already has ".format(share.repo) in result.stderr
assert "SKIPPED Push" in result.stderr
@@ -623,26 +561,18 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare1")
- ) as shareuser, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare2")
- ) as shareproject, create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare3")
- ) as sharecli:
+ ) as shareproject, create_artifact_share(os.path.join(str(tmpdir), "artifactshare3")) as sharecli:
# Add shareproject repo url to project.conf
with open(os.path.join(project, "project.conf"), "a") as projconf:
- projconf.write(
- "artifacts:\n url: {}\n push: True".format(shareproject.repo)
- )
+ projconf.write("artifacts:\n url: {}\n push: True".format(shareproject.repo))
# Configure shareuser remote in user conf
cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
- result = cli.run(
- project=project, args=["build", "--remote", sharecli.repo, "target.bst"]
- )
+ result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
# Artifacts should have only been pushed to sharecli, as that was provided via the cli
result.assert_success()
@@ -668,10 +598,7 @@ def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
cli.configure(
- {
- "artifacts": {"url": share.repo, "push": True},
- "projects": {"test": {"strict": False}},
- }
+ {"artifacts": {"url": share.repo, "push": True}, "projects": {"test": {"strict": False}},}
)
# First get us a build
diff --git a/tests/frontend/rebuild.py b/tests/frontend/rebuild.py
index d3e36e6f4..1aef8e423 100644
--- a/tests/frontend/rebuild.py
+++ b/tests/frontend/rebuild.py
@@ -25,15 +25,11 @@ def test_rebuild(datafiles, cli, strict):
result.assert_success()
# Modify base import
- with open(
- os.path.join(project, "files", "dev-files", "usr", "include", "new.h"), "w"
- ) as f:
+ with open(os.path.join(project, "files", "dev-files", "usr", "include", "new.h"), "w") as f:
f.write("#define NEW")
# Rebuild base import and build top-level rebuild-target.bst
# In non-strict mode, this does not rebuild intermediate target.bst,
# which means that a weakly cached target.bst will be staged as dependency.
- result = cli.run(
- project=project, args=strict_args(["build", "rebuild-target.bst"], strict)
- )
+ result = cli.run(project=project, args=strict_args(["build", "rebuild-target.bst"], strict))
result.assert_success()
diff --git a/tests/frontend/show.py b/tests/frontend/show.py
index a54d625ea..a686dbd2d 100644
--- a/tests/frontend/show.py
+++ b/tests/frontend/show.py
@@ -29,24 +29,14 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)),)
)
def test_show(cli, datafiles, target, fmt, expected):
project = str(datafiles)
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", fmt, target],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", fmt, target],)
result.assert_success()
if result.output.strip() != expected:
- raise AssertionError(
- "Expected output:\n{}\nInstead received output:\n{}".format(
- expected, result.output
- )
- )
+ raise AssertionError("Expected output:\n{}\nInstead received output:\n{}".format(expected, result.output))
-@pytest.mark.datafiles(
- os.path.join(os.path.dirname(os.path.realpath(__file__)), "invalid_element_path",)
-)
+@pytest.mark.datafiles(os.path.join(os.path.dirname(os.path.realpath(__file__)), "invalid_element_path",))
def test_show_invalid_element_path(cli, datafiles):
project = str(datafiles)
cli.run(project=project, silent=True, args=["show", "foo.bst"])
@@ -77,16 +67,8 @@ def test_show_fail(cli, datafiles):
@pytest.mark.parametrize(
"target,except_,expected",
[
- (
- "target.bst",
- "import-bin.bst",
- ["import-dev.bst", "compose-all.bst", "target.bst"],
- ),
- (
- "target.bst",
- "import-dev.bst",
- ["import-bin.bst", "compose-all.bst", "target.bst"],
- ),
+ ("target.bst", "import-bin.bst", ["import-dev.bst", "compose-all.bst", "target.bst"],),
+ ("target.bst", "import-dev.bst", ["import-bin.bst", "compose-all.bst", "target.bst"],),
("target.bst", "compose-all.bst", ["import-bin.bst", "target.bst"]),
("compose-all.bst", "import-bin.bst", ["import-dev.bst", "compose-all.bst"]),
],
@@ -96,27 +78,14 @@ def test_show_except_simple(cli, datafiles, target, except_, expected):
result = cli.run(
project=project,
silent=True,
- args=[
- "show",
- "--deps",
- "all",
- "--format",
- "%{name}",
- "--except",
- except_,
- target,
- ],
+ args=["show", "--deps", "all", "--format", "%{name}", "--except", except_, target,],
)
result.assert_success()
results = result.output.strip().splitlines()
if results != expected:
- raise AssertionError(
- "Expected elements:\n{}\nInstead received elements:\n{}".format(
- expected, results
- )
- )
+ raise AssertionError("Expected elements:\n{}\nInstead received elements:\n{}".format(expected, results))
# This test checks various constructions of a pipeline
@@ -200,22 +169,14 @@ def test_show_except_simple(cli, datafiles, target, except_, expected):
],
),
# Test one target and excepting two elements
- (
- ["build.bst"],
- ["unrelated-1.bst", "unrelated-2.bst"],
- ["first-level-1.bst", "build.bst",],
- ),
+ (["build.bst"], ["unrelated-1.bst", "unrelated-2.bst"], ["first-level-1.bst", "build.bst",],),
],
)
def test_show_except(cli, datafiles, targets, exceptions, expected):
basedir = str(datafiles)
results = cli.get_pipeline(basedir, targets, except_=exceptions, scope="all")
if results != expected:
- raise AssertionError(
- "Expected elements:\n{}\nInstead received elements:\n{}".format(
- expected, results
- )
- )
+ raise AssertionError("Expected elements:\n{}\nInstead received elements:\n{}".format(expected, results))
###############################################################
@@ -271,13 +232,9 @@ def test_target_is_dependency(cli, datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
-@pytest.mark.parametrize(
- "element_name", ["junction-dep.bst", "junction.bst:import-etc.bst"]
-)
+@pytest.mark.parametrize("element_name", ["junction-dep.bst", "junction.bst:import-etc.bst"])
@pytest.mark.parametrize("workspaced", [True, False], ids=["workspace", "no-workspace"])
-def test_unfetched_junction(
- cli, tmpdir, datafiles, ref_storage, element_name, workspaced
-):
+def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage, element_name, workspaced):
project = str(datafiles)
subproject_path = os.path.join(project, "files", "sub-project")
junction_path = os.path.join(project, "elements", "junction.bst")
@@ -286,9 +243,7 @@ def test_unfetched_junction(
configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
- ref = generate_junction(
- tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
- )
+ ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))
# Create a stack element to depend on a cross junction element
#
@@ -309,14 +264,7 @@ def test_unfetched_junction(
result = cli.run(
project=project,
silent=True,
- args=[
- "workspace",
- "open",
- "--no-checkout",
- "--directory",
- subproject_path,
- "junction.bst",
- ],
+ args=["workspace", "open", "--no-checkout", "--directory", subproject_path, "junction.bst",],
)
result.assert_success()
@@ -352,26 +300,15 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, workspaced):
result = cli.run(
project=project,
silent=True,
- args=[
- "workspace",
- "open",
- "--no-checkout",
- "--directory",
- subproject_path,
- "junction.bst",
- ],
+ args=["workspace", "open", "--no-checkout", "--directory", subproject_path, "junction.bst",],
)
result.assert_success()
# Assert the correct error when trying to show the pipeline
- dep_result = cli.run(
- project=project, silent=True, args=["show", "junction-dep.bst"]
- )
+ dep_result = cli.run(project=project, silent=True, args=["show", "junction-dep.bst"])
# Assert the correct error when trying to show the pipeline
- etc_result = cli.run(
- project=project, silent=True, args=["show", "junction.bst:import-etc.bst"]
- )
+ etc_result = cli.run(project=project, silent=True, args=["show", "junction.bst:import-etc.bst"])
# If a workspace is open, no ref is needed
if workspaced:
@@ -384,18 +321,12 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, workspaced):
provenance = ref_node.get_provenance()
assert str(provenance) in dep_result.stderr
- dep_result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT
- )
- etc_result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT
- )
+ dep_result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
+ etc_result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
-@pytest.mark.parametrize(
- "element_name", ["junction-dep.bst", "junction.bst:import-etc.bst"]
-)
+@pytest.mark.parametrize("element_name", ["junction-dep.bst", "junction.bst:import-etc.bst"])
@pytest.mark.parametrize("workspaced", [True, False], ids=["workspace", "no-workspace"])
def test_fetched_junction(cli, tmpdir, datafiles, element_name, workspaced):
project = str(datafiles)
@@ -415,9 +346,7 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name, workspaced):
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(
- project=project, silent=True, args=["source", "fetch", "junction.bst"]
- )
+ result = cli.run(project=project, silent=True, args=["source", "fetch", "junction.bst"])
result.assert_success()
# Open a workspace if we're testing workspaced behavior
@@ -425,23 +354,12 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name, workspaced):
result = cli.run(
project=project,
silent=True,
- args=[
- "workspace",
- "open",
- "--no-checkout",
- "--directory",
- subproject_path,
- "junction.bst",
- ],
+ args=["workspace", "open", "--no-checkout", "--directory", subproject_path, "junction.bst",],
)
result.assert_success()
# Assert the correct error when trying to show the pipeline
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--format", "%{name}-%{state}", element_name],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--format", "%{name}-%{state}", element_name],)
results = result.output.strip().splitlines()
assert "junction.bst:import-etc.bst-buildable" in results
@@ -464,9 +382,7 @@ def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
"""
os.mkdir(project_path)
- result = cli.run(
- silent=True, args=["init", "--project-name", project_name, project_path]
- )
+ result = cli.run(silent=True, args=["init", "--project-name", project_name, project_path])
result.assert_success()
sourcefiles_path = os.path.join(project_path, "files")
@@ -481,20 +397,14 @@ def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
}
if i == 0:
del element["depends"]
- _yaml.roundtrip_dump(
- element, os.path.join(element_path, "element{}.bst".format(str(i)))
- )
+ _yaml.roundtrip_dump(element, os.path.join(element_path, "element{}.bst".format(str(i))))
source = os.path.join(sourcefiles_path, "source{}".format(str(i)))
open(source, "x").close()
assert os.path.exists(source)
setup_test()
- result = cli.run(
- project=project_path,
- silent=True,
- args=["show", "element{}.bst".format(str(dependency_depth))],
- )
+ result = cli.run(project=project_path, silent=True, args=["show", "element{}.bst".format(str(dependency_depth))],)
recursion_limit = sys.getrecursionlimit()
if dependency_depth <= recursion_limit:
@@ -523,19 +433,13 @@ def test_format_deps(cli, datafiles, dep_kind, expected_deps):
project = str(datafiles)
target = "checkout-deps.bst"
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{name}: " + dep_kind, target],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{name}: " + dep_kind, target],
)
result.assert_success()
expected = "{name}: {deps}".format(name=target, deps=expected_deps)
if result.output.strip() != expected:
- raise AssertionError(
- "Expected output:\n{}\nInstead received output:\n{}".format(
- expected, result.output
- )
- )
+ raise AssertionError("Expected output:\n{}\nInstead received output:\n{}".format(expected, result.output))
# This tests the resolved value of the 'max-jobs' variable,
@@ -544,8 +448,7 @@ def test_format_deps(cli, datafiles, dep_kind, expected_deps):
#
@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
@pytest.mark.parametrize(
- "cli_value, config_value",
- [(None, None), (None, "16"), ("16", None), ("5", "16"), ("0", "16"), ("16", "0"),],
+ "cli_value, config_value", [(None, None), (None, "16"), ("16", None), ("5", "16"), ("0", "16"), ("16", "0"),],
)
def test_max_jobs(cli, datafiles, cli_value, config_value):
project = str(datafiles)
@@ -599,8 +502,7 @@ def test_max_jobs(cli, datafiles, cli_value, config_value):
#
@pytest.mark.datafiles(os.path.join(DATA_DIR, "strict-depends"))
@pytest.mark.parametrize(
- "target, expected_state",
- [("non-strict-depends.bst", "cached"), ("strict-depends.bst", "waiting"),],
+ "target, expected_state", [("non-strict-depends.bst", "cached"), ("strict-depends.bst", "waiting"),],
)
def test_strict_dependencies(cli, datafiles, target, expected_state):
project = str(datafiles)
diff --git a/tests/frontend/source_checkout.py b/tests/frontend/source_checkout.py
index f10c24c5d..ff897b1cf 100644
--- a/tests/frontend/source_checkout.py
+++ b/tests/frontend/source_checkout.py
@@ -44,10 +44,7 @@ def test_source_checkout(datafiles, cli, tmpdir_factory, with_workspace, guess_e
if with_workspace:
ws_cmd = ["-C", workspace]
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, target],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, target],)
result.assert_success()
else:
ws_cmd = []
@@ -64,9 +61,7 @@ def test_source_checkout(datafiles, cli, tmpdir_factory, with_workspace, guess_e
result = cli.run(project=project, args=args)
result.assert_success()
- assert os.path.exists(
- os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
- )
+ assert os.path.exists(os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config"))
@pytest.mark.datafiles(DATA_DIR)
@@ -80,23 +75,11 @@ def test_source_checkout_force(datafiles, cli, force_flag):
os.makedirs(os.path.join(checkout, "some-thing"))
result = cli.run(
- project=project,
- args=[
- "source",
- "checkout",
- force_flag,
- "--deps",
- "none",
- "--directory",
- checkout,
- target,
- ],
+ project=project, args=["source", "checkout", force_flag, "--deps", "none", "--directory", checkout, target,],
)
result.assert_success()
- assert os.path.exists(
- os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
- )
+ assert os.path.exists(os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config"))
@pytest.mark.datafiles(DATA_DIR)
@@ -105,17 +88,12 @@ def test_source_checkout_tar(datafiles, cli):
tar = os.path.join(cli.directory, "source-checkout.tar")
target = "checkout-deps.bst"
- result = cli.run(
- project=project,
- args=["source", "checkout", "--tar", tar, "--deps", "none", target],
- )
+ result = cli.run(project=project, args=["source", "checkout", "--tar", tar, "--deps", "none", target],)
result.assert_success()
assert os.path.exists(tar)
with tarfile.open(tar) as tf:
- expected_content = os.path.join(
- tar, "checkout-deps", "etc", "buildstream", "config"
- )
+ expected_content = os.path.join(tar, "checkout-deps", "etc", "buildstream", "config")
tar_members = [f.name for f in tf]
for member in tar_members:
assert member in expected_content
@@ -131,23 +109,11 @@ def test_source_checkout_compressed_tar(datafiles, cli, compression):
result = cli.run(
project=project,
- args=[
- "source",
- "checkout",
- "--tar",
- tar,
- "--compression",
- compression,
- "--deps",
- "none",
- target,
- ],
+ args=["source", "checkout", "--tar", tar, "--compression", compression, "--deps", "none", target,],
)
result.assert_success()
tar = tarfile.open(name=tar, mode="r:" + compression)
- assert (
- os.path.join("checkout-deps", "etc", "buildstream", "config") in tar.getnames()
- )
+ assert os.path.join("checkout-deps", "etc", "buildstream", "config") in tar.getnames()
@pytest.mark.datafiles(DATA_DIR)
@@ -157,33 +123,24 @@ def test_source_checkout_deps(datafiles, cli, deps):
checkout = os.path.join(cli.directory, "source-checkout")
target = "checkout-deps.bst"
- result = cli.run(
- project=project,
- args=["source", "checkout", "--directory", checkout, "--deps", deps, target],
- )
+ result = cli.run(project=project, args=["source", "checkout", "--directory", checkout, "--deps", deps, target],)
result.assert_success()
# Sources of the target
if deps == "build":
assert not os.path.exists(os.path.join(checkout, "checkout-deps"))
else:
- assert os.path.exists(
- os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
- )
+ assert os.path.exists(os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config"))
# Sources of the target's build dependencies
if deps in ("build", "all"):
- assert os.path.exists(
- os.path.join(checkout, "import-dev", "usr", "include", "pony.h")
- )
+ assert os.path.exists(os.path.join(checkout, "import-dev", "usr", "include", "pony.h"))
else:
assert not os.path.exists(os.path.join(checkout, "import-dev"))
# Sources of the target's runtime dependencies
if deps in ("run", "all"):
- assert os.path.exists(
- os.path.join(checkout, "import-bin", "usr", "bin", "hello")
- )
+ assert os.path.exists(os.path.join(checkout, "import-bin", "usr", "bin", "hello"))
else:
assert not os.path.exists(os.path.join(checkout, "import-bin"))
@@ -196,32 +153,18 @@ def test_source_checkout_except(datafiles, cli):
result = cli.run(
project=project,
- args=[
- "source",
- "checkout",
- "--directory",
- checkout,
- "--deps",
- "all",
- "--except",
- "import-bin.bst",
- target,
- ],
+ args=["source", "checkout", "--directory", checkout, "--deps", "all", "--except", "import-bin.bst", target,],
)
result.assert_success()
# Sources for the target should be present
- assert os.path.exists(
- os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
- )
+ assert os.path.exists(os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config"))
# Sources for import-bin.bst should not be present
assert not os.path.exists(os.path.join(checkout, "import-bin"))
# Sources for other dependencies should be present
- assert os.path.exists(
- os.path.join(checkout, "import-dev", "usr", "include", "pony.h")
- )
+ assert os.path.exists(os.path.join(checkout, "import-dev", "usr", "include", "pony.h"))
@pytest.mark.datafiles(DATA_DIR)
@@ -233,8 +176,7 @@ def test_source_checkout_fetch(datafiles, cli):
# Create an element with remote source
element = generate_remote_import_element(
- os.path.join(project, "files", "dev-files", "usr", "include", "pony.h"),
- "pony.h",
+ os.path.join(project, "files", "dev-files", "usr", "include", "pony.h"), "pony.h",
)
_yaml.roundtrip_dump(element, target_path)
@@ -244,9 +186,7 @@ def test_source_checkout_fetch(datafiles, cli):
args = ["source", "checkout"]
args += [target, checkout]
- result = cli.run(
- project=project, args=["source", "checkout", "--directory", checkout, target]
- )
+ result = cli.run(project=project, args=["source", "checkout", "--directory", checkout, target])
result.assert_success()
assert os.path.exists(os.path.join(checkout, "remote-import-dev", "pony.h"))
@@ -309,10 +249,7 @@ def test_source_checkout_options_tar_and_dir_conflict(cli, tmpdir, datafiles):
tar_file = os.path.join(str(tmpdir), "source-checkout.tar")
target = "checkout-deps.bst"
- result = cli.run(
- project=project,
- args=["source", "checkout", "--directory", checkout, "--tar", tar_file, target],
- )
+ result = cli.run(project=project, args=["source", "checkout", "--directory", checkout, "--tar", tar_file, target],)
assert result.exit_code != 0
assert "ERROR: options --directory and --tar conflict" in result.stderr
@@ -326,16 +263,7 @@ def test_source_checkout_compression_without_tar(cli, tmpdir, datafiles):
target = "checkout-deps.bst"
result = cli.run(
- project=project,
- args=[
- "source",
- "checkout",
- "--directory",
- checkout,
- "--compression",
- "xz",
- target,
- ],
+ project=project, args=["source", "checkout", "--directory", checkout, "--compression", "xz", target,],
)
assert result.exit_code != 0
diff --git a/tests/frontend/track.py b/tests/frontend/track.py
index 6d9e3bb3f..02a19787c 100644
--- a/tests/frontend/track.py
+++ b/tests/frontend/track.py
@@ -41,9 +41,7 @@ def test_track_single(cli, tmpdir, datafiles):
# Write out our test targets
generate_element(repo, os.path.join(element_path, element_dep_name))
- generate_element(
- repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name
- )
+ generate_element(repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name)
# Assert that tracking is needed for both elements
states = cli.get_element_states(project, [element_target_name])
@@ -53,15 +51,11 @@ def test_track_single(cli, tmpdir, datafiles):
}
# Now first try to track only one element
- result = cli.run(
- project=project, args=["source", "track", "--deps", "none", element_target_name]
- )
+ result = cli.run(project=project, args=["source", "track", "--deps", "none", element_target_name])
result.assert_success()
# And now fetch it
- result = cli.run(
- project=project, args=["source", "fetch", "--deps", "none", element_target_name]
- )
+ result = cli.run(project=project, args=["source", "fetch", "--deps", "none", element_target_name])
result.assert_success()
# Assert that the dependency is waiting and the target has still never been tracked
@@ -75,9 +69,7 @@ def test_track_single(cli, tmpdir, datafiles):
@pytest.mark.datafiles(os.path.join(TOP_DIR))
@pytest.mark.parametrize("ref_storage", [("inline"), ("project-refs")])
def test_track_optional(cli, tmpdir, datafiles, ref_storage):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "track-optional-" + ref_storage
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "track-optional-" + ref_storage)
dev_files_path = os.path.join(project, "files")
element_path = os.path.join(project, "target.bst")
@@ -104,49 +96,23 @@ def test_track_optional(cli, tmpdir, datafiles, ref_storage):
#
# We want to track and persist the ref separately in this test
#
- result = cli.run(
- project=project,
- args=["--option", "test", "False", "source", "track", "target.bst"],
- )
+ result = cli.run(project=project, args=["--option", "test", "False", "source", "track", "target.bst"],)
result.assert_success()
- result = cli.run(
- project=project,
- args=["--option", "test", "True", "source", "track", "target.bst"],
- )
+ result = cli.run(project=project, args=["--option", "test", "True", "source", "track", "target.bst"],)
result.assert_success()
# Now fetch the key for both options
#
result = cli.run(
project=project,
- args=[
- "--option",
- "test",
- "False",
- "show",
- "--deps",
- "none",
- "--format",
- "%{key}",
- "target.bst",
- ],
+ args=["--option", "test", "False", "show", "--deps", "none", "--format", "%{key}", "target.bst",],
)
result.assert_success()
master_key = result.output
result = cli.run(
project=project,
- args=[
- "--option",
- "test",
- "True",
- "show",
- "--deps",
- "none",
- "--format",
- "%{key}",
- "target.bst",
- ],
+ args=["--option", "test", "True", "show", "--deps", "none", "--format", "%{key}", "target.bst",],
)
result.assert_success()
test_key = result.output
@@ -187,15 +153,7 @@ def test_track_cross_junction(cli, tmpdir, datafiles, cross_junction, ref_storag
#
def get_subproject_element_state():
result = cli.run(
- project=project,
- args=[
- "show",
- "--deps",
- "all",
- "--format",
- "%{name}|%{state}",
- "target.bst",
- ],
+ project=project, args=["show", "--deps", "all", "--format", "%{name}|%{state}", "target.bst",],
)
result.assert_success()
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index ba4e9577f..f469939d1 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -68,9 +68,7 @@ class WorkspaceCreator:
self.workspace_cmd = os.path.join(self.project_path, "workspace_cmd")
- def create_workspace_element(
- self, kind, suffix="", workspace_dir=None, element_attrs=None
- ):
+ def create_workspace_element(self, kind, suffix="", workspace_dir=None, element_attrs=None):
element_name = "workspace-test-{}{}.bst".format(kind, suffix)
element_path = os.path.join(self.project_path, "elements")
if not workspace_dir:
@@ -90,9 +88,7 @@ class WorkspaceCreator:
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
return element_name, element_path, workspace_dir
- def create_workspace_elements(
- self, kinds, suffixs=None, workspace_dir_usr=None, element_attrs=None
- ):
+ def create_workspace_elements(self, kinds, suffixs=None, workspace_dir_usr=None, element_attrs=None):
element_tuples = []
@@ -109,25 +105,16 @@ class WorkspaceCreator:
element_tuples.append((element_name, workspace_dir))
# Assert that there is a fetch is needed
- states = self.cli.get_element_states(
- self.project_path, [e for e, _ in element_tuples]
- )
+ states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples])
assert not any(states[e] != "fetch needed" for e, _ in element_tuples)
return element_tuples
def open_workspaces(
- self,
- kinds,
- suffixs=None,
- workspace_dir=None,
- element_attrs=None,
- no_checkout=False,
+ self, kinds, suffixs=None, workspace_dir=None, element_attrs=None, no_checkout=False,
):
- element_tuples = self.create_workspace_elements(
- kinds, suffixs, workspace_dir, element_attrs
- )
+ element_tuples = self.create_workspace_elements(kinds, suffixs, workspace_dir, element_attrs)
os.makedirs(self.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
@@ -140,20 +127,14 @@ class WorkspaceCreator:
_, workspace_dir = element_tuples[0]
args.extend(["--directory", workspace_dir])
- args.extend(
- [element_name for element_name, workspace_dir_suffix in element_tuples]
- )
- result = self.cli.run(
- cwd=self.workspace_cmd, project=self.project_path, args=args
- )
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
+ result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
result.assert_success()
if not no_checkout:
# Assert that we are now buildable because the source is now cached.
- states = self.cli.get_element_states(
- self.project_path, [e for e, _ in element_tuples]
- )
+ states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples])
assert not any(states[e] != "buildable" for e, _ in element_tuples)
# Check that the executable hello file is found in each workspace
@@ -176,9 +157,7 @@ def open_workspace(
no_checkout=False,
):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path)
- workspaces = workspace_object.open_workspaces(
- (kind,), (suffix,), workspace_dir, element_attrs, no_checkout
- )
+ workspaces = workspace_object.open_workspaces((kind,), (suffix,), workspace_dir, element_attrs, no_checkout)
assert len(workspaces) == 1
element_name, workspace = workspaces[0]
return element_name, workspace_object.project_path, workspace
@@ -197,9 +176,7 @@ def test_open_bzr_customize(cli, tmpdir, datafiles):
source_config = element_config.get_sequence("sources").mapping_at(0)
output = subprocess.check_output(["bzr", "info"], cwd=workspace)
stripped_url = source_config.get_str("url").lstrip("file:///")
- expected_output_str = "checkout of branch: /{}/{}".format(
- stripped_url, source_config.get_str("track")
- )
+ expected_output_str = "checkout of branch: /{}/{}".format(stripped_url, source_config.get_str("track"))
assert expected_output_str in str(output)
@@ -221,9 +198,7 @@ def test_open_multi(cli, tmpdir, datafiles):
assert ".bzr" not in workspace_lsdir
-@pytest.mark.skipif(
- os.geteuid() == 0, reason="root may have CAP_DAC_OVERRIDE and ignore permissions"
-)
+@pytest.mark.skipif(os.geteuid() == 0, reason="root may have CAP_DAC_OVERRIDE and ignore permissions")
@pytest.mark.datafiles(DATA_DIR)
def test_open_multi_unwritable(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
@@ -240,9 +215,7 @@ def test_open_multi_unwritable(cli, tmpdir, datafiles):
cwdstat = os.stat(workspace_object.workspace_cmd)
try:
os.chmod(workspace_object.workspace_cmd, cwdstat.st_mode - stat.S_IWRITE)
- result = workspace_object.cli.run(
- project=workspace_object.project_path, args=args
- )
+ result = workspace_object.cli.run(project=workspace_object.project_path, args=args)
finally:
# Using this finally to make sure we always put thing back how they should be.
os.chmod(workspace_object.workspace_cmd, cwdstat.st_mode)
@@ -250,12 +223,7 @@ def test_open_multi_unwritable(cli, tmpdir, datafiles):
result.assert_main_error(ErrorDomain.STREAM, None)
# Normally we avoid checking stderr in favour of using the mechine readable result.assert_main_error
# But Tristan was very keen that the names of the elements left needing workspaces were present in the out put
- assert (
- " ".join(
- [element_name for element_name, workspace_dir_suffix in element_tuples[1:]]
- )
- in result.stderr
- )
+ assert " ".join([element_name for element_name, workspace_dir_suffix in element_tuples[1:]]) in result.stderr
@pytest.mark.datafiles(DATA_DIR)
@@ -272,9 +240,7 @@ def test_open_multi_with_directory(cli, tmpdir, datafiles):
args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
result = workspace_object.cli.run(
- cwd=workspace_object.workspace_cmd,
- project=workspace_object.project_path,
- args=args,
+ cwd=workspace_object.workspace_cmd, project=workspace_object.project_path, args=args,
)
result.assert_main_error(ErrorDomain.STREAM, "directory-with-multiple-elements")
@@ -285,9 +251,7 @@ def test_open_defaultlocation(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
# pylint: disable=unbalanced-tuple-unpacking
- ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(
- ["git"], ["git"]
- )
+ ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(["git"], ["git"])
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
@@ -304,10 +268,7 @@ def test_open_defaultlocation(cli, tmpdir, datafiles):
result.assert_success()
- assert (
- cli.get_element_state(workspace_object.project_path, element_name)
- == "buildable"
- )
+ assert cli.get_element_state(workspace_object.project_path, element_name) == "buildable"
# Check that the executable hello file is found in the workspace
# even though the cli.run function was not run with cwd = workspace_object.workspace_cmd
@@ -322,9 +283,7 @@ def test_open_defaultlocation_exists(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
# pylint: disable=unbalanced-tuple-unpacking
- ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(
- ["git"], ["git"]
- )
+ ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(["git"], ["git"])
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
with open(workspace_dir, "w") as fl:
@@ -362,10 +321,7 @@ def test_open_force(cli, tmpdir, datafiles):
assert os.path.exists(workspace)
# Now open the workspace again with --force, this should happily succeed
- result = cli.run(
- project=project,
- args=["workspace", "open", "--force", "--directory", workspace, element_name],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--force", "--directory", workspace, element_name],)
result.assert_success()
@@ -377,10 +333,7 @@ def test_open_force_open(cli, tmpdir, datafiles):
assert os.path.exists(workspace)
# Now open the workspace again with --force, this should happily succeed
- result = cli.run(
- project=project,
- args=["workspace", "open", "--force", "--directory", workspace, element_name],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--force", "--directory", workspace, element_name],)
result.assert_success()
@@ -400,15 +353,7 @@ def test_open_force_open_no_checkout(cli, tmpdir, datafiles):
# Now open the workspace again with --force and --no-checkout
result = cli.run(
project=project,
- args=[
- "workspace",
- "open",
- "--force",
- "--no-checkout",
- "--directory",
- workspace,
- element_name,
- ],
+ args=["workspace", "open", "--force", "--no-checkout", "--directory", workspace, element_name,],
)
result.assert_success()
@@ -430,9 +375,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles):
tmpdir = os.path.join(str(tmpdir), "-beta")
shutil.move(hello_path, hello1_path)
- element_name2, _, workspace2 = open_workspace(
- cli, tmpdir, datafiles, "git", "-beta"
- )
+ element_name2, _, workspace2 = open_workspace(cli, tmpdir, datafiles, "git", "-beta")
# Assert the workspace dir exists
assert os.path.exists(workspace2)
@@ -444,10 +387,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles):
assert os.path.exists(os.path.join(workspace2, "usr", "bin", "hello"))
# Now open the workspace again with --force, this should happily succeed
- result = cli.run(
- project=project,
- args=["workspace", "open", "--force", "--directory", workspace, element_name2],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--force", "--directory", workspace, element_name2],)
# Assert that the file in workspace 1 has been replaced
# With the file from workspace 2
@@ -462,9 +402,7 @@ def test_close(cli, tmpdir, datafiles):
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
# Close the workspace
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", element_name]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", element_name])
result.assert_success()
# Assert the workspace dir has been deleted
@@ -475,18 +413,14 @@ def test_close(cli, tmpdir, datafiles):
def test_close_external_after_move_project(cli, tmpdir, datafiles):
workspace_dir = os.path.join(str(tmpdir), "workspace")
project_path = os.path.join(str(tmpdir), "initial_project")
- element_name, _, _ = open_workspace(
- cli, tmpdir, datafiles, "git", "", workspace_dir, project_path
- )
+ element_name, _, _ = open_workspace(cli, tmpdir, datafiles, "git", "", workspace_dir, project_path)
assert os.path.exists(workspace_dir)
moved_dir = os.path.join(str(tmpdir), "external_project")
shutil.move(project_path, moved_dir)
assert os.path.exists(moved_dir)
# Close the workspace
- result = cli.run(
- project=moved_dir, args=["workspace", "close", "--remove-dir", element_name]
- )
+ result = cli.run(project=moved_dir, args=["workspace", "close", "--remove-dir", element_name])
result.assert_success()
# Assert the workspace dir has been deleted
@@ -498,21 +432,14 @@ def test_close_internal_after_move_project(cli, tmpdir, datafiles):
initial_dir = os.path.join(str(tmpdir), "initial_project")
initial_workspace = os.path.join(initial_dir, "workspace")
element_name, _, _ = open_workspace(
- cli,
- tmpdir,
- datafiles,
- "git",
- workspace_dir=initial_workspace,
- project_path=initial_dir,
+ cli, tmpdir, datafiles, "git", workspace_dir=initial_workspace, project_path=initial_dir,
)
moved_dir = os.path.join(str(tmpdir), "internal_project")
shutil.move(initial_dir, moved_dir)
assert os.path.exists(moved_dir)
# Close the workspace
- result = cli.run(
- project=moved_dir, args=["workspace", "close", "--remove-dir", element_name]
- )
+ result = cli.run(project=moved_dir, args=["workspace", "close", "--remove-dir", element_name])
result.assert_success()
# Assert the workspace dir has been deleted
@@ -538,9 +465,7 @@ def test_close_removed(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_close_nonexistant_element(cli, tmpdir, datafiles):
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
- element_path = os.path.join(
- datafiles.dirname, datafiles.basename, "elements", element_name
- )
+ element_path = os.path.join(datafiles.dirname, datafiles.basename, "elements", element_name)
# First brutally remove the element.bst file, ensuring that
# the element does not exist anymore in the project where
@@ -548,9 +473,7 @@ def test_close_nonexistant_element(cli, tmpdir, datafiles):
os.remove(element_path)
# Close the workspace
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", element_name]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", element_name])
result.assert_success()
# Assert the workspace dir has been deleted
@@ -561,17 +484,11 @@ def test_close_nonexistant_element(cli, tmpdir, datafiles):
def test_close_multiple(cli, tmpdir, datafiles):
tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
tmpdir_beta = os.path.join(str(tmpdir), "beta")
- alpha, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
- )
- beta, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, "git", suffix="-beta"
- )
+ alpha, project, workspace_alpha = open_workspace(cli, tmpdir_alpha, datafiles, "git", suffix="-alpha")
+ beta, project, workspace_beta = open_workspace(cli, tmpdir_beta, datafiles, "git", suffix="-beta")
# Close the workspaces
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", alpha, beta]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", alpha, beta])
result.assert_success()
# Assert the workspace dirs have been deleted
@@ -583,17 +500,11 @@ def test_close_multiple(cli, tmpdir, datafiles):
def test_close_all(cli, tmpdir, datafiles):
tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
tmpdir_beta = os.path.join(str(tmpdir), "beta")
- _, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
- )
- _, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, "git", suffix="-beta"
- )
+ _, project, workspace_alpha = open_workspace(cli, tmpdir_alpha, datafiles, "git", suffix="-alpha")
+ _, project, workspace_beta = open_workspace(cli, tmpdir_beta, datafiles, "git", suffix="-beta")
# Close the workspaces
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", "--all"]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", "--all"])
result.assert_success()
# Assert the workspace dirs have been deleted
@@ -657,9 +568,7 @@ def test_reset_soft(cli, tmpdir, datafiles):
assert os.path.exists(pony_path)
# Now soft-reset the open workspace, this should not revert the changes
- result = cli.run(
- project=project, args=["workspace", "reset", "--soft", element_name]
- )
+ result = cli.run(project=project, args=["workspace", "reset", "--soft", element_name])
result.assert_success()
# we removed this dir
assert not os.path.exists(os.path.join(workspace, "usr", "bin"))
@@ -677,12 +586,8 @@ def test_reset_multiple(cli, tmpdir, datafiles):
# Open the workspaces
tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
tmpdir_beta = os.path.join(str(tmpdir), "beta")
- alpha, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
- )
- beta, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, "git", suffix="-beta"
- )
+ alpha, project, workspace_alpha = open_workspace(cli, tmpdir_alpha, datafiles, "git", suffix="-alpha")
+ beta, project, workspace_beta = open_workspace(cli, tmpdir_beta, datafiles, "git", suffix="-beta")
# Modify workspaces
shutil.rmtree(os.path.join(workspace_alpha, "usr", "bin"))
@@ -703,12 +608,8 @@ def test_reset_all(cli, tmpdir, datafiles):
# Open the workspaces
tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
tmpdir_beta = os.path.join(str(tmpdir), "beta")
- _, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
- )
- _, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, "git", suffix="-beta"
- )
+ _, project, workspace_alpha = open_workspace(cli, tmpdir_alpha, datafiles, "git", suffix="-alpha")
+ _, project, workspace_beta = open_workspace(cli, tmpdir_beta, datafiles, "git", suffix="-beta")
# Modify workspaces
shutil.rmtree(os.path.join(workspace_alpha, "usr", "bin"))
@@ -749,13 +650,9 @@ def test_list(cli, tmpdir, datafiles):
[(False, False), (True, True), (True, False)],
ids=["project-no-guess", "workspace-guess", "workspace-no-guess"],
)
-def test_build(
- cli, tmpdir_factory, datafiles, kind, strict, from_workspace, guess_element
-):
+def test_build(cli, tmpdir_factory, datafiles, kind, strict, from_workspace, guess_element):
tmpdir = tmpdir_factory.mktemp("")
- element_name, project, workspace = open_workspace(
- cli, tmpdir, datafiles, kind, False
- )
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
checkout = os.path.join(str(tmpdir), "checkout")
args_dir = ["-C", workspace] if from_workspace else []
args_elm = [element_name] if not guess_element else []
@@ -786,10 +683,7 @@ def test_build(
assert key_1 == key_2
# Checkout the result
- result = cli.run(
- project=project,
- args=args_dir + ["artifact", "checkout", "--directory", checkout, *args_elm],
- )
+ result = cli.run(project=project, args=args_dir + ["artifact", "checkout", "--directory", checkout, *args_elm],)
result.assert_success()
# Check that the pony.conf from the modified workspace exists
@@ -896,10 +790,7 @@ def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
assert key_1 != key_3
# Checkout the result
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
result.assert_success()
# Check the result for the changes we made
@@ -927,10 +818,7 @@ def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
# Test loading a negative workspace version
{"format-version": -1},
# Test loading version 0 with two sources
- {
- "format-version": 0,
- "alpha.bst": {0: "/workspaces/bravo", 1: "/workspaces/charlie",},
- },
+ {"format-version": 0, "alpha.bst": {0: "/workspaces/bravo", 1: "/workspaces/charlie",},},
# Test loading a version with decimals
{"format-version": 0.5},
# Test loading a future version
@@ -959,13 +847,7 @@ def test_list_unsupported_workspace(cli, datafiles, workspace_cfg):
{"alpha.bst": "/workspaces/bravo"},
{
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "running_files": {},
- }
- },
+ "workspaces": {"alpha.bst": {"prepared": False, "path": "/workspaces/bravo", "running_files": {},}},
},
),
# Test loading version 0 with only one source
@@ -973,30 +855,15 @@ def test_list_unsupported_workspace(cli, datafiles, workspace_cfg):
{"alpha.bst": {0: "/workspaces/bravo"}},
{
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "running_files": {},
- }
- },
+ "workspaces": {"alpha.bst": {"prepared": False, "path": "/workspaces/bravo", "running_files": {},}},
},
),
# Test loading version 1
(
- {
- "format-version": 1,
- "workspaces": {"alpha.bst": {"path": "/workspaces/bravo"}},
- },
+ {"format-version": 1, "workspaces": {"alpha.bst": {"path": "/workspaces/bravo"}},},
{
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "running_files": {},
- }
- },
+ "workspaces": {"alpha.bst": {"prepared": False, "path": "/workspaces/bravo", "running_files": {},}},
},
),
# Test loading version 2
@@ -1027,23 +894,11 @@ def test_list_unsupported_workspace(cli, datafiles, workspace_cfg):
(
{
"format-version": 3,
- "workspaces": {
- "alpha.bst": {
- "prepared": True,
- "path": "/workspaces/bravo",
- "running_files": {},
- }
- },
+ "workspaces": {"alpha.bst": {"prepared": True, "path": "/workspaces/bravo", "running_files": {},}},
},
{
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": True,
- "path": "/workspaces/bravo",
- "running_files": {},
- }
- },
+ "workspaces": {"alpha.bst": {"prepared": True, "path": "/workspaces/bravo", "running_files": {},}},
},
),
],
@@ -1087,14 +942,9 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# Make a change to the workspaces file
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
result.assert_success()
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", element_name]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", element_name])
result.assert_success()
# Check that workspace config is converted correctly if necessary
@@ -1116,9 +966,7 @@ def test_inconsitent_pipeline_message(cli, tmpdir, datafiles):
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
checkout = os.path.join(str(tmpdir), "checkout")
- element_name, project, workspace = open_workspace(
- cli, os.path.join(str(tmpdir), "repo-a"), datafiles, "git"
- )
+ element_name, project, workspace = open_workspace(cli, os.path.join(str(tmpdir), "repo-a"), datafiles, "git")
element_path = os.path.join(project, "elements")
back_dep_element_name = "workspace-test-back-dep.bst"
@@ -1165,10 +1013,7 @@ def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
assert key_b1 == key_b2
# Checkout the result
- result = cli.run(
- project=project,
- args=["artifact", "checkout", back_dep_element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", back_dep_element_name, "--directory", checkout],)
result.assert_success()
# Check that the pony.conf from the modified workspace exists
@@ -1185,9 +1030,7 @@ def test_multiple_failed_builds(cli, tmpdir, datafiles):
"kind": "manual",
"config": {"configure-commands": ["unknown_command_that_will_fail"]},
}
- element_name, project, _ = open_workspace(
- cli, tmpdir, datafiles, "git", element_attrs=element_config
- )
+ element_name, project, _ = open_workspace(cli, tmpdir, datafiles, "git", element_attrs=element_config)
for _ in range(2):
result = cli.run(project=project, args=["build", element_name])
@@ -1208,12 +1051,7 @@ def test_external_fetch(cli, datafiles, tmpdir_factory, subdir, guess_element):
create_element_size(depend_element, str(datafiles), "elements", [], 1024)
element_name, project, workspace = open_workspace(
- cli,
- tmpdir,
- datafiles,
- "git",
- no_checkout=True,
- element_attrs={"depends": [depend_element]},
+ cli, tmpdir, datafiles, "git", no_checkout=True, element_attrs={"depends": [depend_element]},
)
arg_elm = [element_name] if not guess_element else []
@@ -1227,9 +1065,7 @@ def test_external_fetch(cli, datafiles, tmpdir_factory, subdir, guess_element):
assert cli.get_element_state(str(datafiles), depend_element) == "fetch needed"
# Fetch the workspaced element
- result = cli.run(
- project=project, args=["-C", call_dir, "source", "fetch", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", call_dir, "source", "fetch", *arg_elm])
result.assert_success()
# Assert that the depended element has now been fetched
@@ -1250,15 +1086,10 @@ def test_external_push_pull(cli, datafiles, tmpdir_factory, guess_element):
cli.configure({"artifacts": {"url": share.repo, "push": True}})
- result = cli.run(
- project=project, args=["-C", workspace, "artifact", "push", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", workspace, "artifact", "push", *arg_elm])
result.assert_success()
- result = cli.run(
- project=project,
- args=["-C", workspace, "artifact", "pull", "--deps", "all", *arg_elm],
- )
+ result = cli.run(project=project, args=["-C", workspace, "artifact", "pull", "--deps", "all", *arg_elm],)
result.assert_success()
@@ -1280,9 +1111,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
del element_contents.get_sequence("sources").mapping_at(0)["ref"]
_yaml.roundtrip_dump(element_contents, element_file)
- result = cli.run(
- project=project, args=["-C", workspace, "source", "track", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", workspace, "source", "track", *arg_elm])
result.assert_success()
# Element is not tracked now
@@ -1290,9 +1119,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
assert "ref" not in element_contents.get_sequence("sources").mapping_at(0)
# close the workspace
- result = cli.run(
- project=project, args=["-C", workspace, "workspace", "close", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", workspace, "workspace", "close", *arg_elm])
result.assert_success()
# and retrack the element
@@ -1311,12 +1138,8 @@ def test_external_open_other(cli, datafiles, tmpdir_factory):
tmpdir1 = tmpdir_factory.mktemp("")
tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- _, project, alpha_workspace = open_workspace(
- cli, tmpdir1, datafiles, "git", suffix="-alpha"
- )
- beta_element, _, beta_workspace = open_workspace(
- cli, tmpdir2, datafiles, "git", suffix="-beta"
- )
+ _, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
# Closing the other element first, because I'm too lazy to create an
# element without opening it
@@ -1325,16 +1148,7 @@ def test_external_open_other(cli, datafiles, tmpdir_factory):
result = cli.run(
project=project,
- args=[
- "-C",
- alpha_workspace,
- "workspace",
- "open",
- "--force",
- "--directory",
- beta_workspace,
- beta_element,
- ],
+ args=["-C", alpha_workspace, "workspace", "open", "--force", "--directory", beta_workspace, beta_element,],
)
result.assert_success()
@@ -1345,15 +1159,10 @@ def test_external_close_other(cli, datafiles, tmpdir_factory):
tmpdir1 = tmpdir_factory.mktemp("")
tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- _, project, alpha_workspace = open_workspace(
- cli, tmpdir1, datafiles, "git", suffix="-alpha"
- )
+ _, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
beta_element, _, _ = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
- result = cli.run(
- project=project,
- args=["-C", alpha_workspace, "workspace", "close", beta_element],
- )
+ result = cli.run(project=project, args=["-C", alpha_workspace, "workspace", "close", beta_element],)
result.assert_success()
assert "you can no longer run BuildStream" not in result.stderr
@@ -1365,15 +1174,11 @@ def test_external_close_self(cli, datafiles, tmpdir_factory, guess_element):
tmpdir1 = tmpdir_factory.mktemp("")
tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- alpha_element, project, alpha_workspace = open_workspace(
- cli, tmpdir1, datafiles, "git", suffix="-alpha"
- )
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
_, _, _ = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
arg_elm = [alpha_element] if not guess_element else []
- result = cli.run(
- project=project, args=["-C", alpha_workspace, "workspace", "close", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", alpha_workspace, "workspace", "close", *arg_elm])
result.assert_success()
assert "you can no longer run BuildStream" in result.stderr
@@ -1383,15 +1188,10 @@ def test_external_reset_other(cli, datafiles, tmpdir_factory):
tmpdir1 = tmpdir_factory.mktemp("")
tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- _, project, alpha_workspace = open_workspace(
- cli, tmpdir1, datafiles, "git", suffix="-alpha"
- )
+ _, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
beta_element, _, _ = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
- result = cli.run(
- project=project,
- args=["-C", alpha_workspace, "workspace", "reset", beta_element],
- )
+ result = cli.run(project=project, args=["-C", alpha_workspace, "workspace", "reset", beta_element],)
result.assert_success()
@@ -1402,9 +1202,7 @@ def test_external_reset_self(cli, datafiles, tmpdir, guess_element):
arg_elm = [element] if not guess_element else []
# Command succeeds
- result = cli.run(
- project=project, args=["-C", workspace, "workspace", "reset", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", workspace, "workspace", "reset", *arg_elm])
result.assert_success()
# Successive commands still work (i.e. .bstproject.yaml hasn't been deleted)
@@ -1430,19 +1228,13 @@ def test_multisource_workspace(cli, datafiles, tmpdir):
element_name = "multisource.bst"
element = {
"kind": "import",
- "sources": [
- {"kind": "local", "path": "files/bin-files"},
- {"kind": "local", "path": "files/dev-files"},
- ],
+ "sources": [{"kind": "local", "path": "files/bin-files"}, {"kind": "local", "path": "files/dev-files"},],
}
element_path = os.path.join(project, "elements", element_name)
_yaml.roundtrip_dump(element, element_path)
workspace_dir = os.path.join(str(tmpdir), "multisource")
- res = cli.run(
- project=project,
- args=["workspace", "open", "multisource.bst", "--directory", workspace_dir],
- )
+ res = cli.run(project=project, args=["workspace", "open", "multisource.bst", "--directory", workspace_dir],)
res.assert_success()
directories = os.listdir(os.path.join(workspace_dir, "usr"))
@@ -1462,14 +1254,8 @@ TEST_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)))
@pytest.mark.parametrize(
["case", "non_workspaced_elements_state"],
[
- (
- "workspaced-build-dep",
- ["waiting", "waiting", "waiting", "waiting", "waiting"],
- ),
- (
- "workspaced-runtime-dep",
- ["buildable", "buildable", "waiting", "waiting", "waiting"],
- ),
+ ("workspaced-build-dep", ["waiting", "waiting", "waiting", "waiting", "waiting"],),
+ ("workspaced-runtime-dep", ["buildable", "buildable", "waiting", "waiting", "waiting"],),
],
)
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
@@ -1492,10 +1278,7 @@ def test_build_all(cli, tmpdir, datafiles, case, strict, non_workspaced_elements
cli.configure({"projects": {"test": {"strict": strict_mode}}})
# First open the workspace
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, "elem1.bst"],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, "elem1.bst"],)
result.assert_success()
# Ensure all elements are waiting build the first
@@ -1508,9 +1291,7 @@ def test_build_all(cli, tmpdir, datafiles, case, strict, non_workspaced_elements
result.assert_success()
# Assert that the target is built
- assert cli.get_element_states(project, all_elements) == {
- elem: "cached" for elem in all_elements
- }
+ assert cli.get_element_states(project, all_elements) == {elem: "cached" for elem in all_elements}
@pytest.mark.datafiles(DATA_DIR)
@@ -1527,9 +1308,7 @@ def test_show_workspace_logs(cli, tmpdir, datafiles, strict):
cli.configure({"projects": {"test": {"strict": strict_mode}}})
# First open the workspace
- result = cli.run(
- project=project, args=["workspace", "open", "--directory", workspace, target]
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, target])
result.assert_success()
# Build the element
diff --git a/tests/integration/artifact.py b/tests/integration/artifact.py
index 67565b803..d66c86ba9 100644
--- a/tests/integration/artifact.py
+++ b/tests/integration/artifact.py
@@ -45,25 +45,17 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
# Dse this really need a sandbox?
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_cache_buildtrees(cli, tmpdir, datafiles):
project = str(datafiles)
element_name = "autotools/amhello.bst"
cwd = str(tmpdir)
# Create artifact shares for pull & push testing
- with create_artifact_share(
- os.path.join(str(tmpdir), "share1")
- ) as share1, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "share1")) as share1, create_artifact_share(
os.path.join(str(tmpdir), "share2")
- ) as share2, create_artifact_share(
- os.path.join(str(tmpdir), "share3")
- ) as share3:
- cli.configure(
- {"artifacts": {"url": share1.repo, "push": True}, "cachedir": str(tmpdir)}
- )
+ ) as share2, create_artifact_share(os.path.join(str(tmpdir), "share3")) as share3:
+ cli.configure({"artifacts": {"url": share1.repo, "push": True}, "cachedir": str(tmpdir)})
# Build autotools element with the default behavior of caching buildtrees
# only when necessary. The artifact should be successfully pushed to the share1 remote
@@ -86,10 +78,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
shutil.rmtree(os.path.join(str(tmpdir), "cas"))
shutil.rmtree(os.path.join(str(tmpdir), "artifacts"))
assert cli.get_element_state(project, element_name) != "cached"
- result = cli.run(
- project=project,
- args=["--pull-buildtrees", "artifact", "pull", element_name],
- )
+ result = cli.run(project=project, args=["--pull-buildtrees", "artifact", "pull", element_name],)
assert element_name in result.get_pulled_elements()
with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert not buildtreedir
@@ -109,13 +98,8 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
# Repeat building the artifacts, this time with cache-buildtrees set to
# 'always' via the cli, as such the buildtree dir should not be empty
- cli.configure(
- {"artifacts": {"url": share2.repo, "push": True}, "cachedir": str(tmpdir)}
- )
- result = cli.run(
- project=project,
- args=["--cache-buildtrees", "always", "build", element_name],
- )
+ cli.configure({"artifacts": {"url": share2.repo, "push": True}, "cachedir": str(tmpdir)})
+ result = cli.run(project=project, args=["--cache-buildtrees", "always", "build", element_name],)
assert result.exit_code == 0
assert cli.get_element_state(project, element_name) == "cached"
assert share2.get_artifact(cli.get_artifact_name(project, "test", element_name))
@@ -130,10 +114,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
shutil.rmtree(os.path.join(str(tmpdir), "cas"))
shutil.rmtree(os.path.join(str(tmpdir), "artifacts"))
assert cli.get_element_state(project, element_name) != "cached"
- result = cli.run(
- project=project,
- args=["--pull-buildtrees", "artifact", "pull", element_name],
- )
+ result = cli.run(project=project, args=["--pull-buildtrees", "artifact", "pull", element_name],)
assert element_name in result.get_pulled_elements()
with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
diff --git a/tests/integration/autotools.py b/tests/integration/autotools.py
index d270b2a77..47eb9cdd8 100644
--- a/tests/integration/autotools.py
+++ b/tests/integration/autotools.py
@@ -18,9 +18,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# Test that an autotools build 'works' - we use the autotools sample
# amhello project for this.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_autotools_build(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -29,10 +27,7 @@ def test_autotools_build(cli, datafiles):
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == 0
assert_contains(
@@ -53,9 +48,7 @@ def test_autotools_build(cli, datafiles):
# Test that an autotools build 'works' - we use the autotools sample
# amhello project for this.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_autotools_confroot_build(cli, datafiles):
project = str(datafiles)
@@ -65,10 +58,7 @@ def test_autotools_confroot_build(cli, datafiles):
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == 0
assert_contains(
@@ -88,9 +78,7 @@ def test_autotools_confroot_build(cli, datafiles):
# Test running an executable built with autotools
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_autotools_run(cli, datafiles):
project = str(datafiles)
element_name = "autotools/amhello.bst"
diff --git a/tests/integration/build-uid.py b/tests/integration/build-uid.py
index 367cf0248..c38dc4514 100644
--- a/tests/integration/build-uid.py
+++ b/tests/integration/build-uid.py
@@ -14,8 +14,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.skipif(
- not IS_LINUX or HAVE_SANDBOX != "bwrap",
- reason="Only available on linux with bubblewrap",
+ not IS_LINUX or HAVE_SANDBOX != "bwrap", reason="Only available on linux with bubblewrap",
)
@pytest.mark.datafiles(DATA_DIR)
def test_build_uid_overridden(cli, datafiles):
@@ -27,15 +26,12 @@ def test_build_uid_overridden(cli, datafiles):
"sandbox": {"build-uid": 800, "build-gid": 900},
}
- result = cli.run_project_config(
- project=project, project_config=project_config, args=["build", element_name]
- )
+ result = cli.run_project_config(project=project, project_config=project_config, args=["build", element_name])
assert result.exit_code == 0
@pytest.mark.skipif(
- not IS_LINUX or HAVE_SANDBOX != "bwrap",
- reason="Only available on linux with bubbelwrap",
+ not IS_LINUX or HAVE_SANDBOX != "bwrap", reason="Only available on linux with bubbelwrap",
)
@pytest.mark.datafiles(DATA_DIR)
def test_build_uid_in_project(cli, datafiles):
@@ -47,16 +43,12 @@ def test_build_uid_in_project(cli, datafiles):
"sandbox": {"build-uid": 1023, "build-gid": 3490},
}
- result = cli.run_project_config(
- project=project, project_config=project_config, args=["build", element_name]
- )
+ result = cli.run_project_config(project=project, project_config=project_config, args=["build", element_name])
assert result.exit_code == 0
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- HAVE_SANDBOX != "bwrap", reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(HAVE_SANDBOX != "bwrap", reason="Only available with a functioning sandbox")
def test_build_uid_default(cli, datafiles):
project = str(datafiles)
element_name = "build-uid/build-uid-default.bst"
diff --git a/tests/integration/cachedfail.py b/tests/integration/cachedfail.py
index 366346e2b..142e8e15f 100644
--- a/tests/integration/cachedfail.py
+++ b/tests/integration/cachedfail.py
@@ -35,9 +35,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_build_checkout_cached_fail(cli, datafiles):
project = str(datafiles)
element_path = os.path.join(project, "elements", "element.bst")
@@ -59,10 +57,7 @@ def test_build_checkout_cached_fail(cli, datafiles):
assert cli.get_element_state(project, "element.bst") == "failed"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "element.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "element.bst", "--directory", checkout],)
result.assert_success()
# Check that the checkout contains the file created before failure
@@ -71,9 +66,7 @@ def test_build_checkout_cached_fail(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_build_depend_on_cached_fail(cli, datafiles):
project = str(datafiles)
dep_path = os.path.join(project, "elements", "dep.bst")
@@ -87,10 +80,7 @@ def test_build_depend_on_cached_fail(cli, datafiles):
_yaml.roundtrip_dump(dep, dep_path)
target = {
"kind": "script",
- "depends": [
- {"filename": "base.bst", "type": "build",},
- {"filename": "dep.bst", "type": "build",},
- ],
+ "depends": [{"filename": "base.bst", "type": "build",}, {"filename": "dep.bst", "type": "build",},],
"config": {"commands": ["test -e /foo",],},
}
_yaml.roundtrip_dump(target, target_path)
@@ -110,9 +100,7 @@ def test_build_depend_on_cached_fail(cli, datafiles):
assert cli.get_element_state(project, "target.bst") == "waiting"
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("on_error", ("continue", "quit"))
def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
@@ -142,10 +130,7 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
)
# Build the element, continuing to finish active jobs on error.
- result = cli.run(
- project=project,
- args=["--on-error={}".format(on_error), "build", "element.bst"],
- )
+ result = cli.run(project=project, args=["--on-error={}".format(on_error), "build", "element.bst"],)
result.assert_main_error(ErrorDomain.STREAM, None)
# This element should have failed
@@ -154,9 +139,7 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
assert share.get_artifact(cli.get_artifact_name(project, "test", "element.bst"))
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("on_error", ("continue", "quit"))
def test_push_failed_missing_shell(cli, tmpdir, datafiles, on_error):
@@ -190,10 +173,7 @@ def test_push_failed_missing_shell(cli, tmpdir, datafiles, on_error):
)
# Build the element, continuing to finish active jobs on error.
- result = cli.run(
- project=project,
- args=["--on-error={}".format(on_error), "build", "element.bst"],
- )
+ result = cli.run(project=project, args=["--on-error={}".format(on_error), "build", "element.bst"],)
result.assert_main_error(ErrorDomain.STREAM, None)
# This element should have failed
@@ -202,9 +182,7 @@ def test_push_failed_missing_shell(cli, tmpdir, datafiles, on_error):
assert share.get_artifact(cli.get_artifact_name(project, "test", "element.bst"))
-@pytest.mark.skipif(
- HAVE_SANDBOX != "bwrap", reason="Only available with bubblewrap on Linux"
-)
+@pytest.mark.skipif(HAVE_SANDBOX != "bwrap", reason="Only available with bubblewrap on Linux")
@pytest.mark.datafiles(DATA_DIR)
def test_host_tools_errors_are_not_cached(cli, datafiles, tmp_path):
# Create symlink to buildbox-casd to work with custom PATH
diff --git a/tests/integration/cmake.py b/tests/integration/cmake.py
index 14ee7c967..50e0bf296 100644
--- a/tests/integration/cmake.py
+++ b/tests/integration/cmake.py
@@ -16,9 +16,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_cmake_build(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -27,19 +25,14 @@ def test_cmake_build(cli, datafiles):
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == 0
assert_contains(checkout, ["/usr", "/usr/bin", "/usr/bin/hello"])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_cmake_confroot_build(cli, datafiles):
project = str(datafiles)
@@ -49,19 +42,14 @@ def test_cmake_confroot_build(cli, datafiles):
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == 0
assert_contains(checkout, ["/usr", "/usr/bin", "/usr/bin/hello"])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_cmake_run(cli, datafiles):
project = str(datafiles)
element_name = "cmake/cmakehello.bst"
diff --git a/tests/integration/compose.py b/tests/integration/compose.py
index 2d68327e4..55d6674f9 100644
--- a/tests/integration/compose.py
+++ b/tests/integration/compose.py
@@ -107,9 +107,7 @@ def create_compose_element(name, path, config=None):
),
],
)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_compose_include(cli, datafiles, include_domains, exclude_domains, expected):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -127,10 +125,7 @@ def test_compose_include(cli, datafiles, include_domains, exclude_domains, expec
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == 0
assert set(walk_dir(checkout)) == set(expected)
diff --git a/tests/integration/filter.py b/tests/integration/filter.py
index 6e95915ee..ee20ceb25 100644
--- a/tests/integration/filter.py
+++ b/tests/integration/filter.py
@@ -17,9 +17,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_filter_pass_integration(datafiles, cli):
project = str(datafiles)
@@ -31,15 +29,7 @@ def test_filter_pass_integration(datafiles, cli):
checkout_dir = os.path.join(project, "filter")
result = cli.run(
project=project,
- args=[
- "artifact",
- "checkout",
- "--integrate",
- "--hardlinks",
- "--directory",
- checkout_dir,
- "filter/filter.bst",
- ],
+ args=["artifact", "checkout", "--integrate", "--hardlinks", "--directory", checkout_dir, "filter/filter.bst",],
)
result.assert_success()
diff --git a/tests/integration/import.py b/tests/integration/import.py
index b7f056bac..4faaba687 100644
--- a/tests/integration/import.py
+++ b/tests/integration/import.py
@@ -37,12 +37,7 @@ def create_import_element(name, path, source, target, source_path):
"/",
"/output",
"files/import-source",
- [
- "/output",
- "/output/test.txt",
- "/output/subdir",
- "/output/subdir/test.txt",
- ],
+ ["/output", "/output/test.txt", "/output/subdir", "/output/subdir/test.txt",],
),
],
)
@@ -58,8 +53,7 @@ def test_import(cli, datafiles, source, target, path, expected):
assert res.exit_code == 0
cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
+ project=project, args=["artifact", "checkout", element_name, "--directory", checkout],
)
assert res.exit_code == 0
diff --git a/tests/integration/make.py b/tests/integration/make.py
index 78f4ba8d7..1b303f4b0 100644
--- a/tests/integration/make.py
+++ b/tests/integration/make.py
@@ -18,9 +18,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# Test that a make build 'works' - we use the make sample
# makehello project for this.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_make_build(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -29,10 +27,7 @@ def test_make_build(cli, datafiles):
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == 0
assert_contains(checkout, ["/usr", "/usr/bin", "/usr/bin/hello"])
@@ -40,9 +35,7 @@ def test_make_build(cli, datafiles):
# Test running an executable built with make
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_make_run(cli, datafiles):
project = str(datafiles)
element_name = "make/makehello.bst"
diff --git a/tests/integration/manual.py b/tests/integration/manual.py
index 8db8b9671..b6b35600c 100644
--- a/tests/integration/manual.py
+++ b/tests/integration/manual.py
@@ -29,9 +29,7 @@ def create_manual_element(name, path, config, variables, environment):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_manual_element(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -44,10 +42,7 @@ def test_manual_element(cli, datafiles):
{
"configure-commands": ["echo './configure' >> test"],
"build-commands": ["echo 'make' >> test"],
- "install-commands": [
- "echo 'make install' >> test",
- "cp test %{install-root}",
- ],
+ "install-commands": ["echo 'make install' >> test", "cp test %{install-root}",],
"strip-commands": ["echo 'strip' >> %{install-root}/test"],
},
{},
@@ -58,8 +53,7 @@ def test_manual_element(cli, datafiles):
assert res.exit_code == 0
cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
+ project=project, args=["artifact", "checkout", element_name, "--directory", checkout],
)
assert res.exit_code == 0
@@ -77,9 +71,7 @@ strip
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_manual_element_environment(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -87,19 +79,14 @@ def test_manual_element_environment(cli, datafiles):
element_name = "import/import.bst"
create_manual_element(
- element_name,
- element_path,
- {"install-commands": ["echo $V >> test", "cp test %{install-root}"]},
- {},
- {"V": 2},
+ element_name, element_path, {"install-commands": ["echo $V >> test", "cp test %{install-root}"]}, {}, {"V": 2},
)
res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
+ project=project, args=["artifact", "checkout", element_name, "--directory", checkout],
)
assert res.exit_code == 0
@@ -110,9 +97,7 @@ def test_manual_element_environment(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_manual_element_noparallel(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -122,13 +107,7 @@ def test_manual_element_noparallel(cli, datafiles):
create_manual_element(
element_name,
element_path,
- {
- "install-commands": [
- "echo $MAKEFLAGS >> test",
- "echo $V >> test",
- "cp test %{install-root}",
- ]
- },
+ {"install-commands": ["echo $MAKEFLAGS >> test", "echo $V >> test", "cp test %{install-root}",]},
{"notparallel": True},
{"MAKEFLAGS": "-j%{max-jobs} -Wall", "V": 2},
)
@@ -137,8 +116,7 @@ def test_manual_element_noparallel(cli, datafiles):
assert res.exit_code == 0
cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
+ project=project, args=["artifact", "checkout", element_name, "--directory", checkout],
)
assert res.exit_code == 0
@@ -154,9 +132,7 @@ def test_manual_element_noparallel(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_manual_element_logging(cli, datafiles):
project = str(datafiles)
element_path = os.path.join(project, "elements")
diff --git a/tests/integration/messages.py b/tests/integration/messages.py
index 8210664e5..66696772e 100644
--- a/tests/integration/messages.py
+++ b/tests/integration/messages.py
@@ -37,9 +37,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_disable_message_lines(cli, datafiles):
project = str(datafiles)
element_path = os.path.join(project, "elements")
@@ -51,9 +49,7 @@ def test_disable_message_lines(cli, datafiles):
"config": {"build-commands": ['echo "Silly message"'], "strip-commands": []},
}
- os.makedirs(
- os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
- )
+ os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# First we check that we get the "Silly message"
@@ -63,17 +59,13 @@ def test_disable_message_lines(cli, datafiles):
# Let's now build it again, but with --message-lines 0
cli.remove_artifact_from_cache(project, element_name)
- result = cli.run(
- project=project, args=["--message-lines", "0", "build", element_name]
- )
+ result = cli.run(project=project, args=["--message-lines", "0", "build", element_name])
result.assert_success()
assert "Message contains " not in result.stderr
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_disable_error_lines(cli, datafiles):
project = str(datafiles)
element_path = os.path.join(project, "elements")
@@ -82,28 +74,19 @@ def test_disable_error_lines(cli, datafiles):
element = {
"kind": "manual",
"depends": [{"filename": "base.bst"}],
- "config": {
- "build-commands": ["This is a syntax error > >"],
- "strip-commands": [],
- },
+ "config": {"build-commands": ["This is a syntax error > >"], "strip-commands": [],},
}
- os.makedirs(
- os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
- )
+ os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# First we check that we get the syntax error
- result = cli.run(
- project=project, args=["--error-lines", "0", "build", element_name]
- )
+ result = cli.run(project=project, args=["--error-lines", "0", "build", element_name])
result.assert_main_error(ErrorDomain.STREAM, None)
assert "This is a syntax error" in result.stderr
# Let's now build it again, but with --error-lines 0
cli.remove_artifact_from_cache(project, element_name)
- result = cli.run(
- project=project, args=["--error-lines", "0", "build", element_name]
- )
+ result = cli.run(project=project, args=["--error-lines", "0", "build", element_name])
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Printing the last" not in result.stderr
diff --git a/tests/integration/pip_element.py b/tests/integration/pip_element.py
index d85cb5f03..5ad6040a5 100644
--- a/tests/integration/pip_element.py
+++ b/tests/integration/pip_element.py
@@ -21,9 +21,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_pip_build(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -42,31 +40,23 @@ def test_pip_build(cli, datafiles):
}
],
}
- os.makedirs(
- os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
- )
+ os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == 0
assert_contains(
- checkout,
- ["/usr", "/usr/lib", "/usr/bin", "/usr/bin/hello", "/usr/lib/python3.6"],
+ checkout, ["/usr", "/usr/lib", "/usr/bin", "/usr/bin/hello", "/usr/lib/python3.6"],
)
# Test running an executable built with pip
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_pip_run(cli, datafiles):
# Create and build our test element
test_pip_build(cli, datafiles)
@@ -80,9 +70,7 @@ def test_pip_run(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_pip_element_should_install_pip_deps(cli, datafiles, setup_pypi_repo):
project = str(datafiles)
elements_path = os.path.join(project, "elements")
@@ -104,9 +92,7 @@ def test_pip_element_should_install_pip_deps(cli, datafiles, setup_pypi_repo):
# set up directories
pypi_repo = os.path.join(project, "files", "pypi-repo")
os.makedirs(pypi_repo, exist_ok=True)
- os.makedirs(
- os.path.dirname(os.path.join(elements_path, element_name)), exist_ok=True
- )
+ os.makedirs(os.path.dirname(os.path.join(elements_path, element_name)), exist_ok=True)
setup_pypi_repo(mock_packages, pypi_repo)
# create pip element
@@ -121,11 +107,7 @@ def test_pip_element_should_install_pip_deps(cli, datafiles, setup_pypi_repo):
# FIXME: remove hardcoded ref once issue #1010 is closed
"ref": "ad96570b552498807abec33c06210bf68378d854ced6753b77916c5ed517610d",
},
- {
- "kind": "pip",
- "url": "file://{}".format(os.path.realpath(pypi_repo)),
- "packages": [myreqs_packages],
- },
+ {"kind": "pip", "url": "file://{}".format(os.path.realpath(pypi_repo)), "packages": [myreqs_packages],},
],
}
_yaml.roundtrip_dump(element, os.path.join(elements_path, element_name))
@@ -138,13 +120,8 @@ def test_pip_element_should_install_pip_deps(cli, datafiles, setup_pypi_repo):
# get installed packages in sandbox
installed_packages = set(
- cli.run(
- project=project, args=["shell", element_name, "pip3", "freeze"]
- ).output.split("\n")
+ cli.run(project=project, args=["shell", element_name, "pip3", "freeze"]).output.split("\n")
)
# compare with packages that are expected to be installed
- pip_source_packages = {
- package.replace("_", "-") + "==0.1"
- for package in dependencies + [myreqs_packages]
- }
+ pip_source_packages = {package.replace("_", "-") + "==0.1" for package in dependencies + [myreqs_packages]}
assert pip_source_packages.issubset(installed_packages)
diff --git a/tests/integration/pip_source.py b/tests/integration/pip_source.py
index bc9a4d94c..3a5d64632 100644
--- a/tests/integration/pip_source.py
+++ b/tests/integration/pip_source.py
@@ -48,16 +48,10 @@ def test_pip_source_import_packages(cli, datafiles, setup_pypi_repo):
"kind": "import",
"sources": [
{"kind": "local", "path": "files/pip-source"},
- {
- "kind": "pip",
- "url": "file://{}".format(os.path.realpath(pypi_repo)),
- "packages": [myreqs_packages],
- },
+ {"kind": "pip", "url": "file://{}".format(os.path.realpath(pypi_repo)), "packages": [myreqs_packages],},
],
}
- os.makedirs(
- os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
- )
+ os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
result = cli.run(project=project, args=["source", "track", element_name])
@@ -66,10 +60,7 @@ def test_pip_source_import_packages(cli, datafiles, setup_pypi_repo):
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == 0
assert_contains(
@@ -124,9 +115,7 @@ def test_pip_source_import_requirements_files(cli, datafiles, setup_pypi_repo):
},
],
}
- os.makedirs(
- os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
- )
+ os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
result = cli.run(project=project, args=["source", "track", element_name])
@@ -135,10 +124,7 @@ def test_pip_source_import_requirements_files(cli, datafiles, setup_pypi_repo):
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == 0
assert_contains(
@@ -158,9 +144,7 @@ def test_pip_source_import_requirements_files(cli, datafiles, setup_pypi_repo):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_pip_source_build(cli, datafiles, setup_pypi_repo):
project = str(datafiles)
element_path = os.path.join(project, "elements")
@@ -203,9 +187,7 @@ def test_pip_source_build(cli, datafiles, setup_pypi_repo):
]
},
}
- os.makedirs(
- os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
- )
+ os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
result = cli.run(project=project, args=["source", "track", element_name])
diff --git a/tests/integration/pullbuildtrees.py b/tests/integration/pullbuildtrees.py
index f0cf22f59..e56823f31 100644
--- a/tests/integration/pullbuildtrees.py
+++ b/tests/integration/pullbuildtrees.py
@@ -41,22 +41,16 @@ def default_state(cli, tmpdir, share):
# directory of an element.
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_pullbuildtrees(cli2, tmpdir, datafiles):
project = str(datafiles)
element_name = "autotools/amhello.bst"
cwd = str(tmpdir)
# Create artifact shares for pull & push testing
- with create_artifact_share(
- os.path.join(str(tmpdir), "share1")
- ) as share1, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "share1")) as share1, create_artifact_share(
os.path.join(str(tmpdir), "share2")
- ) as share2, create_artifact_share(
- os.path.join(str(tmpdir), "share3")
- ) as share3:
+ ) as share2, create_artifact_share(os.path.join(str(tmpdir), "share3")) as share3:
cli2.configure(
{
"artifacts": {"url": share1.repo, "push": True},
@@ -69,9 +63,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
result = cli2.run(project=project, args=["build", element_name])
assert result.exit_code == 0
assert cli2.get_element_state(project, element_name) == "cached"
- assert share1.get_artifact(
- cli2.get_artifact_name(project, "test", element_name)
- )
+ assert share1.get_artifact(cli2.get_artifact_name(project, "test", element_name))
default_state(cli2, tmpdir, share1)
# Pull artifact with default config, assert that pulling again
@@ -94,10 +86,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
artifact_name = cli2.get_artifact_name(project, "test", element_name)
with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert not buildtreedir
- result = cli2.run(
- project=project,
- args=["--pull-buildtrees", "artifact", "pull", element_name],
- )
+ result = cli2.run(project=project, args=["--pull-buildtrees", "artifact", "pull", element_name],)
assert element_name in result.get_pulled_elements()
with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
@@ -111,20 +100,14 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
assert element_name in result.get_pulled_elements()
result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name not in result.get_pulled_elements()
- result = cli2.run(
- project=project,
- args=["--pull-buildtrees", "artifact", "pull", element_name],
- )
+ result = cli2.run(project=project, args=["--pull-buildtrees", "artifact", "pull", element_name],)
assert element_name not in result.get_pulled_elements()
default_state(cli2, tmpdir, share1)
# Pull artifact with default config and buildtrees cli flag set, then assert
# that pulling with pullbuildtrees set in user config doesn't create a pull
# job.
- result = cli2.run(
- project=project,
- args=["--pull-buildtrees", "artifact", "pull", element_name],
- )
+ result = cli2.run(project=project, args=["--pull-buildtrees", "artifact", "pull", element_name],)
assert element_name in result.get_pulled_elements()
cli2.configure({"cache": {"pull-buildtrees": True}})
result = cli2.run(project=project, args=["artifact", "pull", element_name])
@@ -140,25 +123,18 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
cli2.configure({"artifacts": {"url": share2.repo, "push": True}})
result = cli2.run(project=project, args=["artifact", "push", element_name])
assert element_name not in result.get_pushed_elements()
- assert not share2.get_artifact(
- cli2.get_artifact_name(project, "test", element_name)
- )
+ assert not share2.get_artifact(cli2.get_artifact_name(project, "test", element_name))
# Assert that after pulling the missing buildtree the element artifact can be
# successfully pushed to the remote. This will attempt to pull the buildtree
# from share1 and then a 'complete' push to share2
cli2.configure({"artifacts": {"url": share1.repo, "push": False}})
- result = cli2.run(
- project=project,
- args=["--pull-buildtrees", "artifact", "pull", element_name],
- )
+ result = cli2.run(project=project, args=["--pull-buildtrees", "artifact", "pull", element_name],)
assert element_name in result.get_pulled_elements()
cli2.configure({"artifacts": {"url": share2.repo, "push": True}})
result = cli2.run(project=project, args=["artifact", "push", element_name])
assert element_name in result.get_pushed_elements()
- assert share2.get_artifact(
- cli2.get_artifact_name(project, "test", element_name)
- )
+ assert share2.get_artifact(cli2.get_artifact_name(project, "test", element_name))
default_state(cli2, tmpdir, share1)
# Assert that bst artifact push will automatically attempt to pull a missing buildtree
@@ -168,50 +144,31 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name in result.get_pulled_elements()
cli2.configure({"artifacts": {"url": share3.repo, "push": True}})
- result = cli2.run(
- project=project,
- args=["--pull-buildtrees", "artifact", "push", element_name],
- )
+ result = cli2.run(project=project, args=["--pull-buildtrees", "artifact", "push", element_name],)
assert "Attempting to fetch missing artifact buildtrees" in result.stderr
assert element_name not in result.get_pulled_elements()
with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert not buildtreedir
assert element_name not in result.get_pushed_elements()
- assert not share3.get_artifact(
- cli2.get_artifact_name(project, "test", element_name)
- )
+ assert not share3.get_artifact(cli2.get_artifact_name(project, "test", element_name))
# Assert that if we add an extra remote that has the buildtree artfact cached, bst artifact push will
# automatically attempt to pull it and will be successful, leading to the full artifact being pushed
# to the empty share3. This gives the ability to attempt push currently partial artifacts to a remote,
# without exlipictly requiring a bst artifact pull.
- cli2.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": False},
- {"url": share3.repo, "push": True},
- ]
- }
- )
- result = cli2.run(
- project=project,
- args=["--pull-buildtrees", "artifact", "push", element_name],
- )
+ cli2.configure({"artifacts": [{"url": share1.repo, "push": False}, {"url": share3.repo, "push": True},]})
+ result = cli2.run(project=project, args=["--pull-buildtrees", "artifact", "push", element_name],)
assert "Attempting to fetch missing artifact buildtrees" in result.stderr
assert element_name in result.get_pulled_elements()
with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
assert element_name in result.get_pushed_elements()
- assert share3.get_artifact(
- cli2.get_artifact_name(project, "test", element_name)
- )
+ assert share3.get_artifact(cli2.get_artifact_name(project, "test", element_name))
# Ensure that only valid pull-buildtrees boolean options make it through the loading
# process.
-@pytest.mark.parametrize(
- "value,success", [(True, True), (False, True), ("pony", False), ("1", False)]
-)
+@pytest.mark.parametrize("value,success", [(True, True), (False, True), ("pony", False), ("1", False)])
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_cache_pullbuildtrees(cli, datafiles, value, success):
project = str(datafiles)
diff --git a/tests/integration/sandbox-bwrap.py b/tests/integration/sandbox-bwrap.py
index 6f33275e9..0c84ba888 100644
--- a/tests/integration/sandbox-bwrap.py
+++ b/tests/integration/sandbox-bwrap.py
@@ -33,8 +33,7 @@ def test_sandbox_bwrap_cleanup_build(cli, datafiles):
@pytest.mark.skipif(HAVE_SANDBOX != "bwrap", reason="Only available with bubblewrap")
@pytest.mark.skipif(
- not HAVE_BWRAP_JSON_STATUS,
- reason="Only available with bubblewrap supporting --json-status-fd",
+ not HAVE_BWRAP_JSON_STATUS, reason="Only available with bubblewrap supporting --json-status-fd",
)
@pytest.mark.datafiles(DATA_DIR)
def test_sandbox_bwrap_distinguish_setup_error(cli, datafiles):
@@ -42,9 +41,7 @@ def test_sandbox_bwrap_distinguish_setup_error(cli, datafiles):
element_name = "sandbox-bwrap/non-executable-shell.bst"
result = cli.run(project=project, args=["build", element_name])
- result.assert_task_error(
- error_domain=ErrorDomain.SANDBOX, error_reason="bwrap-sandbox-fail"
- )
+ result.assert_task_error(error_domain=ErrorDomain.SANDBOX, error_reason="bwrap-sandbox-fail")
@pytest.mark.skipif(HAVE_SANDBOX != "bwrap", reason="Only available with bubblewrap")
@@ -58,10 +55,5 @@ def test_sandbox_bwrap_return_subprocess(cli, datafiles):
)
result = cli.run(project=project, args=["build", element_name])
- result.assert_task_error(
- error_domain=ErrorDomain.SANDBOX, error_reason="command-failed"
- )
- assert (
- "sandbox-bwrap/command-exit-42.bst|Command failed with exitcode 42"
- in result.stderr
- )
+ result.assert_task_error(error_domain=ErrorDomain.SANDBOX, error_reason="command-failed")
+ assert "sandbox-bwrap/command-exit-42.bst|Command failed with exitcode 42" in result.stderr
diff --git a/tests/integration/script.py b/tests/integration/script.py
index 4f44feae4..964cab384 100644
--- a/tests/integration/script.py
+++ b/tests/integration/script.py
@@ -33,9 +33,7 @@ def create_script_element(name, path, config=None, variables=None):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_script(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -45,21 +43,13 @@ def test_script(cli, datafiles):
create_script_element(
element_name,
element_path,
- config={
- "commands": [
- "mkdir -p %{install-root}",
- "echo 'Hi' > %{install-root}/test",
- ],
- },
+ config={"commands": ["mkdir -p %{install-root}", "echo 'Hi' > %{install-root}/test",],},
)
res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ res = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert res.exit_code == 0
with open(os.path.join(checkout, "test")) as f:
@@ -69,9 +59,7 @@ def test_script(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_script_root(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -96,10 +84,7 @@ def test_script_root(cli, datafiles):
res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ res = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert res.exit_code == 0
with open(os.path.join(checkout, "test")) as f:
@@ -109,9 +94,7 @@ def test_script_root(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_script_no_root(cli, datafiles):
project = str(datafiles)
@@ -138,9 +121,7 @@ def test_script_no_root(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_script_cwd(cli, datafiles):
project = str(datafiles)
@@ -151,19 +132,14 @@ def test_script_cwd(cli, datafiles):
create_script_element(
element_name,
element_path,
- config={
- "commands": ["echo 'test' > test", "cp /buildstream/test %{install-root}"],
- },
+ config={"commands": ["echo 'test' > test", "cp /buildstream/test %{install-root}"],},
variables={"cwd": "/buildstream"},
)
res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ res = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert res.exit_code == 0
with open(os.path.join(checkout, "test")) as f:
@@ -173,9 +149,7 @@ def test_script_cwd(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_script_layout(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -185,8 +159,7 @@ def test_script_layout(cli, datafiles):
assert res.exit_code == 0
cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
+ project=project, args=["artifact", "checkout", element_name, "--directory", checkout],
)
assert res.exit_code == 0
@@ -197,9 +170,7 @@ def test_script_layout(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_regression_cache_corruption(cli, datafiles):
project = str(datafiles)
checkout_original = os.path.join(cli.directory, "checkout-original")
@@ -211,14 +182,7 @@ def test_regression_cache_corruption(cli, datafiles):
assert res.exit_code == 0
res = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- canary_element_name,
- "--directory",
- checkout_original,
- ],
+ project=project, args=["artifact", "checkout", canary_element_name, "--directory", checkout_original,],
)
assert res.exit_code == 0
@@ -228,16 +192,7 @@ def test_regression_cache_corruption(cli, datafiles):
res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- canary_element_name,
- "--directory",
- checkout_after,
- ],
- )
+ res = cli.run(project=project, args=["artifact", "checkout", canary_element_name, "--directory", checkout_after,],)
assert res.exit_code == 0
with open(os.path.join(checkout_after, "canary")) as f:
@@ -245,9 +200,7 @@ def test_regression_cache_corruption(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_regression_tmpdir(cli, datafiles):
project = str(datafiles)
element_name = "script/tmpdir.bst"
@@ -257,9 +210,7 @@ def test_regression_tmpdir(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_regression_cache_corruption_2(cli, datafiles):
project = str(datafiles)
checkout_original = os.path.join(cli.directory, "checkout-original")
@@ -271,14 +222,7 @@ def test_regression_cache_corruption_2(cli, datafiles):
assert res.exit_code == 0
res = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- canary_element_name,
- "--directory",
- checkout_original,
- ],
+ project=project, args=["artifact", "checkout", canary_element_name, "--directory", checkout_original,],
)
assert res.exit_code == 0
@@ -288,16 +232,7 @@ def test_regression_cache_corruption_2(cli, datafiles):
res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- canary_element_name,
- "--directory",
- checkout_after,
- ],
- )
+ res = cli.run(project=project, args=["artifact", "checkout", canary_element_name, "--directory", checkout_after,],)
assert res.exit_code == 0
with open(os.path.join(checkout_after, "canary")) as f:
diff --git a/tests/integration/shell.py b/tests/integration/shell.py
index 124770aad..e03b38563 100644
--- a/tests/integration/shell.py
+++ b/tests/integration/shell.py
@@ -32,13 +32,9 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# element (str): The element to build and run a shell with
# isolate (bool): Whether to pass --isolate to `bst shell`
#
-def execute_shell(
- cli, project, command, *, config=None, mount=None, element="base.bst", isolate=False
-):
+def execute_shell(cli, project, command, *, config=None, mount=None, element="base.bst", isolate=False):
# Ensure the element is built
- result = cli.run_project_config(
- project=project, project_config=config, args=["build", element]
- )
+ result = cli.run_project_config(project=project, project_config=config, args=["build", element])
assert result.exit_code == 0
args = ["shell"]
@@ -55,9 +51,7 @@ def execute_shell(
# Test running something through a shell, allowing it to find the
# executable
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_shell(cli, datafiles):
project = str(datafiles)
@@ -68,9 +62,7 @@ def test_shell(cli, datafiles):
# Test running an executable directly
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_executable(cli, datafiles):
project = str(datafiles)
@@ -82,19 +74,14 @@ def test_executable(cli, datafiles):
# Test shell environment variable explicit assignments
@pytest.mark.parametrize("animal", [("Horse"), ("Pony")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
# This test seems to fail or pass depending on if this file is run or the hole test suite
def test_env_assign(cli, datafiles, animal):
project = str(datafiles)
expected = animal + "\n"
result = execute_shell(
- cli,
- project,
- ["/bin/sh", "-c", "echo ${ANIMAL}"],
- config={"shell": {"environment": {"ANIMAL": animal}}},
+ cli, project, ["/bin/sh", "-c", "echo ${ANIMAL}"], config={"shell": {"environment": {"ANIMAL": animal}}},
)
assert result.exit_code == 0
@@ -104,9 +91,7 @@ def test_env_assign(cli, datafiles, animal):
# Test shell environment variable explicit assignments with host env var expansion
@pytest.mark.parametrize("animal", [("Horse"), ("Pony")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
# This test seems to fail or pass depending on if this file is run or the hole test suite
def test_env_assign_expand_host_environ(cli, datafiles, animal):
project = str(datafiles)
@@ -129,9 +114,7 @@ def test_env_assign_expand_host_environ(cli, datafiles, animal):
# when running an isolated shell
@pytest.mark.parametrize("animal", [("Horse"), ("Pony")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
# This test seems to faili or pass depending on if this file is run or the hole test suite
def test_env_assign_isolated(cli, datafiles, animal):
project = str(datafiles)
@@ -150,9 +133,7 @@ def test_env_assign_isolated(cli, datafiles, animal):
# Test running an executable in a runtime with no shell (i.e., no
# /bin/sh)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_no_shell(cli, datafiles):
project = str(datafiles)
element_path = os.path.join(project, "elements")
@@ -165,14 +146,10 @@ def test_no_shell(cli, datafiles):
"variables": {"install-root": "/"},
"config": {"commands": ["rm /bin/sh"]},
}
- os.makedirs(
- os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
- )
+ os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- result = execute_shell(
- cli, project, ["/bin/echo", "Pegasissies!"], element=element_name
- )
+ result = execute_shell(cli, project, ["/bin/echo", "Pegasissies!"], element=element_name)
assert result.exit_code == 0
assert result.output == "Pegasissies!\n"
@@ -180,18 +157,13 @@ def test_no_shell(cli, datafiles):
# Test that bind mounts defined in project.conf work
@pytest.mark.parametrize("path", [("/etc/pony.conf"), ("/usr/share/pony/pony.txt")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_host_files(cli, datafiles, path):
project = str(datafiles)
ponyfile = os.path.join(project, "files", "shell-mount", "pony.txt")
result = execute_shell(
- cli,
- project,
- ["cat", path],
- config={"shell": {"host-files": [{"host_path": ponyfile, "path": path}]}},
+ cli, project, ["cat", path], config={"shell": {"host-files": [{"host_path": ponyfile, "path": path}]}},
)
assert result.exit_code == 0
assert result.output == "pony\n"
@@ -200,9 +172,7 @@ def test_host_files(cli, datafiles, path):
# Test that bind mounts defined in project.conf work
@pytest.mark.parametrize("path", [("/etc"), ("/usr/share/pony")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_host_files_expand_environ(cli, datafiles, path):
project = str(datafiles)
@@ -217,14 +187,7 @@ def test_host_files_expand_environ(cli, datafiles, path):
project,
["cat", fullpath],
config={
- "shell": {
- "host-files": [
- {
- "host_path": "${HOST_PONY_PATH}/pony.txt",
- "path": "${BASE_PONY}/pony.txt",
- }
- ]
- }
+ "shell": {"host-files": [{"host_path": "${HOST_PONY_PATH}/pony.txt", "path": "${BASE_PONY}/pony.txt",}]}
},
)
assert result.exit_code == 0
@@ -234,9 +197,7 @@ def test_host_files_expand_environ(cli, datafiles, path):
# Test that bind mounts defined in project.conf dont mount in isolation
@pytest.mark.parametrize("path", [("/etc/pony.conf"), ("/usr/share/pony/pony.txt")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_isolated_no_mount(cli, datafiles, path):
project = str(datafiles)
ponyfile = os.path.join(project, "files", "shell-mount", "pony.txt")
@@ -256,9 +217,7 @@ def test_isolated_no_mount(cli, datafiles, path):
# declared as optional, and that there is no warning if it is optional
@pytest.mark.parametrize("optional", [("mandatory"), ("optional")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_host_files_missing(cli, datafiles, optional):
project = str(datafiles)
ponyfile = os.path.join(project, "files", "shell-mount", "horsy.txt")
@@ -270,17 +229,7 @@ def test_host_files_missing(cli, datafiles, optional):
cli,
project,
["echo", "Hello"],
- config={
- "shell": {
- "host-files": [
- {
- "host_path": ponyfile,
- "path": "/etc/pony.conf",
- "optional": option,
- }
- ]
- }
- },
+ config={"shell": {"host-files": [{"host_path": ponyfile, "path": "/etc/pony.conf", "optional": option,}]}},
)
assert result.exit_code == 0
assert result.output == "Hello\n"
@@ -296,9 +245,7 @@ def test_host_files_missing(cli, datafiles, optional):
# Test that bind mounts defined in project.conf work
@pytest.mark.parametrize("path", [("/etc/pony.conf"), ("/usr/share/pony/pony.txt")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_cli_mount(cli, datafiles, path):
project = str(datafiles)
@@ -311,9 +258,7 @@ def test_cli_mount(cli, datafiles, path):
# Test that we can see the workspace files in a shell
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_workspace_visible(cli, datafiles):
project = str(datafiles)
workspace = os.path.join(cli.directory, "workspace")
@@ -321,10 +266,7 @@ def test_workspace_visible(cli, datafiles):
# Open a workspace on our build failing element
#
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
assert res.exit_code == 0
# Ensure the dependencies of our build failing element are built
@@ -341,18 +283,14 @@ def test_workspace_visible(cli, datafiles):
# Cat the hello.c file from a bst shell command, and assert
# that we got the same content here
#
- result = cli.run(
- project=project, args=["shell", "--build", element_name, "--", "cat", "hello.c"]
- )
+ result = cli.run(project=project, args=["shell", "--build", element_name, "--", "cat", "hello.c"])
assert result.exit_code == 0
assert result.output == workspace_hello
# Test that '--sysroot' works
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_sysroot(cli, tmpdir, datafiles):
project = str(datafiles)
@@ -365,10 +303,7 @@ def test_sysroot(cli, tmpdir, datafiles):
# Build and check out a sysroot
res = cli.run(project=project, args=["build", base_element])
res.assert_success()
- res = cli.run(
- project=project,
- args=["artifact", "checkout", base_element, "--directory", checkout_dir],
- )
+ res = cli.run(project=project, args=["artifact", "checkout", base_element, "--directory", checkout_dir],)
res.assert_success()
# Mutate the sysroot
@@ -397,9 +332,7 @@ def test_sysroot(cli, tmpdir, datafiles):
# Test system integration commands can access devices in /dev
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_integration_devices(cli, datafiles):
project = str(datafiles)
element_name = "integration.bst"
@@ -412,12 +345,8 @@ def test_integration_devices(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("build_shell", [("build"), ("nobuild")])
@pytest.mark.parametrize("guess_element", [True, False], ids=["guess", "no-guess"])
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
-def test_integration_external_workspace(
- cli, tmpdir_factory, datafiles, build_shell, guess_element
-):
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
+def test_integration_external_workspace(cli, tmpdir_factory, datafiles, build_shell, guess_element):
tmpdir = tmpdir_factory.mktemp("")
project = str(datafiles)
element_name = "autotools/amhello.bst"
@@ -430,10 +359,7 @@ def test_integration_external_workspace(
with open(project_file, "a") as f:
f.write(config_text)
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace_dir, element_name],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace_dir, element_name],)
result.assert_success()
result = cli.run(project=project, args=["-C", workspace_dir, "build", element_name])
@@ -449,9 +375,7 @@ def test_integration_external_workspace(
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_integration_partial_artifact(cli, datafiles, tmpdir, integration_cache):
project = str(datafiles)
@@ -484,9 +408,7 @@ def test_integration_partial_artifact(cli, datafiles, tmpdir, integration_cache)
],
)
result.assert_success()
- digest = utils.sha256sum(
- os.path.join(str(tmpdir), "tmp", "usr", "bin", "hello")
- )
+ digest = utils.sha256sum(os.path.join(str(tmpdir), "tmp", "usr", "bin", "hello"))
# Remove the binary from the CAS
cachedir = cli.config["cachedir"]
@@ -498,8 +420,6 @@ def test_integration_partial_artifact(cli, datafiles, tmpdir, integration_cache)
result.assert_main_error(ErrorDomain.APP, None)
# check the artifact gets completed with '--pull' specified
- result = cli.run(
- project=project, args=["shell", "--pull", element_name, "--", "hello"]
- )
+ result = cli.run(project=project, args=["shell", "--pull", element_name, "--", "hello"])
result.assert_success()
assert "autotools/amhello.bst" in result.get_pulled_elements()
diff --git a/tests/integration/shellbuildtrees.py b/tests/integration/shellbuildtrees.py
index 6ed6770a4..7144d4bb2 100644
--- a/tests/integration/shellbuildtrees.py
+++ b/tests/integration/shellbuildtrees.py
@@ -20,9 +20,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_buildtree_staged(cli_integration, datafiles):
# We can only test the non interacitve case
# The non interactive case defaults to not using buildtrees
@@ -30,52 +28,32 @@ def test_buildtree_staged(cli_integration, datafiles):
project = str(datafiles)
element_name = "build-shell/buildtree.bst"
- res = cli_integration.run(
- project=project, args=["--cache-buildtrees", "always", "build", element_name]
- )
+ res = cli_integration.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
res.assert_success()
- res = cli_integration.run(
- project=project, args=["shell", "--build", element_name, "--", "cat", "test"]
- )
+ res = cli_integration.run(project=project, args=["shell", "--build", element_name, "--", "cat", "test"])
res.assert_shell_error()
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_buildtree_staged_forced_true(cli_integration, datafiles):
# Test that if we ask for a build tree it is there.
project = str(datafiles)
element_name = "build-shell/buildtree.bst"
- res = cli_integration.run(
- project=project, args=["--cache-buildtrees", "always", "build", element_name]
- )
+ res = cli_integration.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
res.assert_success()
res = cli_integration.run(
- project=project,
- args=[
- "shell",
- "--build",
- "--use-buildtree",
- "always",
- element_name,
- "--",
- "cat",
- "test",
- ],
+ project=project, args=["shell", "--build", "--use-buildtree", "always", element_name, "--", "cat", "test",],
)
res.assert_success()
assert "Hi" in res.output
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_buildtree_staged_warn_empty_cached(cli_integration, tmpdir, datafiles):
# Test that if we stage a cached and empty buildtree, we warn the user.
project = str(datafiles)
@@ -89,103 +67,49 @@ def test_buildtree_staged_warn_empty_cached(cli_integration, tmpdir, datafiles):
res.assert_success()
res = cli_integration.run(
- project=project,
- args=[
- "shell",
- "--build",
- "--use-buildtree",
- "always",
- element_name,
- "--",
- "cat",
- "test",
- ],
+ project=project, args=["shell", "--build", "--use-buildtree", "always", element_name, "--", "cat", "test",],
)
res.assert_main_error(ErrorDomain.APP, None)
- assert (
- "Artifact was created without buildtree, unable to launch shell with it"
- in res.stderr
- )
+ assert "Artifact was created without buildtree, unable to launch shell with it" in res.stderr
# Now attempt the same with the try option, this should not attempt to find a buildtree
# and just launch the shell, however the cat should still fail.
res = cli_integration.run(
- project=project,
- args=[
- "shell",
- "--build",
- "--use-buildtree",
- "try",
- element_name,
- "--",
- "cat",
- "test",
- ],
- )
- assert (
- "Artifact created without buildtree, shell will be loaded without it"
- in res.stderr
+ project=project, args=["shell", "--build", "--use-buildtree", "try", element_name, "--", "cat", "test",],
)
+ assert "Artifact created without buildtree, shell will be loaded without it" in res.stderr
assert "Hi" not in res.output
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_buildtree_staged_if_available(cli_integration, datafiles):
# Test that a build tree can be correctly detected.
project = str(datafiles)
element_name = "build-shell/buildtree.bst"
- res = cli_integration.run(
- project=project, args=["--cache-buildtrees", "always", "build", element_name]
- )
+ res = cli_integration.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
res.assert_success()
res = cli_integration.run(
- project=project,
- args=[
- "shell",
- "--build",
- "--use-buildtree",
- "try",
- element_name,
- "--",
- "cat",
- "test",
- ],
+ project=project, args=["shell", "--build", "--use-buildtree", "try", element_name, "--", "cat", "test",],
)
res.assert_success()
assert "Hi" in res.output
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_buildtree_staged_forced_false(cli_integration, datafiles):
# Test that if we ask not to have a build tree it is not there
project = str(datafiles)
element_name = "build-shell/buildtree.bst"
- res = cli_integration.run(
- project=project, args=["--cache-buildtrees", "always", "build", element_name]
- )
+ res = cli_integration.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
res.assert_success()
res = cli_integration.run(
- project=project,
- args=[
- "shell",
- "--build",
- "--use-buildtree",
- "never",
- element_name,
- "--",
- "cat",
- "test",
- ],
+ project=project, args=["shell", "--build", "--use-buildtree", "never", element_name, "--", "cat", "test",],
)
res.assert_shell_error()
@@ -193,9 +117,7 @@ def test_buildtree_staged_forced_false(cli_integration, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_buildtree_from_failure(cli_integration, datafiles):
# Test that we can use a build tree after a failure
project = str(datafiles)
@@ -206,17 +128,7 @@ def test_buildtree_from_failure(cli_integration, datafiles):
# Assert that file has expected contents
res = cli_integration.run(
- project=project,
- args=[
- "shell",
- "--build",
- element_name,
- "--use-buildtree",
- "always",
- "--",
- "cat",
- "test",
- ],
+ project=project, args=["shell", "--build", element_name, "--use-buildtree", "always", "--", "cat", "test",],
)
res.assert_success()
assert "WARNING: using a buildtree from a failed build" in res.stderr
@@ -224,9 +136,7 @@ def test_buildtree_from_failure(cli_integration, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_buildtree_from_failure_option_never(cli_integration, tmpdir, datafiles):
project = str(datafiles)
@@ -236,35 +146,18 @@ def test_buildtree_from_failure_option_never(cli_integration, tmpdir, datafiles)
# without caching a buildtree explicitly
cli_integration.configure({"cachedir": str(tmpdir)})
- res = cli_integration.run(
- project=project, args=["--cache-buildtrees", "never", "build", element_name]
- )
+ res = cli_integration.run(project=project, args=["--cache-buildtrees", "never", "build", element_name])
res.assert_main_error(ErrorDomain.STREAM, None)
res = cli_integration.run(
- project=project,
- args=[
- "shell",
- "--build",
- element_name,
- "--use-buildtree",
- "always",
- "--",
- "cat",
- "test",
- ],
+ project=project, args=["shell", "--build", element_name, "--use-buildtree", "always", "--", "cat", "test",],
)
res.assert_main_error(ErrorDomain.APP, None)
- assert (
- "Artifact was created without buildtree, unable to launch shell with it"
- in res.stderr
- )
+ assert "Artifact was created without buildtree, unable to launch shell with it" in res.stderr
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_buildtree_from_failure_option_always(cli_integration, tmpdir, datafiles):
project = str(datafiles)
@@ -275,23 +168,11 @@ def test_buildtree_from_failure_option_always(cli_integration, tmpdir, datafiles
# cached with content.
cli_integration.configure({"cachedir": str(tmpdir)})
- res = cli_integration.run(
- project=project, args=["--cache-buildtrees", "always", "build", element_name]
- )
+ res = cli_integration.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
res.assert_main_error(ErrorDomain.STREAM, None)
res = cli_integration.run(
- project=project,
- args=[
- "shell",
- "--build",
- element_name,
- "--use-buildtree",
- "always",
- "--",
- "cat",
- "test",
- ],
+ project=project, args=["shell", "--build", element_name, "--use-buildtree", "always", "--", "cat", "test",],
)
res.assert_success()
assert "WARNING: using a buildtree from a failed build" in res.stderr
@@ -301,9 +182,7 @@ def test_buildtree_from_failure_option_always(cli_integration, tmpdir, datafiles
# Check that build shells work when pulled from a remote cache
# This is to roughly simulate remote execution
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_buildtree_pulled(cli, tmpdir, datafiles):
project = str(datafiles)
element_name = "build-shell/buildtree.bst"
@@ -311,10 +190,7 @@ def test_buildtree_pulled(cli, tmpdir, datafiles):
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Build the element to push it to cache
cli.configure({"artifacts": {"url": share.repo, "push": True}})
- result = cli.run(
- project=project,
- args=["--cache-buildtrees", "always", "build", element_name],
- )
+ result = cli.run(project=project, args=["--cache-buildtrees", "always", "build", element_name],)
result.assert_success()
assert cli.get_element_state(project, element_name) == "cached"
@@ -325,40 +201,21 @@ def test_buildtree_pulled(cli, tmpdir, datafiles):
# Pull from cache, ensuring cli options is set to pull the buildtree
result = cli.run(
- project=project,
- args=[
- "--pull-buildtrees",
- "artifact",
- "pull",
- "--deps",
- "all",
- element_name,
- ],
+ project=project, args=["--pull-buildtrees", "artifact", "pull", "--deps", "all", element_name,],
)
result.assert_success()
# Check it's using the cached build tree
res = cli.run(
project=project,
- args=[
- "shell",
- "--build",
- element_name,
- "--use-buildtree",
- "always",
- "--",
- "cat",
- "test",
- ],
+ args=["shell", "--build", element_name, "--use-buildtree", "always", "--", "cat", "test",],
)
res.assert_success()
# This test checks for correct behaviour if a buildtree is not present in the local cache.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_buildtree_options(cli, tmpdir, datafiles):
project = str(datafiles)
element_name = "build-shell/buildtree.bst"
@@ -366,10 +223,7 @@ def test_buildtree_options(cli, tmpdir, datafiles):
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Build the element to push it to cache
cli.configure({"artifacts": {"url": share.repo, "push": True}})
- result = cli.run(
- project=project,
- args=["--cache-buildtrees", "always", "build", element_name],
- )
+ result = cli.run(project=project, args=["--cache-buildtrees", "always", "build", element_name],)
result.assert_success()
assert cli.get_element_state(project, element_name) == "cached"
assert share.get_artifact(cli.get_artifact_name(project, "test", element_name))
@@ -380,51 +234,26 @@ def test_buildtree_options(cli, tmpdir, datafiles):
assert cli.get_element_state(project, element_name) != "cached"
# Pull from cache, but do not include buildtrees.
- result = cli.run(
- project=project, args=["artifact", "pull", "--deps", "all", element_name]
- )
+ result = cli.run(project=project, args=["artifact", "pull", "--deps", "all", element_name])
result.assert_success()
# Check it's not using the cached build tree
res = cli.run(
- project=project,
- args=[
- "shell",
- "--build",
- element_name,
- "--use-buildtree",
- "never",
- "--",
- "cat",
- "test",
- ],
+ project=project, args=["shell", "--build", element_name, "--use-buildtree", "never", "--", "cat", "test",],
)
res.assert_shell_error()
assert "Hi" not in res.output
# Check it's not using the cached build tree, default is to ask, and fall back to not
# for non interactive behavior
- res = cli.run(
- project=project,
- args=["shell", "--build", element_name, "--", "cat", "test"],
- )
+ res = cli.run(project=project, args=["shell", "--build", element_name, "--", "cat", "test"],)
res.assert_shell_error()
assert "Hi" not in res.output
# Check correctly handling the lack of buildtree, with 'try' not attempting to
# pull the buildtree as the user context is by default set to not pull them
res = cli.run(
- project=project,
- args=[
- "shell",
- "--build",
- element_name,
- "--use-buildtree",
- "try",
- "--",
- "cat",
- "test",
- ],
+ project=project, args=["shell", "--build", element_name, "--use-buildtree", "try", "--", "cat", "test",],
)
assert "Hi" not in res.output
assert "Attempting to fetch missing artifact buildtrees" not in res.stderr
@@ -454,22 +283,11 @@ def test_buildtree_options(cli, tmpdir, datafiles):
# Check it's not loading the shell at all with always set for the buildtree, when the
# user context does not allow for buildtree pulling
- result = cli.run(
- project=project, args=["artifact", "pull", "--deps", "all", element_name]
- )
+ result = cli.run(project=project, args=["artifact", "pull", "--deps", "all", element_name])
result.assert_success()
res = cli.run(
project=project,
- args=[
- "shell",
- "--build",
- element_name,
- "--use-buildtree",
- "always",
- "--",
- "cat",
- "test",
- ],
+ args=["shell", "--build", element_name, "--use-buildtree", "always", "--", "cat", "test",],
)
res.assert_main_error(ErrorDomain.APP, None)
assert "Buildtree is not cached locally or in available remotes" in res.stderr
@@ -493,18 +311,13 @@ def test_buildtree_options(cli, tmpdir, datafiles):
],
)
assert "Hi" in res.output
- assert (
- "buildtree is not cached locally, will attempt to pull from available remotes"
- in res.stderr
- )
+ assert "buildtree is not cached locally, will attempt to pull from available remotes" in res.stderr
assert "Attempting to fetch missing artifact buildtree" in res.stderr
# Tests running pull and pull-buildtree options at the same time.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_pull_buildtree_pulled(cli, tmpdir, datafiles):
project = str(datafiles)
element_name = "build-shell/buildtree.bst"
@@ -512,10 +325,7 @@ def test_pull_buildtree_pulled(cli, tmpdir, datafiles):
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Build the element to push it to cache
cli.configure({"artifacts": {"url": share.repo, "push": True}})
- result = cli.run(
- project=project,
- args=["--cache-buildtrees", "always", "build", element_name],
- )
+ result = cli.run(project=project, args=["--cache-buildtrees", "always", "build", element_name],)
result.assert_success()
assert cli.get_element_state(project, element_name) == "cached"
diff --git a/tests/integration/sockets.py b/tests/integration/sockets.py
index 6f0757ff4..3fb656e95 100644
--- a/tests/integration/sockets.py
+++ b/tests/integration/sockets.py
@@ -14,9 +14,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_builddir_socket_ignored(cli, datafiles):
project = str(datafiles)
element_name = "sockets/make-builddir-socket.bst"
@@ -26,9 +24,7 @@ def test_builddir_socket_ignored(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_install_root_socket_ignored(cli, datafiles):
project = str(datafiles)
element_name = "sockets/make-install-root-socket.bst"
diff --git a/tests/integration/source-determinism.py b/tests/integration/source-determinism.py
index b3a4dd96b..657ad0a67 100644
--- a/tests/integration/source-determinism.py
+++ b/tests/integration/source-determinism.py
@@ -28,9 +28,7 @@ def create_test_directory(*path, mode=0o644):
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_deterministic_source_local(cli, tmpdir, datafiles):
"""Only user rights should be considered for local source.
"""
@@ -63,10 +61,7 @@ def test_deterministic_source_local(cli, tmpdir, datafiles):
result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkoutdir],)
result.assert_success()
with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
diff --git a/tests/integration/stack.py b/tests/integration/stack.py
index d208a8ce1..d17bd9fd2 100644
--- a/tests/integration/stack.py
+++ b/tests/integration/stack.py
@@ -15,9 +15,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_stack(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -27,8 +25,7 @@ def test_stack(cli, datafiles):
assert res.exit_code == 0
cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
+ project=project, args=["artifact", "checkout", element_name, "--directory", checkout],
)
assert res.exit_code == 0
diff --git a/tests/integration/symlinks.py b/tests/integration/symlinks.py
index bc9675ecf..6904f4b65 100644
--- a/tests/integration/symlinks.py
+++ b/tests/integration/symlinks.py
@@ -15,9 +15,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_absolute_symlinks(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -26,10 +24,7 @@ def test_absolute_symlinks(cli, datafiles):
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == 0
symlink = os.path.join(checkout, "opt", "orgname")
@@ -41,9 +36,7 @@ def test_absolute_symlinks(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_disallow_overlaps_inside_symlink_with_dangling_target(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -52,18 +45,13 @@ def test_disallow_overlaps_inside_symlink_with_dangling_target(cli, datafiles):
result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == -1
assert "Destination is a symlink, not a directory: /opt/orgname" in result.stderr
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_detect_symlink_overlaps_pointing_outside_sandbox(cli, datafiles):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
@@ -76,20 +64,13 @@ def test_detect_symlink_overlaps_pointing_outside_sandbox(cli, datafiles):
# ...but when we compose them together, the overlaps create paths that
# point outside the sandbox which BuildStream needs to detect before it
# tries to actually write there.
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
assert result.exit_code == -1
- assert (
- "Destination is a symlink, not a directory: /opt/escape-hatch" in result.stderr
- )
+ assert "Destination is a symlink, not a directory: /opt/escape-hatch" in result.stderr
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_symlink_in_sandbox_path(cli, datafiles):
project = str(datafiles)
element_name = "symlinks/link-on-path-use.bst"
diff --git a/tests/integration/workspace.py b/tests/integration/workspace.py
index 127a9358d..a5f9eded5 100644
--- a/tests/integration/workspace.py
+++ b/tests/integration/workspace.py
@@ -17,18 +17,13 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_workspace_stages_once(cli, datafiles):
project = str(datafiles)
workspace = os.path.join(cli.directory, "workspace")
element_name = "workspace/workspace-mount.bst"
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
assert res.exit_code == 0
assert cli.get_element_key(project, element_name) != "{:?<64}".format("")
res = cli.run(project=project, args=["build", element_name])
@@ -36,18 +31,13 @@ def test_workspace_stages_once(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_workspace_mount(cli, datafiles):
project = str(datafiles)
workspace = os.path.join(cli.directory, "workspace")
element_name = "workspace/workspace-mount.bst"
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
assert res.exit_code == 0
res = cli.run(project=project, args=["build", element_name])
@@ -66,30 +56,20 @@ def test_workspace_mount_on_read_only_directory(cli, datafiles):
# make directory RO
os.chmod(workspace, 0o555)
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
assert res.exit_code == 0
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
-@pytest.mark.xfail(
- reason="Incremental builds are currently incompatible with workspace source plugin."
-)
+@pytest.mark.xfail(reason="Incremental builds are currently incompatible with workspace source plugin.")
def test_workspace_commanddir(cli, datafiles):
project = str(datafiles)
workspace = os.path.join(cli.directory, "workspace")
element_name = "workspace/workspace-commanddir.bst"
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
assert res.exit_code == 0
res = cli.run(project=project, args=["build", element_name])
@@ -100,9 +80,7 @@ def test_workspace_commanddir(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_workspace_updated_dependency(cli, datafiles):
project = str(datafiles)
workspace = os.path.join(cli.directory, "workspace")
@@ -124,10 +102,7 @@ def test_workspace_updated_dependency(cli, datafiles):
_yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
assert res.exit_code == 0
# We build the workspaced element, so that we have an artifact
@@ -156,9 +131,7 @@ def test_workspace_updated_dependency(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_workspace_update_dependency_failed(cli, datafiles):
project = str(datafiles)
workspace = os.path.join(cli.directory, "workspace")
@@ -181,10 +154,7 @@ def test_workspace_update_dependency_failed(cli, datafiles):
_yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
assert res.exit_code == 0
# We build the workspaced element, so that we have an artifact
@@ -233,9 +203,7 @@ def test_workspace_update_dependency_failed(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_updated_dependency_nested(cli, datafiles):
project = str(datafiles)
workspace = os.path.join(cli.directory, "workspace")
@@ -258,10 +226,7 @@ def test_updated_dependency_nested(cli, datafiles):
_yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
assert res.exit_code == 0
# We build the workspaced element, so that we have an artifact
@@ -288,13 +253,9 @@ def test_updated_dependency_nested(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
-@pytest.mark.xfail(
- reason="Incremental builds are currently incompatible with workspace source plugin."
-)
+@pytest.mark.xfail(reason="Incremental builds are currently incompatible with workspace source plugin.")
def test_incremental_configure_commands_run_only_once(cli, datafiles):
project = str(datafiles)
workspace = os.path.join(cli.directory, "workspace")
@@ -310,10 +271,7 @@ def test_incremental_configure_commands_run_only_once(cli, datafiles):
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# We open a workspace on the above element
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
res.assert_success()
# Then we build, and check whether the configure step succeeded
@@ -339,9 +297,7 @@ def test_incremental_configure_commands_run_only_once(cli, datafiles):
# part of a cleanup job.
#
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_workspace_missing_last_successful(cli, datafiles):
project = str(datafiles)
@@ -349,10 +305,7 @@ def test_workspace_missing_last_successful(cli, datafiles):
element_name = "workspace/workspace-commanddir.bst"
# Open workspace
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
assert res.exit_code == 0
# Build first, this will record the last successful build in local state
@@ -370,19 +323,14 @@ def test_workspace_missing_last_successful(cli, datafiles):
# Check that we can still read failed workspace logs
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_workspace_failed_logs(cli, datafiles):
project = str(datafiles)
workspace = os.path.join(cli.directory, "failing_amhello")
element_name = "autotools/amhello-failure.bst"
# Open workspace
- res = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
res.assert_success()
# Try to build and ensure the build fails
diff --git a/tests/internals/cascache.py b/tests/internals/cascache.py
index a35d50040..81273aeaf 100644
--- a/tests/internals/cascache.py
+++ b/tests/internals/cascache.py
@@ -47,9 +47,7 @@ def test_report_when_cascache_exist_not_cleanly(tmp_path, monkeypatch):
def test_report_when_cascache_is_forcefully_killed(tmp_path, monkeypatch):
dummy_buildbox_casd = tmp_path.joinpath("buildbox-casd")
- dummy_buildbox_casd.write_text(
- "#!/bin/bash\ntrap 'echo hello' SIGTERM\nwhile :\ndo\nsleep 60\ndone"
- )
+ dummy_buildbox_casd.write_text("#!/bin/bash\ntrap 'echo hello' SIGTERM\nwhile :\ndo\nsleep 60\ndone")
dummy_buildbox_casd.chmod(0o777)
monkeypatch.setenv("PATH", str(tmp_path), prepend=os.pathsep)
diff --git a/tests/internals/context.py b/tests/internals/context.py
index c219d5f5d..c2ee1efb5 100644
--- a/tests/internals/context.py
+++ b/tests/internals/context.py
@@ -53,9 +53,7 @@ def test_context_load_envvar(context_fixture):
assert isinstance(context, Context)
context.load(config=os.devnull)
- assert context.sourcedir == os.path.join(
- "/", "some", "path", "buildstream", "sources"
- )
+ assert context.sourcedir == os.path.join("/", "some", "path", "buildstream", "sources")
assert context.builddir == os.path.join("/", "some", "path", "buildstream", "build")
assert context.cachedir == os.path.join("/", "some", "path", "buildstream")
assert context.logdir == os.path.join("/", "some", "path", "buildstream", "logs")
diff --git a/tests/internals/pluginfactory.py b/tests/internals/pluginfactory.py
index 13c204752..f8d02ccc5 100644
--- a/tests/internals/pluginfactory.py
+++ b/tests/internals/pluginfactory.py
@@ -44,11 +44,7 @@ def test_element_factory(plugin_fixture):
def test_custom_source(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -62,11 +58,7 @@ def test_custom_source(plugin_fixture, datafiles):
def test_custom_element(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -102,11 +94,7 @@ def test_missing_element(plugin_fixture):
def test_source_notatype(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -119,11 +107,7 @@ def test_source_notatype(plugin_fixture, datafiles):
def test_element_notatype(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -137,11 +121,7 @@ def test_element_notatype(plugin_fixture, datafiles):
def test_source_wrongtype(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -155,11 +135,7 @@ def test_source_wrongtype(plugin_fixture, datafiles):
def test_element_wrongtype(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -172,11 +148,7 @@ def test_element_wrongtype(plugin_fixture, datafiles):
def test_source_missing_setup(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -189,11 +161,7 @@ def test_source_missing_setup(plugin_fixture, datafiles):
def test_element_missing_setup(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -207,11 +175,7 @@ def test_element_missing_setup(plugin_fixture, datafiles):
def test_source_bad_setup(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -225,11 +189,7 @@ def test_source_bad_setup(plugin_fixture, datafiles):
def test_element_bad_setup(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -243,11 +203,7 @@ def test_element_bad_setup(plugin_fixture, datafiles):
def test_source_badversion(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -261,11 +217,7 @@ def test_source_badversion(plugin_fixture, datafiles):
def test_element_badversion(plugin_fixture, datafiles):
plugins = [
Node.from_dict(
- {
- "origin": "local",
- "path": os.path.join(datafiles.dirname, datafiles.basename),
- "plugins": ["foo"],
- }
+ {"origin": "local", "path": os.path.join(datafiles.dirname, datafiles.basename), "plugins": ["foo"],}
)
]
factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
@@ -290,9 +242,7 @@ def test_source_multicontext(plugin_fixture, datafiles):
plugins2 = Node.from_dict(
{
"origin": "local",
- "path": os.path.join(
- datafiles.dirname, datafiles.basename, "anothersource"
- ),
+ "path": os.path.join(datafiles.dirname, datafiles.basename, "anothersource"),
"plugins": ["foo"],
}
)
@@ -314,18 +264,14 @@ def test_element_multicontext(plugin_fixture, datafiles):
plugins1 = Node.from_dict(
{
"origin": "local",
- "path": os.path.join(
- datafiles.dirname, datafiles.basename, "customelement"
- ),
+ "path": os.path.join(datafiles.dirname, datafiles.basename, "customelement"),
"plugins": ["foo"],
}
)
plugins2 = Node.from_dict(
{
"origin": "local",
- "path": os.path.join(
- datafiles.dirname, datafiles.basename, "anotherelement"
- ),
+ "path": os.path.join(datafiles.dirname, datafiles.basename, "anotherelement"),
"plugins": ["foo"],
}
)
diff --git a/tests/internals/pluginloading.py b/tests/internals/pluginloading.py
index 5527bf5cc..83944bbd9 100644
--- a/tests/internals/pluginloading.py
+++ b/tests/internals/pluginloading.py
@@ -43,9 +43,7 @@ def test_customelement(datafiles, tmpdir):
def test_badversionsource(datafiles, tmpdir):
basedir = str(datafiles)
- with pytest.raises(LoadError) as exc, create_pipeline(
- tmpdir, basedir, "simple.bst"
- ):
+ with pytest.raises(LoadError) as exc, create_pipeline(tmpdir, basedir, "simple.bst"):
pass
assert exc.value.reason == LoadErrorReason.UNSUPPORTED_PLUGIN
@@ -55,9 +53,7 @@ def test_badversionsource(datafiles, tmpdir):
def test_badversionelement(datafiles, tmpdir):
basedir = str(datafiles)
- with pytest.raises(LoadError) as exc, create_pipeline(
- tmpdir, basedir, "simple.bst"
- ):
+ with pytest.raises(LoadError) as exc, create_pipeline(tmpdir, basedir, "simple.bst"):
pass
assert exc.value.reason == LoadErrorReason.UNSUPPORTED_PLUGIN
diff --git a/tests/internals/storage.py b/tests/internals/storage.py
index 27dd7e88b..8aa7f4a17 100644
--- a/tests/internals/storage.py
+++ b/tests/internals/storage.py
@@ -47,9 +47,7 @@ def test_modified_file_list(tmpdir, datafiles, backend):
c.import_files(overlay)
- print(
- "List of all paths in imported results: {}".format(c.list_relative_paths())
- )
+ print("List of all paths in imported results: {}".format(c.list_relative_paths()))
assert "bin/bash" in c.list_relative_paths()
assert "bin/bash" in c.list_modified_paths()
assert "bin/hello" not in c.list_modified_paths()
diff --git a/tests/internals/storage_vdir_import.py b/tests/internals/storage_vdir_import.py
index 225191b43..63ab8bc2f 100644
--- a/tests/internals/storage_vdir_import.py
+++ b/tests/internals/storage_vdir_import.py
@@ -194,9 +194,7 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents=
d2 = create_new_casdir(overlay, cas_cache, tmpdir)
d.import_files(d2)
export_dir = os.path.join(tmpdir, "output-{}-{}".format(original, overlay))
- roundtrip_dir = os.path.join(
- tmpdir, "roundtrip-{}-{}".format(original, overlay)
- )
+ roundtrip_dir = os.path.join(tmpdir, "roundtrip-{}-{}".format(original, overlay))
d2.export_files(roundtrip_dir)
d.export_files(export_dir)
@@ -209,9 +207,7 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents=
# The file should not have overwritten the directory in this case.
pass
else:
- assert os.path.isfile(
- realpath
- ), "{} did not exist in the combined virtual directory".format(
+ assert os.path.isfile(realpath), "{} did not exist in the combined virtual directory".format(
path
)
assert file_contents_are(realpath, content)
@@ -241,17 +237,13 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents=
@pytest.mark.parametrize("original", range(1, len(root_filesets) + 1))
@pytest.mark.parametrize("overlay", range(1, len(root_filesets) + 1))
def test_fixed_cas_import(tmpdir, original, overlay):
- _import_test(
- str(tmpdir), original, overlay, generate_import_roots, verify_contents=True
- )
+ _import_test(str(tmpdir), original, overlay, generate_import_roots, verify_contents=True)
@pytest.mark.parametrize("original", range(1, NUM_RANDOM_TESTS + 1))
@pytest.mark.parametrize("overlay", range(1, NUM_RANDOM_TESTS + 1))
def test_random_cas_import(tmpdir, original, overlay):
- _import_test(
- str(tmpdir), original, overlay, generate_random_root, verify_contents=False
- )
+ _import_test(str(tmpdir), original, overlay, generate_random_root, verify_contents=False)
def _listing_test(tmpdir, root, generator_function):
diff --git a/tests/internals/yaml.py b/tests/internals/yaml.py
index e25cf70b3..0958bc156 100644
--- a/tests/internals/yaml.py
+++ b/tests/internals/yaml.py
@@ -197,44 +197,12 @@ def test_composite_preserve_originals(datafiles):
("listappend.yaml", 0, 9, "silly", "basics.yaml", 8, 8),
("listappend.yaml", 6, 9, "sleepy", "basics.yaml", 20, 8),
# Test results of compositing with both (<) and (>) directives
- (
- "listappendprepend.yaml",
- 0,
- 11,
- "prepended1",
- "listappendprepend.yaml",
- 5,
- 10,
- ),
- (
- "listappendprepend.yaml",
- 1,
- 11,
- "prepended2",
- "listappendprepend.yaml",
- 7,
- 10,
- ),
+ ("listappendprepend.yaml", 0, 11, "prepended1", "listappendprepend.yaml", 5, 10,),
+ ("listappendprepend.yaml", 1, 11, "prepended2", "listappendprepend.yaml", 7, 10,),
("listappendprepend.yaml", 2, 11, "silly", "basics.yaml", 8, 8),
("listappendprepend.yaml", 8, 11, "sleepy", "basics.yaml", 20, 8),
- (
- "listappendprepend.yaml",
- 9,
- 11,
- "appended1",
- "listappendprepend.yaml",
- 10,
- 10,
- ),
- (
- "listappendprepend.yaml",
- 10,
- 11,
- "appended2",
- "listappendprepend.yaml",
- 12,
- 10,
- ),
+ ("listappendprepend.yaml", 9, 11, "appended1", "listappendprepend.yaml", 10, 10,),
+ ("listappendprepend.yaml", 10, 11, "appended2", "listappendprepend.yaml", 12, 10,),
# Test results of compositing with the (=) overwrite directive
("listoverwrite.yaml", 0, 2, "overwrite1", "listoverwrite.yaml", 5, 10),
("listoverwrite.yaml", 1, 2, "overwrite2", "listoverwrite.yaml", 7, 10),
@@ -243,9 +211,7 @@ def test_composite_preserve_originals(datafiles):
("implicitoverwrite.yaml", 1, 2, "overwrite2", "implicitoverwrite.yaml", 6, 8),
],
)
-def test_list_composition(
- datafiles, filename, tmpdir, index, length, mood, prov_file, prov_line, prov_col
-):
+def test_list_composition(datafiles, filename, tmpdir, index, length, mood, prov_file, prov_line, prov_col):
base_file = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
overlay_file = os.path.join(datafiles.dirname, datafiles.basename, filename)
@@ -266,9 +232,7 @@ def test_list_composition(
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_list_deletion(datafiles):
base = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
- overlay = os.path.join(
- datafiles.dirname, datafiles.basename, "listoverwriteempty.yaml"
- )
+ overlay = os.path.join(datafiles.dirname, datafiles.basename, "listoverwriteempty.yaml")
base = _yaml.load(base, shortname="basics.yaml")
overlay = _yaml.load(overlay, shortname="listoverwriteempty.yaml")
@@ -309,362 +273,56 @@ def test_list_deletion(datafiles):
"filename1,filename2,index,length,mood,prov_file,prov_line,prov_col",
[
# Test results of compositing literal list with (>) and then (<)
- (
- "listprepend.yaml",
- "listappend.yaml",
- 0,
- 11,
- "prepended1",
- "listprepend.yaml",
- 5,
- 10,
- ),
- (
- "listprepend.yaml",
- "listappend.yaml",
- 1,
- 11,
- "prepended2",
- "listprepend.yaml",
- 7,
- 10,
- ),
+ ("listprepend.yaml", "listappend.yaml", 0, 11, "prepended1", "listprepend.yaml", 5, 10,),
+ ("listprepend.yaml", "listappend.yaml", 1, 11, "prepended2", "listprepend.yaml", 7, 10,),
("listprepend.yaml", "listappend.yaml", 2, 11, "silly", "basics.yaml", 8, 8),
("listprepend.yaml", "listappend.yaml", 8, 11, "sleepy", "basics.yaml", 20, 8),
- (
- "listprepend.yaml",
- "listappend.yaml",
- 9,
- 11,
- "appended1",
- "listappend.yaml",
- 5,
- 10,
- ),
- (
- "listprepend.yaml",
- "listappend.yaml",
- 10,
- 11,
- "appended2",
- "listappend.yaml",
- 7,
- 10,
- ),
+ ("listprepend.yaml", "listappend.yaml", 9, 11, "appended1", "listappend.yaml", 5, 10,),
+ ("listprepend.yaml", "listappend.yaml", 10, 11, "appended2", "listappend.yaml", 7, 10,),
# Test results of compositing literal list with (<) and then (>)
- (
- "listappend.yaml",
- "listprepend.yaml",
- 0,
- 11,
- "prepended1",
- "listprepend.yaml",
- 5,
- 10,
- ),
- (
- "listappend.yaml",
- "listprepend.yaml",
- 1,
- 11,
- "prepended2",
- "listprepend.yaml",
- 7,
- 10,
- ),
+ ("listappend.yaml", "listprepend.yaml", 0, 11, "prepended1", "listprepend.yaml", 5, 10,),
+ ("listappend.yaml", "listprepend.yaml", 1, 11, "prepended2", "listprepend.yaml", 7, 10,),
("listappend.yaml", "listprepend.yaml", 2, 11, "silly", "basics.yaml", 8, 8),
("listappend.yaml", "listprepend.yaml", 8, 11, "sleepy", "basics.yaml", 20, 8),
- (
- "listappend.yaml",
- "listprepend.yaml",
- 9,
- 11,
- "appended1",
- "listappend.yaml",
- 5,
- 10,
- ),
- (
- "listappend.yaml",
- "listprepend.yaml",
- 10,
- 11,
- "appended2",
- "listappend.yaml",
- 7,
- 10,
- ),
+ ("listappend.yaml", "listprepend.yaml", 9, 11, "appended1", "listappend.yaml", 5, 10,),
+ ("listappend.yaml", "listprepend.yaml", 10, 11, "appended2", "listappend.yaml", 7, 10,),
# Test results of compositing literal list with (>) and then (>)
("listappend.yaml", "secondappend.yaml", 0, 11, "silly", "basics.yaml", 8, 8),
("listappend.yaml", "secondappend.yaml", 6, 11, "sleepy", "basics.yaml", 20, 8),
- (
- "listappend.yaml",
- "secondappend.yaml",
- 7,
- 11,
- "appended1",
- "listappend.yaml",
- 5,
- 10,
- ),
- (
- "listappend.yaml",
- "secondappend.yaml",
- 8,
- 11,
- "appended2",
- "listappend.yaml",
- 7,
- 10,
- ),
- (
- "listappend.yaml",
- "secondappend.yaml",
- 9,
- 11,
- "secondappend1",
- "secondappend.yaml",
- 5,
- 10,
- ),
- (
- "listappend.yaml",
- "secondappend.yaml",
- 10,
- 11,
- "secondappend2",
- "secondappend.yaml",
- 7,
- 10,
- ),
+ ("listappend.yaml", "secondappend.yaml", 7, 11, "appended1", "listappend.yaml", 5, 10,),
+ ("listappend.yaml", "secondappend.yaml", 8, 11, "appended2", "listappend.yaml", 7, 10,),
+ ("listappend.yaml", "secondappend.yaml", 9, 11, "secondappend1", "secondappend.yaml", 5, 10,),
+ ("listappend.yaml", "secondappend.yaml", 10, 11, "secondappend2", "secondappend.yaml", 7, 10,),
# Test results of compositing literal list with (>) and then (>)
- (
- "listprepend.yaml",
- "secondprepend.yaml",
- 0,
- 11,
- "secondprepend1",
- "secondprepend.yaml",
- 5,
- 10,
- ),
- (
- "listprepend.yaml",
- "secondprepend.yaml",
- 1,
- 11,
- "secondprepend2",
- "secondprepend.yaml",
- 7,
- 10,
- ),
- (
- "listprepend.yaml",
- "secondprepend.yaml",
- 2,
- 11,
- "prepended1",
- "listprepend.yaml",
- 5,
- 10,
- ),
- (
- "listprepend.yaml",
- "secondprepend.yaml",
- 3,
- 11,
- "prepended2",
- "listprepend.yaml",
- 7,
- 10,
- ),
+ ("listprepend.yaml", "secondprepend.yaml", 0, 11, "secondprepend1", "secondprepend.yaml", 5, 10,),
+ ("listprepend.yaml", "secondprepend.yaml", 1, 11, "secondprepend2", "secondprepend.yaml", 7, 10,),
+ ("listprepend.yaml", "secondprepend.yaml", 2, 11, "prepended1", "listprepend.yaml", 5, 10,),
+ ("listprepend.yaml", "secondprepend.yaml", 3, 11, "prepended2", "listprepend.yaml", 7, 10,),
("listprepend.yaml", "secondprepend.yaml", 4, 11, "silly", "basics.yaml", 8, 8),
- (
- "listprepend.yaml",
- "secondprepend.yaml",
- 10,
- 11,
- "sleepy",
- "basics.yaml",
- 20,
- 8,
- ),
+ ("listprepend.yaml", "secondprepend.yaml", 10, 11, "sleepy", "basics.yaml", 20, 8,),
# Test results of compositing literal list with (>) or (<) and then another literal list
- (
- "listappend.yaml",
- "implicitoverwrite.yaml",
- 0,
- 2,
- "overwrite1",
- "implicitoverwrite.yaml",
- 4,
- 8,
- ),
- (
- "listappend.yaml",
- "implicitoverwrite.yaml",
- 1,
- 2,
- "overwrite2",
- "implicitoverwrite.yaml",
- 6,
- 8,
- ),
- (
- "listprepend.yaml",
- "implicitoverwrite.yaml",
- 0,
- 2,
- "overwrite1",
- "implicitoverwrite.yaml",
- 4,
- 8,
- ),
- (
- "listprepend.yaml",
- "implicitoverwrite.yaml",
- 1,
- 2,
- "overwrite2",
- "implicitoverwrite.yaml",
- 6,
- 8,
- ),
+ ("listappend.yaml", "implicitoverwrite.yaml", 0, 2, "overwrite1", "implicitoverwrite.yaml", 4, 8,),
+ ("listappend.yaml", "implicitoverwrite.yaml", 1, 2, "overwrite2", "implicitoverwrite.yaml", 6, 8,),
+ ("listprepend.yaml", "implicitoverwrite.yaml", 0, 2, "overwrite1", "implicitoverwrite.yaml", 4, 8,),
+ ("listprepend.yaml", "implicitoverwrite.yaml", 1, 2, "overwrite2", "implicitoverwrite.yaml", 6, 8,),
# Test results of compositing literal list with (>) or (<) and then an explicit (=) overwrite
- (
- "listappend.yaml",
- "listoverwrite.yaml",
- 0,
- 2,
- "overwrite1",
- "listoverwrite.yaml",
- 5,
- 10,
- ),
- (
- "listappend.yaml",
- "listoverwrite.yaml",
- 1,
- 2,
- "overwrite2",
- "listoverwrite.yaml",
- 7,
- 10,
- ),
- (
- "listprepend.yaml",
- "listoverwrite.yaml",
- 0,
- 2,
- "overwrite1",
- "listoverwrite.yaml",
- 5,
- 10,
- ),
- (
- "listprepend.yaml",
- "listoverwrite.yaml",
- 1,
- 2,
- "overwrite2",
- "listoverwrite.yaml",
- 7,
- 10,
- ),
+ ("listappend.yaml", "listoverwrite.yaml", 0, 2, "overwrite1", "listoverwrite.yaml", 5, 10,),
+ ("listappend.yaml", "listoverwrite.yaml", 1, 2, "overwrite2", "listoverwrite.yaml", 7, 10,),
+ ("listprepend.yaml", "listoverwrite.yaml", 0, 2, "overwrite1", "listoverwrite.yaml", 5, 10,),
+ ("listprepend.yaml", "listoverwrite.yaml", 1, 2, "overwrite2", "listoverwrite.yaml", 7, 10,),
# Test results of compositing literal list an explicit overwrite (=) and then with (>) or (<)
- (
- "listoverwrite.yaml",
- "listappend.yaml",
- 0,
- 4,
- "overwrite1",
- "listoverwrite.yaml",
- 5,
- 10,
- ),
- (
- "listoverwrite.yaml",
- "listappend.yaml",
- 1,
- 4,
- "overwrite2",
- "listoverwrite.yaml",
- 7,
- 10,
- ),
- (
- "listoverwrite.yaml",
- "listappend.yaml",
- 2,
- 4,
- "appended1",
- "listappend.yaml",
- 5,
- 10,
- ),
- (
- "listoverwrite.yaml",
- "listappend.yaml",
- 3,
- 4,
- "appended2",
- "listappend.yaml",
- 7,
- 10,
- ),
- (
- "listoverwrite.yaml",
- "listprepend.yaml",
- 0,
- 4,
- "prepended1",
- "listprepend.yaml",
- 5,
- 10,
- ),
- (
- "listoverwrite.yaml",
- "listprepend.yaml",
- 1,
- 4,
- "prepended2",
- "listprepend.yaml",
- 7,
- 10,
- ),
- (
- "listoverwrite.yaml",
- "listprepend.yaml",
- 2,
- 4,
- "overwrite1",
- "listoverwrite.yaml",
- 5,
- 10,
- ),
- (
- "listoverwrite.yaml",
- "listprepend.yaml",
- 3,
- 4,
- "overwrite2",
- "listoverwrite.yaml",
- 7,
- 10,
- ),
+ ("listoverwrite.yaml", "listappend.yaml", 0, 4, "overwrite1", "listoverwrite.yaml", 5, 10,),
+ ("listoverwrite.yaml", "listappend.yaml", 1, 4, "overwrite2", "listoverwrite.yaml", 7, 10,),
+ ("listoverwrite.yaml", "listappend.yaml", 2, 4, "appended1", "listappend.yaml", 5, 10,),
+ ("listoverwrite.yaml", "listappend.yaml", 3, 4, "appended2", "listappend.yaml", 7, 10,),
+ ("listoverwrite.yaml", "listprepend.yaml", 0, 4, "prepended1", "listprepend.yaml", 5, 10,),
+ ("listoverwrite.yaml", "listprepend.yaml", 1, 4, "prepended2", "listprepend.yaml", 7, 10,),
+ ("listoverwrite.yaml", "listprepend.yaml", 2, 4, "overwrite1", "listoverwrite.yaml", 5, 10,),
+ ("listoverwrite.yaml", "listprepend.yaml", 3, 4, "overwrite2", "listoverwrite.yaml", 7, 10,),
],
)
def test_list_composition_twice(
- datafiles,
- tmpdir,
- filename1,
- filename2,
- index,
- length,
- mood,
- prov_file,
- prov_line,
- prov_col,
+ datafiles, tmpdir, filename1, filename2, index, length, mood, prov_file, prov_line, prov_col,
):
file_base = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
file1 = os.path.join(datafiles.dirname, datafiles.basename, filename1)
@@ -707,9 +365,7 @@ def test_list_composition_twice(
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_convert_value_to_string(datafiles):
- conf_file = os.path.join(
- datafiles.dirname, datafiles.basename, "convert_value_to_str.yaml"
- )
+ conf_file = os.path.join(datafiles.dirname, datafiles.basename, "convert_value_to_str.yaml")
# Run file through yaml to convert it
test_dict = _yaml.load(conf_file)
@@ -733,9 +389,7 @@ def test_convert_value_to_string(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_value_doesnt_match_expected(datafiles):
- conf_file = os.path.join(
- datafiles.dirname, datafiles.basename, "convert_value_to_str.yaml"
- )
+ conf_file = os.path.join(datafiles.dirname, datafiles.basename, "convert_value_to_str.yaml")
# Run file through yaml to convert it
test_dict = _yaml.load(conf_file)
@@ -748,9 +402,7 @@ def test_value_doesnt_match_expected(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@pytest.mark.parametrize("fromdisk", [(True), (False)])
def test_roundtrip_dump(datafiles, fromdisk):
- filename = os.path.join(
- datafiles.dirname, datafiles.basename, "roundtrip-test.yaml"
- )
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "roundtrip-test.yaml")
with open(filename, "r") as fh:
rt_raw = fh.read()
if fromdisk:
@@ -787,9 +439,7 @@ def test_roundtrip_dump(datafiles, fromdisk):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-@pytest.mark.parametrize(
- "case", [["a", "b", "c"], ["foo", 1], ["stuff", 0, "colour"], ["bird", 0, 1],]
-)
+@pytest.mark.parametrize("case", [["a", "b", "c"], ["foo", 1], ["stuff", 0, "colour"], ["bird", 0, 1],])
def test_node_find_target(datafiles, case):
filename = os.path.join(datafiles.dirname, datafiles.basename, "traversal.yaml")
# We set copy_tree in order to ensure that the nodes in `loaded`
diff --git a/tests/plugins/deprecationwarnings/deprecationwarnings.py b/tests/plugins/deprecationwarnings/deprecationwarnings.py
index 6f20eae8c..628faea68 100644
--- a/tests/plugins/deprecationwarnings/deprecationwarnings.py
+++ b/tests/plugins/deprecationwarnings/deprecationwarnings.py
@@ -11,9 +11,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
_DEPRECATION_MESSAGE = "Here is some detail."
-_DEPRECATION_WARNING = "Using deprecated plugin deprecated_plugin: {}".format(
- _DEPRECATION_MESSAGE
-)
+_DEPRECATION_WARNING = "Using deprecated plugin deprecated_plugin: {}".format(_DEPRECATION_MESSAGE)
@pytest.mark.datafiles(DATA_DIR)
@@ -29,11 +27,7 @@ def test_suppress_deprecation_warning(cli, datafiles):
project = str(datafiles)
cli.run(project=project, args=["show", "manual.bst"])
- element_overrides = (
- "elements:\n"
- " deprecated_plugin:\n"
- " suppress-deprecation-warnings : True\n"
- )
+ element_overrides = "elements:\n" " deprecated_plugin:\n" " suppress-deprecation-warnings : True\n"
project_conf = os.path.join(project, "project.conf")
with open(project_conf, "a") as f:
diff --git a/tests/remoteexecution/buildfail.py b/tests/remoteexecution/buildfail.py
index bd1c81891..8802a311c 100644
--- a/tests/remoteexecution/buildfail.py
+++ b/tests/remoteexecution/buildfail.py
@@ -23,9 +23,7 @@ import pytest
from buildstream._exceptions import ErrorDomain
from buildstream import _yaml
-from buildstream.testing import (
- cli_remote_execution as cli,
-) # pylint: disable=unused-import
+from buildstream.testing import cli_remote_execution as cli # pylint: disable=unused-import
pytestmark = pytest.mark.remoteexecution
@@ -54,10 +52,7 @@ def test_build_remote_failure(cli, datafiles):
result = cli.run(project=project, args=["build", "element.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "element.bst", "--directory", checkout_path],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "element.bst", "--directory", checkout_path],)
result.assert_success()
# check that the file created before the failure exists
diff --git a/tests/remoteexecution/buildtree.py b/tests/remoteexecution/buildtree.py
index 86efb29f1..7c763e1fb 100644
--- a/tests/remoteexecution/buildtree.py
+++ b/tests/remoteexecution/buildtree.py
@@ -21,9 +21,7 @@ import os
import shutil
import pytest
-from buildstream.testing import (
- cli_remote_execution as cli,
-) # pylint: disable=unused-import
+from buildstream.testing import cli_remote_execution as cli # pylint: disable=unused-import
from tests.testutils import create_artifact_share
@@ -44,16 +42,10 @@ def test_buildtree_remote(cli, tmpdir, datafiles):
with create_artifact_share(share_path) as share:
cli.configure(
- {
- "artifacts": {"url": share.repo, "push": True},
- "cache": {"pull-buildtrees": False},
- }
+ {"artifacts": {"url": share.repo, "push": True}, "cache": {"pull-buildtrees": False},}
)
- res = cli.run(
- project=project,
- args=["--cache-buildtrees", "always", "build", element_name],
- )
+ res = cli.run(project=project, args=["--cache-buildtrees", "always", "build", element_name],)
res.assert_success()
# remove local cache
@@ -61,45 +53,21 @@ def test_buildtree_remote(cli, tmpdir, datafiles):
shutil.rmtree(os.path.join(str(tmpdir), "cache", "artifacts"))
# pull without buildtree
- res = cli.run(
- project=project, args=["artifact", "pull", "--deps", "all", element_name]
- )
+ res = cli.run(project=project, args=["artifact", "pull", "--deps", "all", element_name])
res.assert_success()
# check shell doesn't work
- res = cli.run(
- project=project,
- args=["shell", "--build", element_name, "--", "cat", "test"],
- )
+ res = cli.run(project=project, args=["shell", "--build", element_name, "--", "cat", "test"],)
res.assert_shell_error()
# pull with buildtree
- res = cli.run(
- project=project,
- args=[
- "--pull-buildtrees",
- "artifact",
- "pull",
- "--deps",
- "all",
- element_name,
- ],
- )
+ res = cli.run(project=project, args=["--pull-buildtrees", "artifact", "pull", "--deps", "all", element_name,],)
res.assert_success()
# check it works this time
res = cli.run(
project=project,
- args=[
- "shell",
- "--build",
- element_name,
- "--use-buildtree",
- "always",
- "--",
- "cat",
- "test",
- ],
+ args=["shell", "--build", element_name, "--use-buildtree", "always", "--", "cat", "test",],
)
res.assert_success()
assert "Hi" in res.output
diff --git a/tests/remoteexecution/junction.py b/tests/remoteexecution/junction.py
index dd8d4d4e0..46bfaa8af 100644
--- a/tests/remoteexecution/junction.py
+++ b/tests/remoteexecution/junction.py
@@ -20,9 +20,7 @@
import os
import pytest
-from buildstream.testing import (
- cli_remote_execution as cli,
-) # pylint: disable=unused-import
+from buildstream.testing import cli_remote_execution as cli # pylint: disable=unused-import
from buildstream.testing import create_repo
from buildstream import _yaml
from tests.testutils import generate_junction
@@ -72,19 +70,12 @@ def test_junction_build_remote(cli, tmpdir, datafiles):
# Create a trackable element to depend on the cross junction element,
# this one has it's ref resolved already
create_element(
- repo,
- "sub-target.bst",
- subproject_element_path,
- ["autotools/amhello.bst"],
- ref=ref,
+ repo, "sub-target.bst", subproject_element_path, ["autotools/amhello.bst"], ref=ref,
)
# Create a trackable element to depend on the cross junction element
create_element(
- repo,
- "target.bst",
- element_path,
- [{"junction": "junction.bst", "filename": "sub-target.bst"}],
+ repo, "target.bst", element_path, [{"junction": "junction.bst", "filename": "sub-target.bst"}],
)
# Create a repo to hold the subproject and generate a junction element for it
@@ -106,9 +97,7 @@ def test_junction_build_remote(cli, tmpdir, datafiles):
result.assert_success()
# track target to ensure we have refs
- result = cli.run(
- project=project, args=["source", "track", "--deps", "all", "composed.bst"]
- )
+ result = cli.run(project=project, args=["source", "track", "--deps", "all", "composed.bst"])
result.assert_success()
# build
diff --git a/tests/remoteexecution/partial.py b/tests/remoteexecution/partial.py
index 13b6ff853..9a9357b60 100644
--- a/tests/remoteexecution/partial.py
+++ b/tests/remoteexecution/partial.py
@@ -5,9 +5,7 @@ import os
import pytest
from buildstream._exceptions import ErrorDomain
-from buildstream.testing import (
- cli_remote_execution as cli,
-) # pylint: disable=unused-import
+from buildstream.testing import cli_remote_execution as cli # pylint: disable=unused-import
from buildstream.testing.integration import assert_contains
from tests.testutils.artifactshare import create_artifact_share
@@ -24,9 +22,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("pull_artifact_files", [True, False])
@pytest.mark.parametrize("build_all", [True, False])
-def test_build_dependency_partial_local_cas(
- cli, datafiles, pull_artifact_files, build_all
-):
+def test_build_dependency_partial_local_cas(cli, datafiles, pull_artifact_files, build_all):
project = str(datafiles)
element_name = "no-runtime-deps.bst"
builddep_element_name = "autotools/amhello.bst"
@@ -45,10 +41,7 @@ def test_build_dependency_partial_local_cas(
result.assert_success()
# Verify artifact is pulled bar files when ensure artifact files is set
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
if pull_artifact_files:
result.assert_success()
assert_contains(checkout, ["/test"])
@@ -57,14 +50,7 @@ def test_build_dependency_partial_local_cas(
# Verify build dependencies is pulled for ALL and BUILD
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- builddep_element_name,
- "--directory",
- builddep_checkout,
- ],
+ project=project, args=["artifact", "checkout", builddep_element_name, "--directory", builddep_checkout,],
)
if build_all and pull_artifact_files:
result.assert_success()
diff --git a/tests/remoteexecution/simple.py b/tests/remoteexecution/simple.py
index a0625038e..36371b1f3 100644
--- a/tests/remoteexecution/simple.py
+++ b/tests/remoteexecution/simple.py
@@ -4,9 +4,7 @@
import os
import pytest
-from buildstream.testing import (
- cli_remote_execution as cli,
-) # pylint: disable=unused-import
+from buildstream.testing import cli_remote_execution as cli # pylint: disable=unused-import
from buildstream.testing.integration import assert_contains
@@ -29,10 +27,7 @@ def test_remote_autotools_build(cli, datafiles):
result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
result.assert_success()
assert_contains(
diff --git a/tests/sandboxes/missing_dependencies.py b/tests/sandboxes/missing_dependencies.py
index 16754747e..b4967727f 100644
--- a/tests/sandboxes/missing_dependencies.py
+++ b/tests/sandboxes/missing_dependencies.py
@@ -12,9 +12,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), "missing-dependencies",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "missing-dependencies",)
def _symlink_host_tools_to_dir(host_tools, dir_):
@@ -44,9 +42,7 @@ def test_missing_bwrap_has_nice_error_message(cli, datafiles, tmp_path):
# Build without access to host tools, this should fail with a nice error
result = cli.run(
- project=project,
- args=["build", "element.bst"],
- env={"PATH": str(bin_dir), "BST_FORCE_SANDBOX": None},
+ project=project, args=["build", "element.bst"], env={"PATH": str(bin_dir), "BST_FORCE_SANDBOX": None},
)
result.assert_task_error(ErrorDomain.SANDBOX, "unavailable-local-sandbox")
assert "not found" in result.stderr
diff --git a/tests/sandboxes/remote-exec-config.py b/tests/sandboxes/remote-exec-config.py
index 889448954..623dcf1b5 100644
--- a/tests/sandboxes/remote-exec-config.py
+++ b/tests/sandboxes/remote-exec-config.py
@@ -9,9 +9,7 @@ from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream.testing.runcli import cli # pylint: disable=unused-import
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), "remote-exec-config"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "remote-exec-config")
# Tests that we get a useful error message when supplying invalid
# remote execution configurations.
@@ -38,9 +36,7 @@ def test_old_and_new_configs(cli, datafiles):
#
# This does not happen for a simple `bst show`.
result = cli.run(project=project, args=["artifact", "pull", "element.bst"])
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one"
- )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one")
# Assert that if either the client key or client cert is specified
@@ -48,8 +44,7 @@ def test_old_and_new_configs(cli, datafiles):
# instead of an unhandled exception.
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
- "config_key, config_value",
- [("client-cert", "client.crt"), ("client-key", "client.key")],
+ "config_key, config_value", [("client-cert", "client.crt"), ("client-key", "client.key")],
)
def test_missing_certs(cli, datafiles, config_key, config_value):
project = os.path.join(datafiles.dirname, datafiles.basename, "missing-certs")
@@ -58,10 +53,7 @@ def test_missing_certs(cli, datafiles, config_key, config_value):
"name": "test",
"remote-execution": {
"execution-service": {"url": "http://localhost:8088"},
- "storage-service": {
- "url": "http://charactron:11001",
- config_key: config_value,
- },
+ "storage-service": {"url": "http://charactron:11001", config_key: config_value,},
},
}
project_conf_file = os.path.join(project, "project.conf")
@@ -71,9 +63,7 @@ def test_missing_certs(cli, datafiles, config_key, config_value):
#
# This does not happen for a simple `bst show`.
result = cli.run(project=project, args=["show", "element.bst"])
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "Your config is missing"
- )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "Your config is missing")
# Assert that if incomplete information is supplied we get a sensible error message.
@@ -89,6 +79,4 @@ def test_empty_config(cli, datafiles):
#
# This does not happen for a simple `bst show`.
result = cli.run(project=project, args=["artifact", "pull", "element.bst"])
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one"
- )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one")
diff --git a/tests/sandboxes/selection.py b/tests/sandboxes/selection.py
index 70fbdac70..9275961ce 100644
--- a/tests/sandboxes/selection.py
+++ b/tests/sandboxes/selection.py
@@ -43,11 +43,7 @@ def test_force_sandbox(cli, datafiles):
_yaml.roundtrip_dump(element, element_path)
# Build without access to host tools, this will fail
- result = cli.run(
- project=project,
- args=["build", "element.bst"],
- env={"PATH": "", "BST_FORCE_SANDBOX": "bwrap"},
- )
+ result = cli.run(project=project, args=["build", "element.bst"], env={"PATH": "", "BST_FORCE_SANDBOX": "bwrap"},)
result.assert_main_error(ErrorDomain.PLATFORM, None)
assert "Bubblewrap not found" in result.stderr
# we have asked for a spesific sand box, but it is not avalble so
diff --git a/tests/sourcecache/cache.py b/tests/sourcecache/cache.py
index a4878e73d..bbc3d8329 100644
--- a/tests/sourcecache/cache.py
+++ b/tests/sourcecache/cache.py
@@ -108,9 +108,7 @@ def test_source_cache_key(cli, datafiles):
assert len(os.listdir(patch_protos)) == 1
# modify hello-patch file and check tracking updates refs
- with open(
- os.path.join(file_path, "dev-files", "usr", "include", "pony.h"), "a"
- ) as f:
+ with open(os.path.join(file_path, "dev-files", "usr", "include", "pony.h"), "a") as f:
f.write("\nappending nonsense")
res = cli.run(project=project_dir, args=["source", "track", element_name])
diff --git a/tests/sourcecache/config.py b/tests/sourcecache/config.py
index aaf46459e..ab1cf2f03 100644
--- a/tests/sourcecache/config.py
+++ b/tests/sourcecache/config.py
@@ -37,19 +37,14 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
# instead of an unhandled exception.
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
- "config_key, config_value",
- [("client-cert", "client.crt"), ("client-key", "client.key")],
+ "config_key, config_value", [("client-cert", "client.crt"), ("client-key", "client.key")],
)
def test_missing_certs(cli, datafiles, config_key, config_value):
project = os.path.join(datafiles.dirname, datafiles.basename, "missing-certs")
project_conf = {
"name": "test",
- "source-caches": {
- "url": "https://cache.example.com:12345",
- "push": "true",
- config_key: config_value,
- },
+ "source-caches": {"url": "https://cache.example.com:12345", "push": "true", config_key: config_value,},
}
project_conf_file = os.path.join(project, "project.conf")
_yaml.roundtrip_dump(project_conf, project_conf_file)
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index bc3f32e66..0c347ebbf 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -92,9 +92,7 @@ def test_source_fetch(cli, tmpdir, datafiles):
res = cli.run(project=project_dir, args=["build", element_name])
res.assert_success()
- assert (
- os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) != []
- )
+ assert os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) != []
# get root digest of source
sourcecache = context.sourcecache
@@ -115,9 +113,7 @@ def test_source_fetch(cli, tmpdir, datafiles):
# check that we have the source in the cas now and it's not fetched
assert element._source_cached()
- assert (
- os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) == []
- )
+ assert os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) == []
@pytest.mark.datafiles(DATA_DIR)
@@ -145,13 +141,9 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
res.assert_success()
brief_key = source._get_brief_display_key()
assert (
- "Remote source service ({}) does not have source {} cached".format(
- share.repo, brief_key
- )
- ) in res.stderr
- assert (
- "SUCCESS Fetching from {}".format(repo.source_config(ref=ref)["url"])
+ "Remote source service ({}) does not have source {} cached".format(share.repo, brief_key)
) in res.stderr
+ assert ("SUCCESS Fetching from {}".format(repo.source_config(ref=ref)["url"])) in res.stderr
# Check that the source in both in the source dir and the local CAS
assert element._source_cached()
@@ -210,9 +202,7 @@ def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles):
res = cli.run(project=project_dir, args=["build", element_name])
res.assert_success()
- assert (
- os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) != []
- )
+ assert os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) != []
# get root digest of source
sourcecache = context.sourcecache
@@ -221,9 +211,7 @@ def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles):
move_local_cas_to_remote_source_share(str(cache_dir), share.directory)
# Remove the cas content, only keep the proto and such around
- shutil.rmtree(
- os.path.join(str(tmpdir), "sourceshare", "repo", "cas", "objects")
- )
+ shutil.rmtree(os.path.join(str(tmpdir), "sourceshare", "repo", "cas", "objects"))
# check the share doesn't have the object
assert not share.has_object(digest)
@@ -234,6 +222,4 @@ def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles):
res = cli.run(project=project_dir, args=["source", "fetch", element_name])
res.assert_success()
- assert (
- "SUCCESS Fetching from {}".format(repo.source_config(ref=ref)["url"])
- ) in res.stderr
+ assert ("SUCCESS Fetching from {}".format(repo.source_config(ref=ref)["url"])) in res.stderr
diff --git a/tests/sourcecache/source-checkout.py b/tests/sourcecache/source-checkout.py
index 2d2e71565..f5096cefb 100644
--- a/tests/sourcecache/source-checkout.py
+++ b/tests/sourcecache/source-checkout.py
@@ -49,10 +49,7 @@ def test_source_checkout(tmpdir, datafiles, cli):
repo = create_element_size("target.bst", project_dir, element_path, [], 100000)
# check implicit fetching
- res = cli.run(
- project=project_dir,
- args=["source", "checkout", "--directory", target_dir, "target.bst"],
- )
+ res = cli.run(project=project_dir, args=["source", "checkout", "--directory", target_dir, "target.bst"],)
res.assert_success()
assert "Fetching from" in res.stderr
@@ -62,10 +59,7 @@ def test_source_checkout(tmpdir, datafiles, cli):
shutil.rmtree(target_dir)
shutil.rmtree(source_dir)
- res = cli.run(
- project=project_dir,
- args=["source", "checkout", "--directory", target_dir, "target.bst"],
- )
+ res = cli.run(project=project_dir, args=["source", "checkout", "--directory", target_dir, "target.bst"],)
res.assert_success()
assert "Fetching from" not in res.stderr
@@ -73,8 +67,5 @@ def test_source_checkout(tmpdir, datafiles, cli):
shutil.rmtree(target_dir)
shutil.rmtree(os.path.join(cache_dir, "cas"))
- res = cli.run(
- project=project_dir,
- args=["source", "checkout", "--directory", target_dir, "target.bst"],
- )
+ res = cli.run(project=project_dir, args=["source", "checkout", "--directory", target_dir, "target.bst"],)
res.assert_task_error(ErrorDomain.PLUGIN, None)
diff --git a/tests/sourcecache/workspace.py b/tests/sourcecache/workspace.py
index 3b6e265e9..42661edeb 100644
--- a/tests/sourcecache/workspace.py
+++ b/tests/sourcecache/workspace.py
@@ -55,10 +55,7 @@ def test_workspace_source_fetch(tmpdir, datafiles, cli):
shutil.rmtree(source_dir)
# Open a workspace and check that fetches the original sources
- res = cli.run(
- project=project_dir,
- args=["workspace", "open", "target.bst", "--directory", workspace],
- )
+ res = cli.run(project=project_dir, args=["workspace", "open", "target.bst", "--directory", workspace],)
res.assert_success()
assert "Fetching from" in res.stderr
@@ -75,11 +72,7 @@ def test_workspace_open_no_source_push(tmpdir, datafiles, cli):
with create_artifact_share(share_dir) as share:
cli.configure(
- {
- "cachedir": cache_dir,
- "scheduler": {"pushers": 1},
- "source-caches": {"url": share.repo, "push": True,},
- }
+ {"cachedir": cache_dir, "scheduler": {"pushers": 1}, "source-caches": {"url": share.repo, "push": True,},}
)
# Fetch as in previous test and check it pushes the source
@@ -91,10 +84,7 @@ def test_workspace_open_no_source_push(tmpdir, datafiles, cli):
# clear the cas and open a workspace
shutil.rmtree(os.path.join(cache_dir, "cas"))
- res = cli.run(
- project=project_dir,
- args=["workspace", "open", "target.bst", "--directory", workspace],
- )
+ res = cli.run(project=project_dir, args=["workspace", "open", "target.bst", "--directory", workspace],)
res.assert_success()
# Check that this time it does not push the sources
diff --git a/tests/sources/bzr.py b/tests/sources/bzr.py
index 7df4d7471..694b30a7f 100644
--- a/tests/sources/bzr.py
+++ b/tests/sources/bzr.py
@@ -31,10 +31,7 @@ def test_fetch_checkout(cli, tmpdir, datafiles):
assert result.exit_code == 0
result = cli.run(project=project, args=["build", "target.bst"])
assert result.exit_code == 0
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
assert result.exit_code == 0
# Assert we checked out the file as it was commited
diff --git a/tests/sources/deb.py b/tests/sources/deb.py
index 5d1fdcceb..656a65052 100644
--- a/tests/sources/deb.py
+++ b/tests/sources/deb.py
@@ -19,9 +19,7 @@ deb_name = "a_deb.deb"
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump(
- {"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file
- )
+ _yaml.roundtrip_dump({"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file)
def _copy_deb(start_location, tmpdir):
@@ -102,10 +100,7 @@ def test_stage_default_basedir(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '')
@@ -133,10 +128,7 @@ def test_stage_no_basedir(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the full content of the tarball is checked out (base-dir: '')
@@ -164,10 +156,7 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '')
diff --git a/tests/sources/git.py b/tests/sources/git.py
index 6a6f95364..fb8a30e3f 100644
--- a/tests/sources/git.py
+++ b/tests/sources/git.py
@@ -84,10 +84,7 @@ def test_submodule_fetch_checkout(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Assert we checked out both files at their expected location
@@ -124,10 +121,7 @@ def test_submodule_fetch_source_enable_explicit(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Assert we checked out both files at their expected location
@@ -164,10 +158,7 @@ def test_submodule_fetch_source_disable(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Assert we checked out both files at their expected location
@@ -204,10 +195,7 @@ def test_submodule_fetch_submodule_does_override(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Assert we checked out both files at their expected location
@@ -249,10 +237,7 @@ def test_submodule_fetch_submodule_individual_checkout(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Assert we checked out files at their expected location
@@ -281,9 +266,7 @@ def test_submodule_fetch_submodule_individual_checkout_explicit(cli, tmpdir, dat
# Add a submodule pointing to the one we created
repo.add_submodule("subdir", "file://" + subrepo.repo, checkout=False)
- ref = repo.add_submodule(
- "othersubdir", "file://" + other_subrepo.repo, checkout=True
- )
+ ref = repo.add_submodule("othersubdir", "file://" + other_subrepo.repo, checkout=True)
# Write out our test target
element = {
@@ -297,10 +280,7 @@ def test_submodule_fetch_submodule_individual_checkout_explicit(cli, tmpdir, dat
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Assert we checked out files at their expected location
@@ -335,10 +315,7 @@ def test_submodule_fetch_project_override(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Assert we checked out both files at their expected location
@@ -674,9 +651,7 @@ def test_track_invalid_submodule(cli, tmpdir, datafiles, fail):
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
@pytest.mark.parametrize("ref_format", ["sha1", "git-describe"])
-@pytest.mark.parametrize(
- "tag,extra_commit", [(False, False), (True, False), (True, True)]
-)
+@pytest.mark.parametrize("tag,extra_commit", [(False, False), (True, False), (True, True)])
def test_track_fetch(cli, tmpdir, datafiles, ref_format, tag, extra_commit):
project = str(datafiles)
@@ -781,9 +756,7 @@ def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type):
result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
else:
- result = cli.run(
- project=project, args=["source", "track", "target.bst", "--deps", "all"]
- )
+ result = cli.run(project=project, args=["source", "track", "target.bst", "--deps", "all"])
result.assert_success()
if ref_storage == "inline":
@@ -805,31 +778,22 @@ def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type):
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkout],)
result.assert_success()
if tag_type == "annotated":
options = []
else:
options = ["--tags"]
- describe = subprocess.check_output(
- ["git", "describe", *options], cwd=checkout, universal_newlines=True
- )
+ describe = subprocess.check_output(["git", "describe", *options], cwd=checkout, universal_newlines=True)
assert describe.startswith("tag2-2-")
describe_fp = subprocess.check_output(
- ["git", "describe", "--first-parent", *options],
- cwd=checkout,
- universal_newlines=True,
+ ["git", "describe", "--first-parent", *options], cwd=checkout, universal_newlines=True,
)
assert describe_fp.startswith("tag1-2-")
- tags = subprocess.check_output(
- ["git", "tag"], cwd=checkout, universal_newlines=True
- )
+ tags = subprocess.check_output(["git", "tag"], cwd=checkout, universal_newlines=True)
tags = set(tags.splitlines())
assert tags == set(["tag1", "tag2"])
@@ -901,9 +865,7 @@ def test_git_describe_head_is_tagged(cli, tmpdir, datafiles, ref_storage, tag_ty
result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
else:
- result = cli.run(
- project=project, args=["source", "track", "target.bst", "--deps", "all"]
- )
+ result = cli.run(project=project, args=["source", "track", "target.bst", "--deps", "all"])
result.assert_success()
if ref_storage == "inline":
@@ -926,30 +888,21 @@ def test_git_describe_head_is_tagged(cli, tmpdir, datafiles, ref_storage, tag_ty
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkout],)
result.assert_success()
if tag_type == "annotated":
options = []
else:
options = ["--tags"]
- describe = subprocess.check_output(
- ["git", "describe", *options], cwd=checkout, universal_newlines=True
- )
+ describe = subprocess.check_output(["git", "describe", *options], cwd=checkout, universal_newlines=True)
assert describe.startswith("tag")
- tags = subprocess.check_output(
- ["git", "tag"], cwd=checkout, universal_newlines=True
- )
+ tags = subprocess.check_output(["git", "tag"], cwd=checkout, universal_newlines=True)
tags = set(tags.splitlines())
assert tags == set(["tag"])
- rev_list = subprocess.check_output(
- ["git", "rev-list", "--all"], cwd=checkout, universal_newlines=True
- )
+ rev_list = subprocess.check_output(["git", "rev-list", "--all"], cwd=checkout, universal_newlines=True)
assert set(rev_list.splitlines()) == set([tagged_ref])
@@ -1013,29 +966,20 @@ def test_git_describe_relevant_history(cli, tmpdir, datafiles):
element_path = os.path.join(project, "target.bst")
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(
- project=project, args=["source", "track", "target.bst", "--deps", "all"]
- )
+ result = cli.run(project=project, args=["source", "track", "target.bst", "--deps", "all"])
result.assert_success()
checkout = os.path.join(str(tmpdir), "checkout")
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkout],)
result.assert_success()
- describe = subprocess.check_output(
- ["git", "describe"], cwd=checkout, universal_newlines=True
- )
+ describe = subprocess.check_output(["git", "describe"], cwd=checkout, universal_newlines=True)
assert describe.startswith("tag1-2-")
- rev_list = subprocess.check_output(
- ["git", "rev-list", "--all"], cwd=checkout, universal_newlines=True
- )
+ rev_list = subprocess.check_output(["git", "rev-list", "--all"], cwd=checkout, universal_newlines=True)
assert set(rev_list.splitlines()) == set([head, tagged_ref, branch_boundary])
@@ -1102,9 +1046,7 @@ def test_overwrite_rogue_tag_multiple_remotes(cli, tmpdir, datafiles):
repodir, reponame = os.path.split(repo.repo)
project_config = _yaml.load(os.path.join(project, "project.conf"))
project_config["aliases"] = Node.from_dict({"repo": "http://example.com/"})
- project_config["mirrors"] = [
- {"name": "middle-earth", "aliases": {"repo": ["file://{}/".format(repodir)]}}
- ]
+ project_config["mirrors"] = [{"name": "middle-earth", "aliases": {"repo": ["file://{}/".format(repodir)]}}]
_yaml.roundtrip_dump(project_config, os.path.join(project, "project.conf"))
repo.add_annotated_tag("tag", "tag")
diff --git a/tests/sources/local.py b/tests/sources/local.py
index 08c508bfe..da68f1f75 100644
--- a/tests/sources/local.py
+++ b/tests/sources/local.py
@@ -37,9 +37,7 @@ def test_non_regular_file_or_directory(cli, datafiles):
elif os.path.isfile(localfile) and not os.path.islink(localfile):
result.assert_success()
else:
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND
- )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND)
@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
@@ -75,10 +73,7 @@ def test_stage_file(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the checkout contains the expected file
@@ -93,10 +88,7 @@ def test_stage_directory(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the checkout contains the expected file and directory and other file
@@ -121,10 +113,7 @@ def test_stage_symlink(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the checkout contains the expected file and directory and other file
@@ -154,25 +143,18 @@ def test_stage_directory_symlink(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the checkout contains the expected directory and directory symlink
assert os.path.exists(os.path.join(checkoutdir, "subdir", "anotherfile.txt"))
- assert os.path.exists(
- os.path.join(checkoutdir, "symlink-to-subdir", "anotherfile.txt")
- )
+ assert os.path.exists(os.path.join(checkoutdir, "symlink-to-subdir", "anotherfile.txt"))
assert os.path.islink(os.path.join(checkoutdir, "symlink-to-subdir"))
@pytest.mark.integration
@pytest.mark.datafiles(os.path.join(DATA_DIR, "deterministic-umask"))
-@pytest.mark.skipif(
- not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
-)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
def test_deterministic_source_umask(cli, tmpdir, datafiles):
def create_test_file(*path, mode=0o644, content="content\n"):
path = os.path.join(*path)
diff --git a/tests/sources/patch.py b/tests/sources/patch.py
index da93684db..64d082797 100644
--- a/tests/sources/patch.py
+++ b/tests/sources/patch.py
@@ -33,9 +33,7 @@ def test_non_regular_file_patch(cli, datafiles):
if os.path.isfile(patch_path) and not os.path.islink(patch_path):
result.assert_success()
else:
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND
- )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND)
@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
@@ -70,10 +68,7 @@ def test_stage_and_patch(cli, tmpdir, datafiles):
# Build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Test the file.txt was patched and changed
@@ -109,10 +104,7 @@ def test_stage_separate_patch_dir(cli, tmpdir, datafiles):
# Track, fetch, build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Test the file.txt was patched and changed
@@ -128,10 +120,7 @@ def test_stage_multiple_patches(cli, tmpdir, datafiles):
# Track, fetch, build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Test the file.txt was patched and changed
@@ -147,10 +136,7 @@ def test_patch_strip_level(cli, tmpdir, datafiles):
# Track, fetch, build, checkout
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Test the file.txt was patched and changed
diff --git a/tests/sources/previous_source_access.py b/tests/sources/previous_source_access.py
index 3ff91b5fd..c42a9a6fb 100644
--- a/tests/sources/previous_source_access.py
+++ b/tests/sources/previous_source_access.py
@@ -7,9 +7,7 @@ import pytest
from buildstream import _yaml
from buildstream.testing import cli # pylint: disable=unused-import
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), "previous_source_access"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "previous_source_access")
##################################################################
@@ -38,10 +36,7 @@ def test_custom_transform_source(cli, datafiles):
# Ensure we get correct output from foo_transform
cli.run(project=project, args=["build", "target.bst"])
destpath = os.path.join(cli.directory, "checkout")
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", destpath],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", destpath],)
result.assert_success()
# Assert that files from both sources exist, and that they have
# the same content
diff --git a/tests/sources/previous_source_access/plugins/sources/foo_transform.py b/tests/sources/previous_source_access/plugins/sources/foo_transform.py
index d59eaeca7..9e6ef3ad4 100644
--- a/tests/sources/previous_source_access/plugins/sources/foo_transform.py
+++ b/tests/sources/previous_source_access/plugins/sources/foo_transform.py
@@ -80,8 +80,7 @@ class FooTransformSource(Source):
def stage(self, directory):
# Simply stage the "filetransform" file
utils.safe_copy(
- os.path.join(self.mirror, "filetransform"),
- os.path.join(directory, "filetransform"),
+ os.path.join(self.mirror, "filetransform"), os.path.join(directory, "filetransform"),
)
diff --git a/tests/sources/remote.py b/tests/sources/remote.py
index 685f6bfba..a02601215 100644
--- a/tests/sources/remote.py
+++ b/tests/sources/remote.py
@@ -15,16 +15,12 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "remote",)
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump(
- {"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file
- )
+ _yaml.roundtrip_dump({"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file)
def generate_project_file_server(server, project_dir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump(
- {"name": "foo", "aliases": {"tmpdir": server.base_url()}}, project_file
- )
+ _yaml.roundtrip_dump({"name": "foo", "aliases": {"tmpdir": server.base_url()}}, project_file)
# Test that without ref, consistency is set appropriately.
@@ -74,10 +70,7 @@ def test_simple_file_build(cli, tmpdir, datafiles):
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Note that the url of the file in target.bst is actually /dir/file
# but this tests confirms we take the basename
@@ -104,10 +97,7 @@ def test_simple_file_custom_name_build(cli, tmpdir, datafiles):
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
assert not os.path.exists(os.path.join(checkoutdir, "file"))
assert os.path.exists(os.path.join(checkoutdir, "custom-file"))
@@ -120,9 +110,7 @@ def test_unique_key(cli, tmpdir, datafiles):
"""
project = str(datafiles)
generate_project(project, tmpdir)
- states = cli.get_element_states(
- project, ["target.bst", "target-custom.bst", "target-custom-executable.bst"]
- )
+ states = cli.get_element_states(project, ["target.bst", "target-custom.bst", "target-custom-executable.bst"])
assert states["target.bst"] == "fetch needed"
assert states["target-custom.bst"] == "fetch needed"
assert states["target-custom-executable.bst"] == "fetch needed"
@@ -131,9 +119,7 @@ def test_unique_key(cli, tmpdir, datafiles):
cli.run(project=project, args=["source", "fetch", "target.bst"])
# We should download the file only once
- states = cli.get_element_states(
- project, ["target.bst", "target-custom.bst", "target-custom-executable.bst"]
- )
+ states = cli.get_element_states(project, ["target.bst", "target-custom.bst", "target-custom-executable.bst"])
assert states["target.bst"] == "buildable"
assert states["target-custom.bst"] == "buildable"
assert states["target-custom-executable.bst"] == "buildable"
@@ -153,21 +139,12 @@ def test_executable(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
checkoutdir = os.path.join(str(tmpdir), "checkout")
- assert (
- cli.get_element_state(project, "target-custom-executable.bst") == "fetch needed"
- )
+ assert cli.get_element_state(project, "target-custom-executable.bst") == "fetch needed"
# Try to fetch it
cli.run(project=project, args=["build", "target-custom-executable.bst"])
cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "target-custom-executable.bst",
- "--directory",
- checkoutdir,
- ],
+ project=project, args=["artifact", "checkout", "target-custom-executable.bst", "--directory", checkoutdir,],
)
mode = os.stat(os.path.join(checkoutdir, "some-custom-file")).st_mode
assert mode & stat.S_IEXEC
@@ -200,10 +177,7 @@ def test_use_netrc(cli, datafiles, server_type, tmpdir):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
checkout_file = os.path.join(checkoutdir, "file")
diff --git a/tests/sources/tar.py b/tests/sources/tar.py
index cdd2328fd..ab493f5df 100644
--- a/tests/sources/tar.py
+++ b/tests/sources/tar.py
@@ -42,9 +42,7 @@ def _assemble_tar_lz(workingdir, srcdir, dstfile):
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump(
- {"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file
- )
+ _yaml.roundtrip_dump({"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file)
def generate_project_file_server(base_url, project_dir):
@@ -124,10 +122,7 @@ def test_stage_default_basedir(cli, tmpdir, datafiles, srcdir):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -156,10 +151,7 @@ def test_stage_no_basedir(cli, tmpdir, datafiles, srcdir):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the full content of the tarball is checked out (base-dir: '')
@@ -188,10 +180,7 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles, srcdir):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -214,15 +203,10 @@ def test_stage_contains_links(cli, tmpdir, datafiles):
# Create a hardlink, we wont trust git to store that info for us
os.makedirs(
- os.path.join(str(datafiles), "content", "base-directory", "subdir2"),
- exist_ok=True,
- )
- file1 = os.path.join(
- str(datafiles), "content", "base-directory", "subdir1", "file.txt"
- )
- file2 = os.path.join(
- str(datafiles), "content", "base-directory", "subdir2", "file.txt"
+ os.path.join(str(datafiles), "content", "base-directory", "subdir2"), exist_ok=True,
)
+ file1 = os.path.join(str(datafiles), "content", "base-directory", "subdir1", "file.txt")
+ file2 = os.path.join(str(datafiles), "content", "base-directory", "subdir2", "file.txt")
os.link(file1, file2)
_assemble_tar(os.path.join(str(datafiles), "content"), "base-directory", src_tar)
@@ -234,10 +218,7 @@ def test_stage_contains_links(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -266,10 +247,7 @@ def test_stage_default_basedir_lzip(cli, tmpdir, datafiles, srcdir):
result.assert_success()
result = cli.run(project=project, args=["build", "target-lz.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target-lz.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target-lz.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -297,12 +275,7 @@ def test_read_only_dir(cli, tmpdir, datafiles, tar_name, base_dir):
{
"kind": "import",
"sources": [
- {
- "kind": "tar",
- "url": "tmpdir:/{}".format(tar_file),
- "ref": "foo",
- "base-dir": base_dir,
- }
+ {"kind": "tar", "url": "tmpdir:/{}".format(tar_file), "ref": "foo", "base-dir": base_dir,}
],
},
bst_path,
@@ -326,13 +299,9 @@ def test_read_only_dir(cli, tmpdir, datafiles, tar_name, base_dir):
env = {"TMP": tmpdir_str}
# Track, fetch, build, checkout
- result = cli.run(
- project=project, args=["source", "track", "target.bst"], env=env
- )
+ result = cli.run(project=project, args=["source", "track", "target.bst"], env=env)
result.assert_success()
- result = cli.run(
- project=project, args=["source", "fetch", "target.bst"], env=env
- )
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"], env=env)
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"], env=env)
result.assert_success()
@@ -382,10 +351,7 @@ def test_use_netrc(cli, datafiles, server_type, tmpdir):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
original_dir = os.path.join(str(datafiles), "content", "a")
@@ -413,9 +379,7 @@ def test_netrc_already_specified_user(cli, datafiles, server_type, tmpdir):
with create_file_server(server_type) as server:
server.add_user("otheruser", "12345", file_server_files)
parts = urllib.parse.urlsplit(server.base_url())
- base_url = urllib.parse.urlunsplit(
- [parts[0], "otheruser@{}".format(parts[1]), *parts[2:]]
- )
+ base_url = urllib.parse.urlunsplit([parts[0], "otheruser@{}".format(parts[1]), *parts[2:]])
generate_project_file_server(base_url, project)
src_tar = os.path.join(file_server_files, "a.tar.gz")
@@ -440,9 +404,7 @@ def test_homeless_environment(cli, tmpdir, datafiles):
_assemble_tar(os.path.join(str(datafiles), "content"), "a", src_tar)
# Use a track, make sure the plugin tries to find a ~/.netrc
- result = cli.run(
- project=project, args=["source", "track", "target.bst"], env={"HOME": None}
- )
+ result = cli.run(project=project, args=["source", "track", "target.bst"], env={"HOME": None})
result.assert_success()
@@ -472,9 +434,7 @@ def test_out_of_basedir_hardlinks(cli, tmpdir, datafiles):
# attributes set
with tarfile.open(src_tar, "r:gz") as tar:
assert any(
- member.islnk()
- and member.path == "contents/to_extract/a"
- and member.linkname == "contents/elsewhere/a"
+ member.islnk() and member.path == "contents/to_extract/a" and member.linkname == "contents/elsewhere/a"
for member in tar.getmembers()
)
@@ -485,10 +445,7 @@ def test_out_of_basedir_hardlinks(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
original_dir = os.path.join(str(datafiles), "contents", "to_extract")
diff --git a/tests/sources/zip.py b/tests/sources/zip.py
index d1b000167..0a5f6eed3 100644
--- a/tests/sources/zip.py
+++ b/tests/sources/zip.py
@@ -29,16 +29,12 @@ def _assemble_zip(workingdir, dstfile):
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump(
- {"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file
- )
+ _yaml.roundtrip_dump({"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file)
def generate_project_file_server(server, project_dir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump(
- {"name": "foo", "aliases": {"tmpdir": server.base_url()}}, project_file
- )
+ _yaml.roundtrip_dump({"name": "foo", "aliases": {"tmpdir": server.base_url()}}, project_file)
# Test that without ref, consistency is set appropriately.
@@ -112,10 +108,7 @@ def test_stage_default_basedir(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -143,10 +136,7 @@ def test_stage_no_basedir(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the full content of the tarball is checked out (base-dir: '')
@@ -174,10 +164,7 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -219,10 +206,7 @@ def test_use_netrc(cli, datafiles, server_type, tmpdir):
result.assert_success()
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],)
result.assert_success()
original_dir = os.path.join(str(datafiles), "content", "a")
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index 39cc7da10..2038bfedd 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -80,12 +80,7 @@ class ArtifactShare:
cleanup_on_sigterm()
server = stack.enter_context(
- create_server(
- self.repodir,
- quota=self.quota,
- enable_push=True,
- index_only=self.index_only,
- )
+ create_server(self.repodir, quota=self.quota, enable_push=True, index_only=self.index_only,)
)
port = server.add_insecure_port("localhost:0")
server.start()
@@ -133,9 +128,7 @@ class ArtifactShare:
reachable = set()
def reachable_dir(digest):
- self.cas._reachable_refs_dir(
- reachable, digest, update_mtime=False, check_exists=True
- )
+ self.cas._reachable_refs_dir(reachable, digest, update_mtime=False, check_exists=True)
try:
if str(artifact_proto.files):
@@ -219,13 +212,9 @@ statvfs_result = namedtuple("statvfs_result", "f_blocks f_bfree f_bsize f_bavail
# Assert that a given artifact is in the share
#
def assert_shared(cli, share, project, element_name, *, project_name="test"):
- if not share.get_artifact(
- cli.get_artifact_name(project, project_name, element_name)
- ):
+ if not share.get_artifact(cli.get_artifact_name(project, project_name, element_name)):
raise AssertionError(
- "Artifact share at {} does not contain the expected element {}".format(
- share.repo, element_name
- )
+ "Artifact share at {} does not contain the expected element {}".format(share.repo, element_name)
)
@@ -234,7 +223,5 @@ def assert_shared(cli, share, project, element_name, *, project_name="test"):
def assert_not_shared(cli, share, project, element_name, *, project_name="test"):
if share.get_artifact(cli.get_artifact_name(project, project_name, element_name)):
raise AssertionError(
- "Artifact share at {} unexpectedly contains the element {}".format(
- share.repo, element_name
- )
+ "Artifact share at {} unexpectedly contains the element {}".format(share.repo, element_name)
)
diff --git a/tests/testutils/context.py b/tests/testutils/context.py
index 5d8294fd2..821adef0a 100644
--- a/tests/testutils/context.py
+++ b/tests/testutils/context.py
@@ -55,9 +55,7 @@ class _DummyTask:
@contextmanager
-def _get_dummy_task(
- self, activity_name, *, element_name=None, full_name=None, silent_nested=False
-):
+def _get_dummy_task(self, activity_name, *, element_name=None, full_name=None, silent_nested=False):
yield _DummyTask("state", activity_name, full_name, 0)
diff --git a/tests/testutils/http_server.py b/tests/testutils/http_server.py
index f333f28b2..8591159f8 100644
--- a/tests/testutils/http_server.py
+++ b/tests/testutils/http_server.py
@@ -44,9 +44,7 @@ class RequestHandler(SimpleHTTPRequestHandler):
body = content.encode("UTF-8", "replace")
self.send_header("Content-Type", self.error_content_type)
self.send_header("Content-Length", str(len(body)))
- self.send_header(
- "WWW-Authenticate", 'Basic realm="{}"'.format(self.server.realm)
- )
+ self.send_header("WWW-Authenticate", 'Basic realm="{}"'.format(self.server.realm))
self.end_headers()
self.end_headers()
diff --git a/tests/testutils/patch.py b/tests/testutils/patch.py
index 6dec68ca9..85b38def8 100644
--- a/tests/testutils/patch.py
+++ b/tests/testutils/patch.py
@@ -5,9 +5,7 @@ def apply(file, patch):
try:
subprocess.check_output(["patch", file, patch])
except subprocess.CalledProcessError as e:
- message = "Patch failed with exit code {}\n Output:\n {}".format(
- e.returncode, e.output
- )
+ message = "Patch failed with exit code {}\n Output:\n {}".format(e.returncode, e.output)
print(message)
raise
@@ -16,8 +14,6 @@ def remove(file, patch):
try:
subprocess.check_output(["patch", "--reverse", file, patch])
except subprocess.CalledProcessError as e:
- message = "patch --reverse failed with exit code {}\n Output:\n {}".format(
- e.returncode, e.output
- )
+ message = "patch --reverse failed with exit code {}\n Output:\n {}".format(e.returncode, e.output)
print(message)
raise
diff --git a/tests/testutils/python_repo.py b/tests/testutils/python_repo.py
index 13e9f6209..7d9ae4e47 100644
--- a/tests/testutils/python_repo.py
+++ b/tests/testutils/python_repo.py
@@ -76,11 +76,7 @@ def generate_pip_package(tmpdir, pypi, name, version="0.1", dependencies=None):
setup_file = os.path.join(tmpdir, "setup.py")
pkgdirname = re.sub("[^0-9a-zA-Z]+", "", name)
with open(setup_file, "w") as f:
- f.write(
- SETUP_TEMPLATE.format(
- name=name, version=version, pkgdirname=pkgdirname, pkgdeps=dependencies
- )
- )
+ f.write(SETUP_TEMPLATE.format(name=name, version=version, pkgdirname=pkgdirname, pkgdeps=dependencies))
os.chmod(setup_file, 0o755)
package = os.path.join(tmpdir, pkgdirname)
@@ -128,9 +124,7 @@ def setup_pypi_repo(tmpdir):
def add_packages(packages, pypi_repo):
for package, dependencies in packages.items():
pkgdir = create_pkgdir(package)
- generate_pip_package(
- pkgdir, pypi_repo, package, dependencies=list(dependencies.keys())
- )
+ generate_pip_package(pkgdir, pypi_repo, package, dependencies=list(dependencies.keys()))
for dependency, dependency_dependencies in dependencies.items():
add_packages({dependency: dependency_dependencies}, pypi_repo)
diff --git a/tests/testutils/repo/bzr.py b/tests/testutils/repo/bzr.py
index 246a3eb35..f5d8653b6 100644
--- a/tests/testutils/repo/bzr.py
+++ b/tests/testutils/repo/bzr.py
@@ -29,9 +29,7 @@ class Bzr(Repo):
self.copy_directory(directory, branch_dir)
subprocess.call([self.bzr, "add", "."], env=self.env, cwd=branch_dir)
subprocess.call(
- [self.bzr, "commit", '--message="Initial commit"'],
- env=self.env,
- cwd=branch_dir,
+ [self.bzr, "commit", '--message="Initial commit"'], env=self.env, cwd=branch_dir,
)
return self.latest_commit()
@@ -45,13 +43,7 @@ class Bzr(Repo):
def latest_commit(self):
return subprocess.check_output(
- [
- self.bzr,
- "version-info",
- "--custom",
- "--template={revno}",
- os.path.join(self.repo, "trunk"),
- ],
+ [self.bzr, "version-info", "--custom", "--template={revno}", os.path.join(self.repo, "trunk"),],
env=self.env,
universal_newlines=True,
).strip()
diff --git a/tests/testutils/repo/git.py b/tests/testutils/repo/git.py
index 19ab91601..b9360e9cd 100644
--- a/tests/testutils/repo/git.py
+++ b/tests/testutils/repo/git.py
@@ -54,9 +54,7 @@ class Git(Repo):
def modify_file(self, new_file, path):
shutil.copy(new_file, os.path.join(self.repo, path))
- self._run_git(
- "commit", path, "-m", "Modified {}".format(os.path.basename(path))
- )
+ self._run_git("commit", path, "-m", "Modified {}".format(os.path.basename(path)))
return self.latest_commit()
def add_submodule(self, subdir, url=None, checkout=None):
@@ -92,9 +90,7 @@ class Git(Repo):
return config
def latest_commit(self):
- return self._run_git(
- "rev-parse", "HEAD", stdout=subprocess.PIPE, universal_newlines=True,
- ).stdout.strip()
+ return self._run_git("rev-parse", "HEAD", stdout=subprocess.PIPE, universal_newlines=True,).stdout.strip()
def branch(self, branch_name):
self._run_git("checkout", "-b", branch_name)
@@ -110,6 +106,4 @@ class Git(Repo):
return self.latest_commit()
def rev_parse(self, rev):
- return self._run_git(
- "rev-parse", rev, stdout=subprocess.PIPE, universal_newlines=True,
- ).stdout.strip()
+ return self._run_git("rev-parse", rev, stdout=subprocess.PIPE, universal_newlines=True,).stdout.strip()
diff --git a/tests/testutils/setuptools.py b/tests/testutils/setuptools.py
index 119979da6..0f7f30f91 100644
--- a/tests/testutils/setuptools.py
+++ b/tests/testutils/setuptools.py
@@ -10,9 +10,7 @@ class MockDist:
self.module_name = module_name
def get_resource_filename(self, *_args, **_kwargs):
- return os.path.join(
- self.datafiles.dirname, self.datafiles.basename, self.module_name
- )
+ return os.path.join(self.datafiles.dirname, self.datafiles.basename, self.module_name)
# A mock setuptools entry object.
diff --git a/tox.ini b/tox.ini
index a9e76bcf8..7eaff0d05 100644
--- a/tox.ini
+++ b/tox.ini
@@ -107,7 +107,7 @@ skip_install = True
deps =
black
commands =
- black {posargs: src tests}
+ black --line-length 119 {posargs: src tests}
#
# Running linters