summaryrefslogtreecommitdiff
path: root/src/buildstream/plugins
diff options
context:
space:
mode:
Diffstat (limited to 'src/buildstream/plugins')
-rw-r--r--src/buildstream/plugins/elements/autotools.py3
-rw-r--r--src/buildstream/plugins/elements/compose.py37
-rw-r--r--src/buildstream/plugins/elements/filter.py59
-rw-r--r--src/buildstream/plugins/elements/import.py28
-rw-r--r--src/buildstream/plugins/elements/junction.py10
-rw-r--r--src/buildstream/plugins/elements/manual.py3
-rw-r--r--src/buildstream/plugins/elements/pip.py3
-rw-r--r--src/buildstream/plugins/elements/script.py12
-rw-r--r--src/buildstream/plugins/elements/stack.py4
-rw-r--r--src/buildstream/plugins/sources/_downloadablefilesource.py61
-rw-r--r--src/buildstream/plugins/sources/bzr.py109
-rw-r--r--src/buildstream/plugins/sources/deb.py6
-rw-r--r--src/buildstream/plugins/sources/local.py8
-rw-r--r--src/buildstream/plugins/sources/patch.py12
-rw-r--r--src/buildstream/plugins/sources/pip.py106
-rw-r--r--src/buildstream/plugins/sources/remote.py11
-rw-r--r--src/buildstream/plugins/sources/tar.py45
-rw-r--r--src/buildstream/plugins/sources/workspace.py12
-rw-r--r--src/buildstream/plugins/sources/zip.py14
19 files changed, 282 insertions, 261 deletions
diff --git a/src/buildstream/plugins/elements/autotools.py b/src/buildstream/plugins/elements/autotools.py
index 7a05336b7..089c9bca0 100644
--- a/src/buildstream/plugins/elements/autotools.py
+++ b/src/buildstream/plugins/elements/autotools.py
@@ -66,8 +66,7 @@ class AutotoolsElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
- collect=self.get_variable('install-root'))
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/compose.py b/src/buildstream/plugins/elements/compose.py
index 511925731..063c5d44f 100644
--- a/src/buildstream/plugins/elements/compose.py
+++ b/src/buildstream/plugins/elements/compose.py
@@ -62,27 +62,23 @@ class ComposeElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- node.validate_keys([
- 'integrate', 'include', 'exclude', 'include-orphans'
- ])
+ node.validate_keys(["integrate", "include", "exclude", "include-orphans"])
# We name this variable 'integration' only to avoid
# collision with the Element.integrate() method.
- self.integration = node.get_bool('integrate')
- self.include = node.get_str_list('include')
- self.exclude = node.get_str_list('exclude')
- self.include_orphans = node.get_bool('include-orphans')
+ self.integration = node.get_bool("integrate")
+ self.include = node.get_str_list("include")
+ self.exclude = node.get_str_list("exclude")
+ self.include_orphans = node.get_bool("include-orphans")
def preflight(self):
pass
def get_unique_key(self):
- key = {'integrate': self.integration,
- 'include': sorted(self.include),
- 'orphans': self.include_orphans}
+ key = {"integrate": self.integration, "include": sorted(self.include), "orphans": self.include_orphans}
if self.exclude:
- key['exclude'] = sorted(self.exclude)
+ key["exclude"] = sorted(self.exclude)
return key
@@ -104,9 +100,9 @@ class ComposeElement(Element):
if require_split:
with self.timed_activity("Computing split", silent_nested=True):
for dep in self.dependencies(Scope.BUILD):
- files = dep.compute_manifest(include=self.include,
- exclude=self.exclude,
- orphans=self.include_orphans)
+ files = dep.compute_manifest(
+ include=self.include, exclude=self.exclude, orphans=self.include_orphans
+ )
manifest.update(files)
# Make a snapshot of all the files.
@@ -141,13 +137,16 @@ class ComposeElement(Element):
for path in basedir_contents:
if path not in snapshot:
added_files.add(path)
- self.info("Integration modified {}, added {} and removed {} files"
- .format(len(modified_files), len(added_files), len(removed_files)))
+ self.info(
+ "Integration modified {}, added {} and removed {} files".format(
+ len(modified_files), len(added_files), len(removed_files)
+ )
+ )
# The remainder of this is expensive, make an early exit if
# we're not being selective about what is to be included.
if not require_split:
- return '/'
+ return "/"
# Do we want to force include files which were modified by
# the integration commands, even if they were not added ?
@@ -159,7 +158,7 @@ class ComposeElement(Element):
# instead of into a subdir. The element assemble() method should
# support this in some way.
#
- installdir = vbasedir.descend('buildstream', 'install', create=True)
+ installdir = vbasedir.descend("buildstream", "install", create=True)
# We already saved the manifest for created files in the integration phase,
# now collect the rest of the manifest.
@@ -189,7 +188,7 @@ class ComposeElement(Element):
installdir.import_files(vbasedir, filter_callback=import_filter, can_link=True)
# And we're done
- return os.path.join(os.sep, 'buildstream', 'install')
+ return os.path.join(os.sep, "buildstream", "install")
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/filter.py b/src/buildstream/plugins/elements/filter.py
index d808c9e5a..71ed1f6cb 100644
--- a/src/buildstream/plugins/elements/filter.py
+++ b/src/buildstream/plugins/elements/filter.py
@@ -167,17 +167,15 @@ class FilterElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- node.validate_keys([
- 'include', 'exclude', 'include-orphans', 'pass-integration'
- ])
+ node.validate_keys(["include", "exclude", "include-orphans", "pass-integration"])
- self.include_node = node.get_sequence('include')
- self.exclude_node = node.get_sequence('exclude')
+ self.include_node = node.get_sequence("include")
+ self.exclude_node = node.get_sequence("exclude")
self.include = self.include_node.as_str_list()
self.exclude = self.exclude_node.as_str_list()
- self.include_orphans = node.get_bool('include-orphans')
- self.pass_integration = node.get_bool('pass-integration', False)
+ self.include_orphans = node.get_bool("include-orphans")
+ self.pass_integration = node.get_bool("pass-integration", False)
def preflight(self):
# Exactly one build-depend is permitted
@@ -186,9 +184,13 @@ class FilterElement(Element):
detail = "Full list of build-depends:\n"
deps_list = " \n".join([x.name for x in build_deps])
detail += deps_list
- raise ElementError("{}: {} element must have exactly 1 build-dependency, actually have {}"
- .format(self, type(self).__name__, len(build_deps)),
- detail=detail, reason="filter-bdepend-wrong-count")
+ raise ElementError(
+ "{}: {} element must have exactly 1 build-dependency, actually have {}".format(
+ self, type(self).__name__, len(build_deps)
+ ),
+ detail=detail,
+ reason="filter-bdepend-wrong-count",
+ )
# That build-depend must not also be a runtime-depend
runtime_deps = list(self.dependencies(Scope.RUN, recurse=False))
@@ -196,23 +198,29 @@ class FilterElement(Element):
detail = "Full list of runtime depends:\n"
deps_list = " \n".join([x.name for x in runtime_deps])
detail += deps_list
- raise ElementError("{}: {} element's build dependency must not also be a runtime dependency"
- .format(self, type(self).__name__),
- detail=detail, reason="filter-bdepend-also-rdepend")
+ raise ElementError(
+ "{}: {} element's build dependency must not also be a runtime dependency".format(
+ self, type(self).__name__
+ ),
+ detail=detail,
+ reason="filter-bdepend-also-rdepend",
+ )
# If a parent does not produce an artifact, fail and inform user that the dependency
# must produce artifacts
if not build_deps[0].BST_ELEMENT_HAS_ARTIFACT:
detail = "{} does not produce an artifact, so there is nothing to filter".format(build_deps[0].name)
- raise ElementError("{}: {} element's build dependency must produce an artifact"
- .format(self, type(self).__name__),
- detail=detail, reason="filter-bdepend-no-artifact")
+ raise ElementError(
+ "{}: {} element's build dependency must produce an artifact".format(self, type(self).__name__),
+ detail=detail,
+ reason="filter-bdepend-no-artifact",
+ )
def get_unique_key(self):
key = {
- 'include': sorted(self.include),
- 'exclude': sorted(self.exclude),
- 'orphans': self.include_orphans,
+ "include": sorted(self.include),
+ "exclude": sorted(self.exclude),
+ "orphans": self.include_orphans,
}
return key
@@ -226,8 +234,8 @@ class FilterElement(Element):
with self.timed_activity("Staging artifact", silent_nested=True):
for dep in self.dependencies(Scope.BUILD, recurse=False):
# Check that all the included/excluded domains exist
- pub_data = dep.get_public_data('bst')
- split_rules = pub_data.get_mapping('split-rules', {})
+ pub_data = dep.get_public_data("bst")
+ split_rules = pub_data.get_mapping("split-rules", {})
unfound_includes = []
for domain in self.include:
if domain not in split_rules:
@@ -240,18 +248,17 @@ class FilterElement(Element):
detail = []
if unfound_includes:
detail.append("Unknown domains were used in {}".format(self.include_node.get_provenance()))
- detail.extend([' - {}'.format(domain) for domain in unfound_includes])
+ detail.extend([" - {}".format(domain) for domain in unfound_includes])
if unfound_excludes:
detail.append("Unknown domains were used in {}".format(self.exclude_node.get_provenance()))
- detail.extend([' - {}'.format(domain) for domain in unfound_excludes])
+ detail.extend([" - {}".format(domain) for domain in unfound_excludes])
if detail:
- detail = '\n'.join(detail)
+ detail = "\n".join(detail)
raise ElementError("Unknown domains declared.", detail=detail)
- dep.stage_artifact(sandbox, include=self.include,
- exclude=self.exclude, orphans=self.include_orphans)
+ dep.stage_artifact(sandbox, include=self.include, exclude=self.exclude, orphans=self.include_orphans)
return ""
def _get_source_element(self):
diff --git a/src/buildstream/plugins/elements/import.py b/src/buildstream/plugins/elements/import.py
index 404a0f4ee..2b68197a7 100644
--- a/src/buildstream/plugins/elements/import.py
+++ b/src/buildstream/plugins/elements/import.py
@@ -45,12 +45,10 @@ class ImportElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- node.validate_keys([
- 'source', 'target'
- ])
+ node.validate_keys(["source", "target"])
- self.source = self.node_subst_vars(node.get_scalar('source'))
- self.target = self.node_subst_vars(node.get_scalar('target'))
+ self.source = self.node_subst_vars(node.get_scalar("source"))
+ self.target = self.node_subst_vars(node.get_scalar("target"))
def preflight(self):
# Assert that we have at least one source to fetch.
@@ -60,10 +58,7 @@ class ImportElement(Element):
raise ElementError("{}: An import element must have at least one source.".format(self))
def get_unique_key(self):
- return {
- 'source': self.source,
- 'target': self.target
- }
+ return {"source": self.source, "target": self.target}
def configure_sandbox(self, sandbox):
pass
@@ -74,11 +69,11 @@ class ImportElement(Element):
def assemble(self, sandbox):
# Stage sources into the input directory
- self.stage_sources(sandbox, 'input')
+ self.stage_sources(sandbox, "input")
rootdir = sandbox.get_virtual_directory()
- inputdir = rootdir.descend('input')
- outputdir = rootdir.descend('output', create=True)
+ inputdir = rootdir.descend("input")
+ outputdir = rootdir.descend("output", create=True)
# The directory to grab
inputdir = inputdir.descend(*self.source.strip(os.sep).split(os.sep))
@@ -87,18 +82,17 @@ class ImportElement(Element):
outputdir = outputdir.descend(*self.target.strip(os.sep).split(os.sep), create=True)
if inputdir.is_empty():
- raise ElementError("{}: No files were found inside directory '{}'"
- .format(self, self.source))
+ raise ElementError("{}: No files were found inside directory '{}'".format(self, self.source))
# Move it over
outputdir.import_files(inputdir)
# And we're done
- return '/output'
+ return "/output"
def generate_script(self):
- build_root = self.get_variable('build-root')
- install_root = self.get_variable('install-root')
+ build_root = self.get_variable("build-root")
+ install_root = self.get_variable("install-root")
commands = []
# The directory to grab
diff --git a/src/buildstream/plugins/elements/junction.py b/src/buildstream/plugins/elements/junction.py
index aec32516b..42b9ef08e 100644
--- a/src/buildstream/plugins/elements/junction.py
+++ b/src/buildstream/plugins/elements/junction.py
@@ -187,13 +187,13 @@ class JunctionElement(Element):
BST_FORBID_RDEPENDS = True
def configure(self, node):
- self.path = node.get_str('path', default='')
- self.options = node.get_mapping('options', default={})
- self.target = node.get_str('target', default=None)
+ self.path = node.get_str("path", default="")
+ self.options = node.get_mapping("options", default={})
+ self.target = node.get_str("target", default=None)
self.target_element = None
self.target_junction = None
- self.cache_junction_elements = node.get_bool('cache-junction-elements', default=False)
- self.ignore_junction_remotes = node.get_bool('ignore-junction-remotes', default=False)
+ self.cache_junction_elements = node.get_bool("cache-junction-elements", default=False)
+ self.ignore_junction_remotes = node.get_bool("ignore-junction-remotes", default=False)
def preflight(self):
# "target" cannot be used in conjunction with:
diff --git a/src/buildstream/plugins/elements/manual.py b/src/buildstream/plugins/elements/manual.py
index bbda65312..97da41615 100644
--- a/src/buildstream/plugins/elements/manual.py
+++ b/src/buildstream/plugins/elements/manual.py
@@ -42,8 +42,7 @@ class ManualElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
- collect=self.get_variable('install-root'))
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/pip.py b/src/buildstream/plugins/elements/pip.py
index 4a9eefde1..93303748d 100644
--- a/src/buildstream/plugins/elements/pip.py
+++ b/src/buildstream/plugins/elements/pip.py
@@ -42,8 +42,7 @@ class PipElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
- collect=self.get_variable('install-root'))
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root"))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/script.py b/src/buildstream/plugins/elements/script.py
index f3f0a2f7a..abfb7b3b0 100644
--- a/src/buildstream/plugins/elements/script.py
+++ b/src/buildstream/plugins/elements/script.py
@@ -46,21 +46,19 @@ class ScriptElement(buildstream.ScriptElement):
BST_VIRTUAL_DIRECTORY = True
def configure(self, node):
- for n in node.get_sequence('layout', []):
- dst = self.node_subst_vars(n.get_scalar('destination'))
- elm = self.node_subst_vars(n.get_scalar('element', None))
+ for n in node.get_sequence("layout", []):
+ dst = self.node_subst_vars(n.get_scalar("destination"))
+ elm = self.node_subst_vars(n.get_scalar("element", None))
self.layout_add(elm, dst)
- node.validate_keys([
- 'commands', 'root-read-only', 'layout'
- ])
+ node.validate_keys(["commands", "root-read-only", "layout"])
cmds = self.node_subst_sequence_vars(node.get_sequence("commands"))
self.add_commands("commands", cmds)
self.set_work_dir()
self.set_install_root()
- self.set_root_read_only(node.get_bool('root-read-only', default=False))
+ self.set_root_read_only(node.get_bool("root-read-only", default=False))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/stack.py b/src/buildstream/plugins/elements/stack.py
index ae15af63a..f569199b6 100644
--- a/src/buildstream/plugins/elements/stack.py
+++ b/src/buildstream/plugins/elements/stack.py
@@ -64,10 +64,10 @@ class StackElement(Element):
# Just create a dummy empty artifact, its existence is a statement
# that all this stack's dependencies are built.
vrootdir = sandbox.get_virtual_directory()
- vrootdir.descend('output', create=True)
+ vrootdir.descend("output", create=True)
# And we're done
- return '/output'
+ return "/output"
# Plugin entry point
diff --git a/src/buildstream/plugins/sources/_downloadablefilesource.py b/src/buildstream/plugins/sources/_downloadablefilesource.py
index 1e759b94f..4e43ee3e3 100644
--- a/src/buildstream/plugins/sources/_downloadablefilesource.py
+++ b/src/buildstream/plugins/sources/_downloadablefilesource.py
@@ -12,7 +12,6 @@ from buildstream import utils
class _NetrcFTPOpener(urllib.request.FTPHandler):
-
def __init__(self, netrc_config):
self.netrc = netrc_config
@@ -28,11 +27,11 @@ class _NetrcFTPOpener(urllib.request.FTPHandler):
def _unsplit(self, host, port, user, passwd):
if port:
- host = '{}:{}'.format(host, port)
+ host = "{}:{}".format(host, port)
if user:
if passwd:
- user = '{}:{}'.format(user, passwd)
- host = '{}@{}'.format(user, host)
+ user = "{}:{}".format(user, passwd)
+ host = "{}@{}".format(user, host)
return host
@@ -50,7 +49,6 @@ class _NetrcFTPOpener(urllib.request.FTPHandler):
class _NetrcPasswordManager:
-
def __init__(self, netrc_config):
self.netrc = netrc_config
@@ -72,17 +70,16 @@ class _NetrcPasswordManager:
class DownloadableFileSource(Source):
# pylint: disable=attribute-defined-outside-init
- COMMON_CONFIG_KEYS = Source.COMMON_CONFIG_KEYS + ['url', 'ref', 'etag']
+ COMMON_CONFIG_KEYS = Source.COMMON_CONFIG_KEYS + ["url", "ref", "etag"]
__urlopener = None
__default_mirror_file = None
def configure(self, node):
- self.original_url = node.get_str('url')
- self.ref = node.get_str('ref', None)
+ self.original_url = node.get_str("url")
+ self.ref = node.get_str("ref", None)
self.url = self.translate_url(self.original_url)
- self._mirror_dir = os.path.join(self.get_mirror_directory(),
- utils.url_directory_name(self.original_url))
+ self._mirror_dir = os.path.join(self.get_mirror_directory(), utils.url_directory_name(self.original_url))
self._warn_deprecated_etag(node)
def preflight(self):
@@ -102,28 +99,29 @@ class DownloadableFileSource(Source):
return Consistency.RESOLVED
def load_ref(self, node):
- self.ref = node.get_str('ref', None)
+ self.ref = node.get_str("ref", None)
self._warn_deprecated_etag(node)
def get_ref(self):
return self.ref
def set_ref(self, ref, node):
- node['ref'] = self.ref = ref
+ node["ref"] = self.ref = ref
def track(self): # pylint: disable=arguments-differ
# there is no 'track' field in the source to determine what/whether
# or not to update refs, because tracking a ref is always a conscious
# decision by the user.
- with self.timed_activity("Tracking {}".format(self.url),
- silent_nested=True):
+ with self.timed_activity("Tracking {}".format(self.url), silent_nested=True):
new_ref = self._ensure_mirror()
if self.ref and self.ref != new_ref:
- detail = "When tracking, new ref differs from current ref:\n" \
- + " Tracked URL: {}\n".format(self.url) \
- + " Current ref: {}\n".format(self.ref) \
+ detail = (
+ "When tracking, new ref differs from current ref:\n"
+ + " Tracked URL: {}\n".format(self.url)
+ + " Current ref: {}\n".format(self.ref)
+ " New ref: {}\n".format(new_ref)
+ )
self.warn("Potential man-in-the-middle attack!", detail=detail)
return new_ref
@@ -142,25 +140,26 @@ class DownloadableFileSource(Source):
with self.timed_activity("Fetching {}".format(self.url), silent_nested=True):
sha256 = self._ensure_mirror()
if sha256 != self.ref:
- raise SourceError("File downloaded from {} has sha256sum '{}', not '{}'!"
- .format(self.url, sha256, self.ref))
+ raise SourceError(
+ "File downloaded from {} has sha256sum '{}', not '{}'!".format(self.url, sha256, self.ref)
+ )
def _warn_deprecated_etag(self, node):
- etag = node.get_str('etag', None)
+ etag = node.get_str("etag", None)
if etag:
provenance = node.get_scalar(etag).get_provenance()
self.warn('{} "etag" is deprecated and ignored.'.format(provenance))
def _get_etag(self, ref):
- etagfilename = os.path.join(self._mirror_dir, '{}.etag'.format(ref))
+ etagfilename = os.path.join(self._mirror_dir, "{}.etag".format(ref))
if os.path.exists(etagfilename):
- with open(etagfilename, 'r') as etagfile:
+ with open(etagfilename, "r") as etagfile:
return etagfile.read()
return None
def _store_etag(self, ref, etag):
- etagfilename = os.path.join(self._mirror_dir, '{}.etag'.format(ref))
+ etagfilename = os.path.join(self._mirror_dir, "{}.etag".format(ref))
with utils.save_file_atomic(etagfilename) as etagfile:
etagfile.write(etag)
@@ -170,7 +169,7 @@ class DownloadableFileSource(Source):
with self.tempdir() as td:
default_name = os.path.basename(self.url)
request = urllib.request.Request(self.url)
- request.add_header('Accept', '*/*')
+ request.add_header("Accept", "*/*")
# We do not use etag in case what we have in cache is
# not matching ref in order to be able to recover from
@@ -180,18 +179,18 @@ class DownloadableFileSource(Source):
# Do not re-download the file if the ETag matches.
if etag and self.get_consistency() == Consistency.CACHED:
- request.add_header('If-None-Match', etag)
+ request.add_header("If-None-Match", etag)
opener = self.__get_urlopener()
with contextlib.closing(opener.open(request)) as response:
info = response.info()
- etag = info['ETag'] if 'ETag' in info else None
+ etag = info["ETag"] if "ETag" in info else None
filename = info.get_filename(default_name)
filename = os.path.basename(filename)
local_file = os.path.join(td, filename)
- with open(local_file, 'wb') as dest:
+ with open(local_file, "wb") as dest:
shutil.copyfileobj(response, dest)
# Make sure url-specific mirror dir exists.
@@ -214,14 +213,12 @@ class DownloadableFileSource(Source):
# Because we use etag only for matching ref, currently specified ref is what
# we would have downloaded.
return self.ref
- raise SourceError("{}: Error mirroring {}: {}"
- .format(self, self.url, e), temporary=True) from e
+ raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True) from e
except (urllib.error.URLError, urllib.error.ContentTooShortError, OSError, ValueError) as e:
# Note that urllib.request.Request in the try block may throw a
# ValueError for unknown url types, so we handle it here.
- raise SourceError("{}: Error mirroring {}: {}"
- .format(self, self.url, e), temporary=True) from e
+ raise SourceError("{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True) from e
def _get_mirror_file(self, sha=None):
if sha is not None:
@@ -245,7 +242,7 @@ class DownloadableFileSource(Source):
#
DownloadableFileSource.__urlopener = urllib.request.build_opener()
except netrc.NetrcParseError as e:
- self.warn('{}: While reading .netrc: {}'.format(self, e))
+ self.warn("{}: While reading .netrc: {}".format(self, e))
return urllib.request.build_opener()
else:
netrc_pw_mgr = _NetrcPasswordManager(netrc_config)
diff --git a/src/buildstream/plugins/sources/bzr.py b/src/buildstream/plugins/sources/bzr.py
index 88dba7dc2..30ce55585 100644
--- a/src/buildstream/plugins/sources/bzr.py
+++ b/src/buildstream/plugins/sources/bzr.py
@@ -67,16 +67,16 @@ class BzrSource(Source):
# pylint: disable=attribute-defined-outside-init
def configure(self, node):
- node.validate_keys(['url', 'track', 'ref', *Source.COMMON_CONFIG_KEYS])
+ node.validate_keys(["url", "track", "ref", *Source.COMMON_CONFIG_KEYS])
- self.original_url = node.get_str('url')
- self.tracking = node.get_str('track')
- self.ref = node.get_str('ref', None)
+ self.original_url = node.get_str("url")
+ self.tracking = node.get_str("track")
+ self.ref = node.get_str("ref", None)
self.url = self.translate_url(self.original_url)
def preflight(self):
# Check if bzr is installed, get the binary at the same time.
- self.host_bzr = utils.get_host_tool('bzr')
+ self.host_bzr = utils.get_host_tool("bzr")
def get_unique_key(self):
return [self.original_url, self.tracking, self.ref]
@@ -93,39 +93,44 @@ class BzrSource(Source):
return Consistency.RESOLVED
def load_ref(self, node):
- self.ref = node.get_str('ref', None)
+ self.ref = node.get_str("ref", None)
def get_ref(self):
return self.ref
def set_ref(self, ref, node):
- node['ref'] = self.ref = ref
+ node["ref"] = self.ref = ref
def track(self): # pylint: disable=arguments-differ
- with self.timed_activity("Tracking {}".format(self.url),
- silent_nested=True), self._locked():
+ with self.timed_activity("Tracking {}".format(self.url), silent_nested=True), self._locked():
self._ensure_mirror(skip_ref_check=True)
- ret, out = self.check_output([self.host_bzr, "version-info",
- "--custom", "--template={revno}",
- self._get_branch_dir()],
- fail="Failed to read the revision number at '{}'"
- .format(self._get_branch_dir()))
+ ret, out = self.check_output(
+ [self.host_bzr, "version-info", "--custom", "--template={revno}", self._get_branch_dir()],
+ fail="Failed to read the revision number at '{}'".format(self._get_branch_dir()),
+ )
if ret != 0:
raise SourceError("{}: Failed to get ref for tracking {}".format(self, self.tracking))
return out
def fetch(self): # pylint: disable=arguments-differ
- with self.timed_activity("Fetching {}".format(self.url),
- silent_nested=True), self._locked():
+ with self.timed_activity("Fetching {}".format(self.url), silent_nested=True), self._locked():
self._ensure_mirror()
def stage(self, directory):
- self.call([self.host_bzr, "checkout", "--lightweight",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir(), directory],
- fail="Failed to checkout revision {} from branch {} to {}"
- .format(self.ref, self._get_branch_dir(), directory))
+ self.call(
+ [
+ self.host_bzr,
+ "checkout",
+ "--lightweight",
+ "--revision=revno:{}".format(self.ref),
+ self._get_branch_dir(),
+ directory,
+ ],
+ fail="Failed to checkout revision {} from branch {} to {}".format(
+ self.ref, self._get_branch_dir(), directory
+ ),
+ )
# Remove .bzr dir
shutil.rmtree(os.path.join(directory, ".bzr"))
@@ -133,16 +138,24 @@ class BzrSource(Source):
url = os.path.join(self.url, self.tracking)
with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
# Checkout from the cache
- self.call([self.host_bzr, "branch",
- "--use-existing-dir",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir(), directory],
- fail="Failed to branch revision {} from branch {} to {}"
- .format(self.ref, self._get_branch_dir(), directory))
+ self.call(
+ [
+ self.host_bzr,
+ "branch",
+ "--use-existing-dir",
+ "--revision=revno:{}".format(self.ref),
+ self._get_branch_dir(),
+ directory,
+ ],
+ fail="Failed to branch revision {} from branch {} to {}".format(
+ self.ref, self._get_branch_dir(), directory
+ ),
+ )
# Switch the parent branch to the source's origin
- self.call([self.host_bzr, "switch",
- "--directory={}".format(directory), url],
- fail="Failed to switch workspace's parent branch to {}".format(url))
+ self.call(
+ [self.host_bzr, "switch", "--directory={}".format(directory), url],
+ fail="Failed to switch workspace's parent branch to {}".format(url),
+ )
# _locked()
#
@@ -151,13 +164,10 @@ class BzrSource(Source):
#
@contextmanager
def _locked(self):
- lockdir = os.path.join(self.get_mirror_directory(), 'locks')
- lockfile = os.path.join(
- lockdir,
- utils.url_directory_name(self.original_url) + '.lock'
- )
+ lockdir = os.path.join(self.get_mirror_directory(), "locks")
+ lockfile = os.path.join(lockdir, utils.url_directory_name(self.original_url) + ".lock")
os.makedirs(lockdir, exist_ok=True)
- with open(lockfile, 'w') as lock:
+ with open(lockfile, "w") as lock:
fcntl.flock(lock, fcntl.LOCK_EX)
try:
yield
@@ -169,41 +179,42 @@ class BzrSource(Source):
if not os.path.exists(self._get_branch_dir()):
return False
- return self.call([self.host_bzr, "revno",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir()]) == 0
+ return self.call([self.host_bzr, "revno", "--revision=revno:{}".format(self.ref), self._get_branch_dir()]) == 0
def _get_branch_dir(self):
return os.path.join(self._get_mirror_dir(), self.tracking)
def _get_mirror_dir(self):
- return os.path.join(self.get_mirror_directory(),
- utils.url_directory_name(self.original_url))
+ return os.path.join(self.get_mirror_directory(), utils.url_directory_name(self.original_url))
def _ensure_mirror(self, skip_ref_check=False):
mirror_dir = self._get_mirror_dir()
bzr_metadata_dir = os.path.join(mirror_dir, ".bzr")
if not os.path.exists(bzr_metadata_dir):
- self.call([self.host_bzr, "init-repo", "--no-trees", mirror_dir],
- fail="Failed to initialize bzr repository")
+ self.call(
+ [self.host_bzr, "init-repo", "--no-trees", mirror_dir], fail="Failed to initialize bzr repository"
+ )
branch_dir = os.path.join(mirror_dir, self.tracking)
branch_url = self.url + "/" + self.tracking
if not os.path.exists(branch_dir):
# `bzr branch` the branch if it doesn't exist
# to get the upstream code
- self.call([self.host_bzr, "branch", branch_url, branch_dir],
- fail="Failed to branch from {} to {}".format(branch_url, branch_dir))
+ self.call(
+ [self.host_bzr, "branch", branch_url, branch_dir],
+ fail="Failed to branch from {} to {}".format(branch_url, branch_dir),
+ )
else:
# `bzr pull` the branch if it does exist
# to get any changes to the upstream code
- self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
- fail="Failed to pull new changes for {}".format(branch_dir))
+ self.call(
+ [self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
+ fail="Failed to pull new changes for {}".format(branch_dir),
+ )
if not skip_ref_check and not self._check_ref():
- raise SourceError("Failed to ensure ref '{}' was mirrored".format(self.ref),
- reason="ref-not-mirrored")
+ raise SourceError("Failed to ensure ref '{}' was mirrored".format(self.ref), reason="ref-not-mirrored")
def setup():
diff --git a/src/buildstream/plugins/sources/deb.py b/src/buildstream/plugins/sources/deb.py
index cc88cf53c..a7437b150 100644
--- a/src/buildstream/plugins/sources/deb.py
+++ b/src/buildstream/plugins/sources/deb.py
@@ -50,7 +50,7 @@ details on common configuration options for sources.
import tarfile
from contextlib import contextmanager
-import arpy # pylint: disable=import-error
+import arpy # pylint: disable=import-error
from .tar import TarSource
@@ -61,14 +61,14 @@ class DebSource(TarSource):
def configure(self, node):
super().configure(node)
- self.base_dir = node.get_str('base-dir', None)
+ self.base_dir = node.get_str("base-dir", None)
def preflight(self):
return
@contextmanager
def _get_tar(self):
- with open(self._get_mirror_file(), 'rb') as deb_file:
+ with open(self._get_mirror_file(), "rb") as deb_file:
arpy_archive = arpy.Archive(fileobj=deb_file)
arpy_archive.read_all_headers()
data_tar_arpy = [v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k][0]
diff --git a/src/buildstream/plugins/sources/local.py b/src/buildstream/plugins/sources/local.py
index f40fd79c0..90d8a8f6f 100644
--- a/src/buildstream/plugins/sources/local.py
+++ b/src/buildstream/plugins/sources/local.py
@@ -54,8 +54,8 @@ class LocalSource(Source):
self.__unique_key = None
def configure(self, node):
- node.validate_keys(['path', *Source.COMMON_CONFIG_KEYS])
- self.path = self.node_get_project_path(node.get_scalar('path'))
+ node.validate_keys(["path", *Source.COMMON_CONFIG_KEYS])
+ self.path = self.node_get_project_path(node.get_scalar("path"))
self.fullpath = os.path.join(self.get_project_directory(), self.path)
def preflight(self):
@@ -89,8 +89,8 @@ class LocalSource(Source):
if result.overwritten or result.ignored:
raise SourceError(
- "Failed to stage source: files clash with existing directory",
- reason='ensure-stage-dir-fail')
+ "Failed to stage source: files clash with existing directory", reason="ensure-stage-dir-fail"
+ )
def _get_local_path(self):
return self.fullpath
diff --git a/src/buildstream/plugins/sources/patch.py b/src/buildstream/plugins/sources/patch.py
index 86811cb4d..082983023 100644
--- a/src/buildstream/plugins/sources/patch.py
+++ b/src/buildstream/plugins/sources/patch.py
@@ -56,8 +56,7 @@ class PatchSource(Source):
def configure(self, node):
node.validate_keys(["path", "strip-level", *Source.COMMON_CONFIG_KEYS])
- self.path = self.node_get_project_path(node.get_scalar('path'),
- check_is_file=True)
+ self.path = self.node_get_project_path(node.get_scalar("path"), check_is_file=True)
self.strip_level = node.get_int("strip-level", default=1)
self.fullpath = os.path.join(self.get_project_directory(), self.path)
@@ -89,12 +88,13 @@ class PatchSource(Source):
# Bail out with a comprehensive message if the target directory is empty
if not os.listdir(directory):
- raise SourceError("Nothing to patch in directory '{}'".format(directory),
- reason="patch-no-files")
+ raise SourceError("Nothing to patch in directory '{}'".format(directory), reason="patch-no-files")
strip_level_option = "-p{}".format(self.strip_level)
- self.call([self.host_patch, strip_level_option, "-i", self.fullpath, "-d", directory],
- fail="Failed to apply patch {}".format(self.path))
+ self.call(
+ [self.host_patch, strip_level_option, "-i", self.fullpath, "-d", directory],
+ fail="Failed to apply patch {}".format(self.path),
+ )
# Plugin entry point
diff --git a/src/buildstream/plugins/sources/pip.py b/src/buildstream/plugins/sources/pip.py
index 758ef665f..2c9773787 100644
--- a/src/buildstream/plugins/sources/pip.py
+++ b/src/buildstream/plugins/sources/pip.py
@@ -74,30 +74,28 @@ import re
from buildstream import Consistency, Source, SourceError, utils
-_OUTPUT_DIRNAME = '.bst_pip_downloads'
-_PYPI_INDEX_URL = 'https://pypi.org/simple/'
+_OUTPUT_DIRNAME = ".bst_pip_downloads"
+_PYPI_INDEX_URL = "https://pypi.org/simple/"
# Used only for finding pip command
_PYTHON_VERSIONS = [
- 'python', # when running in a venv, we might not have the exact version
- 'python2.7',
- 'python3.0',
- 'python3.1',
- 'python3.2',
- 'python3.3',
- 'python3.4',
- 'python3.5',
- 'python3.6',
- 'python3.7',
+ "python", # when running in a venv, we might not have the exact version
+ "python2.7",
+ "python3.0",
+ "python3.1",
+ "python3.2",
+ "python3.3",
+ "python3.4",
+ "python3.5",
+ "python3.6",
+ "python3.7",
]
# List of allowed extensions taken from
# https://docs.python.org/3/distutils/sourcedist.html.
# Names of source distribution archives must be of the form
# '%{package-name}-%{version}.%{extension}'.
-_SDIST_RE = re.compile(
- r'^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$',
- re.IGNORECASE)
+_SDIST_RE = re.compile(r"^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$", re.IGNORECASE)
class PipSource(Source):
@@ -109,16 +107,15 @@ class PipSource(Source):
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = True
def configure(self, node):
- node.validate_keys(['url', 'packages', 'ref', 'requirements-files'] +
- Source.COMMON_CONFIG_KEYS)
- self.ref = node.get_str('ref', None)
- self.original_url = node.get_str('url', _PYPI_INDEX_URL)
+ node.validate_keys(["url", "packages", "ref", "requirements-files"] + Source.COMMON_CONFIG_KEYS)
+ self.ref = node.get_str("ref", None)
+ self.original_url = node.get_str("url", _PYPI_INDEX_URL)
self.index_url = self.translate_url(self.original_url)
- self.packages = node.get_str_list('packages', [])
- self.requirements_files = node.get_str_list('requirements-files', [])
+ self.packages = node.get_str_list("packages", [])
+ self.requirements_files = node.get_str_list("requirements-files", [])
if not (self.packages or self.requirements_files):
- raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified". format(self))
+ raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified".format(self))
def preflight(self):
# Try to find a pip version that spports download command
@@ -126,9 +123,9 @@ class PipSource(Source):
for python in reversed(_PYTHON_VERSIONS):
try:
host_python = utils.get_host_tool(python)
- rc = self.call([host_python, '-m', 'pip', 'download', '--help'])
+ rc = self.call([host_python, "-m", "pip", "download", "--help"])
if rc == 0:
- self.host_pip = [host_python, '-m', 'pip']
+ self.host_pip = [host_python, "-m", "pip"]
break
except utils.ProgramNotFoundError:
pass
@@ -150,10 +147,10 @@ class PipSource(Source):
return self.ref
def load_ref(self, node):
- self.ref = node.get_str('ref', None)
+ self.ref = node.get_str("ref", None)
def set_ref(self, ref, node):
- node['ref'] = self.ref = ref
+ node["ref"] = self.ref = ref
def track(self, previous_sources_dir): # pylint: disable=arguments-differ
# XXX pip does not offer any public API other than the CLI tool so it
@@ -163,32 +160,44 @@ class PipSource(Source):
# for details.
# As a result, we have to wastefully install the packages during track.
with self.tempdir() as tmpdir:
- install_args = self.host_pip + ['download',
- '--no-binary', ':all:',
- '--index-url', self.index_url,
- '--dest', tmpdir]
+ install_args = self.host_pip + [
+ "download",
+ "--no-binary",
+ ":all:",
+ "--index-url",
+ self.index_url,
+ "--dest",
+ tmpdir,
+ ]
for requirement_file in self.requirements_files:
fpath = os.path.join(previous_sources_dir, requirement_file)
- install_args += ['-r', fpath]
+ install_args += ["-r", fpath]
install_args += self.packages
self.call(install_args, fail="Failed to install python packages")
reqs = self._parse_sdist_names(tmpdir)
- return '\n'.join(["{}=={}".format(pkg, ver) for pkg, ver in reqs])
+ return "\n".join(["{}=={}".format(pkg, ver) for pkg, ver in reqs])
def fetch(self): # pylint: disable=arguments-differ
with self.tempdir() as tmpdir:
- packages = self.ref.strip().split('\n')
- package_dir = os.path.join(tmpdir, 'packages')
+ packages = self.ref.strip().split("\n")
+ package_dir = os.path.join(tmpdir, "packages")
os.makedirs(package_dir)
- self.call([*self.host_pip,
- 'download',
- '--no-binary', ':all:',
- '--index-url', self.index_url,
- '--dest', package_dir,
- *packages],
- fail="Failed to install python packages: {}".format(packages))
+ self.call(
+ [
+ *self.host_pip,
+ "download",
+ "--no-binary",
+ ":all:",
+ "--index-url",
+ self.index_url,
+ "--dest",
+ package_dir,
+ *packages,
+ ],
+ fail="Failed to install python packages: {}".format(packages),
+ )
# If the mirror directory already exists, assume that some other
# process has fetched the sources before us and ensure that we do
@@ -200,8 +209,11 @@ class PipSource(Source):
# before us.
pass
except OSError as e:
- raise SourceError("{}: Failed to move downloaded pip packages from '{}' to '{}': {}"
- .format(self, package_dir, self._mirror, e)) from e
+ raise SourceError(
+ "{}: Failed to move downloaded pip packages from '{}' to '{}': {}".format(
+ self, package_dir, self._mirror, e
+ )
+ ) from e
def stage(self, directory):
with self.timed_activity("Staging Python packages", silent_nested=True):
@@ -213,9 +225,11 @@ class PipSource(Source):
def _mirror(self):
if not self.ref:
return None
- return os.path.join(self.get_mirror_directory(),
- utils.url_directory_name(self.original_url),
- hashlib.sha256(self.ref.encode()).hexdigest())
+ return os.path.join(
+ self.get_mirror_directory(),
+ utils.url_directory_name(self.original_url),
+ hashlib.sha256(self.ref.encode()).hexdigest(),
+ )
# Parse names of downloaded source distributions
#
diff --git a/src/buildstream/plugins/sources/remote.py b/src/buildstream/plugins/sources/remote.py
index 68aa577fc..da1a1f964 100644
--- a/src/buildstream/plugins/sources/remote.py
+++ b/src/buildstream/plugins/sources/remote.py
@@ -62,13 +62,14 @@ class RemoteSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.filename = node.get_str('filename', os.path.basename(self.url))
- self.executable = node.get_bool('executable', default=False)
+ self.filename = node.get_str("filename", os.path.basename(self.url))
+ self.executable = node.get_bool("executable", default=False)
if os.sep in self.filename:
- raise SourceError('{}: filename parameter cannot contain directories'.format(self),
- reason="filename-contains-directory")
- node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename', 'executable'])
+ raise SourceError(
+ "{}: filename parameter cannot contain directories".format(self), reason="filename-contains-directory"
+ )
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ["filename", "executable"])
def get_unique_key(self):
return super().get_unique_key() + [self.filename, self.executable]
diff --git a/src/buildstream/plugins/sources/tar.py b/src/buildstream/plugins/sources/tar.py
index 60d464457..658cc2735 100644
--- a/src/buildstream/plugins/sources/tar.py
+++ b/src/buildstream/plugins/sources/tar.py
@@ -73,6 +73,7 @@ class ReadableTarInfo(tarfile.TarInfo):
`mode` attribute in `TarInfo`, the class that encapsulates the internal meta-data of the tarball,
so that the owner-read bit is always set.
"""
+
@property
def mode(self):
# ensure file is readable by owner
@@ -89,13 +90,13 @@ class TarSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.base_dir = node.get_str('base-dir', '*')
- node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['base-dir'])
+ self.base_dir = node.get_str("base-dir", "*")
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ["base-dir"])
def preflight(self):
self.host_lzip = None
- if self.url.endswith('.lz'):
- self.host_lzip = utils.get_host_tool('lzip')
+ if self.url.endswith(".lz"):
+ self.host_lzip = utils.get_host_tool("lzip")
def get_unique_key(self):
return super().get_unique_key() + [self.base_dir]
@@ -104,19 +105,17 @@ class TarSource(DownloadableFileSource):
def _run_lzip(self):
assert self.host_lzip
with TemporaryFile() as lzip_stdout:
- with open(self._get_mirror_file(), 'r') as lzip_file:
- self.call([self.host_lzip, '-d'],
- stdin=lzip_file,
- stdout=lzip_stdout)
+ with open(self._get_mirror_file(), "r") as lzip_file:
+ self.call([self.host_lzip, "-d"], stdin=lzip_file, stdout=lzip_stdout)
lzip_stdout.seek(0, 0)
yield lzip_stdout
@contextmanager
def _get_tar(self):
- if self.url.endswith('.lz'):
+ if self.url.endswith(".lz"):
with self._run_lzip() as lzip_dec:
- with tarfile.open(fileobj=lzip_dec, mode='r:', tarinfo=ReadableTarInfo) as tar:
+ with tarfile.open(fileobj=lzip_dec, mode="r:", tarinfo=ReadableTarInfo) as tar:
yield tar
else:
with tarfile.open(self._get_mirror_file(), tarinfo=ReadableTarInfo) as tar:
@@ -147,14 +146,18 @@ class TarSource(DownloadableFileSource):
def assert_safe(member):
final_path = os.path.abspath(os.path.join(target_dir, member.path))
if not final_path.startswith(target_dir):
- raise SourceError("{}: Tarfile attempts to extract outside the staging area: "
- "{} -> {}".format(self, member.path, final_path))
+ raise SourceError(
+ "{}: Tarfile attempts to extract outside the staging area: "
+ "{} -> {}".format(self, member.path, final_path)
+ )
if member.islnk():
linked_path = os.path.abspath(os.path.join(target_dir, member.linkname))
if not linked_path.startswith(target_dir):
- raise SourceError("{}: Tarfile attempts to hardlink outside the staging area: "
- "{} -> {}".format(self, member.path, final_path))
+ raise SourceError(
+ "{}: Tarfile attempts to hardlink outside the staging area: "
+ "{} -> {}".format(self, member.path, final_path)
+ )
# Don't need to worry about symlinks because they're just
# files here and won't be able to do much harm once we are
@@ -167,9 +170,9 @@ class TarSource(DownloadableFileSource):
for member in tar.getmembers():
# First, ensure that a member never starts with `./`
- if member.path.startswith('./'):
+ if member.path.startswith("./"):
member.path = member.path[2:]
- if member.islnk() and member.linkname.startswith('./'):
+ if member.islnk() and member.linkname.startswith("./"):
member.linkname = member.linkname[2:]
# Now extract only the paths which match the normalized path
@@ -202,16 +205,16 @@ class TarSource(DownloadableFileSource):
# Remove any possible leading './', offer more consistent behavior
# across tarballs encoded with or without a leading '.'
- member_name = member.name.lstrip('./')
+ member_name = member.name.lstrip("./")
if not member.isdir():
# Loop over the components of a path, for a path of a/b/c/d
# we will first visit 'a', then 'a/b' and then 'a/b/c', excluding
# the final component
- components = member_name.split('/')
+ components = member_name.split("/")
for i in range(len(components) - 1):
- dir_component = '/'.join([components[j] for j in range(i + 1)])
+ dir_component = "/".join([components[j] for j in range(i + 1)])
if dir_component not in visited:
visited.add(dir_component)
try:
@@ -219,7 +222,7 @@ class TarSource(DownloadableFileSource):
# exist in the archive
_ = tar.getmember(dir_component)
except KeyError:
- if dir_component != '.':
+ if dir_component != ".":
yield dir_component
continue
@@ -227,7 +230,7 @@ class TarSource(DownloadableFileSource):
# Avoid considering the '.' directory, if any is included in the archive
# this is to avoid the default 'base-dir: *' value behaving differently
# depending on whether the tarball was encoded with a leading '.' or not
- elif member_name == '.':
+ elif member_name == ".":
continue
yield member_name
diff --git a/src/buildstream/plugins/sources/workspace.py b/src/buildstream/plugins/sources/workspace.py
index f40f5fae8..a845fd440 100644
--- a/src/buildstream/plugins/sources/workspace.py
+++ b/src/buildstream/plugins/sources/workspace.py
@@ -59,9 +59,9 @@ class WorkspaceSource(Source):
return None
def configure(self, node: MappingNode) -> None:
- node.validate_keys(['path', 'ref', 'kind'])
- self.path = node.get_str('path')
- self.__digest = node.get_str('ref')
+ node.validate_keys(["path", "ref", "kind"])
+ self.path = node.get_str("path")
+ self.__digest = node.get_str("ref")
def preflight(self) -> None:
pass # pragma: nocover
@@ -79,7 +79,7 @@ class WorkspaceSource(Source):
#
# Raises AssertionError: existing workspaces should not be reinitialized
def init_workspace(self, directory: Directory) -> None:
- raise AssertionError('Attempting to re-open an existing workspace')
+ raise AssertionError("Attempting to re-open an existing workspace")
def get_consistency(self):
# always return cached state
@@ -95,8 +95,8 @@ class WorkspaceSource(Source):
if result.overwritten or result.ignored:
raise SourceError(
- "Failed to stage source: files clash with existing directory",
- reason='ensure-stage-dir-fail')
+ "Failed to stage source: files clash with existing directory", reason="ensure-stage-dir-fail"
+ )
def _get_local_path(self) -> str:
return self.path
diff --git a/src/buildstream/plugins/sources/zip.py b/src/buildstream/plugins/sources/zip.py
index 322be58d7..47933c8eb 100644
--- a/src/buildstream/plugins/sources/zip.py
+++ b/src/buildstream/plugins/sources/zip.py
@@ -72,8 +72,8 @@ class ZipSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.base_dir = node.get_str('base-dir', '*')
- node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['base-dir'])
+ self.base_dir = node.get_str("base-dir", "*")
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ["base-dir"])
def get_unique_key(self):
return super().get_unique_key() + [self.base_dir]
@@ -139,14 +139,14 @@ class ZipSource(DownloadableFileSource):
# ZipInfo.is_dir() is only available in python >= 3.6, but all
# it does is check for a trailing '/' in the name
#
- if not member.filename.endswith('/'):
+ if not member.filename.endswith("/"):
# Loop over the components of a path, for a path of a/b/c/d
# we will first visit 'a', then 'a/b' and then 'a/b/c', excluding
# the final component
- components = member.filename.split('/')
+ components = member.filename.split("/")
for i in range(len(components) - 1):
- dir_component = '/'.join([components[j] for j in range(i + 1)])
+ dir_component = "/".join([components[j] for j in range(i + 1)])
if dir_component not in visited:
visited[dir_component] = True
try:
@@ -154,7 +154,7 @@ class ZipSource(DownloadableFileSource):
# exist in the archive
_ = archive.getinfo(dir_component)
except KeyError:
- if dir_component != '.':
+ if dir_component != ".":
yield dir_component
continue
@@ -162,7 +162,7 @@ class ZipSource(DownloadableFileSource):
# Avoid considering the '.' directory, if any is included in the archive
# this is to avoid the default 'base-dir: *' value behaving differently
# depending on whether the archive was encoded with a leading '.' or not
- elif member.filename == '.' or member.filename == './':
+ elif member.filename == "." or member.filename == "./":
continue
yield member.filename