summaryrefslogtreecommitdiff
path: root/tests/frontend
diff options
context:
space:
mode:
Diffstat (limited to 'tests/frontend')
-rw-r--r--tests/frontend/artifact_delete.py22
-rw-r--r--tests/frontend/artifact_list_contents.py21
-rw-r--r--tests/frontend/artifact_log.py15
-rw-r--r--tests/frontend/artifact_show.py4
-rw-r--r--tests/frontend/buildcheckout.py144
-rw-r--r--tests/frontend/completions.py160
-rw-r--r--tests/frontend/compose_splits.py8
-rw-r--r--tests/frontend/configurable_warnings.py10
-rw-r--r--tests/frontend/configuredwarning/plugins/corewarn.py3
-rw-r--r--tests/frontend/configuredwarning/plugins/warninga.py4
-rw-r--r--tests/frontend/configuredwarning/plugins/warningb.py4
-rw-r--r--tests/frontend/consistencyerror/plugins/consistencyerror.py4
-rw-r--r--tests/frontend/cross_junction_workspace.py6
-rw-r--r--tests/frontend/fetch.py4
-rw-r--r--tests/frontend/help.py14
-rw-r--r--tests/frontend/init.py45
-rw-r--r--tests/frontend/large_directory.py8
-rw-r--r--tests/frontend/logging.py26
-rw-r--r--tests/frontend/mirror.py58
-rw-r--r--tests/frontend/order.py34
-rw-r--r--tests/frontend/overlaps.py26
-rw-r--r--tests/frontend/progress.py16
-rw-r--r--tests/frontend/project/sources/fetch_source.py17
-rw-r--r--tests/frontend/pull.py92
-rw-r--r--tests/frontend/push.py121
-rw-r--r--tests/frontend/rebuild.py8
-rw-r--r--tests/frontend/show.py156
-rw-r--r--tests/frontend/source_checkout.py110
-rw-r--r--tests/frontend/track.py60
-rw-r--r--tests/frontend/workspace.py379
30 files changed, 305 insertions, 1274 deletions
diff --git a/tests/frontend/artifact_delete.py b/tests/frontend/artifact_delete.py
index 9389788b3..a93d99ef6 100644
--- a/tests/frontend/artifact_delete.py
+++ b/tests/frontend/artifact_delete.py
@@ -74,9 +74,7 @@ def test_artifact_delete_artifact(cli, tmpdir, datafiles):
result.assert_success()
# Check that the ARTIFACT is no longer in the cache
- assert not os.path.exists(
- os.path.join(local_cache, "cas", "refs", "heads", artifact)
- )
+ assert not os.path.exists(os.path.join(local_cache, "cas", "refs", "heads", artifact))
# Test the `bst artifact delete` command with multiple, different arguments.
@@ -190,9 +188,7 @@ def test_artifact_delete_elements_build_deps(cli, tmpdir, datafiles):
for state in bdep_states.values():
assert state == "cached"
- result = cli.run(
- project=project, args=["artifact", "delete", "--deps", "build", element]
- )
+ result = cli.run(project=project, args=["artifact", "delete", "--deps", "build", element])
result.assert_success()
# Assert that the build deps have been deleted and that the artifact remains cached
@@ -227,20 +223,14 @@ def test_artifact_delete_artifacts_build_deps(cli, tmpdir, datafiles):
bdep_refs = []
bdep_states = cli.get_element_states(project, [element], deps="build")
for bdep in bdep_states.keys():
- bdep_refs.append(
- os.path.join(
- "test", _get_normal_name(bdep), cli.get_element_key(project, bdep)
- )
- )
+ bdep_refs.append(os.path.join("test", _get_normal_name(bdep), cli.get_element_key(project, bdep)))
# Assert build dependencies are cached
for ref in bdep_refs:
assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", ref))
# Delete the artifact
- result = cli.run(
- project=project, args=["artifact", "delete", "--deps", "build", artifact]
- )
+ result = cli.run(project=project, args=["artifact", "delete", "--deps", "build", artifact])
result.assert_success()
# Check that the artifact's build deps are no longer in the cache
@@ -265,9 +255,7 @@ def test_artifact_delete_artifact_with_deps_all_fails(cli, tmpdir, datafiles):
artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)
# Try to delete the artifact with all of its dependencies
- result = cli.run(
- project=project, args=["artifact", "delete", "--deps", "all", artifact]
- )
+ result = cli.run(project=project, args=["artifact", "delete", "--deps", "all", artifact])
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Error: '--deps all' is not supported for artifact refs" in result.stderr
diff --git a/tests/frontend/artifact_list_contents.py b/tests/frontend/artifact_list_contents.py
index ddd2d50a6..7e8bb6508 100644
--- a/tests/frontend/artifact_list_contents.py
+++ b/tests/frontend/artifact_list_contents.py
@@ -37,9 +37,7 @@ def test_artifact_list_exact_contents_element(cli, datafiles):
assert result.exit_code == 0
# List the contents via the element name
- result = cli.run(
- project=project, args=["artifact", "list-contents", "import-bin.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "list-contents", "import-bin.bst"])
assert result.exit_code == 0
expected_output = "import-bin.bst:\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n"
assert expected_output in result.output
@@ -57,14 +55,10 @@ def test_artifact_list_exact_contents_ref(cli, datafiles):
assert result.exit_code == 0
# List the contents via the key
- result = cli.run(
- project=project, args=["artifact", "list-contents", "test/import-bin/" + key]
- )
+ result = cli.run(project=project, args=["artifact", "list-contents", "test/import-bin/" + key])
assert result.exit_code == 0
- expected_output = (
- "test/import-bin/" + key + ":\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n"
- )
+ expected_output = "test/import-bin/" + key + ":\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n"
assert expected_output in result.output
@@ -106,9 +100,7 @@ def test_artifact_list_exact_contents_element_long(cli, datafiles):
assert result.exit_code == 0
# List the contents via the element name
- result = cli.run(
- project=project, args=["artifact", "list-contents", "--long", "import-bin.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "list-contents", "--long", "import-bin.bst"])
assert result.exit_code == 0
expected_output = (
"import-bin.bst:\n"
@@ -132,10 +124,7 @@ def test_artifact_list_exact_contents_ref_long(cli, datafiles):
assert result.exit_code == 0
# List the contents via the key
- result = cli.run(
- project=project,
- args=["artifact", "list-contents", "-l", "test/import-bin/" + key],
- )
+ result = cli.run(project=project, args=["artifact", "list-contents", "-l", "test/import-bin/" + key],)
assert result.exit_code == 0
expected_output = (
diff --git a/tests/frontend/artifact_log.py b/tests/frontend/artifact_log.py
index 44c35aa3d..806a3b437 100644
--- a/tests/frontend/artifact_log.py
+++ b/tests/frontend/artifact_log.py
@@ -36,15 +36,7 @@ def test_artifact_log(cli, datafiles):
result = cli.run(
project=project,
silent=True,
- args=[
- "--no-colors",
- "show",
- "--deps",
- "none",
- "--format",
- "%{full-key}",
- "target.bst",
- ],
+ args=["--no-colors", "show", "--deps", "none", "--format", "%{full-key}", "target.bst",],
)
key = result.output.strip()
@@ -89,10 +81,7 @@ def test_artifact_log_files(cli, datafiles):
assert not os.path.exists(import_bin)
# Run the command and ensure the file now exists
- result = cli.run(
- project=project,
- args=["artifact", "log", "--out", logfiles, "target.bst", "import-bin.bst"],
- )
+ result = cli.run(project=project, args=["artifact", "log", "--out", logfiles, "target.bst", "import-bin.bst"],)
assert result.exit_code == 0
assert os.path.exists(logfiles)
assert os.path.exists(target)
diff --git a/tests/frontend/artifact_show.py b/tests/frontend/artifact_show.py
index c47222e18..6f824c0e4 100644
--- a/tests/frontend/artifact_show.py
+++ b/tests/frontend/artifact_show.py
@@ -79,9 +79,7 @@ def test_artifact_show_element_missing_deps(cli, tmpdir, datafiles):
result = cli.run(project=project, args=["artifact", "delete", dependency])
result.assert_success()
- result = cli.run(
- project=project, args=["artifact", "show", "--deps", "all", element]
- )
+ result = cli.run(project=project, args=["artifact", "show", "--deps", "all", element])
result.assert_success()
assert "not cached {}".format(dependency) in result.output
assert "cached {}".format(element) in result.output
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index f3080269d..7772c48ef 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -33,12 +33,7 @@ def strict_args(args, strict):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
"strict,hardlinks",
- [
- ("strict", "copies"),
- ("strict", "hardlinks"),
- ("non-strict", "copies"),
- ("non-strict", "hardlinks"),
- ],
+ [("strict", "copies"), ("strict", "hardlinks"), ("non-strict", "copies"), ("non-strict", "hardlinks"),],
)
def test_build_checkout(datafiles, cli, strict, hardlinks):
project = str(datafiles)
@@ -115,9 +110,7 @@ def test_build_invalid_suffix_dep(datafiles, cli, strict, hardlinks):
project = str(datafiles)
# target2.bst depends on an element called target.foo
- result = cli.run(
- project=project, args=strict_args(["build", "target2.bst"], strict)
- )
+ result = cli.run(project=project, args=strict_args(["build", "target2.bst"], strict))
result.assert_main_error(ErrorDomain.LOAD, "bad-element-suffix")
@@ -134,9 +127,7 @@ def test_build_invalid_filename_chars(datafiles, cli):
}
_yaml.roundtrip_dump(element, os.path.join(project, "elements", element_name))
- result = cli.run(
- project=project, args=strict_args(["build", element_name], "non-strict")
- )
+ result = cli.run(project=project, args=strict_args(["build", element_name], "non-strict"))
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
@@ -154,10 +145,7 @@ def test_build_invalid_filename_chars_dep(datafiles, cli):
}
_yaml.roundtrip_dump(element, os.path.join(project, "elements", element_name))
- result = cli.run(
- project=project,
- args=strict_args(["build", "invalid-chars-in-dep.bst"], "non-strict"),
- )
+ result = cli.run(project=project, args=strict_args(["build", "invalid-chars-in-dep.bst"], "non-strict"),)
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
@@ -179,16 +167,7 @@ def test_build_checkout_deps(datafiles, cli, deps):
# Now check it out
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- element_name,
- "--deps",
- deps,
- "--directory",
- checkout,
- ],
+ project=project, args=["artifact", "checkout", element_name, "--deps", deps, "--directory", checkout,],
)
result.assert_success()
@@ -220,10 +199,7 @@ def test_build_checkout_unbuilt(datafiles, cli):
checkout = os.path.join(cli.directory, "checkout")
# Check that checking out an unbuilt element fails nicely
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "target.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "target.bst", "--directory", checkout],)
result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
@@ -246,10 +222,7 @@ def test_build_checkout_compression_no_tar(datafiles, cli):
]
result = cli.run(project=project, args=checkout_args)
- assert (
- "ERROR: --compression can only be provided if --tar is provided"
- in result.stderr
- )
+ assert "ERROR: --compression can only be provided if --tar is provided" in result.stderr
assert result.exit_code != 0
@@ -466,10 +439,7 @@ def test_build_checkout_invalid_ref(datafiles, cli):
]
result = cli.run(project=project, args=checkout_args)
- assert (
- "Error while staging dependencies into a sandbox: 'No artifacts to stage'"
- in result.stderr
- )
+ assert "Error while staging dependencies into a sandbox: 'No artifacts to stage'" in result.stderr
@pytest.mark.datafiles(DATA_DIR)
@@ -613,9 +583,7 @@ def test_build_checkout_tarball_links(datafiles, cli):
# of the symlink and the test therefore doesn't have the correct content
os.symlink(
os.path.join("..", "basicfile"),
- os.path.join(
- project, "files", "files-and-links", "basicfolder", "basicsymlink"
- ),
+ os.path.join(project, "files", "files-and-links", "basicfolder", "basicsymlink"),
)
result = cli.run(project=project, args=["build", "import-links.bst"])
@@ -632,10 +600,7 @@ def test_build_checkout_tarball_links(datafiles, cli):
tar = tarfile.open(name=checkout, mode="r:")
tar.extractall(extract)
- assert (
- open(os.path.join(extract, "basicfolder", "basicsymlink")).read()
- == "file contents\n"
- )
+ assert open(os.path.join(extract, "basicfolder", "basicsymlink")).read() == "file contents\n"
@pytest.mark.datafiles(DATA_DIR)
@@ -648,9 +613,7 @@ def test_build_checkout_links(datafiles, cli):
# of the symlink and the test therefore doesn't have the correct content
os.symlink(
os.path.join("..", "basicfile"),
- os.path.join(
- project, "files", "files-and-links", "basicfolder", "basicsymlink"
- ),
+ os.path.join(project, "files", "files-and-links", "basicfolder", "basicsymlink"),
)
result = cli.run(project=project, args=["build", "import-links.bst"])
@@ -671,10 +634,7 @@ def test_build_checkout_links(datafiles, cli):
result = cli.run(project=project, args=checkout_args)
result.assert_success()
- assert (
- open(os.path.join(checkout, "basicfolder", "basicsymlink")).read()
- == "file contents\n"
- )
+ assert open(os.path.join(checkout, "basicfolder", "basicsymlink")).read() == "file contents\n"
@pytest.mark.datafiles(DATA_DIR)
@@ -836,9 +796,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage):
configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
- ref = generate_junction(
- tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
- )
+ ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))
# Create a stack element to depend on a cross junction element
#
@@ -891,10 +849,7 @@ def test_build_checkout_junction(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],)
result.assert_success()
# Assert the content of /etc/animal.conf
@@ -934,10 +889,7 @@ def test_build_checkout_junction_default_targets(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],)
result.assert_success()
# Assert the content of /etc/animal.conf
@@ -970,10 +922,7 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
# Now open a workspace on the junction
#
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, "junction.bst"],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, "junction.bst"],)
result.assert_success()
filename = os.path.join(workspace, "files", "etc-files", "etc", "animal.conf")
@@ -996,10 +945,7 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],)
result.assert_success()
# Assert the workspace modified content of /etc/animal.conf
@@ -1023,14 +969,7 @@ def test_build_checkout_cross_junction(datafiles, cli, tmpdir):
result.assert_success()
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "junction.bst:import-etc.bst",
- "--directory",
- checkout,
- ],
+ project=project, args=["artifact", "checkout", "junction.bst:import-etc.bst", "--directory", checkout,],
)
result.assert_success()
@@ -1063,10 +1002,7 @@ def test_build_junction_short_notation(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],)
result.assert_success()
# Assert the content of /etc/animal.conf
@@ -1105,10 +1041,7 @@ def test_build_junction_short_notation_filename(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(
- project=project,
- args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],)
result.assert_success()
# Assert the content of /etc/animal.conf
@@ -1133,9 +1066,7 @@ def test_build_junction_short_notation_with_junction(cli, tmpdir, datafiles):
# colon (:) as the separator
element = {
"kind": "stack",
- "depends": [
- {"filename": "junction.bst:import-etc.bst", "junction": "junction.bst",}
- ],
+ "depends": [{"filename": "junction.bst:import-etc.bst", "junction": "junction.bst",}],
}
_yaml.roundtrip_dump(element, element_path)
@@ -1202,30 +1133,17 @@ def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
# A push artifact cache means we have to pull to push to them, so
# delete some blobs from that CAS such that we have to fetch
- digest = utils.sha256sum(
- os.path.join(project, "files", "bin-files", "usr", "bin", "hello")
- )
+ digest = utils.sha256sum(os.path.join(project, "files", "bin-files", "usr", "bin", "hello"))
objpath = os.path.join(cli.directory, "cas", "objects", digest[:2], digest[2:])
os.unlink(objpath)
# Verify that the build-only dependency is not (complete) in the local cache
- result = cli.run(
- project=project,
- args=["artifact", "checkout", input_name, "--directory", checkout_dir],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", input_name, "--directory", checkout_dir],)
result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
# Verify that the pull method fetches relevant artifacts in order to stage
result = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "--pull",
- input_name,
- "--directory",
- checkout_dir,
- ],
+ project=project, args=["artifact", "checkout", "--pull", input_name, "--directory", checkout_dir,],
)
result.assert_success()
@@ -1244,17 +1162,7 @@ def test_partial_checkout_fail(tmpdir, datafiles, cli):
cli.configure({"artifacts": {"url": share.repo, "push": True}})
res = cli.run(
- project=project,
- args=[
- "artifact",
- "checkout",
- "--pull",
- build_elt,
- "--directory",
- checkout_dir,
- ],
+ project=project, args=["artifact", "checkout", "--pull", build_elt, "--directory", checkout_dir,],
)
res.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
- assert re.findall(
- r"Remote \((\S+)\) does not have artifact (\S+) cached", res.stderr
- )
+ assert re.findall(r"Remote \((\S+)\) does not have artifact (\S+) cached", res.stderr)
diff --git a/tests/frontend/completions.py b/tests/frontend/completions.py
index 075fd70f1..3603543c7 100644
--- a/tests/frontend/completions.py
+++ b/tests/frontend/completions.py
@@ -84,13 +84,7 @@ MIXED_ELEMENTS = PROJECT_ELEMENTS + INVALID_ELEMENTS
def assert_completion(cli, cmd, word_idx, expected, cwd=None):
result = cli.run(
- project=".",
- cwd=cwd,
- env={
- "_BST_COMPLETION": "complete",
- "COMP_WORDS": cmd,
- "COMP_CWORD": str(word_idx),
- },
+ project=".", cwd=cwd, env={"_BST_COMPLETION": "complete", "COMP_WORDS": cmd, "COMP_CWORD": str(word_idx),},
)
words = []
if result.output:
@@ -105,14 +99,7 @@ def assert_completion(cli, cmd, word_idx, expected, cwd=None):
def assert_completion_failed(cli, cmd, word_idx, expected, cwd=None):
- result = cli.run(
- cwd=cwd,
- env={
- "_BST_COMPLETION": "complete",
- "COMP_WORDS": cmd,
- "COMP_CWORD": str(word_idx),
- },
- )
+ result = cli.run(cwd=cwd, env={"_BST_COMPLETION": "complete", "COMP_WORDS": cmd, "COMP_CWORD": str(word_idx),},)
words = []
if result.output:
words = result.output.splitlines()
@@ -182,29 +169,14 @@ def test_option_choice(cli, cmd, word_idx, expected):
# Note that elements/ and files/ are partial completions and
# as such do not come with trailing whitespace
("bst --config ", 2, ["cache/", "elements/", "files/", "project.conf "], None),
- (
- "bst --log-file ",
- 2,
- ["cache/", "elements/", "files/", "project.conf "],
- None,
- ),
+ ("bst --log-file ", 2, ["cache/", "elements/", "files/", "project.conf "], None,),
("bst --config f", 2, ["files/"], None),
("bst --log-file f", 2, ["files/"], None),
("bst --config files", 2, ["files/bin-files/", "files/dev-files/"], None),
("bst --log-file files", 2, ["files/bin-files/", "files/dev-files/"], None),
("bst --config files/", 2, ["files/bin-files/", "files/dev-files/"], None),
- (
- "bst --log-file elements/",
- 2,
- [os.path.join("elements", e) + " " for e in PROJECT_ELEMENTS],
- None,
- ),
- (
- "bst --config ../",
- 2,
- ["../cache/", "../elements/", "../files/", "../project.conf "],
- "files",
- ),
+ ("bst --log-file elements/", 2, [os.path.join("elements", e) + " " for e in PROJECT_ELEMENTS], None,),
+ ("bst --config ../", 2, ["../cache/", "../elements/", "../files/", "../project.conf "], "files",),
(
"bst --config ../elements/",
2,
@@ -251,11 +223,7 @@ def test_option_directory(datafiles, cli, cmd, word_idx, expected, subdir):
"project",
"bst build com",
2,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
None,
),
# When running from the files subdir
@@ -264,83 +232,37 @@ def test_option_directory(datafiles, cli, cmd, word_idx, expected, subdir):
"project",
"bst build com",
2,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
"files",
),
# When passing the project directory
- (
- "project",
- "bst --directory ../ show ",
- 4,
- [e + " " for e in PROJECT_ELEMENTS],
- "files",
- ),
+ ("project", "bst --directory ../ show ", 4, [e + " " for e in PROJECT_ELEMENTS], "files",),
(
"project",
"bst --directory ../ build com",
4,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
"files",
),
# Also try multi arguments together
- (
- "project",
- "bst --directory ../ artifact checkout t ",
- 5,
- ["target.bst "],
- "files",
- ),
- (
- "project",
- "bst --directory ../ artifact checkout --directory ",
- 6,
- ["bin-files/", "dev-files/"],
- "files",
- ),
+ ("project", "bst --directory ../ artifact checkout t ", 5, ["target.bst "], "files",),
+ ("project", "bst --directory ../ artifact checkout --directory ", 6, ["bin-files/", "dev-files/"], "files",),
# When running in the project directory
- (
- "no-element-path",
- "bst show ",
- 2,
- [e + " " for e in PROJECT_ELEMENTS] + ["files/"],
- None,
- ),
+ ("no-element-path", "bst show ", 2, [e + " " for e in PROJECT_ELEMENTS] + ["files/"], None,),
(
"no-element-path",
"bst build com",
2,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
None,
),
# When running from the files subdir
- (
- "no-element-path",
- "bst show ",
- 2,
- [e + " " for e in PROJECT_ELEMENTS] + ["files/"],
- "files",
- ),
+ ("no-element-path", "bst show ", 2, [e + " " for e in PROJECT_ELEMENTS] + ["files/"], "files",),
(
"no-element-path",
"bst build com",
2,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
"files",
),
# When passing the project directory
@@ -352,32 +274,16 @@ def test_option_directory(datafiles, cli, cmd, word_idx, expected, subdir):
"files",
),
("no-element-path", "bst --directory ../ show f", 4, ["files/"], "files"),
- (
- "no-element-path",
- "bst --directory ../ show files/",
- 4,
- ["files/bin-files/", "files/dev-files/"],
- "files",
- ),
+ ("no-element-path", "bst --directory ../ show files/", 4, ["files/bin-files/", "files/dev-files/"], "files",),
(
"no-element-path",
"bst --directory ../ build com",
4,
- [
- "compose-all.bst ",
- "compose-include-bin.bst ",
- "compose-exclude-dev.bst ",
- ],
+ ["compose-all.bst ", "compose-include-bin.bst ", "compose-exclude-dev.bst ",],
"files",
),
# Also try multi arguments together
- (
- "no-element-path",
- "bst --directory ../ artifact checkout t ",
- 5,
- ["target.bst "],
- "files",
- ),
+ ("no-element-path", "bst --directory ../ artifact checkout t ", 5, ["target.bst "], "files",),
(
"no-element-path",
"bst --directory ../ artifact checkout --directory ",
@@ -402,18 +308,10 @@ def test_argument_element(datafiles, cli, project, cmd, word_idx, expected, subd
"project,cmd,word_idx,expected,subdir",
[
# When element has invalid suffix
- (
- "project",
- "bst --directory ../ show ",
- 4,
- [e + " " for e in MIXED_ELEMENTS],
- "files",
- )
+ ("project", "bst --directory ../ show ", 4, [e + " " for e in MIXED_ELEMENTS], "files",)
],
)
-def test_argument_element_invalid(
- datafiles, cli, project, cmd, word_idx, expected, subdir
-):
+def test_argument_element_invalid(datafiles, cli, project, cmd, word_idx, expected, subdir):
cwd = os.path.join(str(datafiles), project)
if subdir:
cwd = os.path.join(cwd, subdir)
@@ -442,9 +340,7 @@ def test_argument_artifact(cli, datafiles):
project = str(datafiles)
# Build an import element with no dependencies (as there will only be ONE cache key)
- result = cli.run(
- project=project, args=["build", "import-bin.bst"]
- ) # Has no dependencies
+ result = cli.run(project=project, args=["build", "import-bin.bst"]) # Has no dependencies
result.assert_success()
# Get the key and the artifact ref ($project/$element_name/$key)
@@ -459,23 +355,15 @@ def test_argument_artifact(cli, datafiles):
result = cli.run(
project=project,
cwd=project,
- env={
- "_BST_COMPLETION": "complete",
- "COMP_WORDS": cmd,
- "COMP_CWORD": str(word_idx),
- },
+ env={"_BST_COMPLETION": "complete", "COMP_WORDS": cmd, "COMP_CWORD": str(word_idx),},
)
if result.output:
- words = (
- result.output.splitlines()
- ) # This leaves an extra space on each e.g. ['foo.bst ']
+ words = result.output.splitlines() # This leaves an extra space on each e.g. ['foo.bst ']
words = [word.strip() for word in words]
if i == 0:
- expected = PROJECT_ELEMENTS + [
- artifact
- ] # We should now be able to see the artifact
+ expected = PROJECT_ELEMENTS + [artifact] # We should now be able to see the artifact
elif i == 1:
expected = ["target.bst", artifact]
elif i == 2:
diff --git a/tests/frontend/compose_splits.py b/tests/frontend/compose_splits.py
index 3a308a9f5..d333b031e 100644
--- a/tests/frontend/compose_splits.py
+++ b/tests/frontend/compose_splits.py
@@ -9,9 +9,7 @@ from buildstream.testing.runcli import cli # pylint: disable=unused-import
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
-@pytest.mark.parametrize(
- "target", [("compose-include-bin.bst"), ("compose-exclude-dev.bst")]
-)
+@pytest.mark.parametrize("target", [("compose-include-bin.bst"), ("compose-exclude-dev.bst")])
@pytest.mark.datafiles(DATA_DIR)
def test_compose_splits(datafiles, cli, target):
project = str(datafiles)
@@ -22,9 +20,7 @@ def test_compose_splits(datafiles, cli, target):
result.assert_success()
# Now check it out
- result = cli.run(
- project=project, args=["artifact", "checkout", target, "--directory", checkout]
- )
+ result = cli.run(project=project, args=["artifact", "checkout", target, "--directory", checkout])
result.assert_success()
# Check that the executable hello file is found in the checkout
diff --git a/tests/frontend/configurable_warnings.py b/tests/frontend/configurable_warnings.py
index f756aae2b..52cb03cec 100644
--- a/tests/frontend/configurable_warnings.py
+++ b/tests/frontend/configurable_warnings.py
@@ -19,11 +19,7 @@ def get_project(fatal_warnings):
"name": "test",
"element-path": "elements",
"plugins": [
- {
- "origin": "local",
- "path": "plugins",
- "elements": {"warninga": 0, "warningb": 0, "corewarn": 0,},
- }
+ {"origin": "local", "path": "plugins", "elements": {"warninga": 0, "warningb": 0, "corewarn": 0,},}
],
"fatal-warnings": fatal_warnings,
}
@@ -53,9 +49,7 @@ def build_project(datafiles, fatal_warnings):
("warningb.bst", [CoreWarnings.OVERLAPS], False, None),
],
)
-def test_fatal_warnings(
- cli, datafiles, element_name, fatal_warnings, expect_fatal, error_domain
-):
+def test_fatal_warnings(cli, datafiles, element_name, fatal_warnings, expect_fatal, error_domain):
if HAVE_SANDBOX == "buildbox" and error_domain != ErrorDomain.STREAM:
pytest.xfail()
project_path = build_project(datafiles, fatal_warnings)
diff --git a/tests/frontend/configuredwarning/plugins/corewarn.py b/tests/frontend/configuredwarning/plugins/corewarn.py
index 5e43115f7..7ca8daed9 100644
--- a/tests/frontend/configuredwarning/plugins/corewarn.py
+++ b/tests/frontend/configuredwarning/plugins/corewarn.py
@@ -20,8 +20,7 @@ class CoreWarn(Element):
def assemble(self, sandbox):
self.warn(
- "Testing: CoreWarning produced during assemble",
- warning_token=CoreWarnings.OVERLAPS,
+ "Testing: CoreWarning produced during assemble", warning_token=CoreWarnings.OVERLAPS,
)
diff --git a/tests/frontend/configuredwarning/plugins/warninga.py b/tests/frontend/configuredwarning/plugins/warninga.py
index dde90bb42..9fd8dc61b 100644
--- a/tests/frontend/configuredwarning/plugins/warninga.py
+++ b/tests/frontend/configuredwarning/plugins/warninga.py
@@ -20,9 +20,7 @@ class WarningA(Element):
pass
def assemble(self, sandbox):
- self.warn(
- "Testing: warning-a produced during assemble", warning_token=WARNING_A
- )
+ self.warn("Testing: warning-a produced during assemble", warning_token=WARNING_A)
def setup():
diff --git a/tests/frontend/configuredwarning/plugins/warningb.py b/tests/frontend/configuredwarning/plugins/warningb.py
index d9229f0d0..64d25ef39 100644
--- a/tests/frontend/configuredwarning/plugins/warningb.py
+++ b/tests/frontend/configuredwarning/plugins/warningb.py
@@ -20,9 +20,7 @@ class WarningB(Element):
pass
def assemble(self, sandbox):
- self.warn(
- "Testing: warning-b produced during assemble", warning_token=WARNING_B
- )
+ self.warn("Testing: warning-b produced during assemble", warning_token=WARNING_B)
def setup():
diff --git a/tests/frontend/consistencyerror/plugins/consistencyerror.py b/tests/frontend/consistencyerror/plugins/consistencyerror.py
index 656bd981c..125baf39c 100644
--- a/tests/frontend/consistencyerror/plugins/consistencyerror.py
+++ b/tests/frontend/consistencyerror/plugins/consistencyerror.py
@@ -14,9 +14,7 @@ class ConsistencyErrorSource(Source):
def get_consistency(self):
# Raise an error unconditionally
- raise SourceError(
- "Something went terribly wrong", reason="the-consistency-error"
- )
+ raise SourceError("Something went terribly wrong", reason="the-consistency-error")
def get_ref(self):
return None
diff --git a/tests/frontend/cross_junction_workspace.py b/tests/frontend/cross_junction_workspace.py
index 90e68d8ac..3ac3e8814 100644
--- a/tests/frontend/cross_junction_workspace.py
+++ b/tests/frontend/cross_junction_workspace.py
@@ -27,8 +27,7 @@ def prepare_junction_project(cli, tmpdir):
import_ref = import_repo.create(str(import_dir))
_yaml.roundtrip_dump(
- {"kind": "import", "sources": [import_repo.source_config(ref=import_ref)]},
- str(sub_project.join("data.bst")),
+ {"kind": "import", "sources": [import_repo.source_config(ref=import_ref)]}, str(sub_project.join("data.bst")),
)
sub_repo_dir = tmpdir.join("sub_repo")
@@ -37,8 +36,7 @@ def prepare_junction_project(cli, tmpdir):
sub_ref = sub_repo.create(str(sub_project))
_yaml.roundtrip_dump(
- {"kind": "junction", "sources": [sub_repo.source_config(ref=sub_ref)]},
- str(main_project.join("sub.bst")),
+ {"kind": "junction", "sources": [sub_repo.source_config(ref=sub_ref)]}, str(main_project.join("sub.bst")),
)
args = ["source", "fetch", "sub.bst"]
diff --git a/tests/frontend/fetch.py b/tests/frontend/fetch.py
index d34764d13..10a420ddd 100644
--- a/tests/frontend/fetch.py
+++ b/tests/frontend/fetch.py
@@ -85,9 +85,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, strict, ref_storage):
cli.configure({"projects": {"test": {"strict": strict}}})
# Create a repo to hold the subproject and generate a junction element for it
- ref = generate_junction(
- tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
- )
+ ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))
# Create a stack element to depend on a cross junction element
#
diff --git a/tests/frontend/help.py b/tests/frontend/help.py
index 3bbae44f5..de3b0e678 100644
--- a/tests/frontend/help.py
+++ b/tests/frontend/help.py
@@ -9,8 +9,7 @@ def assert_help(cli_output):
expected_start = "Usage: "
if not cli_output.startswith(expected_start):
raise AssertionError(
- "Help output expected to begin with '{}',".format(expected_start)
- + " output was: {}".format(cli_output)
+ "Help output expected to begin with '{}',".format(expected_start) + " output was: {}".format(cli_output)
)
@@ -21,16 +20,7 @@ def test_help_main(cli):
@pytest.mark.parametrize(
- "command",
- [
- ("artifact"),
- ("build"),
- ("checkout"),
- ("shell"),
- ("show"),
- ("source"),
- ("workspace"),
- ],
+ "command", [("artifact"), ("build"), ("checkout"), ("shell"), ("show"), ("source"), ("workspace"),],
)
def test_help(cli, command):
result = cli.run(args=[command, "--help"])
diff --git a/tests/frontend/init.py b/tests/frontend/init.py
index 01686b7c6..aef9d148e 100644
--- a/tests/frontend/init.py
+++ b/tests/frontend/init.py
@@ -29,16 +29,7 @@ def test_all_options(cli, tmpdir):
project_path = os.path.join(project, "project.conf")
result = cli.run(
- args=[
- "init",
- "--project-name",
- "foo",
- "--format-version",
- "2",
- "--element-path",
- "ponies",
- project,
- ]
+ args=["init", "--project-name", "foo", "--format-version", "2", "--element-path", "ponies", project,]
)
result.assert_success()
@@ -96,9 +87,7 @@ def test_relative_path_directory_as_argument(cli, tmpdir):
def test_set_directory_and_directory_as_argument(cli, tmpdir):
- result = cli.run(
- args=["-C", "/foo/bar", "init", "--project-name", "foo", "/boo/far"]
- )
+ result = cli.run(args=["-C", "/foo/bar", "init", "--project-name", "foo", "/boo/far"])
result.assert_main_error(ErrorDomain.APP, "init-with-set-directory")
@@ -110,33 +99,13 @@ def test_bad_project_name(cli, tmpdir, project_name):
@pytest.mark.parametrize("format_version", [(str(-1)), (str(BST_FORMAT_VERSION + 1))])
def test_bad_format_version(cli, tmpdir, format_version):
- result = cli.run(
- args=[
- "init",
- "--project-name",
- "foo",
- "--format-version",
- format_version,
- str(tmpdir),
- ]
- )
+ result = cli.run(args=["init", "--project-name", "foo", "--format-version", format_version, str(tmpdir),])
result.assert_main_error(ErrorDomain.APP, "invalid-format-version")
-@pytest.mark.parametrize(
- "element_path", [("/absolute/path"), ("../outside/of/project")]
-)
+@pytest.mark.parametrize("element_path", [("/absolute/path"), ("../outside/of/project")])
def test_bad_element_path(cli, tmpdir, element_path):
- result = cli.run(
- args=[
- "init",
- "--project-name",
- "foo",
- "--element-path",
- element_path,
- str(tmpdir),
- ]
- )
+ result = cli.run(args=["init", "--project-name", "foo", "--element-path", element_path, str(tmpdir),])
result.assert_main_error(ErrorDomain.APP, "invalid-element-path")
@@ -154,9 +123,7 @@ def test_element_path_interactive(cli, tmp_path, monkeypatch, element_path):
def create(cls, *args, **kwargs):
return DummyInteractiveApp(*args, **kwargs)
- def _init_project_interactive(
- self, *args, **kwargs
- ): # pylint: disable=arguments-differ
+ def _init_project_interactive(self, *args, **kwargs): # pylint: disable=arguments-differ
return ("project_name", "0", element_path)
monkeypatch.setattr(App, "create", DummyInteractiveApp.create)
diff --git a/tests/frontend/large_directory.py b/tests/frontend/large_directory.py
index e01d5f3c6..ea29fd1ca 100644
--- a/tests/frontend/large_directory.py
+++ b/tests/frontend/large_directory.py
@@ -37,9 +37,7 @@ def limit_grpc_message_length(limit):
orig_insecure_channel = grpc.insecure_channel
def new_insecure_channel(target):
- return orig_insecure_channel(
- target, options=(("grpc.max_send_message_length", limit),)
- )
+ return orig_insecure_channel(target, options=(("grpc.max_send_message_length", limit),))
grpc.insecure_channel = new_insecure_channel
try:
@@ -71,9 +69,7 @@ def test_large_directory(cli, tmpdir, datafiles):
# Enforce 1 MB gRPC message limit
with limit_grpc_message_length(MAX_MESSAGE_LENGTH):
# Build and push
- result = cli.run(
- project=project, args=["build", "import-large-directory.bst"]
- )
+ result = cli.run(project=project, args=["build", "import-large-directory.bst"])
result.assert_success()
# Assert that we are now cached locally
diff --git a/tests/frontend/logging.py b/tests/frontend/logging.py
index d4f8d0d23..27ff88352 100644
--- a/tests/frontend/logging.py
+++ b/tests/frontend/logging.py
@@ -37,9 +37,7 @@ def test_default_logging(cli, tmpdir, datafiles):
result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
- m = re.search(
- r"\[\d\d:\d\d:\d\d\]\[\s*\]\[.*\] SUCCESS Checking sources", result.stderr
- )
+ m = re.search(r"\[\d\d:\d\d:\d\d\]\[\s*\]\[.*\] SUCCESS Checking sources", result.stderr)
assert m is not None
@@ -51,8 +49,7 @@ def test_custom_logging(cli, tmpdir, datafiles):
element_name = "fetch-test-git.bst"
custom_log_format = (
- "%{elapsed},%{elapsed-us},%{wallclock},%{wallclock-us},"
- "%{key},%{element},%{action},%{message}"
+ "%{elapsed},%{elapsed-us},%{wallclock},%{wallclock-us}," "%{key},%{element},%{action},%{message}"
)
user_config = {"logging": {"message-format": custom_log_format}}
cli.configure(user_config)
@@ -72,8 +69,7 @@ def test_custom_logging(cli, tmpdir, datafiles):
result.assert_success()
m = re.search(
- r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6}\s*,.*"
- r",SUCCESS,Checking sources",
+ r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6}\s*,.*" r",SUCCESS,Checking sources",
result.stderr,
)
assert m is not None
@@ -89,9 +85,7 @@ def test_failed_build_listing(cli, datafiles):
element = {"kind": "script", "config": {"commands": ["false"]}}
_yaml.roundtrip_dump(element, os.path.join(project, element_path))
element_names.append(element_name)
- result = cli.run(
- project=project, args=["--on-error=continue", "build", *element_names]
- )
+ result = cli.run(project=project, args=["--on-error=continue", "build", *element_names])
result.assert_main_error(ErrorDomain.STREAM, None)
# Check that we re-print the failure summaries only in the "Failure Summary"
@@ -102,12 +96,8 @@ def test_failed_build_listing(cli, datafiles):
# testfail-0.bst:
# [00:00:00][44f1b8c3][ build:testfail-0.bst ] FAILURE Running 'commands'
#
- failure_heading_pos = re.search(
- r"^Failure Summary$", result.stderr, re.MULTILINE
- ).start()
- pipeline_heading_pos = re.search(
- r"^Pipeline Summary$", result.stderr, re.MULTILINE
- ).start()
+ failure_heading_pos = re.search(r"^Failure Summary$", result.stderr, re.MULTILINE).start()
+ pipeline_heading_pos = re.search(r"^Pipeline Summary$", result.stderr, re.MULTILINE).start()
failure_summary_range = range(failure_heading_pos, pipeline_heading_pos)
matches = tuple(re.finditer(r"^\s+testfail-.\.bst:$", result.stderr, re.MULTILINE))
for m in matches:
@@ -119,6 +109,4 @@ def test_failed_build_listing(cli, datafiles):
# with the name of the relevant element, e.g. 'testfail-1.bst'. Check that
# they have the name as expected.
pattern = r"\[..:..:..\] FAILURE testfail-.\.bst: Staged artifacts do not provide command 'sh'"
- assert (
- len(re.findall(pattern, result.stderr, re.MULTILINE)) == 6
- ) # each element should be matched twice.
+ assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 6 # each element should be matched twice.
diff --git a/tests/frontend/mirror.py b/tests/frontend/mirror.py
index dbd21e1e9..1146893cd 100644
--- a/tests/frontend/mirror.py
+++ b/tests/frontend/mirror.py
@@ -48,9 +48,7 @@ def generate_project():
{"name": "arrakis", "aliases": {"foo": ["OFO/"], "bar": ["RBA/"],},},
{"name": "oz", "aliases": {"foo": ["ooF/"], "bar": ["raB/"],}},
],
- "plugins": [
- {"origin": "local", "path": "sources", "sources": {"fetch_source": 0}}
- ],
+ "plugins": [{"origin": "local", "path": "sources", "sources": {"fetch_source": 0}}],
}
return project
@@ -75,11 +73,7 @@ def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
element = {
"kind": "import",
- "sources": [
- upstream_repo.source_config(
- ref=upstream_ref if ref_storage == "inline" else None
- )
- ],
+ "sources": [upstream_repo.source_config(ref=upstream_ref if ref_storage == "inline" else None)],
}
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
@@ -109,11 +103,7 @@ def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
mirror_data = [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"]}}]
if mirror == "unrelated-mirror":
mirror_data.insert(
- 0,
- {
- "name": "narnia",
- "aliases": {"frob": ["http://www.example.com/repo"]},
- },
+ 0, {"name": "narnia", "aliases": {"frob": ["http://www.example.com/repo"]},},
)
project["mirrors"] = mirror_data
@@ -164,10 +154,7 @@ def test_mirror_fetch_default_cmdline(cli, tmpdir):
project = generate_project()
_yaml.roundtrip_dump(project, project_file)
- result = cli.run(
- project=project_dir,
- args=["--default-mirror", "arrakis", "source", "fetch", element_name],
- )
+ result = cli.run(project=project_dir, args=["--default-mirror", "arrakis", "source", "fetch", element_name],)
result.assert_success()
with open(output_file) as f:
contents = f.read()
@@ -179,9 +166,7 @@ def test_mirror_fetch_default_cmdline(cli, tmpdir):
me_str = "OOF/repo1"
me_pos = contents.find(me_str)
assert me_pos != -1, "'{}' wasn't found".format(me_str)
- assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(
- arrakis_str, me_str
- )
+ assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
@pytest.mark.datafiles(DATA_DIR)
@@ -237,10 +222,7 @@ def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir):
userconfig = {"projects": {"test": {"default-mirror": "oz"}}}
cli.configure(userconfig)
- result = cli.run(
- project=project_dir,
- args=["--default-mirror", "arrakis", "source", "fetch", element_name],
- )
+ result = cli.run(project=project_dir, args=["--default-mirror", "arrakis", "source", "fetch", element_name],)
result.assert_success()
with open(output_file) as f:
contents = f.read()
@@ -252,9 +234,7 @@ def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir):
me_str = "OOF/repo1"
me_pos = contents.find(me_str)
assert me_pos != -1, "'{}' wasn't found".format(me_str)
- assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(
- arrakis_str, me_str
- )
+ assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
@pytest.mark.datafiles(DATA_DIR)
@@ -317,9 +297,7 @@ def test_mirror_git_submodule_fetch(cli, tmpdir, datafiles):
"name": "test",
"element-path": "elements",
"aliases": {alias: "http://www.example.com/"},
- "mirrors": [
- {"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},
- ],
+ "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},],
}
project_file = os.path.join(project_dir, "project.conf")
_yaml.roundtrip_dump(project, project_file)
@@ -382,9 +360,7 @@ def test_mirror_fallback_git_only_submodules(cli, tmpdir, datafiles):
element = {
"kind": "import",
- "sources": [
- main_repo.source_config_extra(ref=main_ref, checkout_submodules=True)
- ],
+ "sources": [main_repo.source_config_extra(ref=main_ref, checkout_submodules=True)],
}
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
@@ -409,10 +385,7 @@ def test_mirror_fallback_git_only_submodules(cli, tmpdir, datafiles):
result.assert_success()
checkout = os.path.join(str(tmpdir), "checkout")
- result = cli.run(
- project=project_dir,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project_dir, args=["artifact", "checkout", element_name, "--directory", checkout],)
result.assert_success()
assert os.path.exists(os.path.join(checkout, "bin", "bin", "hello"))
@@ -471,11 +444,7 @@ def test_mirror_fallback_git_with_submodules(cli, tmpdir, datafiles):
element = {
"kind": "import",
- "sources": [
- upstream_main_repo.source_config_extra(
- ref=upstream_main_ref, checkout_submodules=True
- )
- ],
+ "sources": [upstream_main_repo.source_config_extra(ref=upstream_main_ref, checkout_submodules=True)],
}
element["sources"][0]["url"] = aliased_repo
element_name = "test.bst"
@@ -501,10 +470,7 @@ def test_mirror_fallback_git_with_submodules(cli, tmpdir, datafiles):
result.assert_success()
checkout = os.path.join(str(tmpdir), "checkout")
- result = cli.run(
- project=project_dir,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project_dir, args=["artifact", "checkout", element_name, "--directory", checkout],)
result.assert_success()
assert os.path.exists(os.path.join(checkout, "bin", "bin", "hello"))
diff --git a/tests/frontend/order.py b/tests/frontend/order.py
index a66064694..9032379ef 100644
--- a/tests/frontend/order.py
+++ b/tests/frontend/order.py
@@ -57,12 +57,7 @@ def create_element(project, name, dependencies):
# First simple test
(
"3.bst",
- {
- "0.bst": ["1.bst"],
- "1.bst": [],
- "2.bst": ["0.bst"],
- "3.bst": ["0.bst", "1.bst", "2.bst"],
- },
+ {"0.bst": ["1.bst"], "1.bst": [], "2.bst": ["0.bst"], "3.bst": ["0.bst", "1.bst", "2.bst"],},
["1.bst", "0.bst", "2.bst", "3.bst"],
),
# A more complicated test with build of build dependencies
@@ -74,22 +69,9 @@ def create_element(project, name, dependencies):
"timezones.bst": [],
"middleware.bst": [{"filename": "base.bst", "type": "build"}],
"app.bst": [{"filename": "middleware.bst", "type": "build"}],
- "target.bst": [
- "a.bst",
- "base.bst",
- "middleware.bst",
- "app.bst",
- "timezones.bst",
- ],
+ "target.bst": ["a.bst", "base.bst", "middleware.bst", "app.bst", "timezones.bst",],
},
- [
- "base.bst",
- "middleware.bst",
- "a.bst",
- "app.bst",
- "timezones.bst",
- "target.bst",
- ],
+ ["base.bst", "middleware.bst", "a.bst", "app.bst", "timezones.bst", "target.bst",],
),
],
)
@@ -109,18 +91,12 @@ def test_order(cli, datafiles, operation, target, template, expected):
# Run test and collect results
if operation == "show":
- result = cli.run(
- args=["show", "--deps", "plan", "--format", "%{name}", target],
- project=project,
- silent=True,
- )
+ result = cli.run(args=["show", "--deps", "plan", "--format", "%{name}", target], project=project, silent=True,)
result.assert_success()
results = result.output.splitlines()
else:
if operation == "fetch":
- result = cli.run(
- args=["source", "fetch", target], project=project, silent=True
- )
+ result = cli.run(args=["source", "fetch", target], project=project, silent=True)
else:
result = cli.run(args=[operation, target], project=project, silent=True)
result.assert_success()
diff --git a/tests/frontend/overlaps.py b/tests/frontend/overlaps.py
index 4f6f72af5..d3e0c9d60 100644
--- a/tests/frontend/overlaps.py
+++ b/tests/frontend/overlaps.py
@@ -13,9 +13,7 @@ from tests.testutils import generate_junction
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "overlaps")
-def gen_project(
- project_dir, fail_on_overlap, use_fatal_warnings=True, project_name="test"
-):
+def gen_project(project_dir, fail_on_overlap, use_fatal_warnings=True, project_name="test"):
template = {"name": project_name}
if use_fatal_warnings:
template["fatal-warnings"] = [CoreWarnings.OVERLAPS] if fail_on_overlap else []
@@ -48,9 +46,7 @@ def test_overlaps_error(cli, datafiles, use_fatal_warnings):
def test_overlaps_whitelist(cli, datafiles):
project_dir = str(datafiles)
gen_project(project_dir, True)
- result = cli.run(
- project=project_dir, silent=True, args=["build", "collect-whitelisted.bst"]
- )
+ result = cli.run(project=project_dir, silent=True, args=["build", "collect-whitelisted.bst"])
result.assert_success()
@@ -58,9 +54,7 @@ def test_overlaps_whitelist(cli, datafiles):
def test_overlaps_whitelist_ignored(cli, datafiles):
project_dir = str(datafiles)
gen_project(project_dir, False)
- result = cli.run(
- project=project_dir, silent=True, args=["build", "collect-whitelisted.bst"]
- )
+ result = cli.run(project=project_dir, silent=True, args=["build", "collect-whitelisted.bst"])
result.assert_success()
@@ -71,11 +65,7 @@ def test_overlaps_whitelist_on_overlapper(cli, datafiles):
# it'll still fail because A doesn't permit overlaps.
project_dir = str(datafiles)
gen_project(project_dir, True)
- result = cli.run(
- project=project_dir,
- silent=True,
- args=["build", "collect-partially-whitelisted.bst"],
- )
+ result = cli.run(project=project_dir, silent=True, args=["build", "collect-partially-whitelisted.bst"],)
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.OVERLAPS)
@@ -100,9 +90,7 @@ def test_overlap_subproject(cli, tmpdir, datafiles, project_policy, subproject_p
junction_path = os.path.join(project_dir, "sub-project.bst")
gen_project(project_dir, bool(project_policy == "fail"), project_name="test")
- gen_project(
- subproject_dir, bool(subproject_policy == "fail"), project_name="subtest"
- )
+ gen_project(subproject_dir, bool(subproject_policy == "fail"), project_name="subtest")
generate_junction(tmpdir, subproject_dir, junction_path)
# Here we have a dependency chain where the project element
@@ -111,9 +99,7 @@ def test_overlap_subproject(cli, tmpdir, datafiles, project_policy, subproject_p
# Test that overlap error vs warning policy for this overlap
# is always controlled by the project and not the subproject.
#
- result = cli.run(
- project=project_dir, silent=True, args=["build", "sub-collect.bst"]
- )
+ result = cli.run(project=project_dir, silent=True, args=["build", "sub-collect.bst"])
if project_policy == "fail":
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.OVERLAPS)
diff --git a/tests/frontend/progress.py b/tests/frontend/progress.py
index 3ca81f543..86abe830c 100644
--- a/tests/frontend/progress.py
+++ b/tests/frontend/progress.py
@@ -43,9 +43,7 @@ def test_junction_tally(cli, tmpdir, datafiles):
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(
- project=project, silent=True, args=["source", "fetch", "junction.bst"]
- )
+ result = cli.run(project=project, silent=True, args=["source", "fetch", "junction.bst"])
result.assert_success()
# Assert the correct progress tallies are in the logging
@@ -62,9 +60,7 @@ def test_nested_junction_tally(cli, tmpdir, datafiles):
sub1_path = os.path.join(project, "files", "sub-project")
sub2_path = os.path.join(project, "files", "sub2-project")
# A junction element which pulls sub1 into sub2
- sub1_element = os.path.join(
- project, "files", "sub2-project", "elements", "sub-junction.bst"
- )
+ sub1_element = os.path.join(project, "files", "sub2-project", "elements", "sub-junction.bst")
# A junction element which pulls sub2 into the main project
sub2_element = os.path.join(project, "elements", "junction.bst")
element_path = os.path.join(project, "elements", "junction-dep.bst")
@@ -80,9 +76,7 @@ def test_nested_junction_tally(cli, tmpdir, datafiles):
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(
- project=project, silent=True, args=["source", "fetch", "junction.bst"]
- )
+ result = cli.run(project=project, silent=True, args=["source", "fetch", "junction.bst"])
result.assert_success()
# Assert the correct progress tallies are in the logging
@@ -116,9 +110,7 @@ def test_junction_dep_tally(cli, tmpdir, datafiles):
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(
- project=project, silent=True, args=["source", "fetch", "junction-dep.bst"]
- )
+ result = cli.run(project=project, silent=True, args=["source", "fetch", "junction-dep.bst"])
# Since we aren't allowed to specify any dependencies on a
# junction, we should fail
diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py
index cb3ab024e..51bfe1049 100644
--- a/tests/frontend/project/sources/fetch_source.py
+++ b/tests/frontend/project/sources/fetch_source.py
@@ -22,16 +22,10 @@ class FetchFetcher(SourceFetcher):
self.mark_download_url(url)
def fetch(self, alias_override=None):
- url = self.source.translate_url(
- self.original_url, alias_override=alias_override, primary=self.primary
- )
+ url = self.source.translate_url(self.original_url, alias_override=alias_override, primary=self.primary)
with open(self.source.output_file, "a") as f:
- success = (
- url in self.source.fetch_succeeds and self.source.fetch_succeeds[url]
- )
- message = "Fetch {} {} from {}\n".format(
- self.original_url, "succeeded" if success else "failed", url
- )
+ success = url in self.source.fetch_succeeds and self.source.fetch_succeeds[url]
+ message = "Fetch {} {} from {}\n".format(self.original_url, "succeeded" if success else "failed", url)
f.write(message)
if not success:
raise SourceError("Failed to fetch {}".format(url))
@@ -42,10 +36,7 @@ class FetchSource(Source):
def configure(self, node):
self.original_urls = node.get_str_list("urls")
self.output_file = node.get_str("output-text")
- self.fetch_succeeds = {
- key: value.as_bool()
- for key, value in node.get_mapping("fetch-succeeds", {}).items()
- }
+ self.fetch_succeeds = {key: value.as_bool() for key, value in node.get_mapping("fetch-succeeds", {}).items()}
# First URL is the primary one for this test
#
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 970987d36..100a9a914 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -60,9 +60,7 @@ def test_push_pull_all(cli, tmpdir, datafiles):
assert not any(states[e] == "cached" for e in all_elements)
# Now try bst artifact pull
- result = cli.run(
- project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "pull", "--deps", "all", "target.bst"])
result.assert_success()
# And assert that it's again in the local cache, without having built
@@ -132,21 +130,12 @@ def test_push_pull_default_targets(cli, tmpdir, datafiles):
def test_pull_secondary_cache(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare1")
- ) as share1, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare2")
) as share2:
# Build the target and push it to share2 only.
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": False},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ cli.configure({"artifacts": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]})
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
@@ -180,9 +169,7 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(
- os.path.join(str(tmpdir), "goodartifactshare")
- ) as good_share, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "goodartifactshare")) as good_share, create_artifact_share(
os.path.join(str(tmpdir), "badartifactshare")
) as bad_share:
@@ -200,10 +187,7 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
)
# Now try `bst artifact push` to the good_share.
- result = cli.run(
- project=project,
- args=["artifact", "push", "target.bst", "--remote", good_share.repo],
- )
+ result = cli.run(project=project, args=["artifact", "push", "target.bst", "--remote", good_share.repo],)
result.assert_success()
# Assert that all the artifacts are in the share we pushed
@@ -219,10 +203,7 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
artifactdir = os.path.join(cli.directory, "artifacts")
shutil.rmtree(artifactdir)
- result = cli.run(
- project=project,
- args=["artifact", "pull", "target.bst", "--remote", good_share.repo],
- )
+ result = cli.run(project=project, args=["artifact", "pull", "target.bst", "--remote", good_share.repo],)
result.assert_success()
# And assert that it's again in the local cache, without having built
@@ -240,10 +221,7 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build the target element and push to the remote.
cli.configure(
- {
- "artifacts": {"url": share.repo, "push": True},
- "projects": {"test": {"strict": False}},
- }
+ {"artifacts": {"url": share.repo, "push": True}, "projects": {"test": {"strict": False}},}
)
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
@@ -272,9 +250,7 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
assert cli.get_element_state(project, element_name) != "cached"
# Add a file to force change in strict cache key of import-bin.bst
- with open(
- os.path.join(str(project), "files", "bin-files", "usr", "bin", "world"), "w"
- ) as f:
+ with open(os.path.join(str(project), "files", "bin-files", "usr", "bin", "world"), "w") as f:
f.write("world")
# Assert that the workspaced element requires a rebuild
@@ -283,9 +259,7 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
assert cli.get_element_state(project, "target.bst") == "waiting"
# Now try bst artifact pull
- result = cli.run(
- project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "pull", "--deps", "all", "target.bst"])
result.assert_success()
# And assert that the target is again in the local cache, without having built
@@ -313,14 +287,10 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
artifact_dir = os.path.join(project, "cache", "artifacts")
shutil.rmtree(artifact_dir)
- assert (
- cli.get_element_state(project, "junction.bst:import-etc.bst") == "buildable"
- )
+ assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "buildable"
# Now try bst artifact pull
- result = cli.run(
- project=project, args=["artifact", "pull", "junction.bst:import-etc.bst"]
- )
+ result = cli.run(project=project, args=["artifact", "pull", "junction.bst:import-etc.bst"])
result.assert_success()
# And assert that it's again in the local cache, without having built
@@ -418,9 +388,7 @@ def test_pull_missing_local_blob(cli, tmpdir, datafiles):
# This is a placeholder to test partial CAS handling until we support
# partial artifact pulling (or blob-based CAS expiry).
#
- digest = utils.sha256sum(
- os.path.join(project, "files", "bin-files", "usr", "bin", "hello")
- )
+ digest = utils.sha256sum(os.path.join(project, "files", "bin-files", "usr", "bin", "hello"))
objpath = os.path.join(cli.directory, "cas", "objects", digest[:2], digest[2:])
os.unlink(objpath)
@@ -443,9 +411,7 @@ def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles):
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- assert (
- not result.get_pulled_elements()
- ), "No elements should have been pulled since the cache was empty"
+ assert not result.get_pulled_elements(), "No elements should have been pulled since the cache was empty"
assert "INFO Remote ({}) does not have".format(share.repo) in result.stderr
assert "SKIPPED Pull" in result.stderr
@@ -456,19 +422,13 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare1")
- ) as shareuser, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare2")
- ) as shareproject, create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare3")
- ) as sharecli:
+ ) as shareproject, create_artifact_share(os.path.join(str(tmpdir), "artifactshare3")) as sharecli:
# Add shareproject repo url to project.conf
with open(os.path.join(project, "project.conf"), "a") as projconf:
- projconf.write(
- "artifacts:\n url: {}\n push: True".format(shareproject.repo)
- )
+ projconf.write("artifacts:\n url: {}\n push: True".format(shareproject.repo))
# Configure shareuser remote in user conf
cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
@@ -489,9 +449,7 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
# Now check that a build with cli set as sharecli results in nothing being pulled,
# as it doesn't have them cached and shareuser/shareproject should be ignored. This
# will however result in the artifacts being built and pushed to it
- result = cli.run(
- project=project, args=["build", "--remote", sharecli.repo, "target.bst"]
- )
+ result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
result.assert_success()
for element_name in all_elements:
assert element_name not in result.get_pulled_elements()
@@ -500,9 +458,7 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
# Now check that a clean build with cli set as sharecli should result in artifacts only
# being pulled from it, as that was provided via the cli and is populated
- result = cli.run(
- project=project, args=["build", "--remote", sharecli.repo, "target.bst"]
- )
+ result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
result.assert_success()
for element_name in all_elements:
assert cli.get_element_state(project, element_name) == "cached"
@@ -616,9 +572,7 @@ def test_pull_artifact(cli, tmpdir, datafiles):
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
- assert os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
# Assert that the target is shared (note that assert shared will use the artifact name)
assert_shared(cli, share, project, element)
@@ -627,15 +581,11 @@ def test_pull_artifact(cli, tmpdir, datafiles):
shutil.rmtree(os.path.join(local_cache, "artifacts"))
# Assert that nothing is cached locally anymore
- assert not os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert not os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
# Now try bst artifact pull
result = cli.run(project=project, args=["artifact", "pull", artifact_ref])
result.assert_success()
# And assert that it's again in the local cache, without having built
- assert os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 21a47838c..6e2e283cd 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -61,9 +61,7 @@ def test_push(cli, tmpdir, datafiles):
# Set up two artifact shares.
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1:
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare2")
- ) as share2:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:
# Try pushing with no remotes configured. This should fail.
result = cli.run(project=project, args=["artifact", "push", "target.bst"])
@@ -78,14 +76,7 @@ def test_push(cli, tmpdir, datafiles):
result.assert_main_error(ErrorDomain.STREAM, None)
# Configure bst to push to one of the caches and run `bst artifact push`. This works.
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": False},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ cli.configure({"artifacts": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]})
cli.run(project=project, args=["artifact", "push", "target.bst"])
assert_not_shared(cli, share1, project, "target.bst")
@@ -93,17 +84,8 @@ def test_push(cli, tmpdir, datafiles):
# Now try pushing to both
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare2")
- ) as share2:
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": True},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:
+ cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
cli.run(project=project, args=["artifact", "push", "target.bst"])
assert_shared(cli, share1, project, "target.bst")
@@ -129,9 +111,7 @@ def test_push_artifact(cli, tmpdir, datafiles):
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
- assert os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
# Configure artifact share
cli.configure(
@@ -215,15 +195,7 @@ def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
# Now try and push the target with its deps using --on-error continue
# and assert that push failed, but what could be pushed was pushed
result = cli.run(
- project=project,
- args=[
- "--on-error=continue",
- "artifact",
- "push",
- "--deps",
- "all",
- "target.bst",
- ],
+ project=project, args=["--on-error=continue", "artifact", "push", "--deps", "all", "target.bst",],
)
# The overall process should return as failed
@@ -279,9 +251,7 @@ def test_push_all(cli, tmpdir, datafiles):
)
# Now try bst artifact push all the deps
- result = cli.run(
- project=project, args=["artifact", "push", "target.bst", "--deps", "all"]
- )
+ result = cli.run(project=project, args=["artifact", "push", "target.bst", "--deps", "all"])
result.assert_success()
# And finally assert that all the artifacts are in the share
@@ -310,9 +280,7 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
- assert os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
# Configure artifact share
cli.configure(
@@ -331,9 +299,7 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
)
# Now try bst artifact push all the deps
- result = cli.run(
- project=project, args=["artifact", "push", "--deps", "all", artifact_ref]
- )
+ result = cli.run(project=project, args=["artifact", "push", "--deps", "all", artifact_ref])
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Error: '--deps all' is not supported for artifact refs" in result.stderr
@@ -347,9 +313,7 @@ def test_push_after_pull(cli, tmpdir, datafiles):
project = str(datafiles)
# Set up two artifact shares.
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare1")
- ) as share1, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare2")
) as share2:
@@ -381,14 +345,7 @@ def test_push_after_pull(cli, tmpdir, datafiles):
# Now we add share2 into the mix as a second push remote. This time,
# `bst build` should push to share2 after pulling from share1.
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": True},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
assert result.get_pulled_elements() == ["target.bst"]
@@ -405,9 +362,7 @@ def test_artifact_expires(cli, datafiles, tmpdir):
# Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
# Set a 22 MB quota
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)
- ) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
# Configure bst to push to the cache
cli.configure(
@@ -459,9 +414,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
# Create an artifact share (remote cache) in tmpdir/artifactshare
# Mock a file system with 5 MB total space
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)
- ) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)) as share:
# Configure bst to push to the remote cache
cli.configure(
@@ -488,9 +441,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
result.assert_main_error(ErrorDomain.STREAM, None)
# Ensure that the small artifact is still in the share
- states = cli.get_element_states(
- project, ["small_element.bst", "large_element.bst"]
- )
+ states = cli.get_element_states(project, ["small_element.bst", "large_element.bst"])
assert states["small_element.bst"] == "cached"
assert_shared(cli, share, project, "small_element.bst")
@@ -507,9 +458,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
# Create an artifact share (remote cache) in tmpdir/artifactshare
# Set a 22 MB quota
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)
- ) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
# Configure bst to push to the cache
cli.configure(
@@ -541,10 +490,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
assert cli.get_element_state(project, "element1.bst") != "cached"
# Pull the element1 from the remote cache (this should update its mtime)
- result = cli.run(
- project=project,
- args=["artifact", "pull", "element1.bst", "--remote", share.repo],
- )
+ result = cli.run(project=project, args=["artifact", "pull", "element1.bst", "--remote", share.repo],)
result.assert_success()
# Ensure element1 is cached locally
@@ -583,16 +529,10 @@ def test_push_cross_junction(cli, tmpdir, datafiles):
cli.configure(
{"artifacts": {"url": share.repo, "push": True},}
)
- cli.run(
- project=project, args=["artifact", "push", "junction.bst:import-etc.bst"]
- )
+ cli.run(project=project, args=["artifact", "push", "junction.bst:import-etc.bst"])
cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst")
- assert share.get_artifact(
- cli.get_artifact_name(
- project, "subtest", "import-etc.bst", cache_key=cache_key
- )
- )
+ assert share.get_artifact(cli.get_artifact_name(project, "subtest", "import-etc.bst", cache_key=cache_key))
@pytest.mark.datafiles(DATA_DIR)
@@ -611,9 +551,7 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles):
result = cli.run(project=project, args=["artifact", "push", "target.bst"])
result.assert_success()
- assert (
- not result.get_pushed_elements()
- ), "No elements should have been pushed since the cache was populated"
+ assert not result.get_pushed_elements(), "No elements should have been pushed since the cache was populated"
assert "INFO Remote ({}) already has ".format(share.repo) in result.stderr
assert "SKIPPED Push" in result.stderr
@@ -623,26 +561,18 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare1")
- ) as shareuser, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare2")
- ) as shareproject, create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare3")
- ) as sharecli:
+ ) as shareproject, create_artifact_share(os.path.join(str(tmpdir), "artifactshare3")) as sharecli:
# Add shareproject repo url to project.conf
with open(os.path.join(project, "project.conf"), "a") as projconf:
- projconf.write(
- "artifacts:\n url: {}\n push: True".format(shareproject.repo)
- )
+ projconf.write("artifacts:\n url: {}\n push: True".format(shareproject.repo))
# Configure shareuser remote in user conf
cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
- result = cli.run(
- project=project, args=["build", "--remote", sharecli.repo, "target.bst"]
- )
+ result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
# Artifacts should have only been pushed to sharecli, as that was provided via the cli
result.assert_success()
@@ -668,10 +598,7 @@ def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
cli.configure(
- {
- "artifacts": {"url": share.repo, "push": True},
- "projects": {"test": {"strict": False}},
- }
+ {"artifacts": {"url": share.repo, "push": True}, "projects": {"test": {"strict": False}},}
)
# First get us a build
diff --git a/tests/frontend/rebuild.py b/tests/frontend/rebuild.py
index d3e36e6f4..1aef8e423 100644
--- a/tests/frontend/rebuild.py
+++ b/tests/frontend/rebuild.py
@@ -25,15 +25,11 @@ def test_rebuild(datafiles, cli, strict):
result.assert_success()
# Modify base import
- with open(
- os.path.join(project, "files", "dev-files", "usr", "include", "new.h"), "w"
- ) as f:
+ with open(os.path.join(project, "files", "dev-files", "usr", "include", "new.h"), "w") as f:
f.write("#define NEW")
# Rebuild base import and build top-level rebuild-target.bst
# In non-strict mode, this does not rebuild intermediate target.bst,
# which means that a weakly cached target.bst will be staged as dependency.
- result = cli.run(
- project=project, args=strict_args(["build", "rebuild-target.bst"], strict)
- )
+ result = cli.run(project=project, args=strict_args(["build", "rebuild-target.bst"], strict))
result.assert_success()
diff --git a/tests/frontend/show.py b/tests/frontend/show.py
index a54d625ea..a686dbd2d 100644
--- a/tests/frontend/show.py
+++ b/tests/frontend/show.py
@@ -29,24 +29,14 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)),)
)
def test_show(cli, datafiles, target, fmt, expected):
project = str(datafiles)
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", fmt, target],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", fmt, target],)
result.assert_success()
if result.output.strip() != expected:
- raise AssertionError(
- "Expected output:\n{}\nInstead received output:\n{}".format(
- expected, result.output
- )
- )
+ raise AssertionError("Expected output:\n{}\nInstead received output:\n{}".format(expected, result.output))
-@pytest.mark.datafiles(
- os.path.join(os.path.dirname(os.path.realpath(__file__)), "invalid_element_path",)
-)
+@pytest.mark.datafiles(os.path.join(os.path.dirname(os.path.realpath(__file__)), "invalid_element_path",))
def test_show_invalid_element_path(cli, datafiles):
project = str(datafiles)
cli.run(project=project, silent=True, args=["show", "foo.bst"])
@@ -77,16 +67,8 @@ def test_show_fail(cli, datafiles):
@pytest.mark.parametrize(
"target,except_,expected",
[
- (
- "target.bst",
- "import-bin.bst",
- ["import-dev.bst", "compose-all.bst", "target.bst"],
- ),
- (
- "target.bst",
- "import-dev.bst",
- ["import-bin.bst", "compose-all.bst", "target.bst"],
- ),
+ ("target.bst", "import-bin.bst", ["import-dev.bst", "compose-all.bst", "target.bst"],),
+ ("target.bst", "import-dev.bst", ["import-bin.bst", "compose-all.bst", "target.bst"],),
("target.bst", "compose-all.bst", ["import-bin.bst", "target.bst"]),
("compose-all.bst", "import-bin.bst", ["import-dev.bst", "compose-all.bst"]),
],
@@ -96,27 +78,14 @@ def test_show_except_simple(cli, datafiles, target, except_, expected):
result = cli.run(
project=project,
silent=True,
- args=[
- "show",
- "--deps",
- "all",
- "--format",
- "%{name}",
- "--except",
- except_,
- target,
- ],
+ args=["show", "--deps", "all", "--format", "%{name}", "--except", except_, target,],
)
result.assert_success()
results = result.output.strip().splitlines()
if results != expected:
- raise AssertionError(
- "Expected elements:\n{}\nInstead received elements:\n{}".format(
- expected, results
- )
- )
+ raise AssertionError("Expected elements:\n{}\nInstead received elements:\n{}".format(expected, results))
# This test checks various constructions of a pipeline
@@ -200,22 +169,14 @@ def test_show_except_simple(cli, datafiles, target, except_, expected):
],
),
# Test one target and excepting two elements
- (
- ["build.bst"],
- ["unrelated-1.bst", "unrelated-2.bst"],
- ["first-level-1.bst", "build.bst",],
- ),
+ (["build.bst"], ["unrelated-1.bst", "unrelated-2.bst"], ["first-level-1.bst", "build.bst",],),
],
)
def test_show_except(cli, datafiles, targets, exceptions, expected):
basedir = str(datafiles)
results = cli.get_pipeline(basedir, targets, except_=exceptions, scope="all")
if results != expected:
- raise AssertionError(
- "Expected elements:\n{}\nInstead received elements:\n{}".format(
- expected, results
- )
- )
+ raise AssertionError("Expected elements:\n{}\nInstead received elements:\n{}".format(expected, results))
###############################################################
@@ -271,13 +232,9 @@ def test_target_is_dependency(cli, datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
-@pytest.mark.parametrize(
- "element_name", ["junction-dep.bst", "junction.bst:import-etc.bst"]
-)
+@pytest.mark.parametrize("element_name", ["junction-dep.bst", "junction.bst:import-etc.bst"])
@pytest.mark.parametrize("workspaced", [True, False], ids=["workspace", "no-workspace"])
-def test_unfetched_junction(
- cli, tmpdir, datafiles, ref_storage, element_name, workspaced
-):
+def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage, element_name, workspaced):
project = str(datafiles)
subproject_path = os.path.join(project, "files", "sub-project")
junction_path = os.path.join(project, "elements", "junction.bst")
@@ -286,9 +243,7 @@ def test_unfetched_junction(
configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
- ref = generate_junction(
- tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
- )
+ ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline"))
# Create a stack element to depend on a cross junction element
#
@@ -309,14 +264,7 @@ def test_unfetched_junction(
result = cli.run(
project=project,
silent=True,
- args=[
- "workspace",
- "open",
- "--no-checkout",
- "--directory",
- subproject_path,
- "junction.bst",
- ],
+ args=["workspace", "open", "--no-checkout", "--directory", subproject_path, "junction.bst",],
)
result.assert_success()
@@ -352,26 +300,15 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, workspaced):
result = cli.run(
project=project,
silent=True,
- args=[
- "workspace",
- "open",
- "--no-checkout",
- "--directory",
- subproject_path,
- "junction.bst",
- ],
+ args=["workspace", "open", "--no-checkout", "--directory", subproject_path, "junction.bst",],
)
result.assert_success()
# Assert the correct error when trying to show the pipeline
- dep_result = cli.run(
- project=project, silent=True, args=["show", "junction-dep.bst"]
- )
+ dep_result = cli.run(project=project, silent=True, args=["show", "junction-dep.bst"])
# Assert the correct error when trying to show the pipeline
- etc_result = cli.run(
- project=project, silent=True, args=["show", "junction.bst:import-etc.bst"]
- )
+ etc_result = cli.run(project=project, silent=True, args=["show", "junction.bst:import-etc.bst"])
# If a workspace is open, no ref is needed
if workspaced:
@@ -384,18 +321,12 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, workspaced):
provenance = ref_node.get_provenance()
assert str(provenance) in dep_result.stderr
- dep_result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT
- )
- etc_result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT
- )
+ dep_result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
+ etc_result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
-@pytest.mark.parametrize(
- "element_name", ["junction-dep.bst", "junction.bst:import-etc.bst"]
-)
+@pytest.mark.parametrize("element_name", ["junction-dep.bst", "junction.bst:import-etc.bst"])
@pytest.mark.parametrize("workspaced", [True, False], ids=["workspace", "no-workspace"])
def test_fetched_junction(cli, tmpdir, datafiles, element_name, workspaced):
project = str(datafiles)
@@ -415,9 +346,7 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name, workspaced):
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(
- project=project, silent=True, args=["source", "fetch", "junction.bst"]
- )
+ result = cli.run(project=project, silent=True, args=["source", "fetch", "junction.bst"])
result.assert_success()
# Open a workspace if we're testing workspaced behavior
@@ -425,23 +354,12 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name, workspaced):
result = cli.run(
project=project,
silent=True,
- args=[
- "workspace",
- "open",
- "--no-checkout",
- "--directory",
- subproject_path,
- "junction.bst",
- ],
+ args=["workspace", "open", "--no-checkout", "--directory", subproject_path, "junction.bst",],
)
result.assert_success()
# Assert the correct error when trying to show the pipeline
- result = cli.run(
- project=project,
- silent=True,
- args=["show", "--format", "%{name}-%{state}", element_name],
- )
+ result = cli.run(project=project, silent=True, args=["show", "--format", "%{name}-%{state}", element_name],)
results = result.output.strip().splitlines()
assert "junction.bst:import-etc.bst-buildable" in results
@@ -464,9 +382,7 @@ def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
"""
os.mkdir(project_path)
- result = cli.run(
- silent=True, args=["init", "--project-name", project_name, project_path]
- )
+ result = cli.run(silent=True, args=["init", "--project-name", project_name, project_path])
result.assert_success()
sourcefiles_path = os.path.join(project_path, "files")
@@ -481,20 +397,14 @@ def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
}
if i == 0:
del element["depends"]
- _yaml.roundtrip_dump(
- element, os.path.join(element_path, "element{}.bst".format(str(i)))
- )
+ _yaml.roundtrip_dump(element, os.path.join(element_path, "element{}.bst".format(str(i))))
source = os.path.join(sourcefiles_path, "source{}".format(str(i)))
open(source, "x").close()
assert os.path.exists(source)
setup_test()
- result = cli.run(
- project=project_path,
- silent=True,
- args=["show", "element{}.bst".format(str(dependency_depth))],
- )
+ result = cli.run(project=project_path, silent=True, args=["show", "element{}.bst".format(str(dependency_depth))],)
recursion_limit = sys.getrecursionlimit()
if dependency_depth <= recursion_limit:
@@ -523,19 +433,13 @@ def test_format_deps(cli, datafiles, dep_kind, expected_deps):
project = str(datafiles)
target = "checkout-deps.bst"
result = cli.run(
- project=project,
- silent=True,
- args=["show", "--deps", "none", "--format", "%{name}: " + dep_kind, target],
+ project=project, silent=True, args=["show", "--deps", "none", "--format", "%{name}: " + dep_kind, target],
)
result.assert_success()
expected = "{name}: {deps}".format(name=target, deps=expected_deps)
if result.output.strip() != expected:
- raise AssertionError(
- "Expected output:\n{}\nInstead received output:\n{}".format(
- expected, result.output
- )
- )
+ raise AssertionError("Expected output:\n{}\nInstead received output:\n{}".format(expected, result.output))
# This tests the resolved value of the 'max-jobs' variable,
@@ -544,8 +448,7 @@ def test_format_deps(cli, datafiles, dep_kind, expected_deps):
#
@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
@pytest.mark.parametrize(
- "cli_value, config_value",
- [(None, None), (None, "16"), ("16", None), ("5", "16"), ("0", "16"), ("16", "0"),],
+ "cli_value, config_value", [(None, None), (None, "16"), ("16", None), ("5", "16"), ("0", "16"), ("16", "0"),],
)
def test_max_jobs(cli, datafiles, cli_value, config_value):
project = str(datafiles)
@@ -599,8 +502,7 @@ def test_max_jobs(cli, datafiles, cli_value, config_value):
#
@pytest.mark.datafiles(os.path.join(DATA_DIR, "strict-depends"))
@pytest.mark.parametrize(
- "target, expected_state",
- [("non-strict-depends.bst", "cached"), ("strict-depends.bst", "waiting"),],
+ "target, expected_state", [("non-strict-depends.bst", "cached"), ("strict-depends.bst", "waiting"),],
)
def test_strict_dependencies(cli, datafiles, target, expected_state):
project = str(datafiles)
diff --git a/tests/frontend/source_checkout.py b/tests/frontend/source_checkout.py
index f10c24c5d..ff897b1cf 100644
--- a/tests/frontend/source_checkout.py
+++ b/tests/frontend/source_checkout.py
@@ -44,10 +44,7 @@ def test_source_checkout(datafiles, cli, tmpdir_factory, with_workspace, guess_e
if with_workspace:
ws_cmd = ["-C", workspace]
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, target],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, target],)
result.assert_success()
else:
ws_cmd = []
@@ -64,9 +61,7 @@ def test_source_checkout(datafiles, cli, tmpdir_factory, with_workspace, guess_e
result = cli.run(project=project, args=args)
result.assert_success()
- assert os.path.exists(
- os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
- )
+ assert os.path.exists(os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config"))
@pytest.mark.datafiles(DATA_DIR)
@@ -80,23 +75,11 @@ def test_source_checkout_force(datafiles, cli, force_flag):
os.makedirs(os.path.join(checkout, "some-thing"))
result = cli.run(
- project=project,
- args=[
- "source",
- "checkout",
- force_flag,
- "--deps",
- "none",
- "--directory",
- checkout,
- target,
- ],
+ project=project, args=["source", "checkout", force_flag, "--deps", "none", "--directory", checkout, target,],
)
result.assert_success()
- assert os.path.exists(
- os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
- )
+ assert os.path.exists(os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config"))
@pytest.mark.datafiles(DATA_DIR)
@@ -105,17 +88,12 @@ def test_source_checkout_tar(datafiles, cli):
tar = os.path.join(cli.directory, "source-checkout.tar")
target = "checkout-deps.bst"
- result = cli.run(
- project=project,
- args=["source", "checkout", "--tar", tar, "--deps", "none", target],
- )
+ result = cli.run(project=project, args=["source", "checkout", "--tar", tar, "--deps", "none", target],)
result.assert_success()
assert os.path.exists(tar)
with tarfile.open(tar) as tf:
- expected_content = os.path.join(
- tar, "checkout-deps", "etc", "buildstream", "config"
- )
+ expected_content = os.path.join(tar, "checkout-deps", "etc", "buildstream", "config")
tar_members = [f.name for f in tf]
for member in tar_members:
assert member in expected_content
@@ -131,23 +109,11 @@ def test_source_checkout_compressed_tar(datafiles, cli, compression):
result = cli.run(
project=project,
- args=[
- "source",
- "checkout",
- "--tar",
- tar,
- "--compression",
- compression,
- "--deps",
- "none",
- target,
- ],
+ args=["source", "checkout", "--tar", tar, "--compression", compression, "--deps", "none", target,],
)
result.assert_success()
tar = tarfile.open(name=tar, mode="r:" + compression)
- assert (
- os.path.join("checkout-deps", "etc", "buildstream", "config") in tar.getnames()
- )
+ assert os.path.join("checkout-deps", "etc", "buildstream", "config") in tar.getnames()
@pytest.mark.datafiles(DATA_DIR)
@@ -157,33 +123,24 @@ def test_source_checkout_deps(datafiles, cli, deps):
checkout = os.path.join(cli.directory, "source-checkout")
target = "checkout-deps.bst"
- result = cli.run(
- project=project,
- args=["source", "checkout", "--directory", checkout, "--deps", deps, target],
- )
+ result = cli.run(project=project, args=["source", "checkout", "--directory", checkout, "--deps", deps, target],)
result.assert_success()
# Sources of the target
if deps == "build":
assert not os.path.exists(os.path.join(checkout, "checkout-deps"))
else:
- assert os.path.exists(
- os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
- )
+ assert os.path.exists(os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config"))
# Sources of the target's build dependencies
if deps in ("build", "all"):
- assert os.path.exists(
- os.path.join(checkout, "import-dev", "usr", "include", "pony.h")
- )
+ assert os.path.exists(os.path.join(checkout, "import-dev", "usr", "include", "pony.h"))
else:
assert not os.path.exists(os.path.join(checkout, "import-dev"))
# Sources of the target's runtime dependencies
if deps in ("run", "all"):
- assert os.path.exists(
- os.path.join(checkout, "import-bin", "usr", "bin", "hello")
- )
+ assert os.path.exists(os.path.join(checkout, "import-bin", "usr", "bin", "hello"))
else:
assert not os.path.exists(os.path.join(checkout, "import-bin"))
@@ -196,32 +153,18 @@ def test_source_checkout_except(datafiles, cli):
result = cli.run(
project=project,
- args=[
- "source",
- "checkout",
- "--directory",
- checkout,
- "--deps",
- "all",
- "--except",
- "import-bin.bst",
- target,
- ],
+ args=["source", "checkout", "--directory", checkout, "--deps", "all", "--except", "import-bin.bst", target,],
)
result.assert_success()
# Sources for the target should be present
- assert os.path.exists(
- os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
- )
+ assert os.path.exists(os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config"))
# Sources for import-bin.bst should not be present
assert not os.path.exists(os.path.join(checkout, "import-bin"))
# Sources for other dependencies should be present
- assert os.path.exists(
- os.path.join(checkout, "import-dev", "usr", "include", "pony.h")
- )
+ assert os.path.exists(os.path.join(checkout, "import-dev", "usr", "include", "pony.h"))
@pytest.mark.datafiles(DATA_DIR)
@@ -233,8 +176,7 @@ def test_source_checkout_fetch(datafiles, cli):
# Create an element with remote source
element = generate_remote_import_element(
- os.path.join(project, "files", "dev-files", "usr", "include", "pony.h"),
- "pony.h",
+ os.path.join(project, "files", "dev-files", "usr", "include", "pony.h"), "pony.h",
)
_yaml.roundtrip_dump(element, target_path)
@@ -244,9 +186,7 @@ def test_source_checkout_fetch(datafiles, cli):
args = ["source", "checkout"]
args += [target, checkout]
- result = cli.run(
- project=project, args=["source", "checkout", "--directory", checkout, target]
- )
+ result = cli.run(project=project, args=["source", "checkout", "--directory", checkout, target])
result.assert_success()
assert os.path.exists(os.path.join(checkout, "remote-import-dev", "pony.h"))
@@ -309,10 +249,7 @@ def test_source_checkout_options_tar_and_dir_conflict(cli, tmpdir, datafiles):
tar_file = os.path.join(str(tmpdir), "source-checkout.tar")
target = "checkout-deps.bst"
- result = cli.run(
- project=project,
- args=["source", "checkout", "--directory", checkout, "--tar", tar_file, target],
- )
+ result = cli.run(project=project, args=["source", "checkout", "--directory", checkout, "--tar", tar_file, target],)
assert result.exit_code != 0
assert "ERROR: options --directory and --tar conflict" in result.stderr
@@ -326,16 +263,7 @@ def test_source_checkout_compression_without_tar(cli, tmpdir, datafiles):
target = "checkout-deps.bst"
result = cli.run(
- project=project,
- args=[
- "source",
- "checkout",
- "--directory",
- checkout,
- "--compression",
- "xz",
- target,
- ],
+ project=project, args=["source", "checkout", "--directory", checkout, "--compression", "xz", target,],
)
assert result.exit_code != 0
diff --git a/tests/frontend/track.py b/tests/frontend/track.py
index 6d9e3bb3f..02a19787c 100644
--- a/tests/frontend/track.py
+++ b/tests/frontend/track.py
@@ -41,9 +41,7 @@ def test_track_single(cli, tmpdir, datafiles):
# Write out our test targets
generate_element(repo, os.path.join(element_path, element_dep_name))
- generate_element(
- repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name
- )
+ generate_element(repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name)
# Assert that tracking is needed for both elements
states = cli.get_element_states(project, [element_target_name])
@@ -53,15 +51,11 @@ def test_track_single(cli, tmpdir, datafiles):
}
# Now first try to track only one element
- result = cli.run(
- project=project, args=["source", "track", "--deps", "none", element_target_name]
- )
+ result = cli.run(project=project, args=["source", "track", "--deps", "none", element_target_name])
result.assert_success()
# And now fetch it
- result = cli.run(
- project=project, args=["source", "fetch", "--deps", "none", element_target_name]
- )
+ result = cli.run(project=project, args=["source", "fetch", "--deps", "none", element_target_name])
result.assert_success()
# Assert that the dependency is waiting and the target has still never been tracked
@@ -75,9 +69,7 @@ def test_track_single(cli, tmpdir, datafiles):
@pytest.mark.datafiles(os.path.join(TOP_DIR))
@pytest.mark.parametrize("ref_storage", [("inline"), ("project-refs")])
def test_track_optional(cli, tmpdir, datafiles, ref_storage):
- project = os.path.join(
- datafiles.dirname, datafiles.basename, "track-optional-" + ref_storage
- )
+ project = os.path.join(datafiles.dirname, datafiles.basename, "track-optional-" + ref_storage)
dev_files_path = os.path.join(project, "files")
element_path = os.path.join(project, "target.bst")
@@ -104,49 +96,23 @@ def test_track_optional(cli, tmpdir, datafiles, ref_storage):
#
# We want to track and persist the ref separately in this test
#
- result = cli.run(
- project=project,
- args=["--option", "test", "False", "source", "track", "target.bst"],
- )
+ result = cli.run(project=project, args=["--option", "test", "False", "source", "track", "target.bst"],)
result.assert_success()
- result = cli.run(
- project=project,
- args=["--option", "test", "True", "source", "track", "target.bst"],
- )
+ result = cli.run(project=project, args=["--option", "test", "True", "source", "track", "target.bst"],)
result.assert_success()
# Now fetch the key for both options
#
result = cli.run(
project=project,
- args=[
- "--option",
- "test",
- "False",
- "show",
- "--deps",
- "none",
- "--format",
- "%{key}",
- "target.bst",
- ],
+ args=["--option", "test", "False", "show", "--deps", "none", "--format", "%{key}", "target.bst",],
)
result.assert_success()
master_key = result.output
result = cli.run(
project=project,
- args=[
- "--option",
- "test",
- "True",
- "show",
- "--deps",
- "none",
- "--format",
- "%{key}",
- "target.bst",
- ],
+ args=["--option", "test", "True", "show", "--deps", "none", "--format", "%{key}", "target.bst",],
)
result.assert_success()
test_key = result.output
@@ -187,15 +153,7 @@ def test_track_cross_junction(cli, tmpdir, datafiles, cross_junction, ref_storag
#
def get_subproject_element_state():
result = cli.run(
- project=project,
- args=[
- "show",
- "--deps",
- "all",
- "--format",
- "%{name}|%{state}",
- "target.bst",
- ],
+ project=project, args=["show", "--deps", "all", "--format", "%{name}|%{state}", "target.bst",],
)
result.assert_success()
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index ba4e9577f..f469939d1 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -68,9 +68,7 @@ class WorkspaceCreator:
self.workspace_cmd = os.path.join(self.project_path, "workspace_cmd")
- def create_workspace_element(
- self, kind, suffix="", workspace_dir=None, element_attrs=None
- ):
+ def create_workspace_element(self, kind, suffix="", workspace_dir=None, element_attrs=None):
element_name = "workspace-test-{}{}.bst".format(kind, suffix)
element_path = os.path.join(self.project_path, "elements")
if not workspace_dir:
@@ -90,9 +88,7 @@ class WorkspaceCreator:
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
return element_name, element_path, workspace_dir
- def create_workspace_elements(
- self, kinds, suffixs=None, workspace_dir_usr=None, element_attrs=None
- ):
+ def create_workspace_elements(self, kinds, suffixs=None, workspace_dir_usr=None, element_attrs=None):
element_tuples = []
@@ -109,25 +105,16 @@ class WorkspaceCreator:
element_tuples.append((element_name, workspace_dir))
# Assert that there is a fetch is needed
- states = self.cli.get_element_states(
- self.project_path, [e for e, _ in element_tuples]
- )
+ states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples])
assert not any(states[e] != "fetch needed" for e, _ in element_tuples)
return element_tuples
def open_workspaces(
- self,
- kinds,
- suffixs=None,
- workspace_dir=None,
- element_attrs=None,
- no_checkout=False,
+ self, kinds, suffixs=None, workspace_dir=None, element_attrs=None, no_checkout=False,
):
- element_tuples = self.create_workspace_elements(
- kinds, suffixs, workspace_dir, element_attrs
- )
+ element_tuples = self.create_workspace_elements(kinds, suffixs, workspace_dir, element_attrs)
os.makedirs(self.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
@@ -140,20 +127,14 @@ class WorkspaceCreator:
_, workspace_dir = element_tuples[0]
args.extend(["--directory", workspace_dir])
- args.extend(
- [element_name for element_name, workspace_dir_suffix in element_tuples]
- )
- result = self.cli.run(
- cwd=self.workspace_cmd, project=self.project_path, args=args
- )
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
+ result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
result.assert_success()
if not no_checkout:
# Assert that we are now buildable because the source is now cached.
- states = self.cli.get_element_states(
- self.project_path, [e for e, _ in element_tuples]
- )
+ states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples])
assert not any(states[e] != "buildable" for e, _ in element_tuples)
# Check that the executable hello file is found in each workspace
@@ -176,9 +157,7 @@ def open_workspace(
no_checkout=False,
):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path)
- workspaces = workspace_object.open_workspaces(
- (kind,), (suffix,), workspace_dir, element_attrs, no_checkout
- )
+ workspaces = workspace_object.open_workspaces((kind,), (suffix,), workspace_dir, element_attrs, no_checkout)
assert len(workspaces) == 1
element_name, workspace = workspaces[0]
return element_name, workspace_object.project_path, workspace
@@ -197,9 +176,7 @@ def test_open_bzr_customize(cli, tmpdir, datafiles):
source_config = element_config.get_sequence("sources").mapping_at(0)
output = subprocess.check_output(["bzr", "info"], cwd=workspace)
stripped_url = source_config.get_str("url").lstrip("file:///")
- expected_output_str = "checkout of branch: /{}/{}".format(
- stripped_url, source_config.get_str("track")
- )
+ expected_output_str = "checkout of branch: /{}/{}".format(stripped_url, source_config.get_str("track"))
assert expected_output_str in str(output)
@@ -221,9 +198,7 @@ def test_open_multi(cli, tmpdir, datafiles):
assert ".bzr" not in workspace_lsdir
-@pytest.mark.skipif(
- os.geteuid() == 0, reason="root may have CAP_DAC_OVERRIDE and ignore permissions"
-)
+@pytest.mark.skipif(os.geteuid() == 0, reason="root may have CAP_DAC_OVERRIDE and ignore permissions")
@pytest.mark.datafiles(DATA_DIR)
def test_open_multi_unwritable(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
@@ -240,9 +215,7 @@ def test_open_multi_unwritable(cli, tmpdir, datafiles):
cwdstat = os.stat(workspace_object.workspace_cmd)
try:
os.chmod(workspace_object.workspace_cmd, cwdstat.st_mode - stat.S_IWRITE)
- result = workspace_object.cli.run(
- project=workspace_object.project_path, args=args
- )
+ result = workspace_object.cli.run(project=workspace_object.project_path, args=args)
finally:
# Using this finally to make sure we always put thing back how they should be.
os.chmod(workspace_object.workspace_cmd, cwdstat.st_mode)
@@ -250,12 +223,7 @@ def test_open_multi_unwritable(cli, tmpdir, datafiles):
result.assert_main_error(ErrorDomain.STREAM, None)
# Normally we avoid checking stderr in favour of using the mechine readable result.assert_main_error
# But Tristan was very keen that the names of the elements left needing workspaces were present in the out put
- assert (
- " ".join(
- [element_name for element_name, workspace_dir_suffix in element_tuples[1:]]
- )
- in result.stderr
- )
+ assert " ".join([element_name for element_name, workspace_dir_suffix in element_tuples[1:]]) in result.stderr
@pytest.mark.datafiles(DATA_DIR)
@@ -272,9 +240,7 @@ def test_open_multi_with_directory(cli, tmpdir, datafiles):
args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
result = workspace_object.cli.run(
- cwd=workspace_object.workspace_cmd,
- project=workspace_object.project_path,
- args=args,
+ cwd=workspace_object.workspace_cmd, project=workspace_object.project_path, args=args,
)
result.assert_main_error(ErrorDomain.STREAM, "directory-with-multiple-elements")
@@ -285,9 +251,7 @@ def test_open_defaultlocation(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
# pylint: disable=unbalanced-tuple-unpacking
- ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(
- ["git"], ["git"]
- )
+ ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(["git"], ["git"])
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
@@ -304,10 +268,7 @@ def test_open_defaultlocation(cli, tmpdir, datafiles):
result.assert_success()
- assert (
- cli.get_element_state(workspace_object.project_path, element_name)
- == "buildable"
- )
+ assert cli.get_element_state(workspace_object.project_path, element_name) == "buildable"
# Check that the executable hello file is found in the workspace
# even though the cli.run function was not run with cwd = workspace_object.workspace_cmd
@@ -322,9 +283,7 @@ def test_open_defaultlocation_exists(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
# pylint: disable=unbalanced-tuple-unpacking
- ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(
- ["git"], ["git"]
- )
+ ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(["git"], ["git"])
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
with open(workspace_dir, "w") as fl:
@@ -362,10 +321,7 @@ def test_open_force(cli, tmpdir, datafiles):
assert os.path.exists(workspace)
# Now open the workspace again with --force, this should happily succeed
- result = cli.run(
- project=project,
- args=["workspace", "open", "--force", "--directory", workspace, element_name],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--force", "--directory", workspace, element_name],)
result.assert_success()
@@ -377,10 +333,7 @@ def test_open_force_open(cli, tmpdir, datafiles):
assert os.path.exists(workspace)
# Now open the workspace again with --force, this should happily succeed
- result = cli.run(
- project=project,
- args=["workspace", "open", "--force", "--directory", workspace, element_name],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--force", "--directory", workspace, element_name],)
result.assert_success()
@@ -400,15 +353,7 @@ def test_open_force_open_no_checkout(cli, tmpdir, datafiles):
# Now open the workspace again with --force and --no-checkout
result = cli.run(
project=project,
- args=[
- "workspace",
- "open",
- "--force",
- "--no-checkout",
- "--directory",
- workspace,
- element_name,
- ],
+ args=["workspace", "open", "--force", "--no-checkout", "--directory", workspace, element_name,],
)
result.assert_success()
@@ -430,9 +375,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles):
tmpdir = os.path.join(str(tmpdir), "-beta")
shutil.move(hello_path, hello1_path)
- element_name2, _, workspace2 = open_workspace(
- cli, tmpdir, datafiles, "git", "-beta"
- )
+ element_name2, _, workspace2 = open_workspace(cli, tmpdir, datafiles, "git", "-beta")
# Assert the workspace dir exists
assert os.path.exists(workspace2)
@@ -444,10 +387,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles):
assert os.path.exists(os.path.join(workspace2, "usr", "bin", "hello"))
# Now open the workspace again with --force, this should happily succeed
- result = cli.run(
- project=project,
- args=["workspace", "open", "--force", "--directory", workspace, element_name2],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--force", "--directory", workspace, element_name2],)
# Assert that the file in workspace 1 has been replaced
# With the file from workspace 2
@@ -462,9 +402,7 @@ def test_close(cli, tmpdir, datafiles):
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
# Close the workspace
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", element_name]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", element_name])
result.assert_success()
# Assert the workspace dir has been deleted
@@ -475,18 +413,14 @@ def test_close(cli, tmpdir, datafiles):
def test_close_external_after_move_project(cli, tmpdir, datafiles):
workspace_dir = os.path.join(str(tmpdir), "workspace")
project_path = os.path.join(str(tmpdir), "initial_project")
- element_name, _, _ = open_workspace(
- cli, tmpdir, datafiles, "git", "", workspace_dir, project_path
- )
+ element_name, _, _ = open_workspace(cli, tmpdir, datafiles, "git", "", workspace_dir, project_path)
assert os.path.exists(workspace_dir)
moved_dir = os.path.join(str(tmpdir), "external_project")
shutil.move(project_path, moved_dir)
assert os.path.exists(moved_dir)
# Close the workspace
- result = cli.run(
- project=moved_dir, args=["workspace", "close", "--remove-dir", element_name]
- )
+ result = cli.run(project=moved_dir, args=["workspace", "close", "--remove-dir", element_name])
result.assert_success()
# Assert the workspace dir has been deleted
@@ -498,21 +432,14 @@ def test_close_internal_after_move_project(cli, tmpdir, datafiles):
initial_dir = os.path.join(str(tmpdir), "initial_project")
initial_workspace = os.path.join(initial_dir, "workspace")
element_name, _, _ = open_workspace(
- cli,
- tmpdir,
- datafiles,
- "git",
- workspace_dir=initial_workspace,
- project_path=initial_dir,
+ cli, tmpdir, datafiles, "git", workspace_dir=initial_workspace, project_path=initial_dir,
)
moved_dir = os.path.join(str(tmpdir), "internal_project")
shutil.move(initial_dir, moved_dir)
assert os.path.exists(moved_dir)
# Close the workspace
- result = cli.run(
- project=moved_dir, args=["workspace", "close", "--remove-dir", element_name]
- )
+ result = cli.run(project=moved_dir, args=["workspace", "close", "--remove-dir", element_name])
result.assert_success()
# Assert the workspace dir has been deleted
@@ -538,9 +465,7 @@ def test_close_removed(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_close_nonexistant_element(cli, tmpdir, datafiles):
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
- element_path = os.path.join(
- datafiles.dirname, datafiles.basename, "elements", element_name
- )
+ element_path = os.path.join(datafiles.dirname, datafiles.basename, "elements", element_name)
# First brutally remove the element.bst file, ensuring that
# the element does not exist anymore in the project where
@@ -548,9 +473,7 @@ def test_close_nonexistant_element(cli, tmpdir, datafiles):
os.remove(element_path)
# Close the workspace
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", element_name]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", element_name])
result.assert_success()
# Assert the workspace dir has been deleted
@@ -561,17 +484,11 @@ def test_close_nonexistant_element(cli, tmpdir, datafiles):
def test_close_multiple(cli, tmpdir, datafiles):
tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
tmpdir_beta = os.path.join(str(tmpdir), "beta")
- alpha, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
- )
- beta, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, "git", suffix="-beta"
- )
+ alpha, project, workspace_alpha = open_workspace(cli, tmpdir_alpha, datafiles, "git", suffix="-alpha")
+ beta, project, workspace_beta = open_workspace(cli, tmpdir_beta, datafiles, "git", suffix="-beta")
# Close the workspaces
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", alpha, beta]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", alpha, beta])
result.assert_success()
# Assert the workspace dirs have been deleted
@@ -583,17 +500,11 @@ def test_close_multiple(cli, tmpdir, datafiles):
def test_close_all(cli, tmpdir, datafiles):
tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
tmpdir_beta = os.path.join(str(tmpdir), "beta")
- _, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
- )
- _, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, "git", suffix="-beta"
- )
+ _, project, workspace_alpha = open_workspace(cli, tmpdir_alpha, datafiles, "git", suffix="-alpha")
+ _, project, workspace_beta = open_workspace(cli, tmpdir_beta, datafiles, "git", suffix="-beta")
# Close the workspaces
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", "--all"]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", "--all"])
result.assert_success()
# Assert the workspace dirs have been deleted
@@ -657,9 +568,7 @@ def test_reset_soft(cli, tmpdir, datafiles):
assert os.path.exists(pony_path)
# Now soft-reset the open workspace, this should not revert the changes
- result = cli.run(
- project=project, args=["workspace", "reset", "--soft", element_name]
- )
+ result = cli.run(project=project, args=["workspace", "reset", "--soft", element_name])
result.assert_success()
# we removed this dir
assert not os.path.exists(os.path.join(workspace, "usr", "bin"))
@@ -677,12 +586,8 @@ def test_reset_multiple(cli, tmpdir, datafiles):
# Open the workspaces
tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
tmpdir_beta = os.path.join(str(tmpdir), "beta")
- alpha, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
- )
- beta, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, "git", suffix="-beta"
- )
+ alpha, project, workspace_alpha = open_workspace(cli, tmpdir_alpha, datafiles, "git", suffix="-alpha")
+ beta, project, workspace_beta = open_workspace(cli, tmpdir_beta, datafiles, "git", suffix="-beta")
# Modify workspaces
shutil.rmtree(os.path.join(workspace_alpha, "usr", "bin"))
@@ -703,12 +608,8 @@ def test_reset_all(cli, tmpdir, datafiles):
# Open the workspaces
tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
tmpdir_beta = os.path.join(str(tmpdir), "beta")
- _, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
- )
- _, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, "git", suffix="-beta"
- )
+ _, project, workspace_alpha = open_workspace(cli, tmpdir_alpha, datafiles, "git", suffix="-alpha")
+ _, project, workspace_beta = open_workspace(cli, tmpdir_beta, datafiles, "git", suffix="-beta")
# Modify workspaces
shutil.rmtree(os.path.join(workspace_alpha, "usr", "bin"))
@@ -749,13 +650,9 @@ def test_list(cli, tmpdir, datafiles):
[(False, False), (True, True), (True, False)],
ids=["project-no-guess", "workspace-guess", "workspace-no-guess"],
)
-def test_build(
- cli, tmpdir_factory, datafiles, kind, strict, from_workspace, guess_element
-):
+def test_build(cli, tmpdir_factory, datafiles, kind, strict, from_workspace, guess_element):
tmpdir = tmpdir_factory.mktemp("")
- element_name, project, workspace = open_workspace(
- cli, tmpdir, datafiles, kind, False
- )
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
checkout = os.path.join(str(tmpdir), "checkout")
args_dir = ["-C", workspace] if from_workspace else []
args_elm = [element_name] if not guess_element else []
@@ -786,10 +683,7 @@ def test_build(
assert key_1 == key_2
# Checkout the result
- result = cli.run(
- project=project,
- args=args_dir + ["artifact", "checkout", "--directory", checkout, *args_elm],
- )
+ result = cli.run(project=project, args=args_dir + ["artifact", "checkout", "--directory", checkout, *args_elm],)
result.assert_success()
# Check that the pony.conf from the modified workspace exists
@@ -896,10 +790,7 @@ def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
assert key_1 != key_3
# Checkout the result
- result = cli.run(
- project=project,
- args=["artifact", "checkout", element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout],)
result.assert_success()
# Check the result for the changes we made
@@ -927,10 +818,7 @@ def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
# Test loading a negative workspace version
{"format-version": -1},
# Test loading version 0 with two sources
- {
- "format-version": 0,
- "alpha.bst": {0: "/workspaces/bravo", 1: "/workspaces/charlie",},
- },
+ {"format-version": 0, "alpha.bst": {0: "/workspaces/bravo", 1: "/workspaces/charlie",},},
# Test loading a version with decimals
{"format-version": 0.5},
# Test loading a future version
@@ -959,13 +847,7 @@ def test_list_unsupported_workspace(cli, datafiles, workspace_cfg):
{"alpha.bst": "/workspaces/bravo"},
{
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "running_files": {},
- }
- },
+ "workspaces": {"alpha.bst": {"prepared": False, "path": "/workspaces/bravo", "running_files": {},}},
},
),
# Test loading version 0 with only one source
@@ -973,30 +855,15 @@ def test_list_unsupported_workspace(cli, datafiles, workspace_cfg):
{"alpha.bst": {0: "/workspaces/bravo"}},
{
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "running_files": {},
- }
- },
+ "workspaces": {"alpha.bst": {"prepared": False, "path": "/workspaces/bravo", "running_files": {},}},
},
),
# Test loading version 1
(
- {
- "format-version": 1,
- "workspaces": {"alpha.bst": {"path": "/workspaces/bravo"}},
- },
+ {"format-version": 1, "workspaces": {"alpha.bst": {"path": "/workspaces/bravo"}},},
{
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "running_files": {},
- }
- },
+ "workspaces": {"alpha.bst": {"prepared": False, "path": "/workspaces/bravo", "running_files": {},}},
},
),
# Test loading version 2
@@ -1027,23 +894,11 @@ def test_list_unsupported_workspace(cli, datafiles, workspace_cfg):
(
{
"format-version": 3,
- "workspaces": {
- "alpha.bst": {
- "prepared": True,
- "path": "/workspaces/bravo",
- "running_files": {},
- }
- },
+ "workspaces": {"alpha.bst": {"prepared": True, "path": "/workspaces/bravo", "running_files": {},}},
},
{
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": True,
- "path": "/workspaces/bravo",
- "running_files": {},
- }
- },
+ "workspaces": {"alpha.bst": {"prepared": True, "path": "/workspaces/bravo", "running_files": {},}},
},
),
],
@@ -1087,14 +942,9 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# Make a change to the workspaces file
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, element_name],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name],)
result.assert_success()
- result = cli.run(
- project=project, args=["workspace", "close", "--remove-dir", element_name]
- )
+ result = cli.run(project=project, args=["workspace", "close", "--remove-dir", element_name])
result.assert_success()
# Check that workspace config is converted correctly if necessary
@@ -1116,9 +966,7 @@ def test_inconsitent_pipeline_message(cli, tmpdir, datafiles):
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
checkout = os.path.join(str(tmpdir), "checkout")
- element_name, project, workspace = open_workspace(
- cli, os.path.join(str(tmpdir), "repo-a"), datafiles, "git"
- )
+ element_name, project, workspace = open_workspace(cli, os.path.join(str(tmpdir), "repo-a"), datafiles, "git")
element_path = os.path.join(project, "elements")
back_dep_element_name = "workspace-test-back-dep.bst"
@@ -1165,10 +1013,7 @@ def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
assert key_b1 == key_b2
# Checkout the result
- result = cli.run(
- project=project,
- args=["artifact", "checkout", back_dep_element_name, "--directory", checkout],
- )
+ result = cli.run(project=project, args=["artifact", "checkout", back_dep_element_name, "--directory", checkout],)
result.assert_success()
# Check that the pony.conf from the modified workspace exists
@@ -1185,9 +1030,7 @@ def test_multiple_failed_builds(cli, tmpdir, datafiles):
"kind": "manual",
"config": {"configure-commands": ["unknown_command_that_will_fail"]},
}
- element_name, project, _ = open_workspace(
- cli, tmpdir, datafiles, "git", element_attrs=element_config
- )
+ element_name, project, _ = open_workspace(cli, tmpdir, datafiles, "git", element_attrs=element_config)
for _ in range(2):
result = cli.run(project=project, args=["build", element_name])
@@ -1208,12 +1051,7 @@ def test_external_fetch(cli, datafiles, tmpdir_factory, subdir, guess_element):
create_element_size(depend_element, str(datafiles), "elements", [], 1024)
element_name, project, workspace = open_workspace(
- cli,
- tmpdir,
- datafiles,
- "git",
- no_checkout=True,
- element_attrs={"depends": [depend_element]},
+ cli, tmpdir, datafiles, "git", no_checkout=True, element_attrs={"depends": [depend_element]},
)
arg_elm = [element_name] if not guess_element else []
@@ -1227,9 +1065,7 @@ def test_external_fetch(cli, datafiles, tmpdir_factory, subdir, guess_element):
assert cli.get_element_state(str(datafiles), depend_element) == "fetch needed"
# Fetch the workspaced element
- result = cli.run(
- project=project, args=["-C", call_dir, "source", "fetch", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", call_dir, "source", "fetch", *arg_elm])
result.assert_success()
# Assert that the depended element has now been fetched
@@ -1250,15 +1086,10 @@ def test_external_push_pull(cli, datafiles, tmpdir_factory, guess_element):
cli.configure({"artifacts": {"url": share.repo, "push": True}})
- result = cli.run(
- project=project, args=["-C", workspace, "artifact", "push", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", workspace, "artifact", "push", *arg_elm])
result.assert_success()
- result = cli.run(
- project=project,
- args=["-C", workspace, "artifact", "pull", "--deps", "all", *arg_elm],
- )
+ result = cli.run(project=project, args=["-C", workspace, "artifact", "pull", "--deps", "all", *arg_elm],)
result.assert_success()
@@ -1280,9 +1111,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
del element_contents.get_sequence("sources").mapping_at(0)["ref"]
_yaml.roundtrip_dump(element_contents, element_file)
- result = cli.run(
- project=project, args=["-C", workspace, "source", "track", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", workspace, "source", "track", *arg_elm])
result.assert_success()
# Element is not tracked now
@@ -1290,9 +1119,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
assert "ref" not in element_contents.get_sequence("sources").mapping_at(0)
# close the workspace
- result = cli.run(
- project=project, args=["-C", workspace, "workspace", "close", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", workspace, "workspace", "close", *arg_elm])
result.assert_success()
# and retrack the element
@@ -1311,12 +1138,8 @@ def test_external_open_other(cli, datafiles, tmpdir_factory):
tmpdir1 = tmpdir_factory.mktemp("")
tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- _, project, alpha_workspace = open_workspace(
- cli, tmpdir1, datafiles, "git", suffix="-alpha"
- )
- beta_element, _, beta_workspace = open_workspace(
- cli, tmpdir2, datafiles, "git", suffix="-beta"
- )
+ _, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
# Closing the other element first, because I'm too lazy to create an
# element without opening it
@@ -1325,16 +1148,7 @@ def test_external_open_other(cli, datafiles, tmpdir_factory):
result = cli.run(
project=project,
- args=[
- "-C",
- alpha_workspace,
- "workspace",
- "open",
- "--force",
- "--directory",
- beta_workspace,
- beta_element,
- ],
+ args=["-C", alpha_workspace, "workspace", "open", "--force", "--directory", beta_workspace, beta_element,],
)
result.assert_success()
@@ -1345,15 +1159,10 @@ def test_external_close_other(cli, datafiles, tmpdir_factory):
tmpdir1 = tmpdir_factory.mktemp("")
tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- _, project, alpha_workspace = open_workspace(
- cli, tmpdir1, datafiles, "git", suffix="-alpha"
- )
+ _, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
beta_element, _, _ = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
- result = cli.run(
- project=project,
- args=["-C", alpha_workspace, "workspace", "close", beta_element],
- )
+ result = cli.run(project=project, args=["-C", alpha_workspace, "workspace", "close", beta_element],)
result.assert_success()
assert "you can no longer run BuildStream" not in result.stderr
@@ -1365,15 +1174,11 @@ def test_external_close_self(cli, datafiles, tmpdir_factory, guess_element):
tmpdir1 = tmpdir_factory.mktemp("")
tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- alpha_element, project, alpha_workspace = open_workspace(
- cli, tmpdir1, datafiles, "git", suffix="-alpha"
- )
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
_, _, _ = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
arg_elm = [alpha_element] if not guess_element else []
- result = cli.run(
- project=project, args=["-C", alpha_workspace, "workspace", "close", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", alpha_workspace, "workspace", "close", *arg_elm])
result.assert_success()
assert "you can no longer run BuildStream" in result.stderr
@@ -1383,15 +1188,10 @@ def test_external_reset_other(cli, datafiles, tmpdir_factory):
tmpdir1 = tmpdir_factory.mktemp("")
tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- _, project, alpha_workspace = open_workspace(
- cli, tmpdir1, datafiles, "git", suffix="-alpha"
- )
+ _, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
beta_element, _, _ = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
- result = cli.run(
- project=project,
- args=["-C", alpha_workspace, "workspace", "reset", beta_element],
- )
+ result = cli.run(project=project, args=["-C", alpha_workspace, "workspace", "reset", beta_element],)
result.assert_success()
@@ -1402,9 +1202,7 @@ def test_external_reset_self(cli, datafiles, tmpdir, guess_element):
arg_elm = [element] if not guess_element else []
# Command succeeds
- result = cli.run(
- project=project, args=["-C", workspace, "workspace", "reset", *arg_elm]
- )
+ result = cli.run(project=project, args=["-C", workspace, "workspace", "reset", *arg_elm])
result.assert_success()
# Successive commands still work (i.e. .bstproject.yaml hasn't been deleted)
@@ -1430,19 +1228,13 @@ def test_multisource_workspace(cli, datafiles, tmpdir):
element_name = "multisource.bst"
element = {
"kind": "import",
- "sources": [
- {"kind": "local", "path": "files/bin-files"},
- {"kind": "local", "path": "files/dev-files"},
- ],
+ "sources": [{"kind": "local", "path": "files/bin-files"}, {"kind": "local", "path": "files/dev-files"},],
}
element_path = os.path.join(project, "elements", element_name)
_yaml.roundtrip_dump(element, element_path)
workspace_dir = os.path.join(str(tmpdir), "multisource")
- res = cli.run(
- project=project,
- args=["workspace", "open", "multisource.bst", "--directory", workspace_dir],
- )
+ res = cli.run(project=project, args=["workspace", "open", "multisource.bst", "--directory", workspace_dir],)
res.assert_success()
directories = os.listdir(os.path.join(workspace_dir, "usr"))
@@ -1462,14 +1254,8 @@ TEST_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)))
@pytest.mark.parametrize(
["case", "non_workspaced_elements_state"],
[
- (
- "workspaced-build-dep",
- ["waiting", "waiting", "waiting", "waiting", "waiting"],
- ),
- (
- "workspaced-runtime-dep",
- ["buildable", "buildable", "waiting", "waiting", "waiting"],
- ),
+ ("workspaced-build-dep", ["waiting", "waiting", "waiting", "waiting", "waiting"],),
+ ("workspaced-runtime-dep", ["buildable", "buildable", "waiting", "waiting", "waiting"],),
],
)
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
@@ -1492,10 +1278,7 @@ def test_build_all(cli, tmpdir, datafiles, case, strict, non_workspaced_elements
cli.configure({"projects": {"test": {"strict": strict_mode}}})
# First open the workspace
- result = cli.run(
- project=project,
- args=["workspace", "open", "--directory", workspace, "elem1.bst"],
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, "elem1.bst"],)
result.assert_success()
# Ensure all elements are waiting build the first
@@ -1508,9 +1291,7 @@ def test_build_all(cli, tmpdir, datafiles, case, strict, non_workspaced_elements
result.assert_success()
# Assert that the target is built
- assert cli.get_element_states(project, all_elements) == {
- elem: "cached" for elem in all_elements
- }
+ assert cli.get_element_states(project, all_elements) == {elem: "cached" for elem in all_elements}
@pytest.mark.datafiles(DATA_DIR)
@@ -1527,9 +1308,7 @@ def test_show_workspace_logs(cli, tmpdir, datafiles, strict):
cli.configure({"projects": {"test": {"strict": strict_mode}}})
# First open the workspace
- result = cli.run(
- project=project, args=["workspace", "open", "--directory", workspace, target]
- )
+ result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, target])
result.assert_success()
# Build the element