diff options
-rw-r--r-- | NEWS | 1 | ||||
-rw-r--r-- | src/buildstream/_frontend/cli.py | 7 | ||||
-rw-r--r-- | tests/frontend/push.py | 35 |
3 files changed, 30 insertions, 13 deletions
@@ -9,6 +9,7 @@ CLI o `bst build --deps` now also accepts "build" as an input. o `bst source fetch --deps` now also accepts "build" and "run" as inputs. o `bst artifact pull --deps` now also accepts "build" and "run" as inputs. + o `bst artifact push --deps` now also accepts "build" and "run" as inputs. Format diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py index 34e21fd22..11d6dfe98 100644 --- a/src/buildstream/_frontend/cli.py +++ b/src/buildstream/_frontend/cli.py @@ -1355,7 +1355,10 @@ def artifact_pull(app, artifacts, deps, remote): "-d", default=_PipelineSelection.NONE, show_default=True, - type=FastEnumType(_PipelineSelection, [_PipelineSelection.NONE, _PipelineSelection.ALL]), + type=FastEnumType( + _PipelineSelection, + [_PipelineSelection.BUILD, _PipelineSelection.NONE, _PipelineSelection.RUN, _PipelineSelection.ALL], + ), help="The dependencies to push", ) @click.option( @@ -1384,6 +1387,8 @@ def artifact_push(app, artifacts, deps, remote): \b none: No dependencies, just the element itself + run: Runtime dependencies, including the element itself + build: Build time dependencies, excluding the element itself all: All dependencies """ with app.initialized(session_name="Push"): diff --git a/tests/frontend/push.py b/tests/frontend/push.py index 970885784..26dd6cb4f 100644 --- a/tests/frontend/push.py +++ b/tests/frontend/push.py @@ -220,21 +220,31 @@ def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles): assert error in result.stderr -# Tests that `bst artifact push --deps all` pushes all dependencies of the given element. +# Tests that `bst artifact push --deps DEPS` pushes selected dependencies of +# the given element. # @pytest.mark.datafiles(DATA_DIR) -def test_push_all(cli, tmpdir, datafiles): +@pytest.mark.parametrize( + "deps, expected_states", + [ + ("build", [False, True, False]), + ("none", [True, False, False]), + ("run", [True, False, True]), + ("all", [True, True, True]), + ], +) +def test_push_deps(cli, tmpdir, datafiles, deps, expected_states): project = str(datafiles) + target = "checkout-deps.bst" + build_dep = "import-dev.bst" + runtime_dep = "import-bin.bst" with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share: # First build it without the artifact cache configured - result = cli.run(project=project, args=["build", "target.bst"]) + result = cli.run(project=project, args=["build", target]) result.assert_success() - # Assert that we are now cached locally - assert cli.get_element_state(project, "target.bst") == "cached" - # Configure artifact share cli.configure( { @@ -252,14 +262,15 @@ def test_push_all(cli, tmpdir, datafiles): ) # Now try bst artifact push all the deps - result = cli.run(project=project, args=["artifact", "push", "target.bst", "--deps", "all"]) + result = cli.run(project=project, args=["artifact", "push", target, "--deps", deps]) result.assert_success() - # And finally assert that all the artifacts are in the share - assert_shared(cli, share, project, "target.bst") - assert_shared(cli, share, project, "import-bin.bst") - assert_shared(cli, share, project, "import-dev.bst") - assert_shared(cli, share, project, "compose-all.bst") + # And finally assert that the selected artifacts are in the share + states = [] + for element in (target, build_dep, runtime_dep): + is_cached = share.get_artifact(cli.get_artifact_name(project, "test", element)) is not None + states.append(is_cached) + assert states == expected_states # Tests that `bst artifact push --deps run $artifact_ref` fails |