summaryrefslogtreecommitdiff
path: root/buildstream/testing
diff options
context:
space:
mode:
Diffstat (limited to 'buildstream/testing')
-rw-r--r--buildstream/testing/__init__.py98
-rw-r--r--buildstream/testing/_sourcetests/__init__.py0
-rw-r--r--buildstream/testing/_sourcetests/build_checkout.py83
-rw-r--r--buildstream/testing/_sourcetests/fetch.py107
-rw-r--r--buildstream/testing/_sourcetests/mirror.py427
-rw-r--r--buildstream/testing/_sourcetests/project/elements/base.bst5
-rw-r--r--buildstream/testing/_sourcetests/project/elements/base/base-alpine.bst17
-rw-r--r--buildstream/testing/_sourcetests/project/elements/import-bin.bst4
-rw-r--r--buildstream/testing/_sourcetests/project/elements/import-dev.bst4
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/horsey.bst3
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/pony.bst1
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/zebry.bst3
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/0.bst7
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/1.bst4
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/2.bst4
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/3.bst6
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/4.bst2
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/5.bst2
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/6.bst4
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/7.bst4
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/8.bst4
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/9.bst4
-rw-r--r--buildstream/testing/_sourcetests/project/elements/multiple_targets/order/run.bst2
-rw-r--r--buildstream/testing/_sourcetests/project/files/bar0
-rwxr-xr-xbuildstream/testing/_sourcetests/project/files/bin-files/usr/bin/hello3
-rw-r--r--buildstream/testing/_sourcetests/project/files/dev-files/usr/include/pony.h12
-rw-r--r--buildstream/testing/_sourcetests/project/files/etc-files/etc/buildstream/config1
-rw-r--r--buildstream/testing/_sourcetests/project/files/foo0
-rw-r--r--buildstream/testing/_sourcetests/project/files/source-bundle/llamas.txt1
-rw-r--r--buildstream/testing/_sourcetests/project/files/sub-project/elements/import-etc.bst4
-rw-r--r--buildstream/testing/_sourcetests/project/files/sub-project/files/etc-files/etc/animal.conf1
-rw-r--r--buildstream/testing/_sourcetests/project/files/sub-project/project.conf4
-rw-r--r--buildstream/testing/_sourcetests/project/project.conf27
-rw-r--r--buildstream/testing/_sourcetests/source_determinism.py118
-rw-r--r--buildstream/testing/_sourcetests/track.py420
-rw-r--r--buildstream/testing/_sourcetests/track_cross_junction.py186
-rw-r--r--buildstream/testing/_sourcetests/workspace.py161
-rw-r--r--buildstream/testing/_utils/__init__.py10
-rw-r--r--buildstream/testing/_utils/junction.py83
-rw-r--r--buildstream/testing/_utils/site.py46
-rw-r--r--buildstream/testing/integration.py51
-rw-r--r--buildstream/testing/repo.py109
-rw-r--r--buildstream/testing/runcli.py857
43 files changed, 2889 insertions, 0 deletions
diff --git a/buildstream/testing/__init__.py b/buildstream/testing/__init__.py
new file mode 100644
index 000000000..0dfc11f1c
--- /dev/null
+++ b/buildstream/testing/__init__.py
@@ -0,0 +1,98 @@
+#
+# Copyright (C) 2019 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+This package contains various utilities which make it easier to test plugins.
+"""
+
+import os
+from collections import OrderedDict
+from . import _sourcetests
+from .repo import Repo
+from .runcli import cli, cli_integration, cli_remote_execution
+
+# To make use of these test utilities it is necessary to have pytest
+# available. However, we don't want to have a hard dependency on
+# pytest.
+try:
+ import pytest
+except ImportError:
+ module_name = globals()['__name__']
+ msg = "Could not import pytest:\n" \
+ "To use the {} module, you must have pytest installed.".format(module_name)
+ raise ImportError(msg)
+
+
+ALL_REPO_KINDS = OrderedDict()
+
+
+def create_repo(kind, directory, subdir='repo'):
+ """Convenience method for creating a Repo
+
+ Args:
+ kind (str): The kind of repo to create (a source plugin basename). This
+ must have previously been registered using
+ `register_repo_kind`
+ directory (str): The path where the repo will keep a cache
+
+ Returns:
+ (Repo): A new Repo object
+ """
+ try:
+ constructor = ALL_REPO_KINDS[kind]
+ except KeyError as e:
+ raise AssertionError("Unsupported repo kind {}".format(kind)) from e
+
+ return constructor(directory, subdir=subdir)
+
+
+def register_repo_kind(kind, cls):
+ """Register a new repo kind.
+
+ Registering a repo kind will allow the use of the `create_repo`
+ method for that kind and include that repo kind in ALL_REPO_KINDS
+
+ In addition, repo_kinds registred prior to
+ `sourcetests_collection_hook` being called will be automatically
+ used to test the basic behaviour of their associated source
+ plugins using the tests in `testing._sourcetests`.
+
+ Args:
+ kind (str): The kind of repo to create (a source plugin basename)
+ cls (cls) : A class derived from Repo.
+
+ """
+ ALL_REPO_KINDS[kind] = cls
+
+
+def sourcetests_collection_hook(session):
+ """ Used to hook the templated source plugin tests into a pyest test suite.
+
+ This should be called via the `pytest_sessionstart
+ hook <https://docs.pytest.org/en/latest/reference.html#collection-hooks>`_.
+ The tests in the _sourcetests package will be collected as part of
+ whichever test package this hook is called from.
+
+ Args:
+ session (pytest.Session): The current pytest session
+ """
+ SOURCE_TESTS_PATH = os.path.dirname(_sourcetests.__file__)
+ # Add the location of the source tests to the session's
+ # python_files config. Without this, pytest may filter out these
+ # tests during collection.
+ session.config.addinivalue_line("python_files", os.path.join(SOURCE_TESTS_PATH, "*.py"))
+ session.config.args.append(SOURCE_TESTS_PATH)
diff --git a/buildstream/testing/_sourcetests/__init__.py b/buildstream/testing/_sourcetests/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/buildstream/testing/_sourcetests/__init__.py
diff --git a/buildstream/testing/_sourcetests/build_checkout.py b/buildstream/testing/_sourcetests/build_checkout.py
new file mode 100644
index 000000000..3619d2b7e
--- /dev/null
+++ b/buildstream/testing/_sourcetests/build_checkout.py
@@ -0,0 +1,83 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+# Pylint doesn't play well with fixtures and dependency injection from pytest
+# pylint: disable=redefined-outer-name
+
+import os
+import pytest
+
+from buildstream.testing import create_repo, ALL_REPO_KINDS
+from buildstream.testing import cli # pylint: disable=unused-import
+from buildstream import _yaml
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+fetch_build_checkout_combos = \
+ [("strict", kind) for kind in ALL_REPO_KINDS] + \
+ [("non-strict", kind) for kind in ALL_REPO_KINDS]
+
+
+def strict_args(args, strict):
+ if strict != "strict":
+ return ['--no-strict', *args]
+ return args
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("strict,kind", fetch_build_checkout_combos)
+def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
+ checkout = os.path.join(cli.directory, 'checkout')
+ project = str(datafiles)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'build-test-{}.bst'.format(kind)
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ ref = repo.create(dev_files_path)
+
+ # Write out our test target
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config(ref=ref)
+ ]
+ }
+ _yaml.dump(element,
+ os.path.join(element_path,
+ element_name))
+
+ assert cli.get_element_state(project, element_name) == 'fetch needed'
+ result = cli.run(project=project, args=strict_args(['build', element_name], strict))
+ result.assert_success()
+ assert cli.get_element_state(project, element_name) == 'cached'
+
+ # Now check it out
+ result = cli.run(project=project, args=strict_args([
+ 'artifact', 'checkout', element_name, '--directory', checkout
+ ], strict))
+ result.assert_success()
+
+ # Check that the pony.h include from files/dev-files exists
+ filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
+ assert os.path.exists(filename)
diff --git a/buildstream/testing/_sourcetests/fetch.py b/buildstream/testing/_sourcetests/fetch.py
new file mode 100644
index 000000000..aaf92a14d
--- /dev/null
+++ b/buildstream/testing/_sourcetests/fetch.py
@@ -0,0 +1,107 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+# Pylint doesn't play well with fixtures and dependency injection from pytest
+# pylint: disable=redefined-outer-name
+
+import os
+import pytest
+
+from buildstream import _yaml
+from .._utils import generate_junction, configure_project
+from .. import create_repo, ALL_REPO_KINDS
+from .. import cli # pylint: disable=unused-import
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_fetch(cli, tmpdir, datafiles, kind):
+ project = str(datafiles)
+ bin_files_path = os.path.join(project, 'files', 'bin-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'fetch-test-{}.bst'.format(kind)
+
+ # Create our repo object of the given source type with
+ # the bin files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ ref = repo.create(bin_files_path)
+
+ # Write out our test target
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config(ref=ref)
+ ]
+ }
+ _yaml.dump(element,
+ os.path.join(element_path,
+ element_name))
+
+ # Assert that a fetch is needed
+ assert cli.get_element_state(project, element_name) == 'fetch needed'
+
+ # Now try to fetch it
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+ # Assert that we are now buildable because the source is
+ # now cached.
+ assert cli.get_element_state(project, element_name) == 'buildable'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
+ project = str(datafiles)
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ junction_path = os.path.join(project, 'elements', 'junction.bst')
+
+ import_etc_path = os.path.join(subproject_path, 'elements', 'import-etc-repo.bst')
+ etc_files_path = os.path.join(subproject_path, 'files', 'etc-files')
+
+ repo = create_repo(kind, str(tmpdir.join('import-etc')))
+ ref = repo.create(etc_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config(ref=(ref if ref_storage == 'inline' else None))
+ ]
+ }
+ _yaml.dump(element, import_etc_path)
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline'))
+
+ if ref_storage == 'project.refs':
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result.assert_success()
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc.bst'])
+ result.assert_success()
+
+ result = cli.run(project=project, args=['source', 'fetch', 'junction.bst:import-etc.bst'])
+ result.assert_success()
diff --git a/buildstream/testing/_sourcetests/mirror.py b/buildstream/testing/_sourcetests/mirror.py
new file mode 100644
index 000000000..d682bb2ef
--- /dev/null
+++ b/buildstream/testing/_sourcetests/mirror.py
@@ -0,0 +1,427 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+# Pylint doesn't play well with fixtures and dependency injection from pytest
+# pylint: disable=redefined-outer-name
+
+import os
+import pytest
+
+from buildstream import _yaml
+from buildstream._exceptions import ErrorDomain
+from .._utils import generate_junction
+from .. import create_repo, ALL_REPO_KINDS
+from .. import cli # pylint: disable=unused-import
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_fetch(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+ upstream_ref = upstream_repo.create(dev_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ # No obvious ways of checking that the mirror has been fetched
+ # But at least we can be sure it succeeds
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ ref = upstream_repo.create(dev_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=ref)
+ ]
+ }
+
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ _, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: 'http://www.example.com/'
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ config_project_dir = str(tmpdir.join('config'))
+ os.makedirs(config_project_dir, exist_ok=True)
+ config_project = {
+ 'name': 'config'
+ }
+ _yaml.dump(config_project, os.path.join(config_project_dir, 'project.conf'))
+ extra_mirrors = {
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ }
+ }
+ ]
+ }
+ _yaml.dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
+ generate_junction(str(tmpdir.join('config_repo')),
+ config_project_dir,
+ os.path.join(element_dir, 'config.bst'))
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ '(@)': [
+ 'config.bst:mirrors.yml'
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ # Now make the upstream unavailable.
+ os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+
+ element = {
+ 'kind': 'junction',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ config_project_dir = str(tmpdir.join('config'))
+ os.makedirs(config_project_dir, exist_ok=True)
+ config_project = {
+ 'name': 'config'
+ }
+ _yaml.dump(config_project, os.path.join(config_project_dir, 'project.conf'))
+ extra_mirrors = {
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ }
+ }
+ ]
+ }
+ _yaml.dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
+ generate_junction(str(tmpdir.join('config_repo')),
+ config_project_dir,
+ os.path.join(element_dir, 'config.bst'))
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ '(@)': [
+ 'config.bst:mirrors.yml'
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ # Now make the upstream unavailable.
+ os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+ # Now make the upstream available again.
+ os.rename('{}.bak'.format(upstream_repo.repo), upstream_repo.repo)
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+ upstream_ref = upstream_repo.create(dev_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ result = cli.run(project=project_dir, args=['source', 'track', element_name])
+ result.assert_success()
+
+ # Tracking tries upstream first. Check the ref is from upstream.
+ new_element = _yaml.load(element_path)
+ source = _yaml.node_get(new_element, dict, 'sources', [0])
+ if 'ref' in source:
+ assert _yaml.node_get(source, str, 'ref') == upstream_ref
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+ mirror_ref = upstream_ref
+ upstream_ref = upstream_repo.create(dev_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ _, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: 'http://www.example.com/'
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ result = cli.run(project=project_dir, args=['source', 'track', element_name])
+ result.assert_success()
+
+ # Check that tracking fell back to the mirror
+ new_element = _yaml.load(element_path)
+ source = _yaml.node_get(new_element, dict, 'sources', [0])
+ if 'ref' in source:
+ assert _yaml.node_get(source, str, 'ref') == mirror_ref
diff --git a/buildstream/testing/_sourcetests/project/elements/base.bst b/buildstream/testing/_sourcetests/project/elements/base.bst
new file mode 100644
index 000000000..428afa736
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/base.bst
@@ -0,0 +1,5 @@
+# elements/base.bst
+
+kind: stack
+depends:
+ - base/base-alpine.bst
diff --git a/buildstream/testing/_sourcetests/project/elements/base/base-alpine.bst b/buildstream/testing/_sourcetests/project/elements/base/base-alpine.bst
new file mode 100644
index 000000000..c5833095d
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/base/base-alpine.bst
@@ -0,0 +1,17 @@
+kind: import
+
+description: |
+ Alpine Linux base for tests
+
+ Generated using the `tests/integration-tests/base/generate-base.sh` script.
+
+sources:
+ - kind: tar
+ base-dir: ''
+ (?):
+ - arch == "x86-64":
+ ref: 3eb559250ba82b64a68d86d0636a6b127aa5f6d25d3601a79f79214dc9703639
+ url: "alpine:integration-tests-base.v1.x86_64.tar.xz"
+ - arch == "aarch64":
+ ref: 431fb5362032ede6f172e70a3258354a8fd71fcbdeb1edebc0e20968c792329a
+ url: "alpine:integration-tests-base.v1.aarch64.tar.xz"
diff --git a/buildstream/testing/_sourcetests/project/elements/import-bin.bst b/buildstream/testing/_sourcetests/project/elements/import-bin.bst
new file mode 100644
index 000000000..a847c0c23
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/import-bin.bst
@@ -0,0 +1,4 @@
+kind: import
+sources:
+- kind: local
+ path: files/bin-files
diff --git a/buildstream/testing/_sourcetests/project/elements/import-dev.bst b/buildstream/testing/_sourcetests/project/elements/import-dev.bst
new file mode 100644
index 000000000..152a54667
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/import-dev.bst
@@ -0,0 +1,4 @@
+kind: import
+sources:
+- kind: local
+ path: files/dev-files
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/horsey.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/horsey.bst
new file mode 100644
index 000000000..bd1ffae9c
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/horsey.bst
@@ -0,0 +1,3 @@
+kind: autotools
+depends:
+ - multiple_targets/dependency/pony.bst
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/pony.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/pony.bst
new file mode 100644
index 000000000..3c29b4ea1
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/pony.bst
@@ -0,0 +1 @@
+kind: autotools
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/zebry.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/zebry.bst
new file mode 100644
index 000000000..98447ab52
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/dependency/zebry.bst
@@ -0,0 +1,3 @@
+kind: autotools
+depends:
+ - multiple_targets/dependency/horsey.bst
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/0.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/0.bst
new file mode 100644
index 000000000..a99be06a0
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/0.bst
@@ -0,0 +1,7 @@
+kind: autotools
+description: Root node
+depends:
+ - multiple_targets/order/2.bst
+ - multiple_targets/order/3.bst
+ - filename: multiple_targets/order/run.bst
+ type: runtime
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/1.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/1.bst
new file mode 100644
index 000000000..82b507a62
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/1.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: Root node
+depends:
+ - multiple_targets/order/9.bst
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/2.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/2.bst
new file mode 100644
index 000000000..ee1afae20
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/2.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: First dependency level
+depends:
+ - multiple_targets/order/3.bst
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/3.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/3.bst
new file mode 100644
index 000000000..4c3a23dab
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/3.bst
@@ -0,0 +1,6 @@
+kind: autotools
+description: Second dependency level
+depends:
+ - multiple_targets/order/4.bst
+ - multiple_targets/order/5.bst
+ - multiple_targets/order/6.bst
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/4.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/4.bst
new file mode 100644
index 000000000..b663a0b52
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/4.bst
@@ -0,0 +1,2 @@
+kind: autotools
+description: Third level dependency
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/5.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/5.bst
new file mode 100644
index 000000000..b9efcf71b
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/5.bst
@@ -0,0 +1,2 @@
+kind: autotools
+description: Fifth level dependency
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/6.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/6.bst
new file mode 100644
index 000000000..6c19d04e3
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/6.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: Fourth level dependency
+depends:
+ - multiple_targets/order/5.bst
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/7.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/7.bst
new file mode 100644
index 000000000..6805b3e6d
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/7.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: Third level dependency
+depends:
+ - multiple_targets/order/6.bst
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/8.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/8.bst
new file mode 100644
index 000000000..b8d8964a0
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/8.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: Second level dependency
+depends:
+ - multiple_targets/order/7.bst
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/9.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/9.bst
new file mode 100644
index 000000000..cc13bf3f0
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/9.bst
@@ -0,0 +1,4 @@
+kind: autotools
+description: First level dependency
+depends:
+ - multiple_targets/order/8.bst
diff --git a/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/run.bst b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/run.bst
new file mode 100644
index 000000000..9b3d2446c
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/elements/multiple_targets/order/run.bst
@@ -0,0 +1,2 @@
+kind: autotools
+description: Not a root node, yet built at the same time as root nodes
diff --git a/buildstream/testing/_sourcetests/project/files/bar b/buildstream/testing/_sourcetests/project/files/bar
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/files/bar
diff --git a/buildstream/testing/_sourcetests/project/files/bin-files/usr/bin/hello b/buildstream/testing/_sourcetests/project/files/bin-files/usr/bin/hello
new file mode 100755
index 000000000..f534a4083
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/files/bin-files/usr/bin/hello
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo "Hello !"
diff --git a/buildstream/testing/_sourcetests/project/files/dev-files/usr/include/pony.h b/buildstream/testing/_sourcetests/project/files/dev-files/usr/include/pony.h
new file mode 100644
index 000000000..40bd0c2e7
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/files/dev-files/usr/include/pony.h
@@ -0,0 +1,12 @@
+#ifndef __PONY_H__
+#define __PONY_H__
+
+#define PONY_BEGIN "Once upon a time, there was a pony."
+#define PONY_END "And they lived happily ever after, the end."
+
+#define MAKE_PONY(story) \
+ PONY_BEGIN \
+ story \
+ PONY_END
+
+#endif /* __PONY_H__ */
diff --git a/buildstream/testing/_sourcetests/project/files/etc-files/etc/buildstream/config b/buildstream/testing/_sourcetests/project/files/etc-files/etc/buildstream/config
new file mode 100644
index 000000000..04204c7c9
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/files/etc-files/etc/buildstream/config
@@ -0,0 +1 @@
+config
diff --git a/buildstream/testing/_sourcetests/project/files/foo b/buildstream/testing/_sourcetests/project/files/foo
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/files/foo
diff --git a/buildstream/testing/_sourcetests/project/files/source-bundle/llamas.txt b/buildstream/testing/_sourcetests/project/files/source-bundle/llamas.txt
new file mode 100644
index 000000000..f98b24871
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/files/source-bundle/llamas.txt
@@ -0,0 +1 @@
+llamas
diff --git a/buildstream/testing/_sourcetests/project/files/sub-project/elements/import-etc.bst b/buildstream/testing/_sourcetests/project/files/sub-project/elements/import-etc.bst
new file mode 100644
index 000000000..f0171990e
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/files/sub-project/elements/import-etc.bst
@@ -0,0 +1,4 @@
+kind: import
+sources:
+- kind: local
+ path: files/etc-files
diff --git a/buildstream/testing/_sourcetests/project/files/sub-project/files/etc-files/etc/animal.conf b/buildstream/testing/_sourcetests/project/files/sub-project/files/etc-files/etc/animal.conf
new file mode 100644
index 000000000..db8c36cba
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/files/sub-project/files/etc-files/etc/animal.conf
@@ -0,0 +1 @@
+animal=Pony
diff --git a/buildstream/testing/_sourcetests/project/files/sub-project/project.conf b/buildstream/testing/_sourcetests/project/files/sub-project/project.conf
new file mode 100644
index 000000000..bbb8414a3
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/files/sub-project/project.conf
@@ -0,0 +1,4 @@
+# Project config for frontend build test
+name: subtest
+
+element-path: elements
diff --git a/buildstream/testing/_sourcetests/project/project.conf b/buildstream/testing/_sourcetests/project/project.conf
new file mode 100644
index 000000000..05b68bfeb
--- /dev/null
+++ b/buildstream/testing/_sourcetests/project/project.conf
@@ -0,0 +1,27 @@
+# Project config for frontend build test
+name: test
+element-path: elements
+aliases:
+ alpine: https://bst-integration-test-images.ams3.cdn.digitaloceanspaces.com/
+ project_dir: file://{project_dir}
+options:
+ linux:
+ type: bool
+ description: Whether to expect a linux platform
+ default: True
+ arch:
+ type: arch
+ description: Current architecture
+ values:
+ - x86-64
+ - aarch64
+split-rules:
+ test:
+ - |
+ /tests
+ - |
+ /tests/*
+
+fatal-warnings:
+- bad-element-suffix
+- bad-characters-in-name
diff --git a/buildstream/testing/_sourcetests/source_determinism.py b/buildstream/testing/_sourcetests/source_determinism.py
new file mode 100644
index 000000000..8597a7072
--- /dev/null
+++ b/buildstream/testing/_sourcetests/source_determinism.py
@@ -0,0 +1,118 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+# Pylint doesn't play well with fixtures and dependency injection from pytest
+# pylint: disable=redefined-outer-name
+
+import os
+import pytest
+
+from buildstream import _yaml
+from .._utils.site import HAVE_SANDBOX
+from .. import create_repo, ALL_REPO_KINDS
+from .. import cli # pylint: disable=unused-import
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+def create_test_file(*path, mode=0o644, content='content\n'):
+ path = os.path.join(*path)
+ os.makedirs(os.path.dirname(path), exist_ok=True)
+ with open(path, 'w') as f:
+ f.write(content)
+ os.fchmod(f.fileno(), mode)
+
+
+def create_test_directory(*path, mode=0o644):
+ create_test_file(*path, '.keep', content='')
+ path = os.path.join(*path)
+ os.chmod(path, mode)
+
+
+@pytest.mark.integration
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", ['local', *ALL_REPO_KINDS])
+@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
+ project = str(datafiles)
+ element_name = 'list.bst'
+ element_path = os.path.join(project, 'elements', element_name)
+ repodir = os.path.join(str(tmpdir), 'repo')
+ sourcedir = os.path.join(project, 'source')
+
+ create_test_file(sourcedir, 'a.txt', mode=0o700)
+ create_test_file(sourcedir, 'b.txt', mode=0o755)
+ create_test_file(sourcedir, 'c.txt', mode=0o600)
+ create_test_file(sourcedir, 'd.txt', mode=0o400)
+ create_test_file(sourcedir, 'e.txt', mode=0o644)
+ create_test_file(sourcedir, 'f.txt', mode=0o4755)
+ create_test_file(sourcedir, 'g.txt', mode=0o2755)
+ create_test_file(sourcedir, 'h.txt', mode=0o1755)
+ create_test_directory(sourcedir, 'dir-a', mode=0o0700)
+ create_test_directory(sourcedir, 'dir-c', mode=0o0755)
+ create_test_directory(sourcedir, 'dir-d', mode=0o4755)
+ create_test_directory(sourcedir, 'dir-e', mode=0o2755)
+ create_test_directory(sourcedir, 'dir-f', mode=0o1755)
+
+ if kind == 'local':
+ source = {'kind': 'local',
+ 'path': 'source'}
+ else:
+ repo = create_repo(kind, repodir)
+ ref = repo.create(sourcedir)
+ source = repo.source_config(ref=ref)
+ element = {
+ 'kind': 'manual',
+ 'depends': [
+ {
+ 'filename': 'base.bst',
+ 'type': 'build'
+ }
+ ],
+ 'sources': [
+ source
+ ],
+ 'config': {
+ 'install-commands': [
+ 'ls -l >"%{install-root}/ls-l"'
+ ]
+ }
+ }
+ _yaml.dump(element, element_path)
+
+ def get_value_for_umask(umask):
+ checkoutdir = os.path.join(str(tmpdir), 'checkout-{}'.format(umask))
+
+ old_umask = os.umask(umask)
+
+ try:
+ result = cli.run(project=project, args=['build', element_name])
+ result.assert_success()
+
+ result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkoutdir])
+ result.assert_success()
+
+ with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
+ return f.read()
+ finally:
+ os.umask(old_umask)
+ cli.remove_artifact_from_cache(project, element_name)
+
+ assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
diff --git a/buildstream/testing/_sourcetests/track.py b/buildstream/testing/_sourcetests/track.py
new file mode 100644
index 000000000..668ea29e5
--- /dev/null
+++ b/buildstream/testing/_sourcetests/track.py
@@ -0,0 +1,420 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+# Pylint doesn't play well with fixtures and dependency injection from pytest
+# pylint: disable=redefined-outer-name
+
+import os
+import pytest
+
+from buildstream import _yaml
+from buildstream._exceptions import ErrorDomain
+from .._utils import generate_junction, configure_project
+from .. import create_repo, ALL_REPO_KINDS
+from .. import cli # pylint: disable=unused-import
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+def generate_element(repo, element_path, dep_name=None):
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config()
+ ]
+ }
+ if dep_name:
+ element['depends'] = [dep_name]
+
+ _yaml.dump(element, element_path)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track(cli, tmpdir, datafiles, ref_storage, kind):
+ project = str(datafiles)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'track-test-{}.bst'.format(kind)
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ repo.create(dev_files_path)
+
+ # Generate the element
+ generate_element(repo, os.path.join(element_path, element_name))
+
+ # Assert that a fetch is needed
+ assert cli.get_element_state(project, element_name) == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=['source', 'track', element_name])
+ result.assert_success()
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+ # Assert that we are now buildable because the source is
+ # now cached.
+ assert cli.get_element_state(project, element_name) == 'buildable'
+
+ # Assert there was a project.refs created, depending on the configuration
+ if ref_storage == 'project.refs':
+ assert os.path.exists(os.path.join(project, 'project.refs'))
+ else:
+ assert not os.path.exists(os.path.join(project, 'project.refs'))
+
+
+# NOTE:
+#
+# This test checks that recursive tracking works by observing
+# element states after running a recursive tracking operation.
+#
+# However, this test is ALSO valuable as it stresses the source
+# plugins in a situation where many source plugins are operating
+# at once on the same backing repository.
+#
+# Do not change this test to use a separate 'Repo' per element
+# as that would defeat the purpose of the stress test, otherwise
+# please refactor that aspect into another test.
+#
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("amount", [(1), (10)])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
+ project = str(datafiles)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+
+ # Try to actually launch as many fetch jobs as possible at the same time
+ #
+ # This stresses the Source plugins and helps to ensure that
+ # they handle concurrent access to the store correctly.
+ cli.configure({
+ 'scheduler': {
+ 'fetchers': amount,
+ }
+ })
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ repo.create(dev_files_path)
+
+ # Write out our test targets
+ element_names = []
+ last_element_name = None
+ for i in range(amount + 1):
+ element_name = 'track-test-{}-{}.bst'.format(kind, i + 1)
+ filename = os.path.join(element_path, element_name)
+
+ element_names.append(element_name)
+
+ generate_element(repo, filename, dep_name=last_element_name)
+ last_element_name = element_name
+
+ # Assert that a fetch is needed
+ states = cli.get_element_states(project, [last_element_name])
+ for element_name in element_names:
+ assert states[element_name] == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=[
+ 'source', 'track', '--deps', 'all',
+ last_element_name])
+ result.assert_success()
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=[
+ 'source', 'fetch', '--deps', 'all',
+ last_element_name])
+ result.assert_success()
+
+ # Assert that the base is buildable and the rest are waiting
+ states = cli.get_element_states(project, [last_element_name])
+ for element_name in element_names:
+ if element_name == element_names[0]:
+ assert states[element_name] == 'buildable'
+ else:
+ assert states[element_name] == 'waiting'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_recurse_except(cli, tmpdir, datafiles, kind):
+ project = str(datafiles)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_dep_name = 'track-test-dep-{}.bst'.format(kind)
+ element_target_name = 'track-test-target-{}.bst'.format(kind)
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ repo.create(dev_files_path)
+
+ # Write out our test targets
+ generate_element(repo, os.path.join(element_path, element_dep_name))
+ generate_element(repo, os.path.join(element_path, element_target_name),
+ dep_name=element_dep_name)
+
+ # Assert that a fetch is needed
+ states = cli.get_element_states(project, [element_target_name])
+ assert states[element_dep_name] == 'no reference'
+ assert states[element_target_name] == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=[
+ 'source', 'track', '--deps', 'all', '--except', element_dep_name,
+ element_target_name])
+ result.assert_success()
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=[
+ 'source', 'fetch', '--deps', 'none',
+ element_target_name])
+ result.assert_success()
+
+ # Assert that the dependency is buildable and the target is waiting
+ states = cli.get_element_states(project, [element_target_name])
+ assert states[element_dep_name] == 'no reference'
+ assert states[element_target_name] == 'waiting'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
+ project = str(datafiles)
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ junction_path = os.path.join(project, 'elements', 'junction.bst')
+ etc_files = os.path.join(subproject_path, 'files', 'etc-files')
+ repo_element_path = os.path.join(subproject_path, 'elements',
+ 'import-etc-repo.bst')
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ repo = create_repo(kind, str(tmpdir.join('element_repo')))
+ repo.create(etc_files)
+
+ generate_element(repo, repo_element_path)
+
+ generate_junction(str(tmpdir.join('junction_repo')),
+ subproject_path, junction_path, store_ref=False)
+
+ # Track the junction itself first.
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result.assert_success()
+
+ assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'no reference'
+
+ # Track the cross junction element. -J is not given, it is implied.
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc-repo.bst'])
+
+ if ref_storage == 'inline':
+ # This is not allowed to track cross junction without project.refs.
+ result.assert_main_error(ErrorDomain.PIPELINE, 'untrackable-sources')
+ else:
+ result.assert_success()
+
+ assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'buildable'
+
+ assert os.path.exists(os.path.join(project, 'project.refs'))
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
+ project = str(datafiles)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'track-test-{}.bst'.format(kind)
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir))
+ ref = repo.create(dev_files_path)
+
+ # Generate the element
+ element = {
+ 'kind': 'import',
+ '(@)': ['elements/sources.yml']
+ }
+ sources = {
+ 'sources': [
+ repo.source_config()
+ ]
+ }
+
+ _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.dump(sources, os.path.join(element_path, 'sources.yml'))
+
+ # Assert that a fetch is needed
+ assert cli.get_element_state(project, element_name) == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=['source', 'track', element_name])
+ result.assert_success()
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+ # Assert that we are now buildable because the source is
+ # now cached.
+ assert cli.get_element_state(project, element_name) == 'buildable'
+
+ # Assert there was a project.refs created, depending on the configuration
+ if ref_storage == 'project.refs':
+ assert os.path.exists(os.path.join(project, 'project.refs'))
+ else:
+ assert not os.path.exists(os.path.join(project, 'project.refs'))
+
+ new_sources = _yaml.load(os.path.join(element_path, 'sources.yml'))
+
+ # Get all of the sources
+ assert 'sources' in new_sources
+ sources_list = _yaml.node_get(new_sources, list, 'sources')
+ assert len(sources_list) == 1
+
+ # Get the first source from the sources list
+ new_source = _yaml.node_get(new_sources, dict, 'sources', indices=[0])
+ assert 'ref' in new_source
+ assert ref == _yaml.node_get(new_source, str, 'ref')
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
+ project = str(datafiles)
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
+ element_path = os.path.join(project, 'elements')
+ element_name = 'track-test-{}.bst'.format(kind)
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ sub_element_path = os.path.join(subproject_path, 'elements')
+ junction_path = os.path.join(element_path, 'junction.bst')
+
+ configure_project(project, {
+ 'ref-storage': ref_storage
+ })
+
+ # Create our repo object of the given source type with
+ # the dev files, and then collect the initial ref.
+ #
+ repo = create_repo(kind, str(tmpdir.join('element_repo')))
+ repo.create(dev_files_path)
+
+ # Generate the element
+ element = {
+ 'kind': 'import',
+ '(@)': ['junction.bst:elements/sources.yml']
+ }
+ sources = {
+ 'sources': [
+ repo.source_config()
+ ]
+ }
+
+ _yaml.dump(element, os.path.join(element_path, element_name))
+ _yaml.dump(sources, os.path.join(sub_element_path, 'sources.yml'))
+
+ generate_junction(str(tmpdir.join('junction_repo')),
+ subproject_path, junction_path, store_ref=True)
+
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result.assert_success()
+
+ # Assert that a fetch is needed
+ assert cli.get_element_state(project, element_name) == 'no reference'
+
+ # Now first try to track it
+ result = cli.run(project=project, args=['source', 'track', element_name])
+
+ # Assert there was a project.refs created, depending on the configuration
+ if ref_storage == 'inline':
+ # FIXME: We should expect an error. But only a warning is emitted
+ # result.assert_main_error(ErrorDomain.SOURCE, 'tracking-junction-fragment')
+
+ assert 'junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction' in result.stderr
+ else:
+ assert os.path.exists(os.path.join(project, 'project.refs'))
+
+ # And now fetch it: The Source has probably already cached the
+ # latest ref locally, but it is not required to have cached
+ # the associated content of the latest ref at track time, that
+ # is the job of fetch.
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result.assert_success()
+
+ # Assert that we are now buildable because the source is
+ # now cached.
+ assert cli.get_element_state(project, element_name) == 'buildable'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind):
+ project = str(datafiles)
+ element_path = os.path.join(project, 'elements')
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ junction_path = os.path.join(element_path, 'junction.bst')
+
+ configure_project(project, {
+ 'ref-storage': ref_storage,
+ '(@)': ['junction.bst:test.yml']
+ })
+
+ generate_junction(str(tmpdir.join('junction_repo')),
+ subproject_path, junction_path, store_ref=False)
+
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result.assert_success()
diff --git a/buildstream/testing/_sourcetests/track_cross_junction.py b/buildstream/testing/_sourcetests/track_cross_junction.py
new file mode 100644
index 000000000..ece3e0b8f
--- /dev/null
+++ b/buildstream/testing/_sourcetests/track_cross_junction.py
@@ -0,0 +1,186 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+# Pylint doesn't play well with fixtures and dependency injection from pytest
+# pylint: disable=redefined-outer-name
+
+import os
+import pytest
+
+from buildstream import _yaml
+from .._utils import generate_junction
+from .. import create_repo, ALL_REPO_KINDS
+from .. import cli # pylint: disable=unused-import
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+def generate_element(repo, element_path, dep_name=None):
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config()
+ ]
+ }
+ if dep_name:
+ element['depends'] = [dep_name]
+
+ _yaml.dump(element, element_path)
+
+
+def generate_import_element(tmpdir, kind, project, name):
+ element_name = 'import-{}.bst'.format(name)
+ repo_element_path = os.path.join(project, 'elements', element_name)
+ files = str(tmpdir.join("imported_files_{}".format(name)))
+ os.makedirs(files)
+
+ with open(os.path.join(files, '{}.txt'.format(name)), 'w') as f:
+ f.write(name)
+
+ repo = create_repo(kind, str(tmpdir.join('element_{}_repo'.format(name))))
+ repo.create(files)
+
+ generate_element(repo, repo_element_path)
+
+ return element_name
+
+
+def generate_project(tmpdir, name, config=None):
+ if config is None:
+ config = {}
+
+ project_name = 'project-{}'.format(name)
+ subproject_path = os.path.join(str(tmpdir.join(project_name)))
+ os.makedirs(os.path.join(subproject_path, 'elements'))
+
+ project_conf = {
+ 'name': name,
+ 'element-path': 'elements'
+ }
+ project_conf.update(config)
+ _yaml.dump(project_conf, os.path.join(subproject_path, 'project.conf'))
+
+ return project_name, subproject_path
+
+
+def generate_simple_stack(project, name, dependencies):
+ element_name = '{}.bst'.format(name)
+ element_path = os.path.join(project, 'elements', element_name)
+ element = {
+ 'kind': 'stack',
+ 'depends': dependencies
+ }
+ _yaml.dump(element, element_path)
+
+ return element_name
+
+
+def generate_cross_element(project, subproject_name, import_name):
+ basename, _ = os.path.splitext(import_name)
+ return generate_simple_stack(project, 'import-{}-{}'.format(subproject_name, basename),
+ [{
+ 'junction': '{}.bst'.format(subproject_name),
+ 'filename': import_name
+ }])
+
+
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_cross_junction_multiple_projects(cli, tmpdir, kind):
+ tmpdir = tmpdir.join(kind)
+
+ # Generate 3 projects: main, a, b
+ _, project = generate_project(tmpdir, 'main', {'ref-storage': 'project.refs'})
+ project_a, project_a_path = generate_project(tmpdir, 'a')
+ project_b, project_b_path = generate_project(tmpdir, 'b')
+
+ # Generate an element with a trackable source for each project
+ element_a = generate_import_element(tmpdir, kind, project_a_path, 'a')
+ element_b = generate_import_element(tmpdir, kind, project_b_path, 'b')
+ element_c = generate_import_element(tmpdir, kind, project, 'c')
+
+ # Create some indirections to the elements with dependencies to test --deps
+ stack_a = generate_simple_stack(project_a_path, 'stack-a', [element_a])
+ stack_b = generate_simple_stack(project_b_path, 'stack-b', [element_b])
+
+ # Create junctions for projects a and b in main.
+ junction_a = '{}.bst'.format(project_a)
+ junction_a_path = os.path.join(project, 'elements', junction_a)
+ generate_junction(tmpdir.join('repo_a'), project_a_path, junction_a_path, store_ref=False)
+
+ junction_b = '{}.bst'.format(project_b)
+ junction_b_path = os.path.join(project, 'elements', junction_b)
+ generate_junction(tmpdir.join('repo_b'), project_b_path, junction_b_path, store_ref=False)
+
+ # Track the junctions.
+ result = cli.run(project=project, args=['source', 'track', junction_a, junction_b])
+ result.assert_success()
+
+ # Import elements from a and b in to main.
+ imported_a = generate_cross_element(project, project_a, stack_a)
+ imported_b = generate_cross_element(project, project_b, stack_b)
+
+ # Generate a top level stack depending on everything
+ all_bst = generate_simple_stack(project, 'all', [imported_a, imported_b, element_c])
+
+ # Track without following junctions. But explicitly also track the elements in project a.
+ result = cli.run(project=project, args=['source', 'track',
+ '--deps', 'all',
+ all_bst,
+ '{}:{}'.format(junction_a, stack_a)])
+ result.assert_success()
+
+ # Elements in project b should not be tracked. But elements in project a and main should.
+ expected = [element_c,
+ '{}:{}'.format(junction_a, element_a)]
+ assert set(result.get_tracked_elements()) == set(expected)
+
+
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_exceptions(cli, tmpdir, kind):
+ tmpdir = tmpdir.join(kind)
+
+ _, project = generate_project(tmpdir, 'main', {'ref-storage': 'project.refs'})
+ project_a, project_a_path = generate_project(tmpdir, 'a')
+
+ element_a = generate_import_element(tmpdir, kind, project_a_path, 'a')
+ element_b = generate_import_element(tmpdir, kind, project_a_path, 'b')
+
+ all_bst = generate_simple_stack(project_a_path, 'all', [element_a,
+ element_b])
+
+ junction_a = '{}.bst'.format(project_a)
+ junction_a_path = os.path.join(project, 'elements', junction_a)
+ generate_junction(tmpdir.join('repo_a'), project_a_path, junction_a_path, store_ref=False)
+
+ result = cli.run(project=project, args=['source', 'track', junction_a])
+ result.assert_success()
+
+ imported_b = generate_cross_element(project, project_a, element_b)
+ indirection = generate_simple_stack(project, 'indirection', [imported_b])
+
+ result = cli.run(project=project,
+ args=['source', 'track', '--deps', 'all',
+ '--except', indirection,
+ '{}:{}'.format(junction_a, all_bst), imported_b])
+ result.assert_success()
+
+ expected = ['{}:{}'.format(junction_a, element_a),
+ '{}:{}'.format(junction_a, element_b)]
+ assert set(result.get_tracked_elements()) == set(expected)
diff --git a/buildstream/testing/_sourcetests/workspace.py b/buildstream/testing/_sourcetests/workspace.py
new file mode 100644
index 000000000..5218f8f1e
--- /dev/null
+++ b/buildstream/testing/_sourcetests/workspace.py
@@ -0,0 +1,161 @@
+#
+# Copyright (C) 2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+
+# Pylint doesn't play well with fixtures and dependency injection from pytest
+# pylint: disable=redefined-outer-name
+
+import os
+import shutil
+import pytest
+
+from buildstream import _yaml
+from .. import create_repo, ALL_REPO_KINDS
+from .. import cli # pylint: disable=unused-import
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+class WorkspaceCreator():
+ def __init__(self, cli, tmpdir, datafiles, project_path=None):
+ self.cli = cli
+ self.tmpdir = tmpdir
+ self.datafiles = datafiles
+
+ if not project_path:
+ project_path = str(datafiles)
+ else:
+ shutil.copytree(str(datafiles), project_path)
+
+ self.project_path = project_path
+ self.bin_files_path = os.path.join(project_path, 'files', 'bin-files')
+
+ self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd')
+
+ def create_workspace_element(self, kind, track, suffix='', workspace_dir=None,
+ element_attrs=None):
+ element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
+ element_path = os.path.join(self.project_path, 'elements')
+ if not workspace_dir:
+ workspace_dir = os.path.join(self.workspace_cmd, element_name)
+ if workspace_dir[-4:] == '.bst':
+ workspace_dir = workspace_dir[:-4]
+
+ # Create our repo object of the given source type with
+ # the bin files, and then collect the initial ref.
+ repo = create_repo(kind, str(self.tmpdir))
+ ref = repo.create(self.bin_files_path)
+ if track:
+ ref = None
+
+ # Write out our test target
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ repo.source_config(ref=ref)
+ ]
+ }
+ if element_attrs:
+ element = {**element, **element_attrs}
+ _yaml.dump(element,
+ os.path.join(element_path,
+ element_name))
+ return element_name, element_path, workspace_dir
+
+ def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
+ element_attrs=None):
+
+ element_tuples = []
+
+ if suffixs is None:
+ suffixs = ['', ] * len(kinds)
+ else:
+ if len(suffixs) != len(kinds):
+ raise "terable error"
+
+ for suffix, kind in zip(suffixs, kinds):
+ element_name, _, workspace_dir = \
+ self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
+ element_attrs)
+ element_tuples.append((element_name, workspace_dir))
+
+ # Assert that there is no reference, a track & fetch is needed
+ states = self.cli.get_element_states(self.project_path, [
+ e for e, _ in element_tuples
+ ])
+ if track:
+ assert not any(states[e] != 'no reference' for e, _ in element_tuples)
+ else:
+ assert not any(states[e] != 'fetch needed' for e, _ in element_tuples)
+
+ return element_tuples
+
+ def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
+ element_attrs=None, no_checkout=False):
+
+ element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
+ element_attrs)
+ os.makedirs(self.workspace_cmd, exist_ok=True)
+
+ # Now open the workspace, this should have the effect of automatically
+ # tracking & fetching the source from the repo.
+ args = ['workspace', 'open']
+ if track:
+ args.append('--track')
+ if no_checkout:
+ args.append('--no-checkout')
+ if workspace_dir is not None:
+ assert len(element_tuples) == 1, "test logic error"
+ _, workspace_dir = element_tuples[0]
+ args.extend(['--directory', workspace_dir])
+
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
+ result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
+
+ result.assert_success()
+
+ if not no_checkout:
+ # Assert that we are now buildable because the source is now cached.
+ states = self.cli.get_element_states(self.project_path, [
+ e for e, _ in element_tuples
+ ])
+ assert not any(states[e] != 'buildable' for e, _ in element_tuples)
+
+ # Check that the executable hello file is found in each workspace
+ for _, workspace in element_tuples:
+ filename = os.path.join(workspace, 'usr', 'bin', 'hello')
+ assert os.path.exists(filename)
+
+ return element_tuples
+
+
+def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
+ project_path=None, element_attrs=None, no_checkout=False):
+ workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path)
+ workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
+ element_attrs, no_checkout)
+ assert len(workspaces) == 1
+ element_name, workspace = workspaces[0]
+ return element_name, workspace_object.project_path, workspace
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", ALL_REPO_KINDS)
+def test_open(cli, tmpdir, datafiles, kind):
+ open_workspace(cli, tmpdir, datafiles, kind, False)
diff --git a/buildstream/testing/_utils/__init__.py b/buildstream/testing/_utils/__init__.py
new file mode 100644
index 000000000..b419d72b7
--- /dev/null
+++ b/buildstream/testing/_utils/__init__.py
@@ -0,0 +1,10 @@
+import os
+
+from buildstream import _yaml
+from .junction import generate_junction
+
+
+def configure_project(path, config):
+ config['name'] = 'test'
+ config['element-path'] = 'elements'
+ _yaml.dump(config, os.path.join(path, 'project.conf'))
diff --git a/buildstream/testing/_utils/junction.py b/buildstream/testing/_utils/junction.py
new file mode 100644
index 000000000..ca059eb8b
--- /dev/null
+++ b/buildstream/testing/_utils/junction.py
@@ -0,0 +1,83 @@
+import subprocess
+import pytest
+
+from buildstream import _yaml
+from .. import Repo
+from .site import HAVE_GIT, GIT, GIT_ENV
+
+
+# generate_junction()
+#
+# Generates a junction element with a git repository
+#
+# Args:
+# tmpdir: The tmpdir fixture, for storing the generated git repo
+# subproject_path: The path for the subproject, to add to the git repo
+# junction_path: The location to store the generated junction element
+# store_ref: Whether to store the ref in the junction.bst file
+#
+# Returns:
+# (str): The ref
+#
+def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True):
+ # Create a repo to hold the subproject and generate
+ # a junction element for it
+ #
+ repo = _SimpleGit(str(tmpdir))
+ source_ref = ref = repo.create(subproject_path)
+ if not store_ref:
+ source_ref = None
+
+ element = {
+ 'kind': 'junction',
+ 'sources': [
+ repo.source_config(ref=source_ref)
+ ]
+ }
+ _yaml.dump(element, junction_path)
+
+ return ref
+
+
+# A barebones Git Repo class to use for generating junctions
+class _SimpleGit(Repo):
+ def __init__(self, directory, subdir='repo'):
+ if not HAVE_GIT:
+ pytest.skip('git is not available')
+ super().__init__(directory, subdir)
+
+ def create(self, directory):
+ self.copy_directory(directory, self.repo)
+ self._run_git('init', '.')
+ self._run_git('add', '.')
+ self._run_git('commit', '-m', 'Initial commit')
+ return self.latest_commit()
+
+ def latest_commit(self):
+ return self._run_git(
+ 'rev-parse', 'HEAD',
+ stdout=subprocess.PIPE,
+ universal_newlines=True,
+ ).stdout.strip()
+
+ def source_config(self, ref=None, checkout_submodules=None):
+ config = {
+ 'kind': 'git',
+ 'url': 'file://' + self.repo,
+ 'track': 'master'
+ }
+ if ref is not None:
+ config['ref'] = ref
+ if checkout_submodules is not None:
+ config['checkout-submodules'] = checkout_submodules
+
+ return config
+
+ def _run_git(self, *args, **kwargs):
+ argv = [GIT]
+ argv.extend(args)
+ if 'env' not in kwargs:
+ kwargs['env'] = dict(GIT_ENV, PWD=self.repo)
+ kwargs.setdefault('cwd', self.repo)
+ kwargs.setdefault('check', True)
+ return subprocess.run(argv, **kwargs)
diff --git a/buildstream/testing/_utils/site.py b/buildstream/testing/_utils/site.py
new file mode 100644
index 000000000..54c5b467b
--- /dev/null
+++ b/buildstream/testing/_utils/site.py
@@ -0,0 +1,46 @@
+# Some things resolved about the execution site,
+# so we dont have to repeat this everywhere
+#
+import os
+import sys
+import platform
+
+from buildstream import _site, utils, ProgramNotFoundError
+
+
+try:
+ GIT = utils.get_host_tool('git')
+ HAVE_GIT = True
+ GIT_ENV = {
+ 'GIT_AUTHOR_DATE': '1320966000 +0200',
+ 'GIT_AUTHOR_NAME': 'tomjon',
+ 'GIT_AUTHOR_EMAIL': 'tom@jon.com',
+ 'GIT_COMMITTER_DATE': '1320966000 +0200',
+ 'GIT_COMMITTER_NAME': 'tomjon',
+ 'GIT_COMMITTER_EMAIL': 'tom@jon.com'
+ }
+except ProgramNotFoundError:
+ GIT = None
+ HAVE_GIT = False
+ GIT_ENV = dict()
+
+try:
+ utils.get_host_tool('bwrap')
+ HAVE_BWRAP = True
+ HAVE_BWRAP_JSON_STATUS = _site.get_bwrap_version() >= (0, 3, 2)
+except ProgramNotFoundError:
+ HAVE_BWRAP = False
+ HAVE_BWRAP_JSON_STATUS = False
+
+IS_LINUX = os.getenv('BST_FORCE_BACKEND', sys.platform).startswith('linux')
+IS_WSL = (IS_LINUX and 'Microsoft' in platform.uname().release)
+IS_WINDOWS = (os.name == 'nt')
+
+if not IS_LINUX:
+ HAVE_SANDBOX = True # fallback to a chroot sandbox on unix
+elif IS_WSL:
+ HAVE_SANDBOX = False # Sandboxes are inoperable under WSL due to lack of FUSE
+elif IS_LINUX and HAVE_BWRAP:
+ HAVE_SANDBOX = True
+else:
+ HAVE_SANDBOX = False
diff --git a/buildstream/testing/integration.py b/buildstream/testing/integration.py
new file mode 100644
index 000000000..e29f480ea
--- /dev/null
+++ b/buildstream/testing/integration.py
@@ -0,0 +1,51 @@
+#
+# Copyright (C) 2017 Codethink Limited
+# Copyright (C) 2018 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+"""
+Integration - tools for inspecting the output of plugin integration tests
+=========================================================================
+
+This module contains utilities for inspecting the artifacts produced during
+integration tests.
+"""
+
+import os
+
+
+# Return a list of files relative to the given directory
+def walk_dir(root):
+ for dirname, dirnames, filenames in os.walk(root):
+ # ensure consistent traversal order, needed for consistent
+ # handling of symlinks.
+ dirnames.sort()
+ filenames.sort()
+
+ # print path to all subdirectories first.
+ for subdirname in dirnames:
+ yield os.path.join(dirname, subdirname)[len(root):]
+
+ # print path to all filenames.
+ for filename in filenames:
+ yield os.path.join(dirname, filename)[len(root):]
+
+
+# Ensure that a directory contains the given filenames.
+def assert_contains(directory, expected):
+ missing = set(expected)
+ missing.difference_update(walk_dir(directory))
+ if missing:
+ raise AssertionError("Missing {} expected elements from list: {}"
+ .format(len(missing), missing))
diff --git a/buildstream/testing/repo.py b/buildstream/testing/repo.py
new file mode 100644
index 000000000..c1538685d
--- /dev/null
+++ b/buildstream/testing/repo.py
@@ -0,0 +1,109 @@
+#
+# Copyright (C) 2016-2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Repo - Utility class for testing source plugins
+===============================================
+
+
+"""
+import os
+import shutil
+
+
+class Repo():
+ """Repo()
+
+ Abstract class providing scaffolding for generating data to be
+ used with various sources. Subclasses of Repo may be registered to
+ run through the suite of generic source plugin tests provided in
+ buildstream.testing.
+
+ Args:
+ directory (str): The base temp directory for the test
+ subdir (str): The subdir for the repo, in case there is more than one
+
+ """
+ def __init__(self, directory, subdir='repo'):
+
+ # The working directory for the repo object
+ #
+ self.directory = os.path.abspath(directory)
+
+ # The directory the actual repo will be stored in
+ self.repo = os.path.join(self.directory, subdir)
+
+ os.makedirs(self.repo, exist_ok=True)
+
+ def create(self, directory):
+ """Create a repository in self.directory and add the initial content
+
+ Args:
+ directory: A directory with content to commit
+
+ Returns:
+ (smth): A new ref corresponding to this commit, which can
+ be passed as the ref in the Repo.source_config() API.
+ """
+ raise NotImplementedError("create method has not been implemeted")
+
+ def source_config(self, ref=None):
+ """
+ Args:
+ ref (smth): An optional abstract ref object, usually a string.
+
+ Returns:
+ (dict): A configuration which can be serialized as a
+ source when generating an element file on the fly
+
+ """
+ raise NotImplementedError("source_config method has not been implemeted")
+
+ def copy_directory(self, src, dest):
+ """ Copies the content of src to the directory dest
+
+ Like shutil.copytree(), except dest is expected
+ to exist.
+
+ Args:
+ src (str): The source directory
+ dest (str): The destination directory
+ """
+ for filename in os.listdir(src):
+ src_path = os.path.join(src, filename)
+ dest_path = os.path.join(dest, filename)
+ if os.path.isdir(src_path):
+ shutil.copytree(src_path, dest_path)
+ else:
+ shutil.copy2(src_path, dest_path)
+
+ def copy(self, dest):
+ """Creates a copy of this repository in the specified destination.
+
+ Args:
+ dest (str): The destination directory
+
+ Returns:
+ (Repo): A Repo object for the new repository.
+ """
+ subdir = self.repo[len(self.directory):].lstrip(os.sep)
+ new_dir = os.path.join(dest, subdir)
+ os.makedirs(new_dir, exist_ok=True)
+ self.copy_directory(self.repo, new_dir)
+ repo_type = type(self)
+ new_repo = repo_type(dest, subdir)
+ return new_repo
diff --git a/buildstream/testing/runcli.py b/buildstream/testing/runcli.py
new file mode 100644
index 000000000..72bdce09e
--- /dev/null
+++ b/buildstream/testing/runcli.py
@@ -0,0 +1,857 @@
+#
+# Copyright (C) 2017 Codethink Limited
+# Copyright (C) 2018 Bloomberg Finance LP
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+"""
+runcli - Test fixtures used for running BuildStream commands
+============================================================
+
+:function:'cli' Use result = cli.run([arg1, arg2]) to run buildstream commands
+
+:function:'cli_integration' A variant of the main fixture that keeps persistent
+ artifact and source caches. It also does not use
+ the click test runner to avoid deadlock issues when
+ running `bst shell`, but unfortunately cannot produce
+ nice stacktraces.
+
+"""
+
+
+import os
+import re
+import sys
+import shutil
+import tempfile
+import itertools
+import traceback
+from contextlib import contextmanager, ExitStack
+from ruamel import yaml
+import pytest
+
+# XXX Using pytest private internals here
+#
+# We use pytest internals to capture the stdout/stderr during
+# a run of the buildstream CLI. We do this because click's
+# CliRunner convenience API (click.testing module) does not support
+# separation of stdout/stderr.
+#
+from _pytest.capture import MultiCapture, FDCapture, FDCaptureBinary
+
+# Import the main cli entrypoint
+from buildstream._frontend import cli as bst_cli
+from buildstream import _yaml
+from buildstream._cas import CASCache
+
+# Special private exception accessor, for test case purposes
+from buildstream._exceptions import BstError, get_last_exception, get_last_task_error
+
+
+# Wrapper for the click.testing result
+class Result():
+
+ def __init__(self,
+ exit_code=None,
+ exception=None,
+ exc_info=None,
+ output=None,
+ stderr=None):
+ self.exit_code = exit_code
+ self.exc = exception
+ self.exc_info = exc_info
+ self.output = output
+ self.stderr = stderr
+ self.unhandled_exception = False
+
+ # The last exception/error state is stored at exception
+ # creation time in BstError(), but this breaks down with
+ # recoverable errors where code blocks ignore some errors
+ # and fallback to alternative branches.
+ #
+ # For this reason, we just ignore the exception and errors
+ # in the case that the exit code reported is 0 (success).
+ #
+ if self.exit_code != 0:
+
+ # Check if buildstream failed to handle an
+ # exception, topevel CLI exit should always
+ # be a SystemExit exception.
+ #
+ if not isinstance(exception, SystemExit):
+ self.unhandled_exception = True
+
+ self.exception = get_last_exception()
+ self.task_error_domain, \
+ self.task_error_reason = get_last_task_error()
+ else:
+ self.exception = None
+ self.task_error_domain = None
+ self.task_error_reason = None
+
+ # assert_success()
+ #
+ # Asserts that the buildstream session completed successfully
+ #
+ # Args:
+ # fail_message (str): An optional message to override the automatic
+ # assertion error messages
+ # Raises:
+ # (AssertionError): If the session did not complete successfully
+ #
+ def assert_success(self, fail_message=''):
+ assert self.exit_code == 0, fail_message
+ assert self.exc is None, fail_message
+ assert self.exception is None, fail_message
+ assert self.unhandled_exception is False
+
+ # assert_main_error()
+ #
+ # Asserts that the buildstream session failed, and that
+ # the main process error report is as expected
+ #
+ # Args:
+ # error_domain (ErrorDomain): The domain of the error which occurred
+ # error_reason (any): The reason field of the error which occurred
+ # fail_message (str): An optional message to override the automatic
+ # assertion error messages
+ # debug (bool): If true, prints information regarding the exit state of the result()
+ # Raises:
+ # (AssertionError): If any of the assertions fail
+ #
+ def assert_main_error(self,
+ error_domain,
+ error_reason,
+ fail_message='',
+ *, debug=False):
+ if debug:
+ print(
+ """
+ Exit code: {}
+ Exception: {}
+ Domain: {}
+ Reason: {}
+ """.format(
+ self.exit_code,
+ self.exception,
+ self.exception.domain,
+ self.exception.reason
+ ))
+ assert self.exit_code == -1, fail_message
+ assert self.exc is not None, fail_message
+ assert self.exception is not None, fail_message
+ assert isinstance(self.exception, BstError), fail_message
+ assert self.unhandled_exception is False
+
+ assert self.exception.domain == error_domain, fail_message
+ assert self.exception.reason == error_reason, fail_message
+
+ # assert_task_error()
+ #
+ # Asserts that the buildstream session failed, and that
+ # the child task error which caused buildstream to exit
+ # is as expected.
+ #
+ # Args:
+ # error_domain (ErrorDomain): The domain of the error which occurred
+ # error_reason (any): The reason field of the error which occurred
+ # fail_message (str): An optional message to override the automatic
+ # assertion error messages
+ # Raises:
+ # (AssertionError): If any of the assertions fail
+ #
+ def assert_task_error(self,
+ error_domain,
+ error_reason,
+ fail_message=''):
+
+ assert self.exit_code == -1, fail_message
+ assert self.exc is not None, fail_message
+ assert self.exception is not None, fail_message
+ assert isinstance(self.exception, BstError), fail_message
+ assert self.unhandled_exception is False
+
+ assert self.task_error_domain == error_domain, fail_message
+ assert self.task_error_reason == error_reason, fail_message
+
+ # assert_shell_error()
+ #
+ # Asserts that the buildstream created a shell and that the task in the
+ # shell failed.
+ #
+ # Args:
+ # fail_message (str): An optional message to override the automatic
+ # assertion error messages
+ # Raises:
+ # (AssertionError): If any of the assertions fail
+ #
+ def assert_shell_error(self, fail_message=''):
+ assert self.exit_code == 1, fail_message
+
+ # get_start_order()
+ #
+ # Gets the list of elements processed in a given queue, in the
+ # order of their first appearances in the session.
+ #
+ # Args:
+ # activity (str): The queue activity name (like 'fetch')
+ #
+ # Returns:
+ # (list): A list of element names in the order which they first appeared in the result
+ #
+ def get_start_order(self, activity):
+ results = re.findall(r'\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log'.format(activity), self.stderr)
+ if results is None:
+ return []
+ return list(results)
+
+ # get_tracked_elements()
+ #
+ # Produces a list of element names on which tracking occurred
+ # during the session.
+ #
+ # This is done by parsing the buildstream stderr log
+ #
+ # Returns:
+ # (list): A list of element names
+ #
+ def get_tracked_elements(self):
+ tracked = re.findall(r'\[\s*track:(\S+)\s*]', self.stderr)
+ if tracked is None:
+ return []
+
+ return list(tracked)
+
+ def get_pushed_elements(self):
+ pushed = re.findall(r'\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed artifact', self.stderr)
+ if pushed is None:
+ return []
+
+ return list(pushed)
+
+ def get_pulled_elements(self):
+ pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact', self.stderr)
+ if pulled is None:
+ return []
+
+ return list(pulled)
+
+
+class Cli():
+
+ def __init__(self, directory, verbose=True, default_options=None):
+ self.directory = directory
+ self.config = None
+ self.verbose = verbose
+ self.artifact = TestArtifact()
+
+ if default_options is None:
+ default_options = []
+
+ self.default_options = default_options
+
+ # configure():
+ #
+ # Serializes a user configuration into a buildstream.conf
+ # to use for this test cli.
+ #
+ # Args:
+ # config (dict): The user configuration to use
+ #
+ def configure(self, config):
+ if self.config is None:
+ self.config = {}
+
+ for key, val in config.items():
+ self.config[key] = val
+
+ # remove_artifact_from_cache():
+ #
+ # Remove given element artifact from artifact cache
+ #
+ # Args:
+ # project (str): The project path under test
+ # element_name (str): The name of the element artifact
+ # cache_dir (str): Specific cache dir to remove artifact from
+ #
+ def remove_artifact_from_cache(self, project, element_name,
+ *, cache_dir=None):
+ # Read configuration to figure out where artifacts are stored
+ if not cache_dir:
+ default = os.path.join(project, 'cache')
+
+ if self.config is not None:
+ cache_dir = self.config.get('cachedir', default)
+ else:
+ cache_dir = default
+
+ self.artifact.remove_artifact_from_cache(cache_dir, element_name)
+
+ # run():
+ #
+ # Runs buildstream with the given arguments, additionally
+ # also passes some global options to buildstream in order
+ # to stay contained in the testing environment.
+ #
+ # Args:
+ # configure (bool): Whether to pass a --config argument
+ # project (str): An optional path to a project
+ # silent (bool): Whether to pass --no-verbose
+ # env (dict): Environment variables to temporarily set during the test
+ # args (list): A list of arguments to pass buildstream
+ # binary_capture (bool): Whether to capture the stdout/stderr as binary
+ #
+ def run(self, configure=True, project=None, silent=False, env=None,
+ cwd=None, options=None, args=None, binary_capture=False):
+ if args is None:
+ args = []
+ if options is None:
+ options = []
+
+ # We may have been passed e.g. pathlib.Path or py.path
+ args = [str(x) for x in args]
+ project = str(project)
+
+ options = self.default_options + options
+
+ with ExitStack() as stack:
+ bst_args = ['--no-colors']
+
+ if silent:
+ bst_args += ['--no-verbose']
+
+ if configure:
+ config_file = stack.enter_context(
+ configured(self.directory, self.config)
+ )
+ bst_args += ['--config', config_file]
+
+ if project:
+ bst_args += ['--directory', project]
+
+ for option, value in options:
+ bst_args += ['--option', option, value]
+
+ bst_args += args
+
+ if cwd is not None:
+ stack.enter_context(chdir(cwd))
+
+ if env is not None:
+ stack.enter_context(environment(env))
+
+ # Ensure we have a working stdout - required to work
+ # around a bug that appears to cause AIX to close
+ # sys.__stdout__ after setup.py
+ try:
+ sys.__stdout__.fileno()
+ except ValueError:
+ sys.__stdout__ = open('/dev/stdout', 'w')
+
+ result = self._invoke(bst_cli, bst_args, binary_capture=binary_capture)
+
+ # Some informative stdout we can observe when anything fails
+ if self.verbose:
+ command = "bst " + " ".join(bst_args)
+ print("BuildStream exited with code {} for invocation:\n\t{}"
+ .format(result.exit_code, command))
+ if result.output:
+ print("Program output was:\n{}".format(result.output))
+ if result.stderr:
+ print("Program stderr was:\n{}".format(result.stderr))
+
+ if result.exc_info and result.exc_info[0] != SystemExit:
+ traceback.print_exception(*result.exc_info)
+
+ return result
+
+ def _invoke(self, cli_object, args=None, binary_capture=False):
+ exc_info = None
+ exception = None
+ exit_code = 0
+
+ # Temporarily redirect sys.stdin to /dev/null to ensure that
+ # Popen doesn't attempt to read pytest's dummy stdin.
+ old_stdin = sys.stdin
+ with open(os.devnull) as devnull:
+ sys.stdin = devnull
+ capture_kind = FDCaptureBinary if binary_capture else FDCapture
+ capture = MultiCapture(out=True, err=True, in_=False, Capture=capture_kind)
+ capture.start_capturing()
+
+ try:
+ cli_object.main(args=args or (), prog_name=cli_object.name)
+ except SystemExit as e:
+ if e.code != 0:
+ exception = e
+
+ exc_info = sys.exc_info()
+
+ exit_code = e.code
+ if not isinstance(exit_code, int):
+ sys.stdout.write('Program exit code was not an integer: ')
+ sys.stdout.write(str(exit_code))
+ sys.stdout.write('\n')
+ exit_code = 1
+ except Exception as e: # pylint: disable=broad-except
+ exception = e
+ exit_code = -1
+ exc_info = sys.exc_info()
+ finally:
+ sys.stdout.flush()
+
+ sys.stdin = old_stdin
+ out, err = capture.readouterr()
+ capture.stop_capturing()
+
+ return Result(exit_code=exit_code,
+ exception=exception,
+ exc_info=exc_info,
+ output=out,
+ stderr=err)
+
+ # Fetch an element state by name by
+ # invoking bst show on the project with the CLI
+ #
+ # If you need to get the states of multiple elements,
+ # then use get_element_states(s) instead.
+ #
+ def get_element_state(self, project, element_name):
+ result = self.run(project=project, silent=True, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{state}',
+ element_name
+ ])
+ result.assert_success()
+ return result.output.strip()
+
+ # Fetch the states of elements for a given target / deps
+ #
+ # Returns a dictionary with the element names as keys
+ #
+ def get_element_states(self, project, targets, deps='all'):
+ result = self.run(project=project, silent=True, args=[
+ 'show',
+ '--deps', deps,
+ '--format', '%{name}||%{state}',
+ *targets
+ ])
+ result.assert_success()
+ lines = result.output.splitlines()
+ states = {}
+ for line in lines:
+ split = line.split(sep='||')
+ states[split[0]] = split[1]
+ return states
+
+ # Fetch an element's cache key by invoking bst show
+ # on the project with the CLI
+ #
+ def get_element_key(self, project, element_name):
+ result = self.run(project=project, silent=True, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{full-key}',
+ element_name
+ ])
+ result.assert_success()
+ return result.output.strip()
+
+ # Get the decoded config of an element.
+ #
+ def get_element_config(self, project, element_name):
+ result = self.run(project=project, silent=True, args=[
+ 'show',
+ '--deps', 'none',
+ '--format', '%{config}',
+ element_name
+ ])
+
+ result.assert_success()
+ return yaml.safe_load(result.output)
+
+ # Fetch the elements that would be in the pipeline with the given
+ # arguments.
+ #
+ def get_pipeline(self, project, elements, except_=None, scope='plan'):
+ if except_ is None:
+ except_ = []
+
+ args = ['show', '--deps', scope, '--format', '%{name}']
+ args += list(itertools.chain.from_iterable(zip(itertools.repeat('--except'), except_)))
+
+ result = self.run(project=project, silent=True, args=args + elements)
+ result.assert_success()
+ return result.output.splitlines()
+
+
+class CliIntegration(Cli):
+
+ # run()
+ #
+ # This supports the same arguments as Cli.run() and additionally
+ # it supports the project_config keyword argument.
+ #
+ # This will first load the project.conf file from the specified
+ # project directory ('project' keyword argument) and perform substitutions
+ # of any {project_dir} specified in the existing project.conf.
+ #
+ # If the project_config parameter is specified, it is expected to
+ # be a dictionary of additional project configuration options, and
+ # will be composited on top of the already loaded project.conf
+ #
+ def run(self, *args, project_config=None, **kwargs):
+
+ # First load the project.conf and substitute {project_dir}
+ #
+ # Save the original project.conf, because we will run more than
+ # once in the same temp directory
+ #
+ project_directory = kwargs['project']
+ project_filename = os.path.join(project_directory, 'project.conf')
+ project_backup = os.path.join(project_directory, 'project.conf.backup')
+ project_load_filename = project_filename
+
+ if not os.path.exists(project_backup):
+ shutil.copy(project_filename, project_backup)
+ else:
+ project_load_filename = project_backup
+
+ with open(project_load_filename) as f:
+ config = f.read()
+ config = config.format(project_dir=project_directory)
+
+ if project_config is not None:
+
+ # If a custom project configuration dictionary was
+ # specified, composite it on top of the already
+ # substituted base project configuration
+ #
+ base_config = _yaml.load_data(config)
+
+ # In order to leverage _yaml.composite_dict(), both
+ # dictionaries need to be loaded via _yaml.load_data() first
+ #
+ with tempfile.TemporaryDirectory(dir=project_directory) as scratchdir:
+
+ temp_project = os.path.join(scratchdir, 'project.conf')
+ with open(temp_project, 'w') as f:
+ yaml.safe_dump(project_config, f)
+
+ project_config = _yaml.load(temp_project)
+
+ _yaml.composite_dict(base_config, project_config)
+
+ base_config = _yaml.node_sanitize(base_config)
+ _yaml.dump(base_config, project_filename)
+
+ else:
+
+ # Otherwise, just dump it as is
+ with open(project_filename, 'w') as f:
+ f.write(config)
+
+ return super().run(*args, **kwargs)
+
+
+class CliRemote(CliIntegration):
+
+ # ensure_services():
+ #
+ # Make sure that required services are configured and that
+ # non-required ones are not.
+ #
+ # Args:
+ # actions (bool): Whether to use the 'action-cache' service
+ # artifacts (bool): Whether to use the 'artifact-cache' service
+ # execution (bool): Whether to use the 'execution' service
+ # sources (bool): Whether to use the 'source-cache' service
+ # storage (bool): Whether to use the 'storage' service
+ #
+ # Returns a list of configured services (by names).
+ #
+ def ensure_services(self, actions=True, execution=True, storage=True,
+ artifacts=False, sources=False):
+ # Build a list of configured services by name:
+ configured_services = []
+ if not self.config:
+ return configured_services
+
+ if 'remote-execution' in self.config:
+ rexec_config = self.config['remote-execution']
+
+ if 'action-cache-service' in rexec_config:
+ if actions:
+ configured_services.append('action-cache')
+ else:
+ rexec_config.pop('action-cache-service')
+
+ if 'execution-service' in rexec_config:
+ if execution:
+ configured_services.append('execution')
+ else:
+ rexec_config.pop('execution-service')
+
+ if 'storage-service' in rexec_config:
+ if storage:
+ configured_services.append('storage')
+ else:
+ rexec_config.pop('storage-service')
+
+ if 'artifacts' in self.config:
+ if artifacts:
+ configured_services.append('artifact-cache')
+ else:
+ self.config.pop('artifacts')
+
+ if 'source-caches' in self.config:
+ if sources:
+ configured_services.append('source-cache')
+ else:
+ self.config.pop('source-caches')
+
+ return configured_services
+
+
+class TestArtifact():
+
+ # remove_artifact_from_cache():
+ #
+ # Remove given element artifact from artifact cache
+ #
+ # Args:
+ # cache_dir (str): Specific cache dir to remove artifact from
+ # element_name (str): The name of the element artifact
+ #
+ def remove_artifact_from_cache(self, cache_dir, element_name):
+
+ cache_dir = os.path.join(cache_dir, 'cas', 'refs', 'heads')
+
+ cache_dir = os.path.splitext(os.path.join(cache_dir, 'test', element_name))[0]
+ shutil.rmtree(cache_dir)
+
+ # is_cached():
+ #
+ # Check if given element has a cached artifact
+ #
+ # Args:
+ # cache_dir (str): Specific cache dir to check
+ # element (Element): The element object
+ # element_key (str): The element's cache key
+ #
+ # Returns:
+ # (bool): If the cache contains the element's artifact
+ #
+ def is_cached(self, cache_dir, element, element_key):
+
+ cas = CASCache(str(cache_dir))
+ artifact_ref = element.get_artifact_name(element_key)
+ return cas.contains(artifact_ref)
+
+ # get_digest():
+ #
+ # Get the digest for a given element's artifact
+ #
+ # Args:
+ # cache_dir (str): Specific cache dir to check
+ # element (Element): The element object
+ # element_key (str): The element's cache key
+ #
+ # Returns:
+ # (Digest): The digest stored in the ref
+ #
+ def get_digest(self, cache_dir, element, element_key):
+
+ cas = CASCache(str(cache_dir))
+ artifact_ref = element.get_artifact_name(element_key)
+ digest = cas.resolve_ref(artifact_ref)
+ return digest
+
+ # extract_buildtree():
+ #
+ # Context manager for extracting an elements artifact buildtree for
+ # inspection.
+ #
+ # Args:
+ # tmpdir (LocalPath): pytest fixture for the tests tmp dir
+ # digest (Digest): The element directory digest to extract
+ #
+ # Yields:
+ # (str): path to extracted buildtree directory, does not guarantee
+ # existence.
+ @contextmanager
+ def extract_buildtree(self, tmpdir, digest):
+ with self._extract_subdirectory(tmpdir, digest, 'buildtree') as extract:
+ yield extract
+
+ # _extract_subdirectory():
+ #
+ # Context manager for extracting an element artifact for inspection,
+ # providing an expected path for a given subdirectory
+ #
+ # Args:
+ # tmpdir (LocalPath): pytest fixture for the tests tmp dir
+ # digest (Digest): The element directory digest to extract
+ # subdir (str): Subdirectory to path
+ #
+ # Yields:
+ # (str): path to extracted subdir directory, does not guarantee
+ # existence.
+ @contextmanager
+ def _extract_subdirectory(self, tmpdir, digest, subdir):
+ with tempfile.TemporaryDirectory() as extractdir:
+ try:
+ cas = CASCache(str(tmpdir))
+ cas.checkout(extractdir, digest)
+ yield os.path.join(extractdir, subdir)
+ except FileNotFoundError:
+ yield None
+
+
+# Main fixture
+#
+# Use result = cli.run([arg1, arg2]) to run buildstream commands
+#
+@pytest.fixture()
+def cli(tmpdir):
+ directory = os.path.join(str(tmpdir), 'cache')
+ os.makedirs(directory)
+ return Cli(directory)
+
+
+# A variant of the main fixture that keeps persistent artifact and
+# source caches.
+#
+# It also does not use the click test runner to avoid deadlock issues
+# when running `bst shell`, but unfortunately cannot produce nice
+# stacktraces.
+@pytest.fixture()
+def cli_integration(tmpdir, integration_cache):
+ directory = os.path.join(str(tmpdir), 'cache')
+ os.makedirs(directory)
+
+ if os.environ.get('BST_FORCE_BACKEND') == 'unix':
+ fixture = CliIntegration(directory, default_options=[('linux', 'False')])
+ else:
+ fixture = CliIntegration(directory)
+
+ # We want to cache sources for integration tests more permanently,
+ # to avoid downloading the huge base-sdk repeatedly
+ fixture.configure({
+ 'cachedir': integration_cache.cachedir,
+ 'sourcedir': integration_cache.sources,
+ })
+
+ yield fixture
+
+ # remove following folders if necessary
+ try:
+ shutil.rmtree(os.path.join(integration_cache.cachedir, 'build'))
+ except FileNotFoundError:
+ pass
+ try:
+ shutil.rmtree(os.path.join(integration_cache.cachedir, 'tmp'))
+ except FileNotFoundError:
+ pass
+
+
+# A variant of the main fixture that is configured for remote-execution.
+#
+# It also does not use the click test runner to avoid deadlock issues
+# when running `bst shell`, but unfortunately cannot produce nice
+# stacktraces.
+@pytest.fixture()
+def cli_remote_execution(tmpdir, remote_services):
+ directory = os.path.join(str(tmpdir), 'cache')
+ os.makedirs(directory)
+
+ fixture = CliRemote(directory)
+
+ if remote_services.artifact_service:
+ fixture.configure({'artifacts': [{
+ 'url': remote_services.artifact_service,
+ }]})
+
+ remote_execution = {}
+ if remote_services.action_service:
+ remote_execution['action-cache-service'] = {
+ 'url': remote_services.action_service,
+ }
+ if remote_services.exec_service:
+ remote_execution['execution-service'] = {
+ 'url': remote_services.exec_service,
+ }
+ if remote_services.storage_service:
+ remote_execution['storage-service'] = {
+ 'url': remote_services.storage_service,
+ }
+ if remote_execution:
+ fixture.configure({'remote-execution': remote_execution})
+
+ if remote_services.source_service:
+ fixture.configure({'source-caches': [{
+ 'url': remote_services.source_service,
+ }]})
+
+ return fixture
+
+
+@contextmanager
+def chdir(directory):
+ old_dir = os.getcwd()
+ os.chdir(directory)
+ yield
+ os.chdir(old_dir)
+
+
+@contextmanager
+def environment(env):
+
+ old_env = {}
+ for key, value in env.items():
+ old_env[key] = os.environ.get(key)
+ if value is None:
+ os.environ.pop(key, None)
+ else:
+ os.environ[key] = value
+
+ yield
+
+ for key, value in old_env.items():
+ if value is None:
+ os.environ.pop(key, None)
+ else:
+ os.environ[key] = value
+
+
+@contextmanager
+def configured(directory, config=None):
+
+ # Ensure we've at least relocated the caches to a temp directory
+ if not config:
+ config = {}
+
+ if not config.get('sourcedir', False):
+ config['sourcedir'] = os.path.join(directory, 'sources')
+ if not config.get('cachedir', False):
+ config['cachedir'] = directory
+ if not config.get('logdir', False):
+ config['logdir'] = os.path.join(directory, 'logs')
+
+ # Dump it and yield the filename for test scripts to feed it
+ # to buildstream as an artument
+ filename = os.path.join(directory, "buildstream.conf")
+ _yaml.dump(config, filename)
+
+ yield filename