summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJonathan Maw <jonathan.maw@codethink.co.uk>2018-04-11 17:06:07 +0100
committerJonathan Maw <jonathan.maw@codethink.co.uk>2018-07-27 12:24:56 +0000
commit909120abc32fe3b132f71a13ea94b1f9f929c05c (patch)
tree3f0e3ff1888c49529e2ea0c8686b03969babb339
parentbd51a0b211995a3bf5eb62c51fbc76d652b8866f (diff)
downloadbuildstream-909120abc32fe3b132f71a13ea94b1f9f929c05c.tar.gz
tests: Add mirrored fetching and tracking tests
-rw-r--r--tests/frontend/mirror.py402
-rw-r--r--tests/frontend/project/sources/fetch_source.py85
2 files changed, 487 insertions, 0 deletions
diff --git a/tests/frontend/mirror.py b/tests/frontend/mirror.py
new file mode 100644
index 000000000..62c796ab8
--- /dev/null
+++ b/tests/frontend/mirror.py
@@ -0,0 +1,402 @@
+import os
+import pytest
+
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+
+from buildstream import _yaml
+
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+def generate_element(output_file):
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ {
+ 'kind': 'fetch_source',
+ "output-text": output_file,
+ "urls": ["foo:repo1", "bar:repo2"],
+ "fetch-succeeds": {
+ "FOO/repo1": True,
+ "BAR/repo2": False,
+ "OOF/repo1": False,
+ "RAB/repo2": True,
+ "OFO/repo1": False,
+ "RBA/repo2": False,
+ "ooF/repo1": False,
+ "raB/repo2": False,
+ }
+ }
+ ]
+ }
+ return element
+
+
+def generate_project():
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ 'foo': 'FOO/',
+ 'bar': 'BAR/',
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ 'foo': ['OOF/'],
+ 'bar': ['RAB/'],
+ },
+ },
+ {
+ 'name': 'arrakis',
+ 'aliases': {
+ 'foo': ['OFO/'],
+ 'bar': ['RBA/'],
+ },
+ },
+ {
+ 'name': 'oz',
+ 'aliases': {
+ 'foo': ['ooF/'],
+ 'bar': ['raB/'],
+ }
+ },
+ ],
+ 'plugins': [
+ {
+ 'origin': 'local',
+ 'path': 'sources',
+ 'sources': {
+ 'fetch_source': 0
+ }
+ }
+ ]
+ }
+ return project
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_fetch(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+ mirror_ref = upstream_ref
+ upstream_ref = upstream_repo.create(dev_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ # No obvious ways of checking that the mirror has been fetched
+ # But at least we can be sure it succeeds
+ result = cli.run(project=project_dir, args=['fetch', element_name])
+ result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_mirror_fetch_multi(cli, tmpdir, datafiles):
+ output_file = os.path.join(str(tmpdir), "output.txt")
+ project_dir = str(tmpdir)
+ element_dir = os.path.join(project_dir, 'elements')
+ os.makedirs(element_dir, exist_ok=True)
+ element_name = "test.bst"
+ element_path = os.path.join(element_dir, element_name)
+ element = generate_element(output_file)
+ _yaml.dump(element, element_path)
+
+ project_file = os.path.join(project_dir, 'project.conf')
+ project = generate_project()
+ _yaml.dump(project, project_file)
+
+ result = cli.run(project=project_dir, args=['fetch', element_name])
+ result.assert_success()
+ with open(output_file) as f:
+ contents = f.read()
+ assert "Fetch foo:repo1 succeeded from FOO/repo1" in contents
+ assert "Fetch bar:repo2 succeeded from RAB/repo2" in contents
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_mirror_fetch_default_cmdline(cli, tmpdir, datafiles):
+ output_file = os.path.join(str(tmpdir), "output.txt")
+ project_dir = str(tmpdir)
+ element_dir = os.path.join(project_dir, 'elements')
+ os.makedirs(element_dir, exist_ok=True)
+ element_name = "test.bst"
+ element_path = os.path.join(element_dir, element_name)
+ element = generate_element(output_file)
+ _yaml.dump(element, element_path)
+
+ project_file = os.path.join(project_dir, 'project.conf')
+ project = generate_project()
+ _yaml.dump(project, project_file)
+
+ result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'fetch', element_name])
+ result.assert_success()
+ with open(output_file) as f:
+ contents = f.read()
+ print(contents)
+ # Success if fetching from arrakis' mirror happened before middle-earth's
+ arrakis_str = "OFO/repo1"
+ arrakis_pos = contents.find(arrakis_str)
+ assert arrakis_pos != -1, "'{}' wasn't found".format(arrakis_str)
+ me_str = "OOF/repo1"
+ me_pos = contents.find(me_str)
+ assert me_pos != -1, "'{}' wasn't found".format(me_str)
+ assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_mirror_fetch_default_userconfig(cli, tmpdir, datafiles):
+ output_file = os.path.join(str(tmpdir), "output.txt")
+ project_dir = str(tmpdir)
+ element_dir = os.path.join(project_dir, 'elements')
+ os.makedirs(element_dir, exist_ok=True)
+ element_name = "test.bst"
+ element_path = os.path.join(element_dir, element_name)
+ element = generate_element(output_file)
+ _yaml.dump(element, element_path)
+
+ project_file = os.path.join(project_dir, 'project.conf')
+ project = generate_project()
+ _yaml.dump(project, project_file)
+
+ userconfig = {
+ 'projects': {
+ 'test': {
+ 'default-mirror': 'oz'
+ }
+ }
+ }
+ cli.configure(userconfig)
+
+ result = cli.run(project=project_dir, args=['fetch', element_name])
+ result.assert_success()
+ with open(output_file) as f:
+ contents = f.read()
+ print(contents)
+ # Success if fetching from Oz' mirror happened before middle-earth's
+ oz_str = "ooF/repo1"
+ oz_pos = contents.find(oz_str)
+ assert oz_pos != -1, "'{}' wasn't found".format(oz_str)
+ me_str = "OOF/repo1"
+ me_pos = contents.find(me_str)
+ assert me_pos != -1, "'{}' wasn't found".format(me_str)
+ assert oz_pos < me_pos, "'{}' wasn't found before '{}'".format(oz_str, me_str)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir, datafiles):
+ output_file = os.path.join(str(tmpdir), "output.txt")
+ project_dir = str(tmpdir)
+ element_dir = os.path.join(project_dir, 'elements')
+ os.makedirs(element_dir, exist_ok=True)
+ element_name = "test.bst"
+ element_path = os.path.join(element_dir, element_name)
+ element = generate_element(output_file)
+ _yaml.dump(element, element_path)
+
+ project_file = os.path.join(project_dir, 'project.conf')
+ project = generate_project()
+ _yaml.dump(project, project_file)
+
+ userconfig = {
+ 'projects': {
+ 'test': {
+ 'default-mirror': 'oz'
+ }
+ }
+ }
+ cli.configure(userconfig)
+
+ result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'fetch', element_name])
+ result.assert_success()
+ with open(output_file) as f:
+ contents = f.read()
+ print(contents)
+ # Success if fetching from arrakis' mirror happened before middle-earth's
+ arrakis_str = "OFO/repo1"
+ arrakis_pos = contents.find(arrakis_str)
+ assert arrakis_pos != -1, "'{}' wasn't found".format(arrakis_str)
+ me_str = "OOF/repo1"
+ me_pos = contents.find(me_str)
+ assert me_pos != -1, "'{}' wasn't found".format(me_str)
+ assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+ mirror_ref = upstream_ref
+ upstream_ref = upstream_repo.create(dev_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+
+ element['sources'][0]
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: upstream_map + "/"
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ result = cli.run(project=project_dir, args=['track', element_name])
+ result.assert_success()
+
+ # Tracking tries upstream first. Check the ref is from upstream.
+ new_element = _yaml.load(element_path)
+ source = new_element['sources'][0]
+ if 'ref' in source:
+ assert source['ref'] == upstream_ref
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+ project_dir = os.path.join(str(tmpdir), 'project')
+ os.makedirs(project_dir)
+ element_dir = os.path.join(project_dir, 'elements')
+
+ # Create repo objects of the upstream and mirror
+ upstream_repo = create_repo(kind, upstream_repodir)
+ upstream_ref = upstream_repo.create(bin_files_path)
+ mirror_repo = upstream_repo.copy(mirror_repodir)
+ mirror_ref = upstream_ref
+ upstream_ref = upstream_repo.create(dev_files_path)
+
+ element = {
+ 'kind': 'import',
+ 'sources': [
+ upstream_repo.source_config(ref=upstream_ref)
+ ]
+ }
+
+ element['sources'][0]
+ element_name = 'test.bst'
+ element_path = os.path.join(element_dir, element_name)
+ full_repo = element['sources'][0]['url']
+ upstream_map, repo_name = os.path.split(full_repo)
+ alias = 'foo-' + kind
+ aliased_repo = alias + ':' + repo_name
+ element['sources'][0]['url'] = aliased_repo
+ full_mirror = mirror_repo.source_config()['url']
+ mirror_map, _ = os.path.split(full_mirror)
+ os.makedirs(element_dir)
+ _yaml.dump(element, element_path)
+
+ project = {
+ 'name': 'test',
+ 'element-path': 'elements',
+ 'aliases': {
+ alias: 'http://www.example.com/'
+ },
+ 'mirrors': [
+ {
+ 'name': 'middle-earth',
+ 'aliases': {
+ alias: [mirror_map + "/"],
+ },
+ },
+ ]
+ }
+ project_file = os.path.join(project_dir, 'project.conf')
+ _yaml.dump(project, project_file)
+
+ result = cli.run(project=project_dir, args=['track', element_name])
+ result.assert_success()
+
+ # Check that tracking fell back to the mirror
+ new_element = _yaml.load(element_path)
+ source = new_element['sources'][0]
+ if 'ref' in source:
+ assert source['ref'] == mirror_ref
diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py
new file mode 100644
index 000000000..ebd3fe757
--- /dev/null
+++ b/tests/frontend/project/sources/fetch_source.py
@@ -0,0 +1,85 @@
+import os
+import sys
+
+from buildstream import Source, Consistency, SourceError, SourceFetcher
+
+# Expected config
+# sources:
+# - output-text: $FILE
+# urls:
+# - foo:bar
+# - baz:quux
+# fetch-succeeds:
+# Foo/bar: true
+# ooF/bar: false
+
+
+class FetchFetcher(SourceFetcher):
+ def __init__(self, source, url):
+ super().__init__()
+ self.source = source
+ self.original_url = url
+ self.mark_download_url(url)
+
+ def fetch(self, alias_override=None):
+ url = self.source.translate_url(self.original_url, alias_override=alias_override)
+ with open(self.source.output_file, "a") as f:
+ success = url in self.source.fetch_succeeds and self.source.fetch_succeeds[url]
+ message = "Fetch {} {} from {}\n".format(self.original_url,
+ "succeeded" if success else "failed",
+ url)
+ f.write(message)
+ if not success:
+ raise SourceError("Failed to fetch {}".format(url))
+
+
+class FetchSource(Source):
+ # Read config to know which URLs to fetch
+ def configure(self, node):
+ self.original_urls = self.node_get_member(node, list, 'urls')
+ self.fetchers = [FetchFetcher(self, url) for url in self.original_urls]
+ self.output_file = self.node_get_member(node, str, 'output-text')
+ self.fetch_succeeds = {}
+ if 'fetch-succeeds' in node:
+ self.fetch_succeeds = {x[0]: x[1] for x in self.node_items(node['fetch-succeeds'])}
+
+ def get_source_fetchers(self):
+ return self.fetchers
+
+ def preflight(self):
+ output_dir = os.path.dirname(self.output_file)
+ if not os.path.exists(output_dir):
+ raise SourceError("Directory '{}' does not exist".format(output_dir))
+
+ def fetch(self):
+ for fetcher in self.fetchers:
+ fetcher.fetch()
+
+ def get_unique_key(self):
+ return {"urls": self.original_urls, "output_file": self.output_file}
+
+ def get_consistency(self):
+ if not os.path.exists(self.output_file):
+ return Consistency.RESOLVED
+
+ with open(self.output_file, "r") as f:
+ contents = f.read()
+ for url in self.original_urls:
+ if url not in contents:
+ return Consistency.RESOLVED
+
+ return Consistency.CACHED
+
+ # We dont have a ref, we're a local file...
+ def load_ref(self, node):
+ pass
+
+ def get_ref(self):
+ return None # pragma: nocover
+
+ def set_ref(self, ref, node):
+ pass # pragma: nocover
+
+
+def setup():
+ return FetchSource