diff options
Diffstat (limited to 'src/buildstream/testing/_sourcetests')
| -rw-r--r-- | src/buildstream/testing/_sourcetests/build_checkout.py | 36 | ||||
| -rw-r--r-- | src/buildstream/testing/_sourcetests/fetch.py | 57 | ||||
| -rw-r--r-- | src/buildstream/testing/_sourcetests/mirror.py | 318 | ||||
| -rw-r--r-- | src/buildstream/testing/_sourcetests/source_determinism.py | 75 | ||||
| -rw-r--r-- | src/buildstream/testing/_sourcetests/track.py | 245 | ||||
| -rw-r--r-- | src/buildstream/testing/_sourcetests/track_cross_junction.py | 134 | ||||
| -rw-r--r-- | src/buildstream/testing/_sourcetests/utils.py | 15 | ||||
| -rw-r--r-- | src/buildstream/testing/_sourcetests/workspace.py | 85 |
8 files changed, 391 insertions, 574 deletions
diff --git a/src/buildstream/testing/_sourcetests/build_checkout.py b/src/buildstream/testing/_sourcetests/build_checkout.py index 4d4bcf0e2..782d99814 100644 --- a/src/buildstream/testing/_sourcetests/build_checkout.py +++ b/src/buildstream/testing/_sourcetests/build_checkout.py @@ -29,23 +29,23 @@ from .utils import kind # pylint: disable=unused-import # Project directory TOP_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(TOP_DIR, 'project') +DATA_DIR = os.path.join(TOP_DIR, "project") def strict_args(args, strict): if strict != "strict": - return ['--no-strict', *args] + return ["--no-strict", *args] return args @pytest.mark.datafiles(DATA_DIR) @pytest.mark.parametrize("strict", ["strict", "non-strict"]) def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind): - checkout = os.path.join(cli.directory, 'checkout') + checkout = os.path.join(cli.directory, "checkout") project = str(datafiles) - dev_files_path = os.path.join(project, 'files', 'dev-files') - element_path = os.path.join(project, 'elements') - element_name = 'build-test-{}.bst'.format(kind) + dev_files_path = os.path.join(project, "files", "dev-files") + element_path = os.path.join(project, "elements") + element_name = "build-test-{}.bst".format(kind) # Create our repo object of the given source type with # the dev files, and then collect the initial ref. @@ -54,26 +54,20 @@ def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind): ref = repo.create(dev_files_path) # Write out our test target - element = { - 'kind': 'import', - 'sources': [ - repo.source_config(ref=ref) - ] - } - _yaml.roundtrip_dump(element, - os.path.join(element_path, element_name)) + element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} + _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) - assert cli.get_element_state(project, element_name) == 'fetch needed' - result = cli.run(project=project, args=strict_args(['build', element_name], strict)) + assert cli.get_element_state(project, element_name) == "fetch needed" + result = cli.run(project=project, args=strict_args(["build", element_name], strict)) result.assert_success() - assert cli.get_element_state(project, element_name) == 'cached' + assert cli.get_element_state(project, element_name) == "cached" # Now check it out - result = cli.run(project=project, args=strict_args([ - 'artifact', 'checkout', element_name, '--directory', checkout - ], strict)) + result = cli.run( + project=project, args=strict_args(["artifact", "checkout", element_name, "--directory", checkout], strict) + ) result.assert_success() # Check that the pony.h include from files/dev-files exists - filename = os.path.join(checkout, 'usr', 'include', 'pony.h') + filename = os.path.join(checkout, "usr", "include", "pony.h") assert os.path.exists(filename) diff --git a/src/buildstream/testing/_sourcetests/fetch.py b/src/buildstream/testing/_sourcetests/fetch.py index 897752297..05b43d793 100644 --- a/src/buildstream/testing/_sourcetests/fetch.py +++ b/src/buildstream/testing/_sourcetests/fetch.py @@ -32,15 +32,15 @@ from .utils import kind # pylint: disable=unused-import # Project directory TOP_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(TOP_DIR, 'project') +DATA_DIR = os.path.join(TOP_DIR, "project") @pytest.mark.datafiles(DATA_DIR) def test_fetch(cli, tmpdir, datafiles, kind): project = str(datafiles) - bin_files_path = os.path.join(project, 'files', 'bin-files') - element_path = os.path.join(project, 'elements') - element_name = 'fetch-test-{}.bst'.format(kind) + bin_files_path = os.path.join(project, "files", "bin-files") + element_path = os.path.join(project, "elements") + element_name = "fetch-test-{}.bst".format(kind) # Create our repo object of the given source type with # the bin files, and then collect the initial ref. @@ -49,59 +49,46 @@ def test_fetch(cli, tmpdir, datafiles, kind): ref = repo.create(bin_files_path) # Write out our test target - element = { - 'kind': 'import', - 'sources': [ - repo.source_config(ref=ref) - ] - } - _yaml.roundtrip_dump(element, - os.path.join(element_path, element_name)) + element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} + _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) # Assert that a fetch is needed - assert cli.get_element_state(project, element_name) == 'fetch needed' + assert cli.get_element_state(project, element_name) == "fetch needed" # Now try to fetch it - result = cli.run(project=project, args=['source', 'fetch', element_name]) + result = cli.run(project=project, args=["source", "fetch", element_name]) result.assert_success() # Assert that we are now buildable because the source is # now cached. - assert cli.get_element_state(project, element_name) == 'buildable' + assert cli.get_element_state(project, element_name) == "buildable" @pytest.mark.datafiles(DATA_DIR) -@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs']) +@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"]) def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind): project = str(datafiles) - subproject_path = os.path.join(project, 'files', 'sub-project') - junction_path = os.path.join(project, 'elements', 'junction.bst') + subproject_path = os.path.join(project, "files", "sub-project") + junction_path = os.path.join(project, "elements", "junction.bst") - import_etc_path = os.path.join(subproject_path, 'elements', 'import-etc-repo.bst') - etc_files_path = os.path.join(subproject_path, 'files', 'etc-files') + import_etc_path = os.path.join(subproject_path, "elements", "import-etc-repo.bst") + etc_files_path = os.path.join(subproject_path, "files", "etc-files") - repo = create_repo(kind, str(tmpdir.join('import-etc'))) + repo = create_repo(kind, str(tmpdir.join("import-etc"))) ref = repo.create(etc_files_path) - element = { - 'kind': 'import', - 'sources': [ - repo.source_config(ref=(ref if ref_storage == 'inline' else None)) - ] - } + element = {"kind": "import", "sources": [repo.source_config(ref=(ref if ref_storage == "inline" else None))]} _yaml.roundtrip_dump(element, import_etc_path) - update_project_configuration(project, { - 'ref-storage': ref_storage - }) + update_project_configuration(project, {"ref-storage": ref_storage}) - generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline')) + generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")) - if ref_storage == 'project.refs': - result = cli.run(project=project, args=['source', 'track', 'junction.bst']) + if ref_storage == "project.refs": + result = cli.run(project=project, args=["source", "track", "junction.bst"]) result.assert_success() - result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc.bst']) + result = cli.run(project=project, args=["source", "track", "junction.bst:import-etc.bst"]) result.assert_success() - result = cli.run(project=project, args=['source', 'fetch', 'junction.bst:import-etc.bst']) + result = cli.run(project=project, args=["source", "fetch", "junction.bst:import-etc.bst"]) result.assert_success() diff --git a/src/buildstream/testing/_sourcetests/mirror.py b/src/buildstream/testing/_sourcetests/mirror.py index b6316045d..3ff3fb981 100644 --- a/src/buildstream/testing/_sourcetests/mirror.py +++ b/src/buildstream/testing/_sourcetests/mirror.py @@ -31,25 +31,25 @@ from .utils import kind # pylint: disable=unused-import # Project directory TOP_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(TOP_DIR, 'project') +DATA_DIR = os.path.join(TOP_DIR, "project") def _set_project_mirrors_and_aliases(project_path, mirrors, aliases): - project_conf_path = os.path.join(project_path, 'project.conf') + project_conf_path = os.path.join(project_path, "project.conf") project_conf = _yaml.roundtrip_load(project_conf_path) - project_conf['mirrors'] = mirrors - project_conf['aliases'].update(aliases) + project_conf["mirrors"] = mirrors + project_conf["aliases"].update(aliases) _yaml.roundtrip_dump(project_conf, project_conf_path) def _set_project_includes_and_aliases(project_path, includes, aliases): - project_conf_path = os.path.join(project_path, 'project.conf') + project_conf_path = os.path.join(project_path, "project.conf") project_conf = _yaml.roundtrip_load(project_conf_path) - project_conf['aliases'].update(aliases) - project_conf['(@)'] = includes + project_conf["aliases"].update(aliases) + project_conf["(@)"] = includes _yaml.roundtrip_dump(project_conf, project_conf_path) @@ -57,11 +57,11 @@ def _set_project_includes_and_aliases(project_path, includes, aliases): @pytest.mark.datafiles(DATA_DIR) def test_mirror_fetch(cli, tmpdir, datafiles, kind): project_dir = str(datafiles) - bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr') - dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr') - upstream_repodir = os.path.join(str(tmpdir), 'upstream') - mirror_repodir = os.path.join(str(tmpdir), 'mirror') - element_dir = os.path.join(project_dir, 'elements') + bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr") + dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr") + upstream_repodir = os.path.join(str(tmpdir), "upstream") + mirror_repodir = os.path.join(str(tmpdir), "mirror") + element_dir = os.path.join(project_dir, "elements") # Create repo objects of the upstream and mirror upstream_repo = create_repo(kind, upstream_repodir) @@ -69,228 +69,162 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind): mirror_repo = upstream_repo.copy(mirror_repodir) upstream_ref = upstream_repo.create(dev_files_path) - element = { - 'kind': 'import', - 'sources': [ - upstream_repo.source_config(ref=upstream_ref) - ] - } - element_name = 'test.bst' + element = {"kind": "import", "sources": [upstream_repo.source_config(ref=upstream_ref)]} + element_name = "test.bst" element_path = os.path.join(element_dir, element_name) - full_repo = element['sources'][0]['url'] + full_repo = element["sources"][0]["url"] upstream_map, repo_name = os.path.split(full_repo) - alias = 'foo-' + kind - aliased_repo = alias + ':' + repo_name - element['sources'][0]['url'] = aliased_repo - full_mirror = mirror_repo.source_config()['url'] + alias = "foo-" + kind + aliased_repo = alias + ":" + repo_name + element["sources"][0]["url"] = aliased_repo + full_mirror = mirror_repo.source_config()["url"] mirror_map, _ = os.path.split(full_mirror) _yaml.roundtrip_dump(element, element_path) _set_project_mirrors_and_aliases( project_dir, - [ - { - 'name': 'middle-earth', - 'aliases': { - alias: [mirror_map + '/'], - }, - }, - ], - {alias: upstream_map + '/'}, + [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},], + {alias: upstream_map + "/"}, ) # No obvious ways of checking that the mirror has been fetched # But at least we can be sure it succeeds - result = cli.run(project=project_dir, args=['source', 'fetch', element_name]) + result = cli.run(project=project_dir, args=["source", "fetch", element_name]) result.assert_success() @pytest.mark.datafiles(DATA_DIR) def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind): project_dir = str(datafiles) - dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr') - upstream_repodir = os.path.join(project_dir, 'upstream') - mirror_repodir = os.path.join(str(tmpdir), 'mirror') - element_dir = os.path.join(project_dir, 'elements') + dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr") + upstream_repodir = os.path.join(project_dir, "upstream") + mirror_repodir = os.path.join(str(tmpdir), "mirror") + element_dir = os.path.join(project_dir, "elements") # Create repo objects of the upstream and mirror upstream_repo = create_repo(kind, upstream_repodir) ref = upstream_repo.create(dev_files_path) mirror_repo = upstream_repo.copy(mirror_repodir) - element = { - 'kind': 'import', - 'sources': [ - upstream_repo.source_config(ref=ref) - ] - } + element = {"kind": "import", "sources": [upstream_repo.source_config(ref=ref)]} - element_name = 'test.bst' + element_name = "test.bst" element_path = os.path.join(element_dir, element_name) - full_repo = element['sources'][0]['url'] + full_repo = element["sources"][0]["url"] _, repo_name = os.path.split(full_repo) - alias = 'foo-' + kind - aliased_repo = alias + ':' + repo_name - element['sources'][0]['url'] = aliased_repo - full_mirror = mirror_repo.source_config()['url'] + alias = "foo-" + kind + aliased_repo = alias + ":" + repo_name + element["sources"][0]["url"] = aliased_repo + full_mirror = mirror_repo.source_config()["url"] mirror_map, _ = os.path.split(full_mirror) _yaml.roundtrip_dump(element, element_path) _set_project_mirrors_and_aliases( project_dir, - [ - { - 'name': 'middle-earth', - 'aliases': { - alias: [mirror_map + "/"] - }, - }, - ], - {alias: 'http://www.example.com'}, + [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"]},},], + {alias: "http://www.example.com"}, ) - result = cli.run(project=project_dir, args=['source', 'fetch', element_name]) + result = cli.run(project=project_dir, args=["source", "fetch", element_name]) result.assert_success() @pytest.mark.datafiles(DATA_DIR) def test_mirror_from_includes(cli, tmpdir, datafiles, kind): project_dir = str(datafiles) - bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr') - upstream_repodir = os.path.join(str(tmpdir), 'upstream') - mirror_repodir = os.path.join(str(tmpdir), 'mirror') - element_dir = os.path.join(project_dir, 'elements') + bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr") + upstream_repodir = os.path.join(str(tmpdir), "upstream") + mirror_repodir = os.path.join(str(tmpdir), "mirror") + element_dir = os.path.join(project_dir, "elements") # Create repo objects of the upstream and mirror upstream_repo = create_repo(kind, upstream_repodir) upstream_ref = upstream_repo.create(bin_files_path) mirror_repo = upstream_repo.copy(mirror_repodir) - element = { - 'kind': 'import', - 'sources': [ - upstream_repo.source_config(ref=upstream_ref) - ] - } - element_name = 'test.bst' + element = {"kind": "import", "sources": [upstream_repo.source_config(ref=upstream_ref)]} + element_name = "test.bst" element_path = os.path.join(element_dir, element_name) - full_repo = element['sources'][0]['url'] + full_repo = element["sources"][0]["url"] upstream_map, repo_name = os.path.split(full_repo) - alias = 'foo-' + kind - aliased_repo = alias + ':' + repo_name - element['sources'][0]['url'] = aliased_repo - full_mirror = mirror_repo.source_config()['url'] + alias = "foo-" + kind + aliased_repo = alias + ":" + repo_name + element["sources"][0]["url"] = aliased_repo + full_mirror = mirror_repo.source_config()["url"] mirror_map, _ = os.path.split(full_mirror) _yaml.roundtrip_dump(element, element_path) - config_project_dir = str(tmpdir.join('config')) + config_project_dir = str(tmpdir.join("config")) os.makedirs(config_project_dir, exist_ok=True) - config_project = { - 'name': 'config' - } - _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, 'project.conf')) - extra_mirrors = { - 'mirrors': [ - { - 'name': 'middle-earth', - 'aliases': { - alias: [mirror_map + "/"], - } - } - ] - } - _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml')) - generate_junction(str(tmpdir.join('config_repo')), - config_project_dir, - os.path.join(element_dir, 'config.bst')) + config_project = {"name": "config"} + _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, "project.conf")) + extra_mirrors = {"mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]} + _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, "mirrors.yml")) + generate_junction(str(tmpdir.join("config_repo")), config_project_dir, os.path.join(element_dir, "config.bst")) _set_project_includes_and_aliases( - project_dir, - ['config.bst:mirrors.yml'], - {alias: upstream_map + '/'}, + project_dir, ["config.bst:mirrors.yml"], {alias: upstream_map + "/"}, ) # Now make the upstream unavailable. - os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo)) - result = cli.run(project=project_dir, args=['source', 'fetch', element_name]) + os.rename(upstream_repo.repo, "{}.bak".format(upstream_repo.repo)) + result = cli.run(project=project_dir, args=["source", "fetch", element_name]) result.assert_success() @pytest.mark.datafiles(DATA_DIR) def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind): project_dir = str(datafiles) - bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr') - upstream_repodir = os.path.join(str(tmpdir), 'upstream') - mirror_repodir = os.path.join(str(tmpdir), 'mirror') - element_dir = os.path.join(project_dir, 'elements') + bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr") + upstream_repodir = os.path.join(str(tmpdir), "upstream") + mirror_repodir = os.path.join(str(tmpdir), "mirror") + element_dir = os.path.join(project_dir, "elements") # Create repo objects of the upstream and mirror upstream_repo = create_repo(kind, upstream_repodir) upstream_ref = upstream_repo.create(bin_files_path) mirror_repo = upstream_repo.copy(mirror_repodir) - element = { - 'kind': 'junction', - 'sources': [ - upstream_repo.source_config(ref=upstream_ref) - ] - } - element_name = 'test.bst' + element = {"kind": "junction", "sources": [upstream_repo.source_config(ref=upstream_ref)]} + element_name = "test.bst" element_path = os.path.join(element_dir, element_name) - full_repo = element['sources'][0]['url'] + full_repo = element["sources"][0]["url"] upstream_map, repo_name = os.path.split(full_repo) - alias = 'foo-' + kind - aliased_repo = alias + ':' + repo_name - element['sources'][0]['url'] = aliased_repo - full_mirror = mirror_repo.source_config()['url'] + alias = "foo-" + kind + aliased_repo = alias + ":" + repo_name + element["sources"][0]["url"] = aliased_repo + full_mirror = mirror_repo.source_config()["url"] mirror_map, _ = os.path.split(full_mirror) _yaml.roundtrip_dump(element, element_path) - config_project_dir = str(tmpdir.join('config')) + config_project_dir = str(tmpdir.join("config")) os.makedirs(config_project_dir, exist_ok=True) - config_project = { - 'name': 'config' - } - _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, 'project.conf')) - extra_mirrors = { - 'mirrors': [ - { - 'name': 'middle-earth', - 'aliases': { - alias: [mirror_map + "/"], - } - } - ] - } - _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml')) - generate_junction(str(tmpdir.join('config_repo')), - config_project_dir, - os.path.join(element_dir, 'config.bst')) + config_project = {"name": "config"} + _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, "project.conf")) + extra_mirrors = {"mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]} + _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, "mirrors.yml")) + generate_junction(str(tmpdir.join("config_repo")), config_project_dir, os.path.join(element_dir, "config.bst")) - _set_project_includes_and_aliases( - project_dir, - ['config.bst:mirrors.yml'], - {alias: upstream_map + '/'} - ) + _set_project_includes_and_aliases(project_dir, ["config.bst:mirrors.yml"], {alias: upstream_map + "/"}) # Now make the upstream unavailable. - os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo)) - result = cli.run(project=project_dir, args=['source', 'fetch', element_name]) + os.rename(upstream_repo.repo, "{}.bak".format(upstream_repo.repo)) + result = cli.run(project=project_dir, args=["source", "fetch", element_name]) result.assert_main_error(ErrorDomain.STREAM, None) # Now make the upstream available again. - os.rename('{}.bak'.format(upstream_repo.repo), upstream_repo.repo) - result = cli.run(project=project_dir, args=['source', 'fetch', element_name]) + os.rename("{}.bak".format(upstream_repo.repo), upstream_repo.repo) + result = cli.run(project=project_dir, args=["source", "fetch", element_name]) result.assert_success() @pytest.mark.datafiles(DATA_DIR) def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind): project_dir = str(datafiles) - bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr') - dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr') - upstream_repodir = os.path.join(str(tmpdir), 'upstream') - mirror_repodir = os.path.join(str(tmpdir), 'mirror') - element_dir = os.path.join(project_dir, 'elements') + bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr") + dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr") + upstream_repodir = os.path.join(str(tmpdir), "upstream") + mirror_repodir = os.path.join(str(tmpdir), "mirror") + element_dir = os.path.join(project_dir, "elements") # Create repo objects of the upstream and mirror upstream_repo = create_repo(kind, upstream_repodir) @@ -298,55 +232,43 @@ def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind): mirror_repo = upstream_repo.copy(mirror_repodir) upstream_ref = upstream_repo.create(dev_files_path) - element = { - 'kind': 'import', - 'sources': [ - upstream_repo.source_config(ref=upstream_ref) - ] - } + element = {"kind": "import", "sources": [upstream_repo.source_config(ref=upstream_ref)]} - element_name = 'test.bst' + element_name = "test.bst" element_path = os.path.join(element_dir, element_name) - full_repo = element['sources'][0]['url'] + full_repo = element["sources"][0]["url"] upstream_map, repo_name = os.path.split(full_repo) - alias = 'foo-' + kind - aliased_repo = alias + ':' + repo_name - element['sources'][0]['url'] = aliased_repo - full_mirror = mirror_repo.source_config()['url'] + alias = "foo-" + kind + aliased_repo = alias + ":" + repo_name + element["sources"][0]["url"] = aliased_repo + full_mirror = mirror_repo.source_config()["url"] mirror_map, _ = os.path.split(full_mirror) _yaml.roundtrip_dump(element, element_path) _set_project_mirrors_and_aliases( project_dir, - [ - { - 'name': 'middle-earth', - 'aliases': { - alias: [mirror_map + '/'], - }, - }, - ], - {alias: upstream_map + '/'}, + [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},], + {alias: upstream_map + "/"}, ) - result = cli.run(project=project_dir, args=['source', 'track', element_name]) + result = cli.run(project=project_dir, args=["source", "track", element_name]) result.assert_success() # Tracking tries upstream first. Check the ref is from upstream. new_element = _yaml.load(element_path) - source = new_element.get_sequence('sources').mapping_at(0) - if 'ref' in source: - assert source.get_str('ref') == upstream_ref + source = new_element.get_sequence("sources").mapping_at(0) + if "ref" in source: + assert source.get_str("ref") == upstream_ref @pytest.mark.datafiles(DATA_DIR) def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind): project_dir = str(datafiles) - bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr') - dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr') - upstream_repodir = os.path.join(str(tmpdir), 'upstream') - mirror_repodir = os.path.join(str(tmpdir), 'mirror') - element_dir = os.path.join(project_dir, 'elements') + bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr") + dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr") + upstream_repodir = os.path.join(str(tmpdir), "upstream") + mirror_repodir = os.path.join(str(tmpdir), "mirror") + element_dir = os.path.join(project_dir, "elements") # Create repo objects of the upstream and mirror upstream_repo = create_repo(kind, upstream_repodir) @@ -355,42 +277,30 @@ def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind): mirror_ref = upstream_ref upstream_ref = upstream_repo.create(dev_files_path) - element = { - 'kind': 'import', - 'sources': [ - upstream_repo.source_config(ref=upstream_ref) - ] - } + element = {"kind": "import", "sources": [upstream_repo.source_config(ref=upstream_ref)]} - element_name = 'test.bst' + element_name = "test.bst" element_path = os.path.join(element_dir, element_name) - full_repo = element['sources'][0]['url'] + full_repo = element["sources"][0]["url"] _, repo_name = os.path.split(full_repo) - alias = 'foo-' + kind - aliased_repo = alias + ':' + repo_name - element['sources'][0]['url'] = aliased_repo - full_mirror = mirror_repo.source_config()['url'] + alias = "foo-" + kind + aliased_repo = alias + ":" + repo_name + element["sources"][0]["url"] = aliased_repo + full_mirror = mirror_repo.source_config()["url"] mirror_map, _ = os.path.split(full_mirror) _yaml.roundtrip_dump(element, element_path) _set_project_mirrors_and_aliases( project_dir, - [ - { - 'name': 'middle-earth', - 'aliases': { - alias: [mirror_map + '/'], - }, - }, - ], - {alias: 'http://www.example.com'}, + [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},], + {alias: "http://www.example.com"}, ) - result = cli.run(project=project_dir, args=['source', 'track', element_name]) + result = cli.run(project=project_dir, args=["source", "track", element_name]) result.assert_success() # Check that tracking fell back to the mirror new_element = _yaml.load(element_path) - source = new_element.get_sequence('sources').mapping_at(0) - if 'ref' in source: - assert source.get_str('ref') == mirror_ref + source = new_element.get_sequence("sources").mapping_at(0) + if "ref" in source: + assert source.get_str("ref") == mirror_ref diff --git a/src/buildstream/testing/_sourcetests/source_determinism.py b/src/buildstream/testing/_sourcetests/source_determinism.py index fc0e4618c..27664e0c2 100644 --- a/src/buildstream/testing/_sourcetests/source_determinism.py +++ b/src/buildstream/testing/_sourcetests/source_determinism.py @@ -30,83 +30,72 @@ from .utils import kind # pylint: disable=unused-import # Project directory TOP_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(TOP_DIR, 'project') +DATA_DIR = os.path.join(TOP_DIR, "project") -def create_test_file(*path, mode=0o644, content='content\n'): +def create_test_file(*path, mode=0o644, content="content\n"): path = os.path.join(*path) os.makedirs(os.path.dirname(path), exist_ok=True) - with open(path, 'w') as f: + with open(path, "w") as f: f.write(content) os.fchmod(f.fileno(), mode) def create_test_directory(*path, mode=0o644): - create_test_file(*path, '.keep', content='') + create_test_file(*path, ".keep", content="") path = os.path.join(*path) os.chmod(path, mode) @pytest.mark.integration @pytest.mark.datafiles(DATA_DIR) -@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox') -@pytest.mark.skipif(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox, Must Fix') +@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox") +@pytest.mark.skipif(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox, Must Fix") def test_deterministic_source_umask(cli, tmpdir, datafiles, kind): project = str(datafiles) - element_name = 'list.bst' - element_path = os.path.join(project, 'elements', element_name) - repodir = os.path.join(str(tmpdir), 'repo') - sourcedir = os.path.join(project, 'source') - - create_test_file(sourcedir, 'a.txt', mode=0o700) - create_test_file(sourcedir, 'b.txt', mode=0o755) - create_test_file(sourcedir, 'c.txt', mode=0o600) - create_test_file(sourcedir, 'd.txt', mode=0o400) - create_test_file(sourcedir, 'e.txt', mode=0o644) - create_test_file(sourcedir, 'f.txt', mode=0o4755) - create_test_file(sourcedir, 'g.txt', mode=0o2755) - create_test_file(sourcedir, 'h.txt', mode=0o1755) - create_test_directory(sourcedir, 'dir-a', mode=0o0700) - create_test_directory(sourcedir, 'dir-c', mode=0o0755) - create_test_directory(sourcedir, 'dir-d', mode=0o4755) - create_test_directory(sourcedir, 'dir-e', mode=0o2755) - create_test_directory(sourcedir, 'dir-f', mode=0o1755) + element_name = "list.bst" + element_path = os.path.join(project, "elements", element_name) + repodir = os.path.join(str(tmpdir), "repo") + sourcedir = os.path.join(project, "source") + + create_test_file(sourcedir, "a.txt", mode=0o700) + create_test_file(sourcedir, "b.txt", mode=0o755) + create_test_file(sourcedir, "c.txt", mode=0o600) + create_test_file(sourcedir, "d.txt", mode=0o400) + create_test_file(sourcedir, "e.txt", mode=0o644) + create_test_file(sourcedir, "f.txt", mode=0o4755) + create_test_file(sourcedir, "g.txt", mode=0o2755) + create_test_file(sourcedir, "h.txt", mode=0o1755) + create_test_directory(sourcedir, "dir-a", mode=0o0700) + create_test_directory(sourcedir, "dir-c", mode=0o0755) + create_test_directory(sourcedir, "dir-d", mode=0o4755) + create_test_directory(sourcedir, "dir-e", mode=0o2755) + create_test_directory(sourcedir, "dir-f", mode=0o1755) repo = create_repo(kind, repodir) ref = repo.create(sourcedir) source = repo.source_config(ref=ref) element = { - 'kind': 'manual', - 'depends': [ - { - 'filename': 'base.bst', - 'type': 'build' - } - ], - 'sources': [ - source - ], - 'config': { - 'install-commands': [ - 'ls -l >"%{install-root}/ls-l"' - ] - } + "kind": "manual", + "depends": [{"filename": "base.bst", "type": "build"}], + "sources": [source], + "config": {"install-commands": ['ls -l >"%{install-root}/ls-l"']}, } _yaml.roundtrip_dump(element, element_path) def get_value_for_umask(umask): - checkoutdir = os.path.join(str(tmpdir), 'checkout-{}'.format(umask)) + checkoutdir = os.path.join(str(tmpdir), "checkout-{}".format(umask)) old_umask = os.umask(umask) try: - result = cli.run(project=project, args=['build', element_name]) + result = cli.run(project=project, args=["build", element_name]) result.assert_success() - result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkoutdir]) + result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkoutdir]) result.assert_success() - with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f: + with open(os.path.join(checkoutdir, "ls-l"), "r") as f: return f.read() finally: os.umask(old_umask) diff --git a/src/buildstream/testing/_sourcetests/track.py b/src/buildstream/testing/_sourcetests/track.py index 48856b351..623045cd9 100644 --- a/src/buildstream/testing/_sourcetests/track.py +++ b/src/buildstream/testing/_sourcetests/track.py @@ -33,33 +33,26 @@ from .utils import kind # pylint: disable=unused-import # Project directory TOP_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(TOP_DIR, 'project') +DATA_DIR = os.path.join(TOP_DIR, "project") def generate_element(repo, element_path, dep_name=None): - element = { - 'kind': 'import', - 'sources': [ - repo.source_config() - ] - } + element = {"kind": "import", "sources": [repo.source_config()]} if dep_name: - element['depends'] = [dep_name] + element["depends"] = [dep_name] _yaml.roundtrip_dump(element, element_path) @pytest.mark.datafiles(DATA_DIR) -@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs']) +@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"]) def test_track(cli, tmpdir, datafiles, ref_storage, kind): project = str(datafiles) - dev_files_path = os.path.join(project, 'files', 'dev-files') - element_path = os.path.join(project, 'elements') - element_name = 'track-test-{}.bst'.format(kind) + dev_files_path = os.path.join(project, "files", "dev-files") + element_path = os.path.join(project, "elements") + element_name = "track-test-{}.bst".format(kind) - update_project_configuration(project, { - 'ref-storage': ref_storage - }) + update_project_configuration(project, {"ref-storage": ref_storage}) # Create our repo object of the given source type with # the dev files, and then collect the initial ref. @@ -71,28 +64,28 @@ def test_track(cli, tmpdir, datafiles, ref_storage, kind): generate_element(repo, os.path.join(element_path, element_name)) # Assert that a fetch is needed - assert cli.get_element_state(project, element_name) == 'no reference' + assert cli.get_element_state(project, element_name) == "no reference" # Now first try to track it - result = cli.run(project=project, args=['source', 'track', element_name]) + result = cli.run(project=project, args=["source", "track", element_name]) result.assert_success() # And now fetch it: The Source has probably already cached the # latest ref locally, but it is not required to have cached # the associated content of the latest ref at track time, that # is the job of fetch. - result = cli.run(project=project, args=['source', 'fetch', element_name]) + result = cli.run(project=project, args=["source", "fetch", element_name]) result.assert_success() # Assert that we are now buildable because the source is # now cached. - assert cli.get_element_state(project, element_name) == 'buildable' + assert cli.get_element_state(project, element_name) == "buildable" # Assert there was a project.refs created, depending on the configuration - if ref_storage == 'project.refs': - assert os.path.exists(os.path.join(project, 'project.refs')) + if ref_storage == "project.refs": + assert os.path.exists(os.path.join(project, "project.refs")) else: - assert not os.path.exists(os.path.join(project, 'project.refs')) + assert not os.path.exists(os.path.join(project, "project.refs")) # NOTE: @@ -112,18 +105,14 @@ def test_track(cli, tmpdir, datafiles, ref_storage, kind): @pytest.mark.parametrize("amount", [1, 10]) def test_track_recurse(cli, tmpdir, datafiles, kind, amount): project = str(datafiles) - dev_files_path = os.path.join(project, 'files', 'dev-files') - element_path = os.path.join(project, 'elements') + dev_files_path = os.path.join(project, "files", "dev-files") + element_path = os.path.join(project, "elements") # Try to actually launch as many fetch jobs as possible at the same time # # This stresses the Source plugins and helps to ensure that # they handle concurrent access to the store correctly. - cli.configure({ - 'scheduler': { - 'fetchers': amount, - } - }) + cli.configure({"scheduler": {"fetchers": amount,}}) # Create our repo object of the given source type with # the dev files, and then collect the initial ref. @@ -135,7 +124,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount): element_names = [] last_element_name = None for i in range(amount + 1): - element_name = 'track-test-{}-{}.bst'.format(kind, i + 1) + element_name = "track-test-{}-{}.bst".format(kind, i + 1) filename = os.path.join(element_path, element_name) element_names.append(element_name) @@ -146,39 +135,35 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount): # Assert that a fetch is needed states = cli.get_element_states(project, [last_element_name]) for element_name in element_names: - assert states[element_name] == 'no reference' + assert states[element_name] == "no reference" # Now first try to track it - result = cli.run(project=project, args=[ - 'source', 'track', '--deps', 'all', - last_element_name]) + result = cli.run(project=project, args=["source", "track", "--deps", "all", last_element_name]) result.assert_success() # And now fetch it: The Source has probably already cached the # latest ref locally, but it is not required to have cached # the associated content of the latest ref at track time, that # is the job of fetch. - result = cli.run(project=project, args=[ - 'source', 'fetch', '--deps', 'all', - last_element_name]) + result = cli.run(project=project, args=["source", "fetch", "--deps", "all", last_element_name]) result.assert_success() # Assert that the base is buildable and the rest are waiting states = cli.get_element_states(project, [last_element_name]) for element_name in element_names: if element_name == element_names[0]: - assert states[element_name] == 'buildable' + assert states[element_name] == "buildable" else: - assert states[element_name] == 'waiting' + assert states[element_name] == "waiting" @pytest.mark.datafiles(DATA_DIR) def test_track_recurse_except(cli, tmpdir, datafiles, kind): project = str(datafiles) - dev_files_path = os.path.join(project, 'files', 'dev-files') - element_path = os.path.join(project, 'elements') - element_dep_name = 'track-test-dep-{}.bst'.format(kind) - element_target_name = 'track-test-target-{}.bst'.format(kind) + dev_files_path = os.path.join(project, "files", "dev-files") + element_path = os.path.join(project, "elements") + element_dep_name = "track-test-dep-{}.bst".format(kind) + element_target_name = "track-test-target-{}.bst".format(kind) # Create our repo object of the given source type with # the dev files, and then collect the initial ref. @@ -188,88 +173,79 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind): # Write out our test targets generate_element(repo, os.path.join(element_path, element_dep_name)) - generate_element(repo, os.path.join(element_path, element_target_name), - dep_name=element_dep_name) + generate_element(repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name) # Assert that a fetch is needed states = cli.get_element_states(project, [element_target_name]) - assert states[element_dep_name] == 'no reference' - assert states[element_target_name] == 'no reference' + assert states[element_dep_name] == "no reference" + assert states[element_target_name] == "no reference" # Now first try to track it - result = cli.run(project=project, args=[ - 'source', 'track', '--deps', 'all', '--except', element_dep_name, - element_target_name]) + result = cli.run( + project=project, args=["source", "track", "--deps", "all", "--except", element_dep_name, element_target_name] + ) result.assert_success() # And now fetch it: The Source has probably already cached the # latest ref locally, but it is not required to have cached # the associated content of the latest ref at track time, that # is the job of fetch. - result = cli.run(project=project, args=[ - 'source', 'fetch', '--deps', 'none', - element_target_name]) + result = cli.run(project=project, args=["source", "fetch", "--deps", "none", element_target_name]) result.assert_success() # Assert that the dependency is buildable and the target is waiting states = cli.get_element_states(project, [element_target_name]) - assert states[element_dep_name] == 'no reference' - assert states[element_target_name] == 'waiting' + assert states[element_dep_name] == "no reference" + assert states[element_target_name] == "waiting" @pytest.mark.datafiles(DATA_DIR) -@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs']) +@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"]) def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind): project = str(datafiles) - subproject_path = os.path.join(project, 'files', 'sub-project') - junction_path = os.path.join(project, 'elements', 'junction.bst') - etc_files = os.path.join(subproject_path, 'files', 'etc-files') - repo_element_path = os.path.join(subproject_path, 'elements', - 'import-etc-repo.bst') + subproject_path = os.path.join(project, "files", "sub-project") + junction_path = os.path.join(project, "elements", "junction.bst") + etc_files = os.path.join(subproject_path, "files", "etc-files") + repo_element_path = os.path.join(subproject_path, "elements", "import-etc-repo.bst") - update_project_configuration(project, { - 'ref-storage': ref_storage - }) + update_project_configuration(project, {"ref-storage": ref_storage}) - repo = create_repo(kind, str(tmpdir.join('element_repo'))) + repo = create_repo(kind, str(tmpdir.join("element_repo"))) repo.create(etc_files) generate_element(repo, repo_element_path) - generate_junction(str(tmpdir.join('junction_repo')), - subproject_path, junction_path, store_ref=False) + generate_junction(str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=False) # Track the junction itself first. - result = cli.run(project=project, args=['source', 'track', 'junction.bst']) + result = cli.run(project=project, args=["source", "track", "junction.bst"]) result.assert_success() - assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'no reference' + assert cli.get_element_state(project, "junction.bst:import-etc-repo.bst") == "no reference" # Track the cross junction element. -J is not given, it is implied. - result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc-repo.bst']) + result = cli.run(project=project, args=["source", "track", "junction.bst:import-etc-repo.bst"]) - if ref_storage == 'inline': + if ref_storage == "inline": # This is not allowed to track cross junction without project.refs. - result.assert_main_error(ErrorDomain.PIPELINE, 'untrackable-sources') + result.assert_main_error(ErrorDomain.PIPELINE, "untrackable-sources") else: result.assert_success() - assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'buildable' + assert cli.get_element_state(project, "junction.bst:import-etc-repo.bst") == "buildable" - assert os.path.exists(os.path.join(project, 'project.refs')) + assert os.path.exists(os.path.join(project, "project.refs")) @pytest.mark.datafiles(DATA_DIR) -@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs']) +@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"]) def test_track_include(cli, tmpdir, datafiles, ref_storage, kind): project = str(datafiles) - dev_files_path = os.path.join(project, 'files', 'dev-files') - element_path = os.path.join(project, 'elements') - element_name = 'track-test-{}.bst'.format(kind) + dev_files_path = os.path.join(project, "files", "dev-files") + element_path = os.path.join(project, "elements") + element_name = "track-test-{}.bst".format(kind) - update_project_configuration(project, { - 'ref-storage': ref_storage - }) + update_project_configuration(project, {"ref-storage": ref_storage}) # Create our repo object of the given source type with # the dev files, and then collect the initial ref. @@ -278,139 +254,118 @@ def test_track_include(cli, tmpdir, datafiles, ref_storage, kind): ref = repo.create(dev_files_path) # Generate the element - element = { - 'kind': 'import', - '(@)': ['elements/sources.yml'] - } - sources = { - 'sources': [ - repo.source_config() - ] - } + element = {"kind": "import", "(@)": ["elements/sources.yml"]} + sources = {"sources": [repo.source_config()]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) - _yaml.roundtrip_dump(sources, os.path.join(element_path, 'sources.yml')) + _yaml.roundtrip_dump(sources, os.path.join(element_path, "sources.yml")) # Assert that a fetch is needed - assert cli.get_element_state(project, element_name) == 'no reference' + assert cli.get_element_state(project, element_name) == "no reference" # Now first try to track it - result = cli.run(project=project, args=['source', 'track', element_name]) + result = cli.run(project=project, args=["source", "track", element_name]) result.assert_success() # And now fetch it: The Source has probably already cached the # latest ref locally, but it is not required to have cached # the associated content of the latest ref at track time, that # is the job of fetch. - result = cli.run(project=project, args=['source', 'fetch', element_name]) + result = cli.run(project=project, args=["source", "fetch", element_name]) result.assert_success() # Assert that we are now buildable because the source is # now cached. - assert cli.get_element_state(project, element_name) == 'buildable' + assert cli.get_element_state(project, element_name) == "buildable" # Assert there was a project.refs created, depending on the configuration - if ref_storage == 'project.refs': - assert os.path.exists(os.path.join(project, 'project.refs')) + if ref_storage == "project.refs": + assert os.path.exists(os.path.join(project, "project.refs")) else: - assert not os.path.exists(os.path.join(project, 'project.refs')) + assert not os.path.exists(os.path.join(project, "project.refs")) - new_sources = _yaml.load(os.path.join(element_path, 'sources.yml')) + new_sources = _yaml.load(os.path.join(element_path, "sources.yml")) # Get all of the sources - assert 'sources' in new_sources - sources_list = new_sources.get_sequence('sources') + assert "sources" in new_sources + sources_list = new_sources.get_sequence("sources") assert len(sources_list) == 1 # Get the first source from the sources list new_source = sources_list.mapping_at(0) - assert 'ref' in new_source - assert ref == new_source.get_str('ref') + assert "ref" in new_source + assert ref == new_source.get_str("ref") @pytest.mark.datafiles(DATA_DIR) -@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs']) +@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"]) def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind): project = str(datafiles) - dev_files_path = os.path.join(project, 'files', 'dev-files') - element_path = os.path.join(project, 'elements') - element_name = 'track-test-{}.bst'.format(kind) - subproject_path = os.path.join(project, 'files', 'sub-project') - sub_element_path = os.path.join(subproject_path, 'elements') - junction_path = os.path.join(element_path, 'junction.bst') + dev_files_path = os.path.join(project, "files", "dev-files") + element_path = os.path.join(project, "elements") + element_name = "track-test-{}.bst".format(kind) + subproject_path = os.path.join(project, "files", "sub-project") + sub_element_path = os.path.join(subproject_path, "elements") + junction_path = os.path.join(element_path, "junction.bst") - update_project_configuration(project, { - 'ref-storage': ref_storage - }) + update_project_configuration(project, {"ref-storage": ref_storage}) # Create our repo object of the given source type with # the dev files, and then collect the initial ref. # - repo = create_repo(kind, str(tmpdir.join('element_repo'))) + repo = create_repo(kind, str(tmpdir.join("element_repo"))) repo.create(dev_files_path) # Generate the element - element = { - 'kind': 'import', - '(@)': ['junction.bst:elements/sources.yml'] - } - sources = { - 'sources': [ - repo.source_config() - ] - } + element = {"kind": "import", "(@)": ["junction.bst:elements/sources.yml"]} + sources = {"sources": [repo.source_config()]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) - _yaml.roundtrip_dump(sources, os.path.join(sub_element_path, 'sources.yml')) + _yaml.roundtrip_dump(sources, os.path.join(sub_element_path, "sources.yml")) - generate_junction(str(tmpdir.join('junction_repo')), - subproject_path, junction_path, store_ref=True) + generate_junction(str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=True) - result = cli.run(project=project, args=['source', 'track', 'junction.bst']) + result = cli.run(project=project, args=["source", "track", "junction.bst"]) result.assert_success() # Assert that a fetch is needed - assert cli.get_element_state(project, element_name) == 'no reference' + assert cli.get_element_state(project, element_name) == "no reference" # Now first try to track it - result = cli.run(project=project, args=['source', 'track', element_name]) + result = cli.run(project=project, args=["source", "track", element_name]) # Assert there was a project.refs created, depending on the configuration - if ref_storage == 'inline': + if ref_storage == "inline": # FIXME: We should expect an error. But only a warning is emitted # result.assert_main_error(ErrorDomain.SOURCE, 'tracking-junction-fragment') - assert 'junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction' in result.stderr + assert "junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction" in result.stderr else: - assert os.path.exists(os.path.join(project, 'project.refs')) + assert os.path.exists(os.path.join(project, "project.refs")) # And now fetch it: The Source has probably already cached the # latest ref locally, but it is not required to have cached # the associated content of the latest ref at track time, that # is the job of fetch. - result = cli.run(project=project, args=['source', 'fetch', element_name]) + result = cli.run(project=project, args=["source", "fetch", element_name]) result.assert_success() # Assert that we are now buildable because the source is # now cached. - assert cli.get_element_state(project, element_name) == 'buildable' + assert cli.get_element_state(project, element_name) == "buildable" @pytest.mark.datafiles(DATA_DIR) -@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs']) +@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"]) def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind): project = str(datafiles) - element_path = os.path.join(project, 'elements') - subproject_path = os.path.join(project, 'files', 'sub-project') - junction_path = os.path.join(element_path, 'junction.bst') + element_path = os.path.join(project, "elements") + subproject_path = os.path.join(project, "files", "sub-project") + junction_path = os.path.join(element_path, "junction.bst") - update_project_configuration(project, { - 'ref-storage': ref_storage, - '(@)': ['junction.bst:test.yml'] - }) + update_project_configuration(project, {"ref-storage": ref_storage, "(@)": ["junction.bst:test.yml"]}) - generate_junction(str(tmpdir.join('junction_repo')), - subproject_path, junction_path, store_ref=False) + generate_junction(str(tmpdir.join("junction_repo")), subproject_path, junction_path, store_ref=False) - result = cli.run(project=project, args=['source', 'track', 'junction.bst']) + result = cli.run(project=project, args=["source", "track", "junction.bst"]) result.assert_success() diff --git a/src/buildstream/testing/_sourcetests/track_cross_junction.py b/src/buildstream/testing/_sourcetests/track_cross_junction.py index 550f57faf..2477b37ee 100644 --- a/src/buildstream/testing/_sourcetests/track_cross_junction.py +++ b/src/buildstream/testing/_sourcetests/track_cross_junction.py @@ -32,32 +32,27 @@ from .utils import add_plugins_conf # Project directory TOP_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(TOP_DIR, 'project') +DATA_DIR = os.path.join(TOP_DIR, "project") def generate_element(repo, element_path, dep_name=None): - element = { - 'kind': 'import', - 'sources': [ - repo.source_config() - ] - } + element = {"kind": "import", "sources": [repo.source_config()]} if dep_name: - element['depends'] = [dep_name] + element["depends"] = [dep_name] _yaml.roundtrip_dump(element, element_path) def generate_import_element(tmpdir, kind, project, name): - element_name = 'import-{}.bst'.format(name) - repo_element_path = os.path.join(project, 'elements', element_name) + element_name = "import-{}.bst".format(name) + repo_element_path = os.path.join(project, "elements", element_name) files = str(tmpdir.join("imported_files_{}".format(name))) os.makedirs(files) - with open(os.path.join(files, '{}.txt'.format(name)), 'w') as f: + with open(os.path.join(files, "{}.txt".format(name)), "w") as f: f.write(name) - repo = create_repo(kind, str(tmpdir.join('element_{}_repo'.format(name)))) + repo = create_repo(kind, str(tmpdir.join("element_{}_repo".format(name)))) repo.create(files) generate_element(repo, repo_element_path) @@ -69,28 +64,22 @@ def generate_project(tmpdir, name, kind, config=None): if config is None: config = {} - project_name = 'project-{}'.format(name) + project_name = "project-{}".format(name) subproject_path = os.path.join(str(tmpdir.join(project_name))) - os.makedirs(os.path.join(subproject_path, 'elements')) + os.makedirs(os.path.join(subproject_path, "elements")) - project_conf = { - 'name': name, - 'element-path': 'elements' - } + project_conf = {"name": name, "element-path": "elements"} project_conf.update(config) - _yaml.roundtrip_dump(project_conf, os.path.join(subproject_path, 'project.conf')) + _yaml.roundtrip_dump(project_conf, os.path.join(subproject_path, "project.conf")) add_plugins_conf(subproject_path, kind) return project_name, subproject_path def generate_simple_stack(project, name, dependencies): - element_name = '{}.bst'.format(name) - element_path = os.path.join(project, 'elements', element_name) - element = { - 'kind': 'stack', - 'depends': dependencies - } + element_name = "{}.bst".format(name) + element_path = os.path.join(project, "elements", element_name) + element = {"kind": "stack", "depends": dependencies} _yaml.roundtrip_dump(element, element_path) return element_name @@ -98,11 +87,11 @@ def generate_simple_stack(project, name, dependencies): def generate_cross_element(project, subproject_name, import_name): basename, _ = os.path.splitext(import_name) - return generate_simple_stack(project, 'import-{}-{}'.format(subproject_name, basename), - [{ - 'junction': '{}.bst'.format(subproject_name), - 'filename': import_name - }]) + return generate_simple_stack( + project, + "import-{}-{}".format(subproject_name, basename), + [{"junction": "{}.bst".format(subproject_name), "filename": import_name}], + ) @pytest.mark.parametrize("kind", ALL_REPO_KINDS.keys()) @@ -110,30 +99,30 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind): tmpdir = tmpdir.join(kind) # Generate 3 projects: main, a, b - _, project = generate_project(tmpdir, 'main', kind, {'ref-storage': 'project.refs'}) - project_a, project_a_path = generate_project(tmpdir, 'a', kind) - project_b, project_b_path = generate_project(tmpdir, 'b', kind) + _, project = generate_project(tmpdir, "main", kind, {"ref-storage": "project.refs"}) + project_a, project_a_path = generate_project(tmpdir, "a", kind) + project_b, project_b_path = generate_project(tmpdir, "b", kind) # Generate an element with a trackable source for each project - element_a = generate_import_element(tmpdir, kind, project_a_path, 'a') - element_b = generate_import_element(tmpdir, kind, project_b_path, 'b') - element_c = generate_import_element(tmpdir, kind, project, 'c') + element_a = generate_import_element(tmpdir, kind, project_a_path, "a") + element_b = generate_import_element(tmpdir, kind, project_b_path, "b") + element_c = generate_import_element(tmpdir, kind, project, "c") # Create some indirections to the elements with dependencies to test --deps - stack_a = generate_simple_stack(project_a_path, 'stack-a', [element_a]) - stack_b = generate_simple_stack(project_b_path, 'stack-b', [element_b]) + stack_a = generate_simple_stack(project_a_path, "stack-a", [element_a]) + stack_b = generate_simple_stack(project_b_path, "stack-b", [element_b]) # Create junctions for projects a and b in main. - junction_a = '{}.bst'.format(project_a) - junction_a_path = os.path.join(project, 'elements', junction_a) - generate_junction(tmpdir.join('repo_a'), project_a_path, junction_a_path, store_ref=False) + junction_a = "{}.bst".format(project_a) + junction_a_path = os.path.join(project, "elements", junction_a) + generate_junction(tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False) - junction_b = '{}.bst'.format(project_b) - junction_b_path = os.path.join(project, 'elements', junction_b) - generate_junction(tmpdir.join('repo_b'), project_b_path, junction_b_path, store_ref=False) + junction_b = "{}.bst".format(project_b) + junction_b_path = os.path.join(project, "elements", junction_b) + generate_junction(tmpdir.join("repo_b"), project_b_path, junction_b_path, store_ref=False) # Track the junctions. - result = cli.run(project=project, args=['source', 'track', junction_a, junction_b]) + result = cli.run(project=project, args=["source", "track", junction_a, junction_b]) result.assert_success() # Import elements from a and b in to main. @@ -141,18 +130,16 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind): imported_b = generate_cross_element(project, project_b, stack_b) # Generate a top level stack depending on everything - all_bst = generate_simple_stack(project, 'all', [imported_a, imported_b, element_c]) + all_bst = generate_simple_stack(project, "all", [imported_a, imported_b, element_c]) # Track without following junctions. But explicitly also track the elements in project a. - result = cli.run(project=project, args=['source', 'track', - '--deps', 'all', - all_bst, - '{}:{}'.format(junction_a, stack_a)]) + result = cli.run( + project=project, args=["source", "track", "--deps", "all", all_bst, "{}:{}".format(junction_a, stack_a)] + ) result.assert_success() # Elements in project b should not be tracked. But elements in project a and main should. - expected = [element_c, - '{}:{}'.format(junction_a, element_a)] + expected = [element_c, "{}:{}".format(junction_a, element_a)] assert set(result.get_tracked_elements()) == set(expected) @@ -160,31 +147,38 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind): def test_track_exceptions(cli, tmpdir, kind): tmpdir = tmpdir.join(kind) - _, project = generate_project(tmpdir, 'main', kind, {'ref-storage': 'project.refs'}) - project_a, project_a_path = generate_project(tmpdir, 'a', kind) + _, project = generate_project(tmpdir, "main", kind, {"ref-storage": "project.refs"}) + project_a, project_a_path = generate_project(tmpdir, "a", kind) - element_a = generate_import_element(tmpdir, kind, project_a_path, 'a') - element_b = generate_import_element(tmpdir, kind, project_a_path, 'b') + element_a = generate_import_element(tmpdir, kind, project_a_path, "a") + element_b = generate_import_element(tmpdir, kind, project_a_path, "b") - all_bst = generate_simple_stack(project_a_path, 'all', [element_a, - element_b]) + all_bst = generate_simple_stack(project_a_path, "all", [element_a, element_b]) - junction_a = '{}.bst'.format(project_a) - junction_a_path = os.path.join(project, 'elements', junction_a) - generate_junction(tmpdir.join('repo_a'), project_a_path, junction_a_path, store_ref=False) + junction_a = "{}.bst".format(project_a) + junction_a_path = os.path.join(project, "elements", junction_a) + generate_junction(tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False) - result = cli.run(project=project, args=['source', 'track', junction_a]) + result = cli.run(project=project, args=["source", "track", junction_a]) result.assert_success() imported_b = generate_cross_element(project, project_a, element_b) - indirection = generate_simple_stack(project, 'indirection', [imported_b]) - - result = cli.run(project=project, - args=['source', 'track', '--deps', 'all', - '--except', indirection, - '{}:{}'.format(junction_a, all_bst), imported_b]) + indirection = generate_simple_stack(project, "indirection", [imported_b]) + + result = cli.run( + project=project, + args=[ + "source", + "track", + "--deps", + "all", + "--except", + indirection, + "{}:{}".format(junction_a, all_bst), + imported_b, + ], + ) result.assert_success() - expected = ['{}:{}'.format(junction_a, element_a), - '{}:{}'.format(junction_a, element_b)] + expected = ["{}:{}".format(junction_a, element_a), "{}:{}".format(junction_a, element_b)] assert set(result.get_tracked_elements()) == set(expected) diff --git a/src/buildstream/testing/_sourcetests/utils.py b/src/buildstream/testing/_sourcetests/utils.py index a0e65b4f4..116506807 100644 --- a/src/buildstream/testing/_sourcetests/utils.py +++ b/src/buildstream/testing/_sourcetests/utils.py @@ -27,9 +27,8 @@ import os try: import pytest except ImportError: - module_name = globals()['__name__'] - msg = "Could not import pytest:\n" \ - "To use the {} module, you must have pytest installed.".format(module_name) + module_name = globals()["__name__"] + msg = "Could not import pytest:\n" "To use the {} module, you must have pytest installed.".format(module_name) raise ImportError(msg) from buildstream import _yaml @@ -72,13 +71,7 @@ def add_plugins_conf(project, plugin_kind): if plugin_package is not None: project_conf["plugins"] = [ - { - "origin": "pip", - "package-name": plugin_package, - "sources": { - plugin_kind: 0, - }, - }, + {"origin": "pip", "package-name": plugin_package, "sources": {plugin_kind: 0,},}, ] _yaml.roundtrip_dump(project_conf, project_conf_file) @@ -96,7 +89,7 @@ def add_plugins_conf(project, plugin_kind): # updated_configuration (dict): configuration to merge into the existing one # def update_project_configuration(project_path, updated_configuration): - project_conf_path = os.path.join(project_path, 'project.conf') + project_conf_path = os.path.join(project_path, "project.conf") project_conf = _yaml.roundtrip_load(project_conf_path) project_conf.update(updated_configuration) diff --git a/src/buildstream/testing/_sourcetests/workspace.py b/src/buildstream/testing/_sourcetests/workspace.py index dd7977e76..34e2247ea 100644 --- a/src/buildstream/testing/_sourcetests/workspace.py +++ b/src/buildstream/testing/_sourcetests/workspace.py @@ -30,10 +30,10 @@ from .utils import kind # pylint: disable=unused-import # Project directory TOP_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(TOP_DIR, 'project') +DATA_DIR = os.path.join(TOP_DIR, "project") -class WorkspaceCreator(): +class WorkspaceCreator: def __init__(self, cli, tmpdir, datafiles, project_path=None): self.cli = cli self.tmpdir = tmpdir @@ -45,17 +45,16 @@ class WorkspaceCreator(): shutil.copytree(str(datafiles), project_path) self.project_path = project_path - self.bin_files_path = os.path.join(project_path, 'files', 'bin-files') + self.bin_files_path = os.path.join(project_path, "files", "bin-files") - self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd') + self.workspace_cmd = os.path.join(self.project_path, "workspace_cmd") - def create_workspace_element(self, kind, track, suffix='', workspace_dir=None, - element_attrs=None): - element_name = 'workspace-test-{}{}.bst'.format(kind, suffix) - element_path = os.path.join(self.project_path, 'elements') + def create_workspace_element(self, kind, track, suffix="", workspace_dir=None, element_attrs=None): + element_name = "workspace-test-{}{}.bst".format(kind, suffix) + element_path = os.path.join(self.project_path, "elements") if not workspace_dir: workspace_dir = os.path.join(self.workspace_cmd, element_name) - if workspace_dir[-4:] == '.bst': + if workspace_dir[-4:] == ".bst": workspace_dir = workspace_dir[:-4] # Create our repo object of the given source type with @@ -66,64 +65,53 @@ class WorkspaceCreator(): ref = None # Write out our test target - element = { - 'kind': 'import', - 'sources': [ - repo.source_config(ref=ref) - ] - } + element = {"kind": "import", "sources": [repo.source_config(ref=ref)]} if element_attrs: element = {**element, **element_attrs} - _yaml.roundtrip_dump(element, - os.path.join(element_path, element_name)) + _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) return element_name, element_path, workspace_dir - def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None, - element_attrs=None): + def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None, element_attrs=None): element_tuples = [] if suffixs is None: - suffixs = ['', ] * len(kinds) + suffixs = ["",] * len(kinds) else: if len(suffixs) != len(kinds): raise "terable error" for suffix, kind in zip(suffixs, kinds): - element_name, _, workspace_dir = \ - self.create_workspace_element(kind, track, suffix, workspace_dir_usr, - element_attrs) + element_name, _, workspace_dir = self.create_workspace_element( + kind, track, suffix, workspace_dir_usr, element_attrs + ) element_tuples.append((element_name, workspace_dir)) # Assert that there is no reference, a track & fetch is needed - states = self.cli.get_element_states(self.project_path, [ - e for e, _ in element_tuples - ]) + states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples]) if track: - assert not any(states[e] != 'no reference' for e, _ in element_tuples) + assert not any(states[e] != "no reference" for e, _ in element_tuples) else: - assert not any(states[e] != 'fetch needed' for e, _ in element_tuples) + assert not any(states[e] != "fetch needed" for e, _ in element_tuples) return element_tuples - def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None, - element_attrs=None, no_checkout=False): + def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None, element_attrs=None, no_checkout=False): - element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir, - element_attrs) + element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir, element_attrs) os.makedirs(self.workspace_cmd, exist_ok=True) # Now open the workspace, this should have the effect of automatically # tracking & fetching the source from the repo. - args = ['workspace', 'open'] + args = ["workspace", "open"] if track: - args.append('--track') + args.append("--track") if no_checkout: - args.append('--no-checkout') + args.append("--no-checkout") if workspace_dir is not None: assert len(element_tuples) == 1, "test logic error" _, workspace_dir = element_tuples[0] - args.extend(['--directory', workspace_dir]) + args.extend(["--directory", workspace_dir]) args.extend([element_name for element_name, workspace_dir_suffix in element_tuples]) result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args) @@ -132,24 +120,31 @@ class WorkspaceCreator(): if not no_checkout: # Assert that we are now buildable because the source is now cached. - states = self.cli.get_element_states(self.project_path, [ - e for e, _ in element_tuples - ]) - assert not any(states[e] != 'buildable' for e, _ in element_tuples) + states = self.cli.get_element_states(self.project_path, [e for e, _ in element_tuples]) + assert not any(states[e] != "buildable" for e, _ in element_tuples) # Check that the executable hello file is found in each workspace for _, workspace in element_tuples: - filename = os.path.join(workspace, 'usr', 'bin', 'hello') + filename = os.path.join(workspace, "usr", "bin", "hello") assert os.path.exists(filename) return element_tuples -def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None, - project_path=None, element_attrs=None, no_checkout=False): +def open_workspace( + cli, + tmpdir, + datafiles, + kind, + track, + suffix="", + workspace_dir=None, + project_path=None, + element_attrs=None, + no_checkout=False, +): workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path) - workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir, - element_attrs, no_checkout) + workspaces = workspace_object.open_workspaces((kind,), track, (suffix,), workspace_dir, element_attrs, no_checkout) assert len(workspaces) == 1 element_name, workspace = workspaces[0] return element_name, workspace_object.project_path, workspace |
