summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorRaoul Hidalgo Charman <raoul.hidalgocharman@codethink.co.uk>2019-03-28 12:31:50 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-05-15 10:18:26 +0000
commit30e89a6a3cdcea83e0d83e373e50b88c02df3667 (patch)
tree2ca584a89133bdc0efe2fa5d876173f48d7f396c /tests
parent7c1bb299c891b7fe8b92e1d54a38eca0b55840ef (diff)
downloadbuildstream-30e89a6a3cdcea83e0d83e373e50b88c02df3667.tar.gz
_artifact.py: Rework to use artifact proto
This will replace the previous use of a directory structure. Quite a lot is changed here, predominantly _artifact and _artifactcache modules. Part of #974
Diffstat (limited to 'tests')
-rw-r--r--tests/artifactcache/junctions.py2
-rw-r--r--tests/artifactcache/pull.py3
-rw-r--r--tests/artifactcache/push.py17
-rw-r--r--tests/frontend/artifact.py10
-rw-r--r--tests/frontend/pull.py46
-rw-r--r--tests/frontend/remote-caches.py5
-rw-r--r--tests/integration/artifact.py31
-rw-r--r--tests/integration/pullbuildtrees.py11
-rw-r--r--tests/integration/shellbuildtrees.py3
-rw-r--r--tests/sourcecache/fetch.py1
-rw-r--r--tests/testutils/artifactshare.py45
11 files changed, 122 insertions, 52 deletions
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index d2eceb842..e1bbb31fd 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -69,6 +69,8 @@ def test_push_pull(cli, tmpdir, datafiles):
#
cas = os.path.join(cli.directory, 'cas')
shutil.rmtree(cas)
+ artifact_dir = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifact_dir)
# Assert that nothing is cached locally anymore
state = cli.get_element_state(project, 'target.bst')
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index 4f34156d7..cdadf583a 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -257,7 +257,8 @@ def test_pull_tree(cli, tmpdir, datafiles):
utils._kill_process_tree(process.pid)
raise
- assert directory_hash and directory_size
+ # Directory size now zero with AaaP and stack element commit #1cbc5e63dc
+ assert directory_hash and not directory_size
directory_digest = remote_execution_pb2.Digest(hash=directory_hash,
size_bytes=directory_size)
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index a099ad136..4f31148b7 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -4,7 +4,7 @@ import signal
import pytest
-from buildstream import _yaml, _signals, utils
+from buildstream import _yaml, _signals, utils, Scope
from buildstream._context import Context
from buildstream._project import Project
from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
@@ -85,7 +85,7 @@ def test_push(cli, tmpdir, datafiles):
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
process = multiprocessing.Process(target=_queue_wrapper,
args=(_test_push, queue, user_config_file, project_dir,
- 'target.bst', element_key))
+ 'target.bst'))
try:
# Keep SIGINT blocked in the child process
@@ -102,7 +102,7 @@ def test_push(cli, tmpdir, datafiles):
assert share.has_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst', cache_key=element_key))
-def _test_push(user_config_file, project_dir, element_name, element_key, queue):
+def _test_push(user_config_file, project_dir, element_name, queue):
# Fake minimal context
context = Context()
context.load(config=user_config_file)
@@ -118,13 +118,22 @@ def _test_push(user_config_file, project_dir, element_name, element_key, queue):
# Load the target element
element = project.load_elements([element_name])[0]
+ # Ensure the element's artifact memeber is initialised
+ # This is duplicated from Pipeline.resolve_elements()
+ # as this test does not use the cli frontend.
+ for e in element.dependencies(Scope.ALL):
+ # Preflight
+ e._preflight()
+ # Determine initial element state.
+ e._update_state()
+
# Manually setup the CAS remotes
artifactcache.setup_remotes(use_config=True)
artifactcache.initialize_remotes()
if artifactcache.has_push_remotes(plugin=element):
# Push the element's artifact
- if not artifactcache.push(element, [element_key]):
+ if not element._push():
queue.put("Push operation failed")
else:
queue.put(None)
diff --git a/tests/frontend/artifact.py b/tests/frontend/artifact.py
index 716c4b8a1..eb187a168 100644
--- a/tests/frontend/artifact.py
+++ b/tests/frontend/artifact.py
@@ -92,7 +92,7 @@ def test_artifact_delete_artifact(cli, tmpdir, datafiles):
element = 'target.bst'
# Configure a local cache
- local_cache = os.path.join(str(tmpdir), 'artifacts')
+ local_cache = os.path.join(str(tmpdir), 'cache')
cli.configure({'cachedir': local_cache})
# First build an element so that we can find its artifact
@@ -104,7 +104,7 @@ def test_artifact_delete_artifact(cli, tmpdir, datafiles):
artifact = os.path.join('test', os.path.splitext(element)[0], cache_key)
# Explicitly check that the ARTIFACT exists in the cache
- assert os.path.exists(os.path.join(local_cache, 'cas', 'refs', 'heads', artifact))
+ assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact))
# Delete the artifact
result = cli.run(project=project, args=['artifact', 'delete', artifact])
@@ -122,7 +122,7 @@ def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
dep = 'compose-all.bst'
# Configure a local cache
- local_cache = os.path.join(str(tmpdir), 'artifacts')
+ local_cache = os.path.join(str(tmpdir), 'cache')
cli.configure({'cachedir': local_cache})
# First build an element so that we can find its artifact
@@ -138,14 +138,14 @@ def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
artifact = os.path.join('test', os.path.splitext(element)[0], cache_key)
# Explicitly check that the ARTIFACT exists in the cache
- assert os.path.exists(os.path.join(local_cache, 'cas', 'refs', 'heads', artifact))
+ assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact))
# Delete the artifact
result = cli.run(project=project, args=['artifact', 'delete', artifact, dep])
result.assert_success()
# Check that the ARTIFACT is no longer in the cache
- assert not os.path.exists(os.path.join(local_cache, 'cas', 'refs', 'heads', artifact))
+ assert not os.path.exists(os.path.join(local_cache, 'artifacts', artifact))
# Check that the dependency ELEMENT is no longer cached
assert cli.get_element_state(project, dep) != 'cached'
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index bf25ab1af..dccbbfde3 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -66,8 +66,10 @@ def test_push_pull_all(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- cas = os.path.join(cli.directory, 'cas')
- shutil.rmtree(cas)
+ casdir = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(casdir)
+ artifactdir = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifactdir)
# Assert that nothing is cached locally anymore
states = cli.get_element_states(project, all_elements)
@@ -116,8 +118,10 @@ def test_push_pull_default_targets(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- artifacts = os.path.join(cli.directory, 'cas')
- shutil.rmtree(artifacts)
+ casdir = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(casdir)
+ artifactdir = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifactdir)
# Assert that nothing is cached locally anymore
states = cli.get_element_states(project, all_elements)
@@ -158,8 +162,10 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
assert_shared(cli, share2, project, 'target.bst')
# Delete the user's local artifact cache.
- cas = os.path.join(cli.directory, 'cas')
- shutil.rmtree(cas)
+ casdir = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(casdir)
+ artifactdir = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifactdir)
# Assert that the element is not cached anymore.
assert cli.get_element_state(project, 'target.bst') != 'cached'
@@ -212,8 +218,10 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the good_share.
#
- cas = os.path.join(cli.directory, 'cas')
- shutil.rmtree(cas)
+ casdir = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(casdir)
+ artifactdir = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifactdir)
result = cli.run(project=project, args=['artifact', 'pull', 'target.bst', '--remote',
good_share.repo])
@@ -243,7 +251,7 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
result.assert_success()
assert cli.get_element_state(project, 'target.bst') == 'cached'
- # Assert that everything is now cached in the remote.
+ # Assert that everything is now cached in the reote.
all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
for element_name in all_elements:
assert_shared(cli, share, project, element_name)
@@ -251,8 +259,10 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- cas = os.path.join(cli.directory, 'cas')
- shutil.rmtree(cas)
+ casdir = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(casdir)
+ artifactdir = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifactdir)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
@@ -301,8 +311,10 @@ def test_push_pull_track_non_strict(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- cas = os.path.join(cli.directory, 'cas')
- shutil.rmtree(cas)
+ casdir = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(casdir)
+ artifactdir = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifactdir)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
@@ -339,6 +351,8 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
cache_dir = os.path.join(project, 'cache', 'cas')
shutil.rmtree(cache_dir)
+ artifact_dir = os.path.join(project, 'cache', 'artifacts')
+ shutil.rmtree(artifact_dir)
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
@@ -372,8 +386,10 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- cas = os.path.join(cli.directory, 'cas')
- shutil.rmtree(cas)
+ casdir = os.path.join(cli.directory, 'cas')
+ shutil.rmtree(casdir)
+ artifactdir = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifactdir)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
diff --git a/tests/frontend/remote-caches.py b/tests/frontend/remote-caches.py
index e3f10e6f7..089cf96e4 100644
--- a/tests/frontend/remote-caches.py
+++ b/tests/frontend/remote-caches.py
@@ -80,8 +80,9 @@ def test_source_artifact_caches(cli, tmpdir, datafiles):
# remove the artifact from the repo and check it pulls sources, builds
# and then pushes the artifacts
shutil.rmtree(os.path.join(cachedir, 'cas'))
- print(os.listdir(os.path.join(share.repodir, 'cas', 'refs', 'heads')))
- shutil.rmtree(os.path.join(share.repodir, 'cas', 'refs', 'heads', 'test'))
+ shutil.rmtree(os.path.join(cachedir, 'artifacts'))
+ print(os.listdir(os.path.join(share.repodir, 'artifacts', 'refs')))
+ shutil.rmtree(os.path.join(share.repodir, 'artifacts', 'refs', 'test'))
res = cli.run(project=project_dir, args=['build', 'repo.bst'])
res.assert_success()
diff --git a/tests/integration/artifact.py b/tests/integration/artifact.py
index cb9f070d5..b942c1690 100644
--- a/tests/integration/artifact.py
+++ b/tests/integration/artifact.py
@@ -48,6 +48,7 @@ DATA_DIR = os.path.join(
def test_cache_buildtrees(cli, tmpdir, datafiles):
project = str(datafiles)
element_name = 'autotools/amhello.bst'
+ cwd = str(tmpdir)
# Create artifact shares for pull & push testing
with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
@@ -68,20 +69,23 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
assert share1.has_artifact(cli.get_artifact_name(project, 'test', element_name))
# The buildtree dir should not exist, as we set the config to not cache buildtrees.
- cache_key = cli.get_element_key(project, element_name)
- elementdigest = share1.has_artifact(cli.get_artifact_name(project, 'test', element_name, cache_key=cache_key))
- with cli.artifact.extract_buildtree(tmpdir, elementdigest) as buildtreedir:
- assert not os.path.isdir(buildtreedir)
+
+ artifact_name = cli.get_artifact_name(project, 'test', element_name)
+ assert share1.has_artifact(artifact_name)
+ with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
+ assert not buildtreedir
# Delete the local cached artifacts, and assert the when pulled with --pull-buildtrees
# that is was cached in share1 as expected without a buildtree dir
shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
assert cli.get_element_state(project, element_name) != 'cached'
result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
assert element_name in result.get_pulled_elements()
- with cli.artifact.extract_buildtree(tmpdir, elementdigest) as buildtreedir:
- assert not os.path.isdir(buildtreedir)
+ with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
+ assert not buildtreedir
shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
# Assert that the default behaviour of pull to not include buildtrees on the artifact
# in share1 which was purposely cached with an empty one behaves as expected. As such the
@@ -89,9 +93,10 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
# leading to no buildtreedir being extracted
result = cli.run(project=project, args=['artifact', 'pull', element_name])
assert element_name in result.get_pulled_elements()
- with cli.artifact.extract_buildtree(tmpdir, elementdigest) as buildtreedir:
- assert not os.path.isdir(buildtreedir)
+ with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
+ assert not buildtreedir
shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
# Repeat building the artifacts, this time with cache-buildtrees set to
# 'always' via the cli, as such the buildtree dir should not be empty
@@ -105,21 +110,22 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
assert share2.has_artifact(cli.get_artifact_name(project, 'test', element_name))
# Cache key will be the same however the digest hash will have changed as expected, so reconstruct paths
- elementdigest = share2.has_artifact(cli.get_artifact_name(project, 'test', element_name, cache_key=cache_key))
- with cli.artifact.extract_buildtree(tmpdir, elementdigest) as buildtreedir:
+ with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
assert os.listdir(buildtreedir)
# Delete the local cached artifacts, and assert that when pulled with --pull-buildtrees
# that it was cached in share2 as expected with a populated buildtree dir
shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
assert cli.get_element_state(project, element_name) != 'cached'
result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
assert element_name in result.get_pulled_elements()
- with cli.artifact.extract_buildtree(tmpdir, elementdigest) as buildtreedir:
+ with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
assert os.listdir(buildtreedir)
shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
# Clarify that the user config option for cache-buildtrees works as the cli
# main option does. Point to share3 which does not have the artifacts cached to force
@@ -132,7 +138,6 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
result = cli.run(project=project, args=['build', element_name])
assert result.exit_code == 0
assert cli.get_element_state(project, element_name) == 'cached'
- elementdigest = share3.has_artifact(cli.get_artifact_name(project, 'test', element_name))
- with cli.artifact.extract_buildtree(tmpdir, elementdigest) as buildtreedir:
+ with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
assert os.listdir(buildtreedir)
diff --git a/tests/integration/pullbuildtrees.py b/tests/integration/pullbuildtrees.py
index dfef18e7f..f37023e49 100644
--- a/tests/integration/pullbuildtrees.py
+++ b/tests/integration/pullbuildtrees.py
@@ -40,6 +40,7 @@ def default_state(cli, tmpdir, share):
def test_pullbuildtrees(cli2, tmpdir, datafiles):
project = str(datafiles)
element_name = 'autotools/amhello.bst'
+ cwd = str(tmpdir)
# Create artifact shares for pull & push testing
with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
@@ -75,12 +76,12 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
# Also assert that the buildtree is added to the local CAS.
result = cli2.run(project=project, args=['artifact', 'pull', element_name])
assert element_name in result.get_pulled_elements()
- elementdigest = share1.has_artifact(cli2.get_artifact_name(project, 'test', element_name))
- with cli2.artifact.extract_buildtree(tmpdir, elementdigest) as buildtreedir:
+ artifact_name = cli2.get_artifact_name(project, 'test', element_name)
+ with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert not buildtreedir
result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
assert element_name in result.get_pulled_elements()
- with cli2.artifact.extract_buildtree(tmpdir, elementdigest) as buildtreedir:
+ with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
default_state(cli2, tmpdir, share1)
@@ -139,7 +140,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'push', element_name])
assert "Attempting to fetch missing artifact buildtrees" in result.stderr
assert element_name not in result.get_pulled_elements()
- with cli2.artifact.extract_buildtree(tmpdir, elementdigest) as buildtreedir:
+ with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert not buildtreedir
assert element_name not in result.get_pushed_elements()
assert not share3.has_artifact(cli2.get_artifact_name(project, 'test', element_name))
@@ -152,7 +153,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'push', element_name])
assert "Attempting to fetch missing artifact buildtrees" in result.stderr
assert element_name in result.get_pulled_elements()
- with cli2.artifact.extract_buildtree(tmpdir, elementdigest) as buildtreedir:
+ with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
assert element_name in result.get_pushed_elements()
assert share3.has_artifact(cli2.get_artifact_name(project, 'test', element_name))
diff --git a/tests/integration/shellbuildtrees.py b/tests/integration/shellbuildtrees.py
index d03344992..aa41eba36 100644
--- a/tests/integration/shellbuildtrees.py
+++ b/tests/integration/shellbuildtrees.py
@@ -196,6 +196,7 @@ def test_buildtree_pulled(cli, tmpdir, datafiles):
# Discard the cache
shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
+ shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
assert cli.get_element_state(project, element_name) != 'cached'
# Pull from cache, ensuring cli options is set to pull the buildtree
@@ -229,6 +230,7 @@ def test_buildtree_options(cli, tmpdir, datafiles):
# Discard the cache
shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
+ shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
assert cli.get_element_state(project, element_name) != 'cached'
# Pull from cache, but do not include buildtrees.
@@ -269,6 +271,7 @@ def test_buildtree_options(cli, tmpdir, datafiles):
assert 'Attempting to fetch missing artifact buildtree' in res.stderr
assert 'Hi' in res.output
shutil.rmtree(os.path.join(os.path.join(str(tmpdir), 'cache', 'cas')))
+ shutil.rmtree(os.path.join(os.path.join(str(tmpdir), 'cache', 'artifacts')))
assert cli.get_element_state(project, element_name) != 'cached'
# Check it's not loading the shell at all with always set for the buildtree, when the
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index bc025cbd5..899e162aa 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -95,6 +95,7 @@ def test_source_fetch(cli, tmpdir, datafiles):
os.path.join(str(tmpdir), 'cache', 'cas'),
os.path.join(str(tmpdir), 'sourceshare', 'repo'))
shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'sources'))
+ shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'artifacts'))
digest = share.cas.resolve_ref(source._get_source_name())
assert share.has_object(digest)
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index fca01497a..bc69a87d8 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -10,6 +10,7 @@ from buildstream._cas import CASCache
from buildstream._cas.casserver import create_server
from buildstream._exceptions import CASError
from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
+from buildstream._protos.buildstream.v2 import artifact_pb2
# ArtifactShare()
@@ -42,6 +43,8 @@ class ArtifactShare():
#
self.repodir = os.path.join(self.directory, 'repo')
os.makedirs(self.repodir)
+ self.artifactdir = os.path.join(self.repodir, 'artifacts', 'refs')
+ os.makedirs(self.artifactdir)
self.cas = CASCache(self.repodir)
@@ -126,17 +129,45 @@ class ArtifactShare():
# Returns:
# (str): artifact digest if the artifact exists in the share, otherwise None.
def has_artifact(self, artifact_name):
+
+ artifact_proto = artifact_pb2.Artifact()
+ artifact_path = os.path.join(self.artifactdir, artifact_name)
+
+ try:
+ with open(artifact_path, 'rb') as f:
+ artifact_proto.ParseFromString(f.read())
+ except FileNotFoundError:
+ return None
+
+ reachable = set()
+
+ def reachable_dir(digest):
+ self.cas._reachable_refs_dir(
+ reachable, digest, update_mtime=False, check_exists=True)
+
try:
- tree = self.cas.resolve_ref(artifact_name)
- reachable = set()
- try:
- self.cas._reachable_refs_dir(reachable, tree, update_mtime=False, check_exists=True)
- except FileNotFoundError:
- return None
- return tree
+ if str(artifact_proto.files):
+ reachable_dir(artifact_proto.files)
+
+ if str(artifact_proto.buildtree):
+ reachable_dir(artifact_proto.buildtree)
+
+ if str(artifact_proto.public_data):
+ if not os.path.exists(self.cas.objpath(artifact_proto.public_data)):
+ return None
+
+ for log_file in artifact_proto.logs:
+ if not os.path.exists(self.cas.objpath(log_file.digest)):
+ return None
+
+ return artifact_proto.files
+
except CASError:
return None
+ except FileNotFoundError:
+ return None
+
# close():
#
# Remove the artifact share.