summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJim MacArthur <jim.macarthur@codethink.co.uk>2018-12-06 12:21:15 +0000
committerJürg Billeter <j@bitron.ch>2019-02-18 08:10:50 +0100
commitf6466b8e4869aacc1c9bc541df646186a4f36a18 (patch)
tree71a257bfa544125d0432640ebd627a5b2ffc3e4f
parentd48c780e838557f6ce5ec5d310c9745f907856f9 (diff)
downloadbuildstream-jmac/cache_artifacts_with_vdir.tar.gz
element.py: Alter _cache_artifact to use CasBasedDirectoryjmac/cache_artifacts_with_vdir
-rw-r--r--buildstream/_artifactcache.py7
-rw-r--r--buildstream/element.py194
2 files changed, 104 insertions, 97 deletions
diff --git a/buildstream/_artifactcache.py b/buildstream/_artifactcache.py
index bc0032bec..b72b20fda 100644
--- a/buildstream/_artifactcache.py
+++ b/buildstream/_artifactcache.py
@@ -588,13 +588,16 @@ class ArtifactCache():
#
# Args:
# element (Element): The Element commit an artifact for
- # content (str): The element's content directory
+ # content (Directory): The element's content directory
# keys (list): The cache keys to use
#
def commit(self, element, content, keys):
refs = [element.get_artifact_name(key) for key in keys]
- self.cas.commit(refs, content)
+ tree = content._get_digest()
+
+ for ref in refs:
+ self.cas.set_ref(ref, tree)
# diff():
#
diff --git a/buildstream/element.py b/buildstream/element.py
index b3f4d5518..8fc491204 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -103,6 +103,7 @@ from .types import _KeyStrength, CoreWarnings
from .storage.directory import Directory
from .storage._filebaseddirectory import FileBasedDirectory
+from .storage._casbaseddirectory import CasBasedDirectory
from .storage.directory import VirtualDirectoryError
@@ -1670,106 +1671,109 @@ class Element(Plugin):
cleanup_rootdir()
def _cache_artifact(self, rootdir, sandbox, collect):
- if collect is not None:
- try:
- sandbox_vroot = sandbox.get_virtual_directory()
- collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
- except VirtualDirectoryError:
- # No collect directory existed
- collectvdir = None
+ with self.timed_activity("Caching artifact"):
+ if collect is not None:
+ try:
+ sandbox_vroot = sandbox.get_virtual_directory()
+ collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
+ except VirtualDirectoryError:
+ # No collect directory existed
+ collectvdir = None
- context = self._get_context()
+ context = self._get_context()
- # Create artifact directory structure
- assembledir = os.path.join(rootdir, 'artifact')
- filesdir = os.path.join(assembledir, 'files')
- logsdir = os.path.join(assembledir, 'logs')
- metadir = os.path.join(assembledir, 'meta')
- buildtreedir = os.path.join(assembledir, 'buildtree')
- os.mkdir(assembledir)
- if collect is not None and collectvdir is not None:
- os.mkdir(filesdir)
- os.mkdir(logsdir)
- os.mkdir(metadir)
- os.mkdir(buildtreedir)
-
- # Hard link files from collect dir to files directory
- if collect is not None and collectvdir is not None:
- collectvdir.export_files(filesdir, can_link=True)
-
- cache_buildtrees = context.cache_buildtrees
- build_success = self.__build_result[0]
-
- # cache_buildtrees defaults to 'always', as such the
- # default behaviour is to attempt to cache them. If only
- # caching failed artifact buildtrees, then query the build
- # result. Element types without a build-root dir will be cached
- # with an empty buildtreedir regardless of this configuration.
-
- if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
- try:
+ assemblevdir = CasBasedDirectory(cas_cache=context.artifactcache.cas, ref=None)
+ logsvdir = assemblevdir.descend("logs", create=True)
+ metavdir = assemblevdir.descend("meta", create=True)
+ buildtreevdir = assemblevdir.descend("buildtree", create=True)
+
+ # Create artifact directory structure
+ assembledir = os.path.join(rootdir, 'artifact')
+ logsdir = os.path.join(assembledir, 'logs')
+ metadir = os.path.join(assembledir, 'meta')
+ os.mkdir(assembledir)
+ os.mkdir(logsdir)
+ os.mkdir(metadir)
+
+ if collect is not None and collectvdir is not None:
+ filesvdir = assemblevdir.descend("files", create=True)
+ filesvdir.import_files(collectvdir)
+
+ cache_buildtrees = context.cache_buildtrees
+ build_success = self.__build_result[0]
+
+ # cache_buildtrees defaults to 'always', as such the
+ # default behaviour is to attempt to cache them. If only
+ # caching failed artifact buildtrees, then query the build
+ # result. Element types without a build-root dir will be cached
+ # with an empty buildtreedir regardless of this configuration.
+
+ if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
sandbox_vroot = sandbox.get_virtual_directory()
- sandbox_build_dir = sandbox_vroot.descend(
- self.get_variable('build-root').lstrip(os.sep).split(os.sep))
- # Hard link files from build-root dir to buildtreedir directory
- sandbox_build_dir.export_files(buildtreedir)
- except VirtualDirectoryError:
- # Directory could not be found. Pre-virtual
- # directory behaviour was to continue silently
- # if the directory could not be found.
- pass
+ try:
+ sandbox_build_dir = sandbox_vroot.descend(
+ self.get_variable('build-root').lstrip(os.sep).split(os.sep))
+ buildtreevdir.import_files(sandbox_build_dir)
+ except VirtualDirectoryError:
+ # Directory could not be found. Pre-virtual
+ # directory behaviour was to continue silently
+ # if the directory could not be found.
+ pass
+
+ # Write some logs out to normal directories: logsdir and metadir
+ # Copy build log
+ log_filename = context.get_log_filename()
+ self._build_log_path = os.path.join(logsdir, 'build.log')
+ if log_filename:
+ shutil.copyfile(log_filename, self._build_log_path)
+
+ # Store public data
+ _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
+
+ # Store result
+ build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
+ if self.__build_result[2] is not None:
+ build_result_dict["detail"] = self.__build_result[2]
+ _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
+
+ # ensure we have cache keys
+ self._assemble_done()
+
+ # Store keys.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'strong': self._get_cache_key(),
+ 'weak': self._get_cache_key(_KeyStrength.WEAK),
+ }), os.path.join(metadir, 'keys.yaml'))
+
+ # Store dependencies.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
+ }), os.path.join(metadir, 'dependencies.yaml'))
+
+ # Store workspaced.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'workspaced': bool(self._get_workspace())
+ }), os.path.join(metadir, 'workspaced.yaml'))
+
+ # Store workspaced-dependencies.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'workspaced-dependencies': [
+ e.name for e in self.dependencies(Scope.BUILD)
+ if e._get_workspace()
+ ]
+ }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
- # Copy build log
- log_filename = context.get_log_filename()
- self._build_log_path = os.path.join(logsdir, 'build.log')
- if log_filename:
- shutil.copyfile(log_filename, self._build_log_path)
-
- # Store public data
- _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
-
- # Store result
- build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
- if self.__build_result[2] is not None:
- build_result_dict["detail"] = self.__build_result[2]
- _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
-
- # ensure we have cache keys
- self._assemble_done()
-
- # Store keys.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'strong': self._get_cache_key(),
- 'weak': self._get_cache_key(_KeyStrength.WEAK),
- }), os.path.join(metadir, 'keys.yaml'))
-
- # Store dependencies.yaml
- _yaml.dump(_yaml.node_sanitize({
- e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
- }), os.path.join(metadir, 'dependencies.yaml'))
-
- # Store workspaced.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'workspaced': bool(self._get_workspace())
- }), os.path.join(metadir, 'workspaced.yaml'))
-
- # Store workspaced-dependencies.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'workspaced-dependencies': [
- e.name for e in self.dependencies(Scope.BUILD)
- if e._get_workspace()
- ]
- }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
+ metavdir.import_files(metadir)
+ logsvdir.import_files(logsdir)
- with self.timed_activity("Caching artifact"):
- artifact_size = utils._get_dir_size(assembledir)
- self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
-
- if collect is not None and collectvdir is None:
- raise ElementError(
- "Directory '{}' was not found inside the sandbox, "
- "unable to collect artifact contents"
- .format(collect))
+ artifact_size = assemblevdir.get_size()
+ self.__artifacts.commit(self, assemblevdir, self.__get_cache_keys_for_commit())
+
+ if collect is not None and collectvdir is None:
+ raise ElementError(
+ "Directory '{}' was not found inside the sandbox, "
+ "unable to collect artifact contents"
+ .format(collect))
return artifact_size