summaryrefslogtreecommitdiff
path: root/buildstream
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2019-02-18 11:26:20 +0100
committerJürg Billeter <j@bitron.ch>2019-02-18 11:29:25 +0100
commit3b881efc9ea5ca440d08e3bd901bec5ebdfc5581 (patch)
treeb4ed4996ed9155ba82b2b90b63bd69d2d1a4e5fb /buildstream
parent050249bb90b4a4f8a25842535ec0526d407b5804 (diff)
downloadbuildstream-3b881efc9ea5ca440d08e3bd901bec5ebdfc5581.tar.gz
element.py: Expand scope of timed_activity in _cache_artifact()
ArtifactCache.commit() is no longer the only expensive part and even more so with the upcoming change to use CASBasedDirectory.
Diffstat (limited to 'buildstream')
-rw-r--r--buildstream/element.py188
1 files changed, 94 insertions, 94 deletions
diff --git a/buildstream/element.py b/buildstream/element.py
index b3f4d5518..bb98ce6b3 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -1670,106 +1670,106 @@ class Element(Plugin):
cleanup_rootdir()
def _cache_artifact(self, rootdir, sandbox, collect):
- if collect is not None:
- try:
- sandbox_vroot = sandbox.get_virtual_directory()
- collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
- except VirtualDirectoryError:
- # No collect directory existed
- collectvdir = None
-
- context = self._get_context()
+ with self.timed_activity("Caching artifact"):
+ if collect is not None:
+ try:
+ sandbox_vroot = sandbox.get_virtual_directory()
+ collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
+ except VirtualDirectoryError:
+ # No collect directory existed
+ collectvdir = None
- # Create artifact directory structure
- assembledir = os.path.join(rootdir, 'artifact')
- filesdir = os.path.join(assembledir, 'files')
- logsdir = os.path.join(assembledir, 'logs')
- metadir = os.path.join(assembledir, 'meta')
- buildtreedir = os.path.join(assembledir, 'buildtree')
- os.mkdir(assembledir)
- if collect is not None and collectvdir is not None:
- os.mkdir(filesdir)
- os.mkdir(logsdir)
- os.mkdir(metadir)
- os.mkdir(buildtreedir)
-
- # Hard link files from collect dir to files directory
- if collect is not None and collectvdir is not None:
- collectvdir.export_files(filesdir, can_link=True)
-
- cache_buildtrees = context.cache_buildtrees
- build_success = self.__build_result[0]
-
- # cache_buildtrees defaults to 'always', as such the
- # default behaviour is to attempt to cache them. If only
- # caching failed artifact buildtrees, then query the build
- # result. Element types without a build-root dir will be cached
- # with an empty buildtreedir regardless of this configuration.
-
- if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
- try:
- sandbox_vroot = sandbox.get_virtual_directory()
- sandbox_build_dir = sandbox_vroot.descend(
- self.get_variable('build-root').lstrip(os.sep).split(os.sep))
- # Hard link files from build-root dir to buildtreedir directory
- sandbox_build_dir.export_files(buildtreedir)
- except VirtualDirectoryError:
- # Directory could not be found. Pre-virtual
- # directory behaviour was to continue silently
- # if the directory could not be found.
- pass
+ context = self._get_context()
- # Copy build log
- log_filename = context.get_log_filename()
- self._build_log_path = os.path.join(logsdir, 'build.log')
- if log_filename:
- shutil.copyfile(log_filename, self._build_log_path)
-
- # Store public data
- _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
-
- # Store result
- build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
- if self.__build_result[2] is not None:
- build_result_dict["detail"] = self.__build_result[2]
- _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
-
- # ensure we have cache keys
- self._assemble_done()
-
- # Store keys.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'strong': self._get_cache_key(),
- 'weak': self._get_cache_key(_KeyStrength.WEAK),
- }), os.path.join(metadir, 'keys.yaml'))
-
- # Store dependencies.yaml
- _yaml.dump(_yaml.node_sanitize({
- e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
- }), os.path.join(metadir, 'dependencies.yaml'))
-
- # Store workspaced.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'workspaced': bool(self._get_workspace())
- }), os.path.join(metadir, 'workspaced.yaml'))
-
- # Store workspaced-dependencies.yaml
- _yaml.dump(_yaml.node_sanitize({
- 'workspaced-dependencies': [
- e.name for e in self.dependencies(Scope.BUILD)
- if e._get_workspace()
- ]
- }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
+ # Create artifact directory structure
+ assembledir = os.path.join(rootdir, 'artifact')
+ filesdir = os.path.join(assembledir, 'files')
+ logsdir = os.path.join(assembledir, 'logs')
+ metadir = os.path.join(assembledir, 'meta')
+ buildtreedir = os.path.join(assembledir, 'buildtree')
+ os.mkdir(assembledir)
+ if collect is not None and collectvdir is not None:
+ os.mkdir(filesdir)
+ os.mkdir(logsdir)
+ os.mkdir(metadir)
+ os.mkdir(buildtreedir)
+
+ # Hard link files from collect dir to files directory
+ if collect is not None and collectvdir is not None:
+ collectvdir.export_files(filesdir, can_link=True)
+
+ cache_buildtrees = context.cache_buildtrees
+ build_success = self.__build_result[0]
+
+ # cache_buildtrees defaults to 'always', as such the
+ # default behaviour is to attempt to cache them. If only
+ # caching failed artifact buildtrees, then query the build
+ # result. Element types without a build-root dir will be cached
+ # with an empty buildtreedir regardless of this configuration.
+
+ if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
+ try:
+ sandbox_vroot = sandbox.get_virtual_directory()
+ sandbox_build_dir = sandbox_vroot.descend(
+ self.get_variable('build-root').lstrip(os.sep).split(os.sep))
+ # Hard link files from build-root dir to buildtreedir directory
+ sandbox_build_dir.export_files(buildtreedir)
+ except VirtualDirectoryError:
+ # Directory could not be found. Pre-virtual
+ # directory behaviour was to continue silently
+ # if the directory could not be found.
+ pass
+
+ # Copy build log
+ log_filename = context.get_log_filename()
+ self._build_log_path = os.path.join(logsdir, 'build.log')
+ if log_filename:
+ shutil.copyfile(log_filename, self._build_log_path)
+
+ # Store public data
+ _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
+
+ # Store result
+ build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
+ if self.__build_result[2] is not None:
+ build_result_dict["detail"] = self.__build_result[2]
+ _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
+
+ # ensure we have cache keys
+ self._assemble_done()
+
+ # Store keys.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'strong': self._get_cache_key(),
+ 'weak': self._get_cache_key(_KeyStrength.WEAK),
+ }), os.path.join(metadir, 'keys.yaml'))
+
+ # Store dependencies.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
+ }), os.path.join(metadir, 'dependencies.yaml'))
+
+ # Store workspaced.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'workspaced': bool(self._get_workspace())
+ }), os.path.join(metadir, 'workspaced.yaml'))
+
+ # Store workspaced-dependencies.yaml
+ _yaml.dump(_yaml.node_sanitize({
+ 'workspaced-dependencies': [
+ e.name for e in self.dependencies(Scope.BUILD)
+ if e._get_workspace()
+ ]
+ }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
- with self.timed_activity("Caching artifact"):
artifact_size = utils._get_dir_size(assembledir)
self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
- if collect is not None and collectvdir is None:
- raise ElementError(
- "Directory '{}' was not found inside the sandbox, "
- "unable to collect artifact contents"
- .format(collect))
+ if collect is not None and collectvdir is None:
+ raise ElementError(
+ "Directory '{}' was not found inside the sandbox, "
+ "unable to collect artifact contents"
+ .format(collect))
return artifact_size