summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2020-01-23 15:56:18 +0100
committerJürg Billeter <j@bitron.ch>2020-02-11 21:08:59 +0100
commit83c210ef715014db459a2996ddef447bc1969257 (patch)
tree0e154331a90f4010a46d28a9175ff72524281016
parent3669dcd8ed57ccb0849858302c1d9887b05afb54 (diff)
downloadbuildstream-83c210ef715014db459a2996ddef447bc1969257.tar.gz
element.py: Store sources vdir when caching buildtrees
This will be used for incremental (workspace) builds. Always store sources when already caching buildtrees. The overhead is expected to be negligible as the buildtree is normally a superset of the sources.
-rw-r--r--src/buildstream/_artifact.py8
-rw-r--r--src/buildstream/element.py8
2 files changed, 14 insertions, 2 deletions
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index 8e8def29b..c405f1e15 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -135,13 +135,14 @@ class Artifact:
# Args:
# sandbox_build_dir (Directory): Virtual Directory object for the sandbox build-root
# collectvdir (Directory): Virtual Directoy object from within the sandbox for collection
+ # sourcesvdir (Directory): Virtual Directoy object for the staged sources
# buildresult (tuple): bool, short desc and detailed desc of result
# publicdata (dict): dict of public data to commit to artifact metadata
#
# Returns:
# (int): The size of the newly cached artifact
#
- def cache(self, sandbox_build_dir, collectvdir, buildresult, publicdata):
+ def cache(self, sandbox_build_dir, collectvdir, sourcesvdir, buildresult, publicdata):
context = self._context
element = self._element
@@ -204,6 +205,11 @@ class Artifact:
artifact.buildtree.CopyFrom(buildtreevdir._get_digest())
size += buildtreevdir.get_size()
+ # Store sources
+ if sourcesvdir:
+ artifact.sources.CopyFrom(sourcesvdir._get_digest())
+ size += sourcesvdir.get_size()
+
os.makedirs(os.path.dirname(os.path.join(self._artifactdir, element.get_artifact_name())), exist_ok=True)
keys = utils._deduplicate([self._cache_key, self._weak_cache_key])
for key in keys:
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 41030168d..a25528ee7 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -254,6 +254,7 @@ class Element(Plugin):
self.__cached_remotely = None # Whether the element is cached remotely
# List of Sources
self.__sources = [] # type: List[Source]
+ self.__sources_vdir = None # Directory with staged sources
self.__weak_cache_key = None # Our cached weak cache key
self.__strict_cache_key = None # Our cached cache key for strict builds
self.__artifacts = context.artifactcache # Artifact cache
@@ -1393,6 +1394,8 @@ class Element(Plugin):
reason="import-source-files-fail",
)
+ self.__sources_vdir = import_dir
+
# Set update_mtime to ensure deterministic mtime of sources at build time
with utils._deterministic_umask():
vdirectory.import_files(import_dir, update_mtime=BST_ARBITRARY_TIMESTAMP)
@@ -1615,6 +1618,7 @@ class Element(Plugin):
sandbox_vroot = sandbox.get_virtual_directory()
collectvdir = None
sandbox_build_dir = None
+ sourcesvdir = None
cache_buildtrees = context.cache_buildtrees
build_success = buildresult[0]
@@ -1639,6 +1643,8 @@ class Element(Plugin):
# if the directory could not be found.
pass
+ sourcesvdir = self.__sources_vdir
+
if collect is not None:
try:
collectvdir = sandbox_vroot.descend(*collect.lstrip(os.sep).split(os.sep))
@@ -1650,7 +1656,7 @@ class Element(Plugin):
self.__update_cache_key_non_strict()
with self.timed_activity("Caching artifact"):
- artifact_size = self.__artifact.cache(sandbox_build_dir, collectvdir, buildresult, publicdata)
+ artifact_size = self.__artifact.cache(sandbox_build_dir, collectvdir, sourcesvdir, buildresult, publicdata)
if collect is not None and collectvdir is None:
raise ElementError(