summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2020-02-17 10:37:34 +0000
committerJürg Billeter <j@bitron.ch>2020-02-17 10:37:34 +0000
commitf126f0afcd4df803d4a13a0f54b26afd1b173dd9 (patch)
tree214760a519a8899d55c241e5376d91a76344aa59
parent16b98a8c24dd8c8aa5a43cba8bf3aa09d98251cd (diff)
parent21529450d3d39482fcaa05f4c51acbb1eb17371b (diff)
downloadbuildstream-f126f0afcd4df803d4a13a0f54b26afd1b173dd9.tar.gz
Merge branch 'juerg/incremental-workspace-build' into 'master'
Reimplement support for incremental workspace builds See merge request BuildStream/buildstream!1816
-rw-r--r--src/buildstream/_artifact.py61
-rw-r--r--src/buildstream/_artifactcache.py27
-rw-r--r--src/buildstream/_cas/cascache.py68
-rw-r--r--src/buildstream/_loader/loader.py2
-rw-r--r--src/buildstream/_protos/buildstream/v2/artifact.proto5
-rw-r--r--src/buildstream/_protos/buildstream/v2/artifact_pb2.py32
-rw-r--r--src/buildstream/_stream.py3
-rw-r--r--src/buildstream/_workspaces.py106
-rw-r--r--src/buildstream/buildelement.py32
-rw-r--r--src/buildstream/element.py180
-rw-r--r--src/buildstream/plugins/sources/workspace.py8
-rw-r--r--src/buildstream/sandbox/_sandboxreapi.py11
-rw-r--r--src/buildstream/sandbox/sandbox.py45
-rw-r--r--src/buildstream/storage/_casbaseddirectory.py14
-rw-r--r--src/buildstream/storage/_filebaseddirectory.py7
-rw-r--r--src/buildstream/storage/directory.py6
-rw-r--r--src/buildstream/utils.py12
-rw-r--r--tests/artifactcache/junctions/parent/.bst/workspaces.yml0
-rw-r--r--tests/cachekey/project/.bst/workspaces.yml0
-rw-r--r--tests/frontend/workspace.py59
-rw-r--r--tests/integration/project/files/workspace-incremental/Makefile7
-rw-r--r--tests/integration/project/files/workspace-incremental/source1
-rw-r--r--tests/integration/project/files/workspace-partial/Makefile10
-rw-r--r--tests/integration/project/files/workspace-partial/source11
-rw-r--r--tests/integration/project/files/workspace-partial/source21
-rw-r--r--tests/integration/workspace.py157
-rw-r--r--tests/remoteexecution/workspace.py30
27 files changed, 443 insertions, 442 deletions
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index ae1b395b3..a9cd56c2a 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -90,6 +90,18 @@ class Artifact:
return CasBasedDirectory(self._cas, digest=buildtree_digest)
+ # get_sources():
+ #
+ # Get a virtual directory for the artifact sources
+ #
+ # Returns:
+ # (Directory): The virtual directory object
+ #
+ def get_sources(self):
+ sources_digest = self._get_field_digest("sources")
+
+ return CasBasedDirectory(self._cas, digest=sources_digest)
+
# get_logs():
#
# Get the paths of the artifact's logs
@@ -121,16 +133,16 @@ class Artifact:
# Create the artifact and commit to cache
#
# Args:
- # rootdir (str): An absolute path to the temp rootdir for artifact construct
# sandbox_build_dir (Directory): Virtual Directory object for the sandbox build-root
# collectvdir (Directory): Virtual Directoy object from within the sandbox for collection
+ # sourcesvdir (Directory): Virtual Directoy object for the staged sources
# buildresult (tuple): bool, short desc and detailed desc of result
# publicdata (dict): dict of public data to commit to artifact metadata
#
# Returns:
# (int): The size of the newly cached artifact
#
- def cache(self, rootdir, sandbox_build_dir, collectvdir, buildresult, publicdata):
+ def cache(self, sandbox_build_dir, collectvdir, sourcesvdir, buildresult, publicdata):
context = self._context
element = self._element
@@ -153,11 +165,12 @@ class Artifact:
artifact.weak_key = self._weak_cache_key
artifact.was_workspaced = bool(element._get_workspace())
+ properties = ["MTime"] if artifact.was_workspaced else []
# Store files
if collectvdir:
filesvdir = CasBasedDirectory(cas_cache=self._cas)
- filesvdir.import_files(collectvdir)
+ filesvdir.import_files(collectvdir, properties=properties)
artifact.files.CopyFrom(filesvdir._get_digest())
size += filesvdir.get_size()
@@ -189,10 +202,15 @@ class Artifact:
# Store build tree
if sandbox_build_dir:
buildtreevdir = CasBasedDirectory(cas_cache=self._cas)
- buildtreevdir.import_files(sandbox_build_dir)
+ buildtreevdir.import_files(sandbox_build_dir, properties=properties)
artifact.buildtree.CopyFrom(buildtreevdir._get_digest())
size += buildtreevdir.get_size()
+ # Store sources
+ if sourcesvdir:
+ artifact.sources.CopyFrom(sourcesvdir._get_digest())
+ size += sourcesvdir.get_size()
+
os.makedirs(os.path.dirname(os.path.join(self._artifactdir, element.get_artifact_name())), exist_ok=True)
keys = utils._deduplicate([self._cache_key, self._weak_cache_key])
for key in keys:
@@ -234,6 +252,22 @@ class Artifact:
artifact = self._get_proto()
return bool(str(artifact.buildtree))
+ # cached_sources()
+ #
+ # Check if artifact is cached with sources.
+ #
+ # Returns:
+ # (bool): True if artifact is cached with sources, False if sources
+ # are not available.
+ #
+ def cached_sources(self):
+
+ sources_digest = self._get_field_digest("sources")
+ if sources_digest:
+ return self._cas.contains_directory(sources_digest, with_files=True)
+ else:
+ return False
+
# load_public_data():
#
# Loads the public data from the cached artifact
@@ -289,25 +323,6 @@ class Artifact:
return self._metadata_keys
- # get_metadata_dependencies():
- #
- # Retrieve the hash of dependency keys from the given artifact.
- #
- # Returns:
- # (dict): A dictionary of element names and their keys
- #
- def get_metadata_dependencies(self):
-
- if self._metadata_dependencies is not None:
- return self._metadata_dependencies
-
- # Extract proto
- artifact = self._get_proto()
-
- self._metadata_dependencies = {dep.element_name: dep.cache_key for dep in artifact.build_deps}
-
- return self._metadata_dependencies
-
# get_metadata_workspaced():
#
# Retrieve the hash of dependency from the given artifact.
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index 69a65833c..f1648e947 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -26,7 +26,6 @@ from ._exceptions import ArtifactError, CASError, CacheError, CASRemoteError, Re
from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, artifact_pb2, artifact_pb2_grpc
from ._remote import BaseRemote
-from ._artifact import Artifact
from . import utils
@@ -205,32 +204,6 @@ class ArtifactCache(BaseCache):
except CacheError as e:
raise ArtifactError("{}".format(e)) from e
- # diff():
- #
- # Return a list of files that have been added or modified between
- # the artifacts described by key_a and key_b. This expects the
- # provided keys to be strong cache keys
- #
- # Args:
- # element (Element): The element whose artifacts to compare
- # key_a (str): The first artifact strong key
- # key_b (str): The second artifact strong key
- #
- def diff(self, element, key_a, key_b):
- context = self.context
- artifact_a = Artifact(element, context, strong_key=key_a)
- artifact_b = Artifact(element, context, strong_key=key_b)
- digest_a = artifact_a._get_proto().files
- digest_b = artifact_b._get_proto().files
-
- added = []
- removed = []
- modified = []
-
- self.cas.diff_trees(digest_a, digest_b, added=added, removed=removed, modified=modified)
-
- return modified, removed, added
-
# push():
#
# Push committed artifact to remote repository.
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index 083c8e8dc..c733bacac 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -475,74 +475,6 @@ class CASCache:
if dirnode.name not in excluded_subdirs:
yield from self.required_blobs_for_directory(dirnode.digest)
- def diff_trees(self, tree_a, tree_b, *, added, removed, modified, path=""):
- dir_a = remote_execution_pb2.Directory()
- dir_b = remote_execution_pb2.Directory()
-
- if tree_a:
- with open(self.objpath(tree_a), "rb") as f:
- dir_a.ParseFromString(f.read())
- if tree_b:
- with open(self.objpath(tree_b), "rb") as f:
- dir_b.ParseFromString(f.read())
-
- a = 0
- b = 0
- while a < len(dir_a.files) or b < len(dir_b.files):
- if b < len(dir_b.files) and (a >= len(dir_a.files) or dir_a.files[a].name > dir_b.files[b].name):
- added.append(os.path.join(path, dir_b.files[b].name))
- b += 1
- elif a < len(dir_a.files) and (b >= len(dir_b.files) or dir_b.files[b].name > dir_a.files[a].name):
- removed.append(os.path.join(path, dir_a.files[a].name))
- a += 1
- else:
- # File exists in both directories
- if dir_a.files[a].digest.hash != dir_b.files[b].digest.hash:
- modified.append(os.path.join(path, dir_a.files[a].name))
- a += 1
- b += 1
-
- a = 0
- b = 0
- while a < len(dir_a.directories) or b < len(dir_b.directories):
- if b < len(dir_b.directories) and (
- a >= len(dir_a.directories) or dir_a.directories[a].name > dir_b.directories[b].name
- ):
- self.diff_trees(
- None,
- dir_b.directories[b].digest,
- added=added,
- removed=removed,
- modified=modified,
- path=os.path.join(path, dir_b.directories[b].name),
- )
- b += 1
- elif a < len(dir_a.directories) and (
- b >= len(dir_b.directories) or dir_b.directories[b].name > dir_a.directories[a].name
- ):
- self.diff_trees(
- dir_a.directories[a].digest,
- None,
- added=added,
- removed=removed,
- modified=modified,
- path=os.path.join(path, dir_a.directories[a].name),
- )
- a += 1
- else:
- # Subdirectory exists in both directories
- if dir_a.directories[a].digest.hash != dir_b.directories[b].digest.hash:
- self.diff_trees(
- dir_a.directories[a].digest,
- dir_b.directories[b].digest,
- added=added,
- removed=removed,
- modified=modified,
- path=os.path.join(path, dir_a.directories[a].name),
- )
- a += 1
- b += 1
-
################################################
# Local Private Methods #
################################################
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 3200920b9..35de1b97d 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -456,7 +456,7 @@ class Loader:
if workspace:
workspace_node = {"kind": "workspace"}
workspace_node["path"] = workspace.get_absolute_path()
- workspace_node["last_successful"] = str(workspace.to_dict().get("last_successful", ""))
+ workspace_node["last_build"] = str(workspace.to_dict().get("last_build", ""))
node[Symbol.SOURCES] = [workspace_node]
skip_workspace = False
diff --git a/src/buildstream/_protos/buildstream/v2/artifact.proto b/src/buildstream/_protos/buildstream/v2/artifact.proto
index ead792e15..87f66cc95 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact.proto
+++ b/src/buildstream/_protos/buildstream/v2/artifact.proto
@@ -75,6 +75,9 @@ message Artifact {
// digest of a directory
build.bazel.remote.execution.v2.Digest buildtree = 12; // optional
+
+ // digest of a directory
+ build.bazel.remote.execution.v2.Digest sources = 13; // optional
}
message GetArtifactRequest {
@@ -86,4 +89,4 @@ message UpdateArtifactRequest {
string instance_name = 1;
string cache_key = 2;
Artifact artifact = 3;
-} \ No newline at end of file
+}
diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
index 6ea9c4e10..45327f9e8 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
@@ -22,7 +22,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
package='buildstream.v2',
syntax='proto3',
serialized_options=None,
- serialized_pb=_b('\n\x1d\x62uildstream/v2/artifact.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\"\xf4\x04\n\x08\x41rtifact\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x15\n\rbuild_success\x18\x02 \x01(\x08\x12\x13\n\x0b\x62uild_error\x18\x03 \x01(\t\x12\x1b\n\x13\x62uild_error_details\x18\x04 \x01(\t\x12\x12\n\nstrong_key\x18\x05 \x01(\t\x12\x10\n\x08weak_key\x18\x06 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x07 \x01(\x08\x12\x36\n\x05\x66iles\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x37\n\nbuild_deps\x18\t \x03(\x0b\x32#.buildstream.v2.Artifact.Dependency\x12<\n\x0bpublic_data\x18\n \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12.\n\x04logs\x18\x0b \x03(\x0b\x32 .buildstream.v2.Artifact.LogFile\x12:\n\tbuildtree\x18\x0c \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x1a\x63\n\nDependency\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x14\n\x0c\x65lement_name\x18\x02 \x01(\t\x12\x11\n\tcache_key\x18\x03 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x04 \x01(\x08\x1aP\n\x07LogFile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\">\n\x12GetArtifactRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t\"m\n\x15UpdateArtifactRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t\x12*\n\x08\x61rtifact\x18\x03 \x01(\x0b\x32\x18.buildstream.v2.Artifact2\xb5\x01\n\x0f\x41rtifactService\x12M\n\x0bGetArtifact\x12\".buildstream.v2.GetArtifactRequest\x1a\x18.buildstream.v2.Artifact\"\x00\x12S\n\x0eUpdateArtifact\x12%.buildstream.v2.UpdateArtifactRequest\x1a\x18.buildstream.v2.Artifact\"\x00\x62\x06proto3')
+ serialized_pb=_b('\n\x1d\x62uildstream/v2/artifact.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\"\xae\x05\n\x08\x41rtifact\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x15\n\rbuild_success\x18\x02 \x01(\x08\x12\x13\n\x0b\x62uild_error\x18\x03 \x01(\t\x12\x1b\n\x13\x62uild_error_details\x18\x04 \x01(\t\x12\x12\n\nstrong_key\x18\x05 \x01(\t\x12\x10\n\x08weak_key\x18\x06 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x07 \x01(\x08\x12\x36\n\x05\x66iles\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x37\n\nbuild_deps\x18\t \x03(\x0b\x32#.buildstream.v2.Artifact.Dependency\x12<\n\x0bpublic_data\x18\n \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12.\n\x04logs\x18\x0b \x03(\x0b\x32 .buildstream.v2.Artifact.LogFile\x12:\n\tbuildtree\x18\x0c \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x38\n\x07sources\x18\r \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x1a\x63\n\nDependency\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x14\n\x0c\x65lement_name\x18\x02 \x01(\t\x12\x11\n\tcache_key\x18\x03 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x04 \x01(\x08\x1aP\n\x07LogFile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\">\n\x12GetArtifactRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t\"m\n\x15UpdateArtifactRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t\x12*\n\x08\x61rtifact\x18\x03 \x01(\x0b\x32\x18.buildstream.v2.Artifact2\xb5\x01\n\x0f\x41rtifactService\x12M\n\x0bGetArtifact\x12\".buildstream.v2.GetArtifactRequest\x1a\x18.buildstream.v2.Artifact\"\x00\x12S\n\x0eUpdateArtifact\x12%.buildstream.v2.UpdateArtifactRequest\x1a\x18.buildstream.v2.Artifact\"\x00\x62\x06proto3')
,
dependencies=[build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
@@ -76,8 +76,8 @@ _ARTIFACT_DEPENDENCY = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=583,
- serialized_end=682,
+ serialized_start=641,
+ serialized_end=740,
)
_ARTIFACT_LOGFILE = _descriptor.Descriptor(
@@ -113,8 +113,8 @@ _ARTIFACT_LOGFILE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=684,
- serialized_end=764,
+ serialized_start=742,
+ serialized_end=822,
)
_ARTIFACT = _descriptor.Descriptor(
@@ -208,6 +208,13 @@ _ARTIFACT = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='sources', full_name='buildstream.v2.Artifact.sources', index=12,
+ number=13, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -221,7 +228,7 @@ _ARTIFACT = _descriptor.Descriptor(
oneofs=[
],
serialized_start=136,
- serialized_end=764,
+ serialized_end=822,
)
@@ -258,8 +265,8 @@ _GETARTIFACTREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=766,
- serialized_end=828,
+ serialized_start=824,
+ serialized_end=886,
)
@@ -303,8 +310,8 @@ _UPDATEARTIFACTREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=830,
- serialized_end=939,
+ serialized_start=888,
+ serialized_end=997,
)
_ARTIFACT_DEPENDENCY.containing_type = _ARTIFACT
@@ -315,6 +322,7 @@ _ARTIFACT.fields_by_name['build_deps'].message_type = _ARTIFACT_DEPENDENCY
_ARTIFACT.fields_by_name['public_data'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_ARTIFACT.fields_by_name['logs'].message_type = _ARTIFACT_LOGFILE
_ARTIFACT.fields_by_name['buildtree'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+_ARTIFACT.fields_by_name['sources'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_UPDATEARTIFACTREQUEST.fields_by_name['artifact'].message_type = _ARTIFACT
DESCRIPTOR.message_types_by_name['Artifact'] = _ARTIFACT
DESCRIPTOR.message_types_by_name['GetArtifactRequest'] = _GETARTIFACTREQUEST
@@ -366,8 +374,8 @@ _ARTIFACTSERVICE = _descriptor.ServiceDescriptor(
file=DESCRIPTOR,
index=0,
serialized_options=None,
- serialized_start=942,
- serialized_end=1123,
+ serialized_start=1000,
+ serialized_end=1181,
methods=[
_descriptor.MethodDescriptor(
name='GetArtifact',
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 18297c2e4..92b9f5113 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -930,8 +930,7 @@ class Stream:
workspace_path = workspace.get_absolute_path()
if soft:
- workspace.prepared = False
- workspace.last_successful = None
+ workspace.last_build = None
self._message(
MessageType.INFO, "Reset workspace state for {} at: {}".format(element.name, workspace_path)
)
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index 5c3b4af8f..a54a17ff1 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -21,12 +21,11 @@ import os
from . import utils
from . import _yaml
-from .node import MappingNode, ScalarNode
from ._exceptions import LoadError
from .exceptions import LoadErrorReason
-BST_WORKSPACE_FORMAT_VERSION = 3
+BST_WORKSPACE_FORMAT_VERSION = 4
BST_WORKSPACE_PROJECT_FORMAT_VERSION = 1
WORKSPACE_PROJECT_FILE = ".bstproject.yaml"
@@ -233,24 +232,19 @@ class WorkspaceProjectCache:
# An object to contain various helper functions and data required for
# workspaces.
#
-# last_successful, path and running_files are intended to be public
+# last_build and path are intended to be public
# properties, but may be best accessed using this classes' helper
# methods.
#
# Args:
# toplevel_project (Project): Top project. Will be used for resolving relative workspace paths.
# path (str): The path that should host this workspace
-# last_successful (str): The key of the last successful build of this workspace
-# running_files (dict): A dict mapping dependency elements to files
-# changed between failed builds. Should be
-# made obsolete with failed build artifacts.
+# last_build (str): The key of the last attempted build of this workspace
#
class Workspace:
- def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
- self.prepared = prepared
- self.last_successful = last_successful
+ def __init__(self, toplevel_project, *, last_build=None, path=None):
+ self.last_build = last_build
self._path = path
- self.running_files = running_files if running_files is not None else {}
self._toplevel_project = toplevel_project
self._key = None
@@ -263,9 +257,9 @@ class Workspace:
# (dict) A dict representation of the workspace
#
def to_dict(self):
- ret = {"prepared": self.prepared, "path": self._path, "running_files": self.running_files}
- if self.last_successful is not None:
- ret["last_successful"] = self.last_successful
+ ret = {"path": self._path}
+ if self.last_build is not None:
+ ret["last_build"] = self.last_build
return ret
# from_dict():
@@ -300,45 +294,6 @@ class Workspace:
def differs(self, other):
return self.to_dict() != other.to_dict()
- # stage()
- #
- # Stage the workspace to the given directory.
- #
- # Args:
- # directory (str) - The directory into which to stage this workspace
- #
- def stage(self, directory):
- fullpath = self.get_absolute_path()
- if os.path.isdir(fullpath):
- utils.copy_files(fullpath, directory)
- else:
- destfile = os.path.join(directory, os.path.basename(self.get_absolute_path()))
- utils.safe_copy(fullpath, destfile)
-
- # add_running_files()
- #
- # Append a list of files to the running_files for the given
- # dependency. Duplicate files will be ignored.
- #
- # Args:
- # dep_name (str) - The dependency name whose files to append to
- # files (str) - A list of files to append
- #
- def add_running_files(self, dep_name, files):
- if dep_name in self.running_files:
- # ruamel.py cannot serialize sets in python3.4
- to_add = set(files) - set(self.running_files[dep_name])
- self.running_files[dep_name].extend(to_add)
- else:
- self.running_files[dep_name] = list(files)
-
- # clear_running_files()
- #
- # Clear all running files associated with this workspace.
- #
- def clear_running_files(self):
- self.running_files = {}
-
# get_absolute_path():
#
# Returns: The absolute path of the element's workspace.
@@ -526,38 +481,17 @@ class Workspaces:
"Format version is not an integer in workspace configuration", LoadErrorReason.INVALID_DATA
)
- if version == 0:
- # Pre-versioning format can be of two forms
- for element, config in workspaces.items():
- config_type = type(config)
-
- if config_type is ScalarNode:
- pass
-
- elif config_type is MappingNode:
- sources = list(config.values())
- if len(sources) > 1:
- detail = (
- "There are multiple workspaces open for '{}'.\n"
- + "This is not supported anymore.\n"
- + "Please remove this element from '{}'."
- )
- raise LoadError(detail.format(element, self._get_filename()), LoadErrorReason.INVALID_DATA)
-
- workspaces[element] = sources[0]
-
- else:
- raise LoadError("Workspace config is in unexpected format.", LoadErrorReason.INVALID_DATA)
-
- res = {
- element: Workspace(self._toplevel_project, path=config.as_str())
- for element, config in workspaces.items()
- }
+ if version < 4:
+ # bst 1.x workspaces do not separate source and build files.
+ raise LoadError(
+ "Workspace configuration format version {} not supported."
+ "Please recreate this workspace.".format(version),
+ LoadErrorReason.INVALID_DATA,
+ )
- elif 1 <= version <= BST_WORKSPACE_FORMAT_VERSION:
+ if 4 <= version <= BST_WORKSPACE_FORMAT_VERSION:
workspaces = workspaces.get_mapping("workspaces", default={})
res = {element: self._load_workspace(node) for element, node in workspaces.items()}
-
else:
raise LoadError(
"Workspace configuration format version {} not supported."
@@ -580,15 +514,9 @@ class Workspaces:
# (Workspace): A newly instantiated Workspace
#
def _load_workspace(self, node):
- running_files = node.get_mapping("running_files", default=None)
- if running_files:
- running_files = running_files.strip_node_info()
-
dictionary = {
- "prepared": node.get_bool("prepared", default=False),
"path": node.get_str("path"),
- "last_successful": node.get_str("last_successful", default=None),
- "running_files": running_files,
+ "last_build": node.get_str("last_build", default=None),
}
return Workspace.from_dict(self._toplevel_project, dictionary)
diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py
index f04d3b0dc..aeafbcdda 100644
--- a/src/buildstream/buildelement.py
+++ b/src/buildstream/buildelement.py
@@ -262,10 +262,34 @@ class BuildElement(Element):
def prepare(self, sandbox):
commands = self.__commands["configure-commands"]
- if commands:
- with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"):
- for cmd in commands:
- self.__run_command(sandbox, cmd)
+ if not commands:
+ # No configure commands, nothing to do.
+ return
+
+ # We need to ensure that the prepare() method is only called
+ # once in workspaces, because the changes will persist across
+ # incremental builds - not desirable, for example, in the case
+ # of autotools' `./configure`.
+ marker_filename = ".bst-prepared"
+
+ if self._get_workspace():
+ # We use an empty file as a marker whether prepare() has already
+ # been called in a previous build.
+
+ vdir = sandbox.get_virtual_directory()
+ buildroot = self.get_variable("build-root")
+ buildroot_vdir = vdir.descend(*buildroot.lstrip(os.sep).split(os.sep))
+
+ if buildroot_vdir._exists(marker_filename, follow_symlinks=True):
+ # Already prepared
+ return
+
+ with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"):
+ for cmd in commands:
+ self.__run_command(sandbox, cmd)
+
+ if self._get_workspace():
+ sandbox._create_empty_file(marker_filename)
def generate_script(self):
script = ""
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index cb4dc5450..071d085b8 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -75,7 +75,6 @@ Class Reference
import os
import re
import stat
-import time
import copy
from collections import OrderedDict
import contextlib
@@ -255,6 +254,7 @@ class Element(Plugin):
self.__cached_remotely = None # Whether the element is cached remotely
# List of Sources
self.__sources = [] # type: List[Source]
+ self.__sources_vdir = None # Directory with staged sources
self.__weak_cache_key = None # Our cached weak cache key
self.__strict_cache_key = None # Our cached cache key for strict builds
self.__artifacts = context.artifactcache # Artifact cache
@@ -626,8 +626,7 @@ class Element(Plugin):
path: str = None,
include: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
- orphans: bool = True,
- update_mtimes: Optional[List[str]] = None
+ orphans: bool = True
) -> FileListResult:
"""Stage this element's output artifact in the sandbox
@@ -642,7 +641,6 @@ class Element(Plugin):
include: An optional list of domains to include files from
exclude: An optional list of domains to exclude files from
orphans: Whether to include files not spoken for by split domains
- update_mtimes: An optional list of files whose mtimes to set to the current time.
Raises:
(:class:`.ElementError`): If the element has not yet produced an artifact.
@@ -672,9 +670,6 @@ class Element(Plugin):
)
raise ElementError("No artifacts to stage", detail=detail, reason="uncached-checkout-attempt")
- if update_mtimes is None:
- update_mtimes = []
-
# Time to use the artifact, check once more that it's there
self.__assert_cached()
@@ -690,28 +685,10 @@ class Element(Plugin):
split_filter = self.__split_filter_func(include, exclude, orphans)
- # We must not hardlink files whose mtimes we want to update
- if update_mtimes:
-
- def link_filter(path):
- return (split_filter is None or split_filter(path)) and path not in update_mtimes
-
- def copy_filter(path):
- return (split_filter is None or split_filter(path)) and path in update_mtimes
-
- else:
- link_filter = split_filter
-
result = vstagedir.import_files(
- files_vdir, filter_callback=link_filter, report_written=True, can_link=True
+ files_vdir, filter_callback=split_filter, report_written=True, can_link=True
)
- if update_mtimes:
- copy_result = vstagedir.import_files(
- files_vdir, filter_callback=copy_filter, report_written=True, update_mtime=time.time()
- )
- result = result.combine(copy_result)
-
return result
def stage_dependency_artifacts(
@@ -747,59 +724,9 @@ class Element(Plugin):
ignored = {}
overlaps = OrderedDict() # type: OrderedDict[str, List[str]]
files_written = {} # type: Dict[str, List[str]]
- old_dep_keys = None
- workspace = self._get_workspace()
- context = self._get_context()
-
- if self.__can_build_incrementally() and workspace.last_successful:
-
- # Try to perform an incremental build if the last successful
- # build is still in the artifact cache
- #
- if self.__artifacts.contains(self, workspace.last_successful):
- last_successful = Artifact(self, context, strong_key=workspace.last_successful)
- # Get a dict of dependency strong keys
- old_dep_keys = last_successful.get_metadata_dependencies()
- else:
- # Last successful build is no longer in the artifact cache,
- # so let's reset it and perform a full build now.
- workspace.prepared = False
- workspace.last_successful = None
-
- self.info("Resetting workspace state, last successful build is no longer in the cache")
-
- # In case we are staging in the main process
- if utils._is_main_process():
- context.get_workspaces().save_config()
for dep in self.dependencies(scope):
- # If we are workspaced, and we therefore perform an
- # incremental build, we must ensure that we update the mtimes
- # of any files created by our dependencies since the last
- # successful build.
- to_update = None
- if workspace and old_dep_keys:
- dep.__assert_cached()
-
- if dep.name in old_dep_keys:
- key_new = dep._get_cache_key()
- key_old = old_dep_keys[dep.name]
-
- # We only need to worry about modified and added
- # files, since removed files will be picked up by
- # build systems anyway.
- to_update, _, added = self.__artifacts.diff(dep, key_old, key_new)
- workspace.add_running_files(dep.name, to_update + added)
- to_update.extend(workspace.running_files[dep.name])
-
- # In case we are running `bst shell`, this happens in the
- # main process and we need to update the workspace config
- if utils._is_main_process():
- context.get_workspaces().save_config()
-
- result = dep.stage_artifact(
- sandbox, path=path, include=include, exclude=exclude, orphans=orphans, update_mtimes=to_update
- )
+ result = dep.stage_artifact(sandbox, path=path, include=include, exclude=exclude, orphans=orphans)
if result.overwritten:
for overwrite in result.overwritten:
# Completely new overwrite
@@ -1399,11 +1326,6 @@ class Element(Plugin):
#
def _stage_sources_in_sandbox(self, sandbox, directory):
- # Only artifact caches that implement diff() are allowed to
- # perform incremental builds.
- if self.__can_build_incrementally():
- sandbox.mark_directory(directory)
-
# Stage all sources that need to be copied
sandbox_vroot = sandbox.get_virtual_directory()
host_vdirectory = sandbox_vroot.descend(*directory.lstrip(os.sep).split(os.sep), create=True)
@@ -1467,6 +1389,16 @@ class Element(Plugin):
reason="import-source-files-fail",
)
+ self.__sources_vdir = import_dir
+
+ # incremental builds should merge the source into the last artifact before staging
+ last_build_artifact = self.__get_last_build_artifact()
+ if last_build_artifact:
+ self.info("Incremental build")
+ last_sources = last_build_artifact.get_sources()
+ import_dir = last_build_artifact.get_buildtree()
+ import_dir._apply_changes(last_sources, self.__sources_vdir)
+
# Set update_mtime to ensure deterministic mtime of sources at build time
with utils._deterministic_umask():
vdirectory.import_files(import_dir, update_mtime=BST_ARBITRARY_TIMESTAMP)
@@ -1577,7 +1509,7 @@ class Element(Plugin):
self.__update_cache_key_non_strict()
self._update_ready_for_runtime_and_cached()
- if self._get_workspace() and self._cached_success():
+ if self._get_workspace() and self._cached():
assert utils._is_main_process(), "Attempted to save workspace configuration from child process"
#
# Note that this block can only happen in the
@@ -1589,8 +1521,7 @@ class Element(Plugin):
#
key = self._get_cache_key()
workspace = self._get_workspace()
- workspace.last_successful = key
- workspace.clear_running_files()
+ workspace.last_build = key
self._get_context().get_workspaces().save_config()
# _assemble():
@@ -1676,13 +1607,13 @@ class Element(Plugin):
e.sandbox = True
self.__set_build_result(success=False, description=str(e), detail=e.detail)
- self._cache_artifact(rootdir, sandbox, e.collect)
+ self._cache_artifact(sandbox, e.collect)
raise
else:
- return self._cache_artifact(rootdir, sandbox, collect)
+ return self._cache_artifact(sandbox, collect)
- def _cache_artifact(self, rootdir, sandbox, collect):
+ def _cache_artifact(self, sandbox, collect):
context = self._get_context()
buildresult = self.__build_result
@@ -1690,6 +1621,7 @@ class Element(Plugin):
sandbox_vroot = sandbox.get_virtual_directory()
collectvdir = None
sandbox_build_dir = None
+ sourcesvdir = None
cache_buildtrees = context.cache_buildtrees
build_success = buildresult[0]
@@ -1701,7 +1633,7 @@ class Element(Plugin):
# with an empty buildtreedir regardless of this configuration.
if cache_buildtrees == _CacheBuildTrees.ALWAYS or (
- cache_buildtrees == _CacheBuildTrees.AUTO and not build_success
+ cache_buildtrees == _CacheBuildTrees.AUTO and (not build_success or self._get_workspace())
):
try:
sandbox_build_dir = sandbox_vroot.descend(
@@ -1714,6 +1646,8 @@ class Element(Plugin):
# if the directory could not be found.
pass
+ sourcesvdir = self.__sources_vdir
+
if collect is not None:
try:
collectvdir = sandbox_vroot.descend(*collect.lstrip(os.sep).split(os.sep))
@@ -1725,7 +1659,7 @@ class Element(Plugin):
self.__update_cache_key_non_strict()
with self.timed_activity("Caching artifact"):
- artifact_size = self.__artifact.cache(rootdir, sandbox_build_dir, collectvdir, buildresult, publicdata)
+ artifact_size = self.__artifact.cache(sandbox_build_dir, collectvdir, sourcesvdir, buildresult, publicdata)
if collect is not None and collectvdir is None:
raise ElementError(
@@ -2368,16 +2302,58 @@ class Element(Plugin):
self.__is_resolved = True
self.__update_cache_keys()
- # __can_build_incrementally()
+ # __get_dependency_refs()
+ #
+ # Retrieve the artifact refs of the element's dependencies
+ #
+ # Args:
+ # scope (Scope): The scope of dependencies
+ #
+ # Returns:
+ # (list [str]): A list of refs of all dependencies in staging order.
+ #
+ def __get_dependency_refs(self, scope):
+ return [
+ os.path.join(dep.project_name, _get_normal_name(dep.name), dep._get_cache_key())
+ for dep in self.dependencies(scope)
+ ]
+
+ # __get_last_build_artifact()
#
- # Check if the element can be built incrementally, this
- # is used to decide how to stage things
+ # Return the Artifact of the previous build of this element,
+ # if incremental build is available.
#
# Returns:
- # (bool): Whether this element can be built incrementally
+ # (Artifact): The Artifact of the previous build or None
#
- def __can_build_incrementally(self):
- return bool(self._get_workspace())
+ def __get_last_build_artifact(self):
+ workspace = self._get_workspace()
+ if not workspace:
+ # Currently incremental builds are only supported for workspaces
+ return None
+
+ if not workspace.last_build:
+ return None
+
+ artifact = Artifact(self, self._get_context(), strong_key=workspace.last_build)
+
+ if not artifact.cached():
+ return None
+
+ if not artifact.cached_buildtree():
+ return None
+
+ if not artifact.cached_sources():
+ return None
+
+ # Don't perform an incremental build if there has been a change in
+ # build dependencies.
+ old_dep_refs = artifact.get_dependency_refs(Scope.BUILD)
+ new_dep_refs = self.__get_dependency_refs(Scope.BUILD)
+ if old_dep_refs != new_dep_refs:
+ return None
+
+ return artifact
# __configure_sandbox():
#
@@ -2393,11 +2369,6 @@ class Element(Plugin):
# Internal method for calling public abstract prepare() method.
#
def __prepare(self, sandbox):
- # FIXME:
- # We need to ensure that the prepare() method is only called
- # once in workspaces, because the changes will persist across
- # incremental builds - not desirable, for example, in the case
- # of autotools' `./configure`.
self.prepare(sandbox)
# __preflight():
@@ -2519,6 +2490,11 @@ class Element(Plugin):
project = self._get_project()
platform = context.platform
+ if self._get_workspace():
+ output_node_properties = ["MTime"]
+ else:
+ output_node_properties = None
+
if directory is not None and allow_remote and self.__use_remote_execution():
if not self.BST_VIRTUAL_DIRECTORY:
@@ -2545,6 +2521,7 @@ class Element(Plugin):
bare_directory=bare_directory,
allow_real_directory=False,
output_files_required=output_files_required,
+ output_node_properties=output_node_properties,
)
yield sandbox
@@ -2560,6 +2537,7 @@ class Element(Plugin):
config=config,
bare_directory=bare_directory,
allow_real_directory=not self.BST_VIRTUAL_DIRECTORY,
+ output_node_properties=output_node_properties,
)
yield sandbox
diff --git a/src/buildstream/plugins/sources/workspace.py b/src/buildstream/plugins/sources/workspace.py
index f1ad2eead..13e2bb37d 100644
--- a/src/buildstream/plugins/sources/workspace.py
+++ b/src/buildstream/plugins/sources/workspace.py
@@ -55,16 +55,16 @@ class WorkspaceSource(Source):
self.__unique_key = None
# the digest of the Directory following the import of the workspace
self.__digest = None
- # the cache key of the last successful workspace
- self.__last_successful = None
+ # the cache key of the last workspace build
+ self.__last_build = None
def track(self) -> SourceRef: # pylint: disable=arguments-differ
return None
def configure(self, node: MappingNode) -> None:
- node.validate_keys(["path", "last_successful", "kind"])
+ node.validate_keys(["path", "last_build", "kind"])
self.path = node.get_str("path")
- self.__last_successful = node.get_str("last_successful")
+ self.__last_build = node.get_str("last_build")
def preflight(self) -> None:
pass # pragma: nocover
diff --git a/src/buildstream/sandbox/_sandboxreapi.py b/src/buildstream/sandbox/_sandboxreapi.py
index 132257b9c..ee7fc72ae 100644
--- a/src/buildstream/sandbox/_sandboxreapi.py
+++ b/src/buildstream/sandbox/_sandboxreapi.py
@@ -29,6 +29,11 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
# the Remote Execution API.
#
class SandboxREAPI(Sandbox):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ self._output_node_properties = kwargs.get("output_node_properties")
+
def _use_cas_based_directory(self):
# Always use CasBasedDirectory for REAPI
return True
@@ -78,6 +83,8 @@ class SandboxREAPI(Sandbox):
command_proto = self._create_command(command, cwd, env, read_write_directories)
command_digest = cascache.add_object(buffer=command_proto.SerializeToString())
action = remote_execution_pb2.Action(command_digest=command_digest, input_root_digest=input_root_digest)
+ if self._output_node_properties:
+ action.output_node_properties.extend(self._output_node_properties)
action_result = self._execute_action(action, flags) # pylint: disable=assignment-from-no-return
@@ -221,5 +228,5 @@ class _SandboxREAPIBatch(_SandboxBatch):
quoted_label = shlex.quote("'{}'".format(label))
self.script += " || (echo Command {} failed with exitcode $? >&2 ; exit 1)\n".format(quoted_label)
- def execute_call(self, call):
- raise SandboxError("SandboxRemote does not support callbacks in command batches")
+ def create_empty_file(self, name):
+ self.script += "touch -- {}\n".format(shlex.quote(name))
diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py
index e91e890bb..b82e2da59 100644
--- a/src/buildstream/sandbox/sandbox.py
+++ b/src/buildstream/sandbox/sandbox.py
@@ -580,6 +580,30 @@ class Sandbox:
return False
+ # _create_empty_file()
+ #
+ # Creates an empty file in the current working directory.
+ #
+ # If this is called outside a batch context, the file is created
+ # immediately.
+ #
+ # If this is called in a batch context, creating the file is deferred.
+ #
+ # Args:
+ # path (str): The path of the file to be created
+ #
+ def _create_empty_file(self, name):
+ if self.__batch:
+ batch_file = _SandboxBatchFile(name)
+
+ current_group = self.__batch.current_group
+ current_group.append(batch_file)
+ else:
+ vdir = self.get_virtual_directory()
+ cwd = self._get_work_directory()
+ cwd_vdir = vdir.descend(*cwd.lstrip(os.sep).split(os.sep), create=True)
+ cwd_vdir._create_empty_file(name)
+
# _get_element_name()
#
# Get the plugin's element full name
@@ -655,8 +679,11 @@ class _SandboxBatch:
"Command failed with exitcode {}".format(exitcode), detail=label, collect=self.collect
)
- def execute_call(self, call):
- call.callback()
+ def create_empty_file(self, name):
+ vdir = self.sandbox.get_virtual_directory()
+ cwd = self.sandbox._get_work_directory()
+ cwd_vdir = vdir.descend(*cwd.lstrip(os.sep).split(os.sep), create=True)
+ cwd_vdir._create_empty_file(name)
# _SandboxBatchItem()
@@ -708,18 +735,18 @@ class _SandboxBatchGroup(_SandboxBatchItem):
item.execute(batch)
def combined_label(self):
- return "\n".join(item.combined_label() for item in self.children)
+ return "\n".join(filter(None, (item.combined_label() for item in self.children)))
-# _SandboxBatchCall()
+# _SandboxBatchFile()
#
-# A call item in a command batch.
+# A file creation item in a command batch.
#
-class _SandboxBatchCall(_SandboxBatchItem):
- def __init__(self, callback):
+class _SandboxBatchFile(_SandboxBatchItem):
+ def __init__(self, name):
super().__init__()
- self.callback = callback
+ self.name = name
def execute(self, batch):
- batch.execute_call(self)
+ batch.create_empty_file(self.name)
diff --git a/src/buildstream/storage/_casbaseddirectory.py b/src/buildstream/storage/_casbaseddirectory.py
index e86dd100c..3ab11a6ed 100644
--- a/src/buildstream/storage/_casbaseddirectory.py
+++ b/src/buildstream/storage/_casbaseddirectory.py
@@ -65,6 +65,7 @@ class IndexEntry:
def get_directory(self, parent):
if not self.buildstream_object:
+ assert self.type == _FileType.DIRECTORY
self.buildstream_object = CasBasedDirectory(
parent.cas_cache, digest=self.digest, parent=parent, filename=self.name
)
@@ -212,6 +213,14 @@ class CasBasedDirectory(Directory):
self.__invalidate_digest()
+ def _create_empty_file(self, name):
+ digest = self.cas_cache.add_object(buffer="")
+
+ entry = IndexEntry(name, _FileType.REGULAR_FILE, digest=digest)
+ self.index[name] = entry
+
+ self.__invalidate_digest()
+
def _add_entry(self, entry: IndexEntry):
self.index[entry.name] = entry.clone()
self.__invalidate_digest()
@@ -513,11 +522,6 @@ class CasBasedDirectory(Directory):
result.files_written.append(external_pathspec)
return result
- def set_deterministic_mtime(self):
- """ Sets a static modification time for all regular files in this directory.
- Since we don't store any modification time, we don't need to do anything.
- """
-
def set_deterministic_user(self):
""" Sets all files in this directory to the current user's euid/egid.
We also don't store user data, so this can be ignored.
diff --git a/src/buildstream/storage/_filebaseddirectory.py b/src/buildstream/storage/_filebaseddirectory.py
index 7b745f777..07efa4598 100644
--- a/src/buildstream/storage/_filebaseddirectory.py
+++ b/src/buildstream/storage/_filebaseddirectory.py
@@ -141,9 +141,6 @@ class FileBasedDirectory(Directory):
def _mark_changed(self):
pass
- def set_deterministic_mtime(self):
- _set_deterministic_mtime(self.external_directory)
-
def set_deterministic_user(self):
_set_deterministic_user(self.external_directory)
@@ -346,3 +343,7 @@ class FileBasedDirectory(Directory):
# and incorrectly thinks they are broken the new casbaseddirectory dose not have this bug.
return os.path.lexists(os.path.join(self.external_directory, *path))
raise ImplError("_exists can only follow symlinks in filebaseddirectory")
+
+ def _create_empty_file(self, name):
+ with open(os.path.join(self.external_directory, name), "w"):
+ pass
diff --git a/src/buildstream/storage/directory.py b/src/buildstream/storage/directory.py
index f0aab7c10..55cc717f2 100644
--- a/src/buildstream/storage/directory.py
+++ b/src/buildstream/storage/directory.py
@@ -149,12 +149,6 @@ class Directory:
"""
raise NotImplementedError()
- def set_deterministic_mtime(self):
- """ Sets a static modification time for all regular files in this directory.
- The magic number for timestamps is 2011-11-11 11:11:11.
- """
- raise NotImplementedError()
-
def set_deterministic_user(self):
""" Sets all files in this directory to the current user's euid/egid.
"""
diff --git a/src/buildstream/utils.py b/src/buildstream/utils.py
index 2b6c2761b..b588566df 100644
--- a/src/buildstream/utils.py
+++ b/src/buildstream/utils.py
@@ -122,18 +122,6 @@ class FileListResult:
self.files_written = []
"""List of files that were written."""
- def combine(self, other: "FileListResult") -> "FileListResult":
- """Create a new FileListResult that contains the results of both.
- """
- ret = FileListResult()
-
- ret.overwritten = self.overwritten + other.overwritten
- ret.ignored = self.ignored + other.ignored
- ret.failed_attributes = self.failed_attributes + other.failed_attributes
- ret.files_written = self.files_written + other.files_written
-
- return ret
-
def _make_timestamp(timepoint: float) -> str:
"""Obtain the ISO 8601 timestamp represented by the time given in seconds.
diff --git a/tests/artifactcache/junctions/parent/.bst/workspaces.yml b/tests/artifactcache/junctions/parent/.bst/workspaces.yml
deleted file mode 100644
index e69de29bb..000000000
--- a/tests/artifactcache/junctions/parent/.bst/workspaces.yml
+++ /dev/null
diff --git a/tests/cachekey/project/.bst/workspaces.yml b/tests/cachekey/project/.bst/workspaces.yml
deleted file mode 100644
index e69de29bb..000000000
--- a/tests/cachekey/project/.bst/workspaces.yml
+++ /dev/null
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index e05b6bd1f..f6bfb4362 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -821,6 +821,8 @@ def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
{"format-version": 0, "alpha.bst": {0: "/workspaces/bravo", 1: "/workspaces/charlie",}},
# Test loading a version with decimals
{"format-version": 0.5},
+ # Test loading an unsupported old version
+ {"format-version": 3},
# Test loading a future version
{"format-version": BST_WORKSPACE_FORMAT_VERSION + 1},
],
@@ -842,63 +844,12 @@ def test_list_unsupported_workspace(cli, datafiles, workspace_cfg):
@pytest.mark.parametrize(
"workspace_cfg,expected",
[
- # Test loading version 0 without a dict
+ # Test loading version 4
(
- {"alpha.bst": "/workspaces/bravo"},
+ {"format-version": 4, "workspaces": {"alpha.bst": {"path": "/workspaces/bravo"}},},
{
"format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {"alpha.bst": {"prepared": False, "path": "/workspaces/bravo", "running_files": {}}},
- },
- ),
- # Test loading version 0 with only one source
- (
- {"alpha.bst": {0: "/workspaces/bravo"}},
- {
- "format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {"alpha.bst": {"prepared": False, "path": "/workspaces/bravo", "running_files": {}}},
- },
- ),
- # Test loading version 1
- (
- {"format-version": 1, "workspaces": {"alpha.bst": {"path": "/workspaces/bravo"}}},
- {
- "format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {"alpha.bst": {"prepared": False, "path": "/workspaces/bravo", "running_files": {}}},
- },
- ),
- # Test loading version 2
- (
- {
- "format-version": 2,
- "workspaces": {
- "alpha.bst": {
- "path": "/workspaces/bravo",
- "last_successful": "some_key",
- "running_files": {"beta.bst": ["some_file"]},
- }
- },
- },
- {
- "format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "last_successful": "some_key",
- "running_files": {"beta.bst": ["some_file"]},
- }
- },
- },
- ),
- # Test loading version 3
- (
- {
- "format-version": 3,
- "workspaces": {"alpha.bst": {"prepared": True, "path": "/workspaces/bravo", "running_files": {}}},
- },
- {
- "format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {"alpha.bst": {"prepared": True, "path": "/workspaces/bravo", "running_files": {}}},
+ "workspaces": {"alpha.bst": {"path": "/workspaces/bravo"}},
},
),
],
diff --git a/tests/integration/project/files/workspace-incremental/Makefile b/tests/integration/project/files/workspace-incremental/Makefile
new file mode 100644
index 000000000..18333fd46
--- /dev/null
+++ b/tests/integration/project/files/workspace-incremental/Makefile
@@ -0,0 +1,7 @@
+all: random copy
+
+random:
+ dd if=/dev/urandom count=8 | sha256sum > random
+
+copy: source
+ cp source copy
diff --git a/tests/integration/project/files/workspace-incremental/source b/tests/integration/project/files/workspace-incremental/source
new file mode 100644
index 000000000..56a6051ca
--- /dev/null
+++ b/tests/integration/project/files/workspace-incremental/source
@@ -0,0 +1 @@
+1 \ No newline at end of file
diff --git a/tests/integration/project/files/workspace-partial/Makefile b/tests/integration/project/files/workspace-partial/Makefile
new file mode 100644
index 000000000..1b3683b0d
--- /dev/null
+++ b/tests/integration/project/files/workspace-partial/Makefile
@@ -0,0 +1,10 @@
+all: copy1 copy2
+
+random:
+ dd if=/dev/urandom count=8 | sha256sum > random
+
+copy1: source1
+ cp source1 copy1
+
+copy2: source2
+ cp source2 copy2
diff --git a/tests/integration/project/files/workspace-partial/source1 b/tests/integration/project/files/workspace-partial/source1
new file mode 100644
index 000000000..56a6051ca
--- /dev/null
+++ b/tests/integration/project/files/workspace-partial/source1
@@ -0,0 +1 @@
+1 \ No newline at end of file
diff --git a/tests/integration/project/files/workspace-partial/source2 b/tests/integration/project/files/workspace-partial/source2
new file mode 100644
index 000000000..56a6051ca
--- /dev/null
+++ b/tests/integration/project/files/workspace-partial/source2
@@ -0,0 +1 @@
+1 \ No newline at end of file
diff --git a/tests/integration/workspace.py b/tests/integration/workspace.py
index 7e84b690b..a2ea4841a 100644
--- a/tests/integration/workspace.py
+++ b/tests/integration/workspace.py
@@ -8,6 +8,9 @@ from buildstream import _yaml
from buildstream.testing import cli_integration as cli # pylint: disable=unused-import
from buildstream.testing._utils.site import HAVE_SANDBOX
from buildstream.exceptions import ErrorDomain
+from buildstream.utils import BST_ARBITRARY_TIMESTAMP
+
+from tests.testutils import wait_for_cache_granularity
pytestmark = pytest.mark.integration
@@ -62,7 +65,6 @@ def test_workspace_mount_on_read_only_directory(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
-@pytest.mark.xfail(reason="Incremental builds are currently incompatible with workspace source plugin.")
def test_workspace_commanddir(cli, datafiles):
project = str(datafiles)
workspace = os.path.join(cli.directory, "workspace")
@@ -74,8 +76,16 @@ def test_workspace_commanddir(cli, datafiles):
res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- assert os.path.exists(os.path.join(cli.directory, "workspace"))
- assert os.path.exists(os.path.join(cli.directory, "workspace", "build"))
+ # Check that the object file was created in the command-subdir `build`
+ # using the cached buildtree.
+ res = cli.run(
+ project=project,
+ args=["shell", "--build", element_name, "--use-buildtree", "always", "--", "find", "..", "-mindepth", "1",],
+ )
+ res.assert_success()
+
+ files = res.output.splitlines()
+ assert "../build/hello.o" in files
@pytest.mark.datafiles(DATA_DIR)
@@ -274,7 +284,7 @@ def test_incremental_configure_commands_run_only_once(cli, datafiles):
# Then we build, and check whether the configure step succeeded
res = cli.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
res.assert_success()
- # check that the workspace was not configured
+ # check that the workspace was not configured outside the sandbox
assert not os.path.exists(os.path.join(workspace, "prepared"))
# the configure should have been run in the sandbox, so check the buildtree
@@ -288,6 +298,10 @@ def test_incremental_configure_commands_run_only_once(cli, datafiles):
assert "./prepared" in files
assert not "./prepared-again" in files
+ # Add file to workspace to trigger an (incremental) build
+ with open(os.path.join(workspace, "newfile"), "w"):
+ pass
+
# When we build again, the configure commands should not be
# called, and we should therefore exit cleanly (the configure
# commands are set to always fail after the first run)
@@ -365,3 +379,138 @@ def test_workspace_failed_logs(cli, datafiles):
fail_str = "FAILURE {}: Running build-commands".format(element_name)
batch_fail_str = "FAILURE {}: Running commands".format(element_name)
assert fail_str in log or batch_fail_str in log
+
+
+def get_buildtree_file_contents(cli, project, element_name, filename):
+ res = cli.run(
+ project=project, args=["shell", "--build", element_name, "--use-buildtree", "always", "--", "cat", filename,],
+ )
+ res.assert_success()
+ return res.output
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
+def test_incremental(cli, datafiles):
+ project = str(datafiles)
+ workspace = os.path.join(cli.directory, "workspace")
+ element_path = os.path.join(project, "elements")
+ element_name = "workspace/incremental.bst"
+
+ element = {
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "sources": [{"kind": "local", "path": "files/workspace-incremental"}],
+ "config": {"build-commands": ["make"]},
+ }
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
+
+ # We open a workspace on the above element
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name])
+ res.assert_success()
+
+ # Initial (non-incremental) build of the workspace
+ res = cli.run(project=project, args=["build", element_name])
+ res.assert_success()
+
+ # Save the random hash
+ random_hash = get_buildtree_file_contents(cli, project, element_name, "random")
+
+ # Verify the expected output file of the initial build
+ assert get_buildtree_file_contents(cli, project, element_name, "copy") == "1"
+
+ wait_for_cache_granularity()
+
+ # Replace source file contents with '2'
+ with open(os.path.join(workspace, "source"), "w") as f:
+ f.write("2")
+
+ # Perform incremental build of the workspace
+ res = cli.run(project=project, args=["build", element_name])
+ res.assert_success()
+
+ # Verify that this was an incremental build by comparing the random hash
+ assert get_buildtree_file_contents(cli, project, element_name, "random") == random_hash
+
+ # Verify that the output file matches the new source file
+ assert get_buildtree_file_contents(cli, project, element_name, "copy") == "2"
+
+ wait_for_cache_granularity()
+
+ # Replace source file contents with '3', however, set an old mtime such
+ # that `make` will not pick up the change
+ with open(os.path.join(workspace, "source"), "w") as f:
+ f.write("3")
+ os.utime(os.path.join(workspace, "source"), (BST_ARBITRARY_TIMESTAMP, BST_ARBITRARY_TIMESTAMP))
+
+ # Perform incremental build of the workspace
+ res = cli.run(project=project, args=["build", element_name])
+ res.assert_success()
+
+ # Verify that this was an incremental build by comparing the random hash
+ assert get_buildtree_file_contents(cli, project, element_name, "random") == random_hash
+
+ # Verify that the output file still matches the previous content '2'
+ assert get_buildtree_file_contents(cli, project, element_name, "copy") == "2"
+
+
+# Test incremental build after partial build / build failure
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
+def test_incremental_partial(cli, datafiles):
+ project = str(datafiles)
+ workspace = os.path.join(cli.directory, "workspace")
+ element_path = os.path.join(project, "elements")
+ element_name = "workspace/incremental.bst"
+
+ element = {
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "sources": [{"kind": "local", "path": "files/workspace-partial"}],
+ "config": {"build-commands": ["make random", "make copy1", "make copy2"]},
+ }
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
+
+ # We open a workspace on the above element
+ res = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name])
+ res.assert_success()
+
+ # Initial (non-incremental) build of the workspace
+ res = cli.run(project=project, args=["build", element_name])
+ res.assert_success()
+
+ # Save the random hash
+ random_hash = get_buildtree_file_contents(cli, project, element_name, "random")
+
+ # Verify the expected output files of the initial build
+ assert get_buildtree_file_contents(cli, project, element_name, "copy1") == "1"
+ assert get_buildtree_file_contents(cli, project, element_name, "copy2") == "1"
+
+ wait_for_cache_granularity()
+
+ # Delete source1 and replace source2 file contents with '2'
+ os.unlink(os.path.join(workspace, "source1"))
+ with open(os.path.join(workspace, "source2"), "w") as f:
+ f.write("2")
+
+ # Perform incremental build of the workspace
+ # This should fail because of the missing source1 file.
+ res = cli.run(project=project, args=["build", element_name])
+ res.assert_main_error(ErrorDomain.STREAM, None)
+
+ wait_for_cache_granularity()
+
+ # Recreate source1 file
+ with open(os.path.join(workspace, "source1"), "w") as f:
+ f.write("2")
+
+ # Perform incremental build of the workspace
+ res = cli.run(project=project, args=["build", element_name])
+ res.assert_success()
+
+ # Verify that this was an incremental build by comparing the random hash
+ assert get_buildtree_file_contents(cli, project, element_name, "random") == random_hash
+
+ # Verify that both files got rebuilt
+ assert get_buildtree_file_contents(cli, project, element_name, "copy1") == "2"
+ assert get_buildtree_file_contents(cli, project, element_name, "copy2") == "2"
diff --git a/tests/remoteexecution/workspace.py b/tests/remoteexecution/workspace.py
index 627ae312f..b525cefcd 100644
--- a/tests/remoteexecution/workspace.py
+++ b/tests/remoteexecution/workspace.py
@@ -134,6 +134,9 @@ def check_buildtree(
buildtree = {}
output = result.output.splitlines()
+ typ_inptime = None
+ typ_gentime = None
+
for line in output:
assert "::" in line
fname, mtime = line.split("::")
@@ -142,9 +145,6 @@ def check_buildtree(
mtime = int(mtime)
buildtree[fname] = mtime
- typ_inptime = None
- typ_gentime = None
-
if incremental:
# directory timestamps are not meaningful
if fname in DIRS:
@@ -184,15 +184,10 @@ def get_timemark(cli, project, element_name, marker):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.xfail(reason="incremental workspace builds are not yet supported")
@pytest.mark.parametrize(
"modification", [pytest.param("content"), pytest.param("time"),],
)
-@pytest.mark.parametrize(
- "buildtype", [pytest.param("non-incremental"), pytest.param("incremental"),],
-)
-def test_workspace_build(cli, tmpdir, datafiles, modification, buildtype):
- incremental = buildtype == "incremental"
+def test_workspace_build(cli, tmpdir, datafiles, modification):
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
workspace = os.path.join(cli.directory, "workspace")
@@ -269,7 +264,7 @@ def test_workspace_build(cli, tmpdir, datafiles, modification, buildtype):
if modification == "time":
# touch a file in the workspace and save the mtime
os.utime(main_path)
- touched_time = os.stat(main_path).st_mtime
+ touched_time = int(os.stat(main_path).st_mtime)
elif modification == "content":
# change a source file (there's a race here but it's not serious)
@@ -278,7 +273,7 @@ def test_workspace_build(cli, tmpdir, datafiles, modification, buildtype):
with open(main_path, "w") as fdata:
for line in data:
fdata.write(re.sub(r"Hello", "Goodbye", line))
- touched_time = os.stat(main_path).st_mtime
+ touched_time = int(os.stat(main_path).st_mtime)
# refresh input times
ws_times = get_mtimes(workspace)
@@ -287,14 +282,19 @@ def test_workspace_build(cli, tmpdir, datafiles, modification, buildtype):
result = cli.run(project=project, args=build)
result.assert_success()
- rebuild_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=incremental)
+ rebuild_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=True)
rebuild_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK))
assert rebuild_timemark > build_timemark
# check the times of the changed files
- if incremental:
- assert rebuild_times[os.sep + MAIN] == touched_time
- del rebuild_times[os.sep + MAIN]
+ assert rebuild_times[os.sep + MAIN] == touched_time
+ del rebuild_times[os.sep + MAIN]
+ del rebuild_times[os.sep + MAINO]
+ del rebuild_times[os.sep + SRC + os.sep + "hello"]
+ del rebuild_times[os.sep + DEPS + os.sep + "main.Po"]
+ del rebuild_times[os.sep + BLDMARK]
+
+ # check the times of the unmodified files
assert all([rebuild_times[fname] == build_times[fname] for fname in rebuild_times]), "{}\n{}".format(
rebuild_times, build_times
)