summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2020-02-11 10:07:17 +0100
committerJürg Billeter <j@bitron.ch>2020-02-11 15:34:45 +0100
commit3cde9d285198375025d1a4c7fab835ac9bd1581d (patch)
tree3c1070da9426cded9b85111566ce4afed2db0289
parent761b01b9324b91c01e9a568f448773439cba710e (diff)
downloadbuildstream-juerg/sandbox-reapi-capture.tar.gz
_sandboxreapi.py: Optimize output_directoriesjuerg/sandbox-reapi-capture
Do not capture read-only directories. Fixes #1223.
-rw-r--r--src/buildstream/sandbox/_sandboxreapi.py65
1 files changed, 33 insertions, 32 deletions
diff --git a/src/buildstream/sandbox/_sandboxreapi.py b/src/buildstream/sandbox/_sandboxreapi.py
index f1cd477d6..132257b9c 100644
--- a/src/buildstream/sandbox/_sandboxreapi.py
+++ b/src/buildstream/sandbox/_sandboxreapi.py
@@ -17,7 +17,7 @@
import os
import shlex
-from .sandbox import Sandbox, SandboxCommandError, _SandboxBatch
+from .sandbox import Sandbox, SandboxFlags, SandboxCommandError, _SandboxBatch
from .. import utils
from .._exceptions import ImplError, SandboxError
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
@@ -59,6 +59,7 @@ class SandboxREAPI(Sandbox):
# Create directories for all marked directories. This emulates
# some of the behaviour of other sandboxes, which create these
# to use as mount points.
+ read_write_directories = []
mount_sources = self._get_mount_sources()
for mark in self._get_marked_directories():
directory = mark["directory"]
@@ -66,10 +67,15 @@ class SandboxREAPI(Sandbox):
continue
# Create each marked directory
vdir.descend(*directory.split(os.path.sep), create=True)
+ read_write_directories.append(directory)
+
+ if not flags & SandboxFlags.ROOT_READ_ONLY:
+ # The whole sandbox is writable
+ read_write_directories = [os.path.sep]
# Generate Action proto
input_root_digest = vdir._get_digest()
- command_proto = self._create_command(command, cwd, env)
+ command_proto = self._create_command(command, cwd, env, read_write_directories)
command_digest = cascache.add_object(buffer=command_proto.SerializeToString())
action = remote_execution_pb2.Action(command_digest=command_digest, input_root_digest=input_root_digest)
@@ -77,7 +83,7 @@ class SandboxREAPI(Sandbox):
# Get output of build
self._process_job_output(
- action_result.output_directories, action_result.output_files, failure=action_result.exit_code != 0
+ cwd, action_result.output_directories, action_result.output_files, failure=action_result.exit_code != 0
)
if stdout:
@@ -91,62 +97,57 @@ class SandboxREAPI(Sandbox):
# the remote execution system has worked correctly but the command failed.
return action_result.exit_code
- def _create_command(self, command, working_directory, environment):
+ def _create_command(self, command, working_directory, environment, read_write_directories):
# Creates a command proto
environment_variables = [
remote_execution_pb2.Command.EnvironmentVariable(name=k, value=v) for (k, v) in environment.items()
]
- # Request the whole directory tree as output
- output_directory = os.path.relpath(os.path.sep, start=working_directory)
+ # Request read-write directories as output
+ output_directories = [os.path.relpath(dir, start=working_directory) for dir in read_write_directories]
return remote_execution_pb2.Command(
arguments=command,
working_directory=working_directory[1:],
environment_variables=environment_variables,
output_files=[],
- output_directories=[output_directory],
+ output_directories=output_directories,
platform=None,
)
- def _process_job_output(self, output_directories, output_files, *, failure):
+ def _process_job_output(self, working_directory, output_directories, output_files, *, failure):
# Reads the remote execution server response to an execution request.
#
# output_directories is an array of OutputDirectory objects.
# output_files is an array of OutputFile objects.
#
- # We only specify one output_directory, so it's an error
- # for there to be any output files or more than one directory at the moment.
- #
if output_files:
raise SandboxError("Output files were returned when we didn't request any.")
- if not output_directories:
- error_text = "No output directory was returned from the build server."
- raise SandboxError(error_text)
- if len(output_directories) > 1:
- error_text = "More than one output directory was returned from the build server: {}."
- raise SandboxError(error_text.format(output_directories))
-
- tree_digest = output_directories[0].tree_digest
- if tree_digest is None or not tree_digest.hash:
- raise SandboxError("Output directory structure had no digest attached.")
context = self._get_context()
cascache = context.get_cascache()
+ vdir = self.get_virtual_directory()
- # Get digest of root directory from tree digest
- tree = remote_execution_pb2.Tree()
- with open(cascache.objpath(tree_digest), "rb") as f:
- tree.ParseFromString(f.read())
- root_directory = tree.root.SerializeToString()
- dir_digest = utils._message_digest(root_directory)
+ for output_directory in output_directories:
+ tree_digest = output_directory.tree_digest
+ if tree_digest is None or not tree_digest.hash:
+ raise SandboxError("Output directory structure had no digest attached.")
- # At the moment, we will get the whole directory back in the first directory argument and we need
- # to replace the sandbox's virtual directory with that. Creating a new virtual directory object
- # from another hash will be interesting, though...
+ # Get digest of output directory from tree digest
+ tree = remote_execution_pb2.Tree()
+ with open(cascache.objpath(tree_digest), "rb") as f:
+ tree.ParseFromString(f.read())
+ root_directory = tree.root.SerializeToString()
+ dir_digest = utils._message_digest(root_directory)
- vdir = self.get_virtual_directory()
- vdir._reset(digest=dir_digest)
+ # Create a normalized absolute path (inside the input tree)
+ path = os.path.normpath(os.path.join(working_directory, output_directory.path))
+
+ # Get virtual directory at the path of the output directory
+ vsubdir = vdir.descend(*path.split(os.path.sep), create=True)
+
+ # Replace contents with returned output
+ vsubdir._reset(digest=dir_digest)
def _create_batch(self, main_group, flags, *, collect=None):
return _SandboxREAPIBatch(self, main_group, flags, collect=collect)