summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2020-02-05 17:13:07 +0000
committerJürg Billeter <j@bitron.ch>2020-02-05 17:13:07 +0000
commit067ea76296a84620d2147dabdf1f3d0699c810d0 (patch)
treebc3f8ccd63c13c6190faa6d79690c361bc391791
parent546b3c1a46b6850ad182ebefcae9e70237a1186b (diff)
parent447d56f7cc2fd88fb58b980c7c239e68e56512bb (diff)
downloadbuildstream-067ea76296a84620d2147dabdf1f3d0699c810d0.tar.gz
Merge branch 'traveltissues/1216' into 'master'
mtime support Closes #1216 See merge request BuildStream/buildstream!1761
-rw-r--r--requirements/requirements.in1
-rw-r--r--requirements/requirements.txt1
-rw-r--r--src/buildstream/_cas/cascache.py21
-rw-r--r--src/buildstream/_cas/casserver.py4
-rw-r--r--src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution.proto463
-rw-r--r--src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2.py613
-rw-r--r--src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2_grpc.py60
-rw-r--r--src/buildstream/_protos/build/buildgrid/local_cas.proto20
-rw-r--r--src/buildstream/_protos/build/buildgrid/local_cas_pb2.py249
-rw-r--r--src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py6
-rw-r--r--src/buildstream/element.py10
-rw-r--r--src/buildstream/plugins/sources/workspace.py2
-rw-r--r--src/buildstream/storage/_casbaseddirectory.py42
-rw-r--r--src/buildstream/storage/_filebaseddirectory.py58
-rw-r--r--src/buildstream/storage/directory.py16
-rw-r--r--src/buildstream/testing/_sourcetests/source_determinism.py12
-rw-r--r--src/buildstream/utils.py77
-rw-r--r--tests/integration/source-determinism.py9
-rw-r--r--tests/internals/storage_vdir_import.py19
-rw-r--r--tests/internals/utils_move_atomic.py23
-rw-r--r--tests/remoteexecution/workspace.py180
21 files changed, 1367 insertions, 519 deletions
diff --git a/requirements/requirements.in b/requirements/requirements.in
index 50bb523da..ca38d710e 100644
--- a/requirements/requirements.in
+++ b/requirements/requirements.in
@@ -9,3 +9,4 @@ ruamel.yaml.clib >= 0.1.2
setuptools
pyroaring
ujson
+python-dateutil >= 2.7.0
diff --git a/requirements/requirements.txt b/requirements/requirements.txt
index 962090823..ab7a3a1f7 100644
--- a/requirements/requirements.txt
+++ b/requirements/requirements.txt
@@ -8,6 +8,7 @@ ruamel.yaml==0.16.5
setuptools==39.0.1
pyroaring==0.2.9
ujson==1.35
+python-dateutil==2.8.1
## The following requirements were added by pip freeze:
MarkupSafe==1.1.1
ruamel.yaml.clib==0.2.0
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index 9c0b46d1e..bb2abc6c8 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -26,6 +26,7 @@ import ctypes
import multiprocessing
import signal
import time
+from typing import Optional, List
import grpc
@@ -34,7 +35,7 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from .._protos.build.buildgrid import local_cas_pb2
from .. import _signals, utils
-from ..types import FastEnum
+from ..types import FastEnum, SourceRef
from .._exceptions import CASCacheError
from .casdprocessmanager import CASDProcessManager
@@ -216,10 +217,18 @@ class CASCache:
for filenode in directory.files:
# regular file, create hardlink
fullpath = os.path.join(dest, filenode.name)
- if can_link:
+ # generally, if the node holds properties we will fallback
+ # to copying instead of hardlinking
+ if can_link and not filenode.node_properties:
utils.safe_link(self.objpath(filenode.digest), fullpath)
else:
utils.safe_copy(self.objpath(filenode.digest), fullpath)
+ if filenode.node_properties:
+ # see https://github.com/bazelbuild/remote-apis/blob/master/build/bazel/remote/execution/v2/nodeproperties.md
+ # for supported node property specifications
+ for prop in filenode.node_properties:
+ if prop.name == "MTime" and prop.value:
+ utils._set_file_mtime(fullpath, utils._parse_timestamp(prop.value))
if filenode.is_executable:
os.chmod(
@@ -339,15 +348,21 @@ class CASCache:
#
# Args:
# path (str): Path to directory to import
+ # properties Optional[List[str]]: List of properties to request
#
# Returns:
# (Digest): The digest of the imported directory
#
- def import_directory(self, path):
+ def import_directory(self, path: str, properties: Optional[List[str]] = None) -> SourceRef:
local_cas = self.get_local_cas()
request = local_cas_pb2.CaptureTreeRequest()
request.path.append(path)
+
+ if properties:
+ for _property in properties:
+ request.node_properties.append(_property)
+
response = local_cas.CaptureTree(request)
if len(response.responses) != 1:
diff --git a/src/buildstream/_cas/casserver.py b/src/buildstream/_cas/casserver.py
index 882e7e6d4..dd822d53b 100644
--- a/src/buildstream/_cas/casserver.py
+++ b/src/buildstream/_cas/casserver.py
@@ -268,10 +268,10 @@ class _CapabilitiesServicer(remote_execution_pb2_grpc.CapabilitiesServicer):
response = remote_execution_pb2.ServerCapabilities()
cache_capabilities = response.cache_capabilities
- cache_capabilities.digest_function.append(remote_execution_pb2.SHA256)
+ cache_capabilities.digest_function.append(remote_execution_pb2.DigestFunction.SHA256)
cache_capabilities.action_cache_update_capabilities.update_enabled = False
cache_capabilities.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
- cache_capabilities.symlink_absolute_path_strategy = remote_execution_pb2.CacheCapabilities.ALLOWED
+ cache_capabilities.symlink_absolute_path_strategy = remote_execution_pb2.SymlinkAbsolutePathStrategy.ALLOWED
response.deprecated_api_version.major = 2
response.low_api_version.major = 2
diff --git a/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution.proto b/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution.proto
index 7edbce3bc..efbf513f9 100644
--- a/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution.proto
+++ b/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution.proto
@@ -81,6 +81,7 @@ service Execution {
// action will be reported in the `status` field of the `ExecuteResponse`. The
// server MUST NOT set the `error` field of the `Operation` proto.
// The possible errors include:
+ //
// * `INVALID_ARGUMENT`: One or more arguments are invalid.
// * `FAILED_PRECONDITION`: One or more errors occurred in setting up the
// action requested, such as a missing input or command or no worker being
@@ -93,6 +94,9 @@ service Execution {
// * `INTERNAL`: An internal error occurred in the execution engine or the
// worker.
// * `DEADLINE_EXCEEDED`: The execution timed out.
+ // * `CANCELLED`: The operation was cancelled by the client. This status is
+ // only possible if the server implements the Operations API CancelOperation
+ // method, and it was called for the current execution.
//
// In the case of a missing input or command, the server SHOULD additionally
// send a [PreconditionFailure][google.rpc.PreconditionFailure] error detail
@@ -124,10 +128,7 @@ service Execution {
//
// The lifetime of entries in the action cache is implementation-specific, but
// the server SHOULD assume that more recently used entries are more likely to
-// be used again. Additionally, action cache implementations SHOULD ensure that
-// any blobs referenced in the
-// [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
-// are still valid when returning a result.
+// be used again.
//
// As with other services in the Remote Execution API, any call may return an
// error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
@@ -136,7 +137,15 @@ service Execution {
service ActionCache {
// Retrieve a cached execution result.
//
+ // Implementations SHOULD ensure that any blobs referenced from the
+ // [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
+ // are available at the time of returning the
+ // [ActionResult][build.bazel.remote.execution.v2.ActionResult] and will be
+ // for some period of time afterwards. The TTLs of the referenced blobs SHOULD be increased
+ // if necessary and applicable.
+ //
// Errors:
+ //
// * `NOT_FOUND`: The requested `ActionResult` is not in the cache.
rpc GetActionResult(GetActionResultRequest) returns (ActionResult) {
option (google.api.http) = { get: "/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}" };
@@ -144,11 +153,6 @@ service ActionCache {
// Upload a new execution result.
//
- // This method is intended for servers which implement the distributed cache
- // independently of the
- // [Execution][build.bazel.remote.execution.v2.Execution] API. As a
- // result, it is OPTIONAL for servers to implement.
- //
// In order to allow the server to perform access control based on the type of
// action, and to assist with client debugging, the client MUST first upload
// the [Action][build.bazel.remote.execution.v2.Execution] that produced the
@@ -157,7 +161,10 @@ service ActionCache {
// `ContentAddressableStorage`.
//
// Errors:
- // * `NOT_IMPLEMENTED`: This method is not supported by the server.
+ //
+ // * `INVALID_ARGUMENT`: One or more arguments are invalid.
+ // * `FAILED_PRECONDITION`: One or more errors occurred in updating the
+ // action result, such as a missing command or action.
// * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
// entry to the cache.
rpc UpdateActionResult(UpdateActionResultRequest) returns (ActionResult) {
@@ -181,8 +188,8 @@ service ActionCache {
// hierarchy, which must also each be uploaded on their own.
//
// For small file uploads the client should group them together and call
-// [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs]
-// on chunks of no more than 10 MiB. For large uploads, the client must use the
+// [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs].
+// For large uploads, the client must use the
// [Write method][google.bytestream.ByteStream.Write] of the ByteStream API. The
// `resource_name` is `{instance_name}/uploads/{uuid}/blobs/{hash}/{size}`,
// where `instance_name` is as described in the next paragraph, `uuid` is a
@@ -204,6 +211,9 @@ service ActionCache {
// by the server. For servers which do not support multiple instances, then the
// `instance_name` is the empty path and the leading slash is omitted, so that
// the `resource_name` becomes `uploads/{uuid}/blobs/{hash}/{size}`.
+// To simplify parsing, a path segment cannot equal any of the following
+// keywords: `blobs`, `uploads`, `actions`, `actionResults`, `operations` and
+// `capabilities`.
//
// When attempting an upload, if another client has already completed the upload
// (which may occur in the middle of a single upload if another client uploads
@@ -255,10 +265,12 @@ service ContentAddressableStorage {
// independently.
//
// Errors:
+ //
// * `INVALID_ARGUMENT`: The client attempted to upload more than the
// server supported limit.
//
// Individual requests may return the following errors, additionally:
+ //
// * `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the blob.
// * `INVALID_ARGUMENT`: The
// [Digest][build.bazel.remote.execution.v2.Digest] does not match the
@@ -281,6 +293,7 @@ service ContentAddressableStorage {
// independently.
//
// Errors:
+ //
// * `INVALID_ARGUMENT`: The client attempted to read more than the
// server supported limit.
//
@@ -310,6 +323,8 @@ service ContentAddressableStorage {
// If part of the tree is missing from the CAS, the server will return the
// portion present and omit the rest.
//
+ // Errors:
+ //
// * `NOT_FOUND`: The requested tree root is not present in the CAS.
rpc GetTree(GetTreeRequest) returns (stream GetTreeResponse) {
option (google.api.http) = { get: "/v2/{instance_name=**}/blobs/{root_digest.hash}/{root_digest.size_bytes}:getTree" };
@@ -323,7 +338,14 @@ service ContentAddressableStorage {
// The query may include a particular `instance_name`, in which case the values
// returned will pertain to that instance.
service Capabilities {
- // GetCapabilities returns the server capabilities configuration.
+ // GetCapabilities returns the server capabilities configuration of the
+ // remote endpoint.
+ // Only the capabilities of the services supported by the endpoint will
+ // be returned:
+ // * Execution + CAS + Action Cache endpoints should return both
+ // CacheCapabilities and ExecutionCapabilities.
+ // * Execution only endpoints should return ExecutionCapabilities.
+ // * CAS + Action Cache only endpoints should return CacheCapabilities.
rpc GetCapabilities(GetCapabilitiesRequest) returns (ServerCapabilities) {
option (google.api.http) = {
get: "/v2/{instance_name=**}/capabilities"
@@ -387,8 +409,19 @@ message Action {
// immediately, rather than whenever the cache entry gets evicted.
google.protobuf.Duration timeout = 6;
- // If true, then the `Action`'s result cannot be cached.
+ // If true, then the `Action`'s result cannot be cached, and in-flight
+ // requests for the same `Action` may not be merged.
bool do_not_cache = 7;
+
+ // List of required supported [NodeProperty][build.bazel.remote.execution.v2.NodeProperty]
+ // keys. In order to ensure that equivalent `Action`s always hash to the same
+ // value, the supported node properties MUST be lexicographically sorted by name.
+ // Sorting of strings is done by code point, equivalently, by the UTF-8 bytes.
+ //
+ // The interpretation of these properties is server-dependent. If a property is
+ // not recognized by the server, the server will return an `INVALID_ARGUMENT`
+ // error.
+ repeated string output_node_properties = 8;
}
// A `Command` is the actual command executed by a worker running an
@@ -418,7 +451,8 @@ message Command {
// provide its own default environment variables; these defaults can be
// overridden using this field. Additional variables can also be specified.
//
- // In order to ensure that equivalent `Command`s always hash to the same
+ // In order to ensure that equivalent
+ // [Command][build.bazel.remote.execution.v2.Command]s always hash to the same
// value, the environment variables MUST be lexicographically sorted by name.
// Sorting of strings is done by code point, equivalently, by the UTF-8 bytes.
repeated EnvironmentVariable environment_variables = 2;
@@ -426,7 +460,8 @@ message Command {
// A list of the output files that the client expects to retrieve from the
// action. Only the listed files, as well as directories listed in
// `output_directories`, will be returned to the client as output.
- // Other files that may be created during command execution are discarded.
+ // Other files or directories that may be created during command execution
+ // are discarded.
//
// The paths are relative to the working directory of the action execution.
// The paths are specified using a single forward slash (`/`) as a path
@@ -438,16 +473,22 @@ message Command {
// MUST be sorted lexicographically by code point (or, equivalently, by UTF-8
// bytes).
//
- // An output file cannot be duplicated, be a parent of another output file, be
- // a child of a listed output directory, or have the same path as any of the
- // listed output directories.
+ // An output file cannot be duplicated, be a parent of another output file, or
+ // have the same path as any of the listed output directories.
+ //
+ // Directories leading up to the output files are created by the worker prior
+ // to execution, even if they are not explicitly part of the input root.
+ //
+ // DEPRECATED since v2.1: Use `output_paths` instead.
repeated string output_files = 3;
// A list of the output directories that the client expects to retrieve from
- // the action. Only the contents of the indicated directories (recursively
- // including the contents of their subdirectories) will be
- // returned, as well as files listed in `output_files`. Other files that may
- // be created during command execution are discarded.
+ // the action. Only the listed directories will be returned (an entire
+ // directory structure will be returned as a
+ // [Tree][build.bazel.remote.execution.v2.Tree] message digest, see
+ // [OutputDirectory][build.bazel.remote.execution.v2.OutputDirectory]), as
+ // well as files listed in `output_files`. Other files or directories that
+ // may be created during command execution are discarded.
//
// The paths are relative to the working directory of the action execution.
// The paths are specified using a single forward slash (`/`) as a path
@@ -461,15 +502,52 @@ message Command {
// MUST be sorted lexicographically by code point (or, equivalently, by UTF-8
// bytes).
//
- // An output directory cannot be duplicated, be a parent of another output
- // directory, be a parent of a listed output file, or have the same path as
- // any of the listed output files.
+ // An output directory cannot be duplicated or have the same path as any of
+ // the listed output files. An output directory is allowed to be a parent of
+ // another output directory.
+ //
+ // Directories leading up to the output directories (but not the output
+ // directories themselves) are created by the worker prior to execution, even
+ // if they are not explicitly part of the input root.
+ //
+ // DEPRECATED since 2.1: Use `output_paths` instead.
repeated string output_directories = 4;
+ // A list of the output paths that the client expects to retrieve from the
+ // action. Only the listed paths will be returned to the client as output.
+ // The type of the output (file or directory) is not specified, and will be
+ // determined by the server after action execution. If the resulting path is
+ // a file, it will be returned in an
+ // [OutputFile][build.bazel.remote.execution.v2.OutputFile]) typed field.
+ // If the path is a directory, the entire directory structure will be returned
+ // as a [Tree][build.bazel.remote.execution.v2.Tree] message digest, see
+ // [OutputDirectory][build.bazel.remote.execution.v2.OutputDirectory])
+ // Other files or directories that may be created during command execution
+ // are discarded.
+ //
+ // The paths are relative to the working directory of the action execution.
+ // The paths are specified using a single forward slash (`/`) as a path
+ // separator, even if the execution platform natively uses a different
+ // separator. The path MUST NOT include a trailing slash, nor a leading slash,
+ // being a relative path.
+ //
+ // In order to ensure consistent hashing of the same Action, the output paths
+ // MUST be deduplicated and sorted lexicographically by code point (or,
+ // equivalently, by UTF-8 bytes).
+ //
+ // Directories leading up to the output paths are created by the worker prior
+ // to execution, even if they are not explicitly part of the input root.
+ //
+ // New in v2.1: this field supersedes the DEPRECATED `output_files` and
+ // `output_directories` fields. If `output_paths` is used, `output_files` and
+ // `output_directories` will be ignored!
+ repeated string output_paths = 7;
+
// The platform requirements for the execution environment. The server MAY
// choose to execute the action on any worker satisfying the requirements, so
// the client SHOULD ensure that running the action on any such worker will
// have the same result.
+ // A detailed lexicon for this can be found in the accompanying platform.md.
Platform platform = 5;
// The working directory, relative to the input root, for the command to run
@@ -527,12 +605,21 @@ message Platform {
// In order to ensure that two equivalent directory trees hash to the same
// value, the following restrictions MUST be obeyed when constructing a
// a `Directory`:
-// - Every child in the directory must have a path of exactly one segment.
-// Multiple levels of directory hierarchy may not be collapsed.
-// - Each child in the directory must have a unique path segment (file name).
-// - The files, directories and symlinks in the directory must each be sorted
-// in lexicographical order by path. The path strings must be sorted by code
-// point, equivalently, by UTF-8 bytes.
+//
+// * Every child in the directory must have a path of exactly one segment.
+// Multiple levels of directory hierarchy may not be collapsed.
+// * Each child in the directory must have a unique path segment (file name).
+// Note that while the API itself is case-sensitive, the environment where
+// the Action is executed may or may not be case-sensitive. That is, it is
+// legal to call the API with a Directory that has both "Foo" and "foo" as
+// children, but the Action may be rejected by the remote system upon
+// execution.
+// * The files, directories and symlinks in the directory must each be sorted
+// in lexicographical order by path. The path strings must be sorted by code
+// point, equivalently, by UTF-8 bytes.
+// * The [NodeProperties][build.bazel.remote.execution.v2.NodeProperty] of files,
+// directories, and symlinks must be sorted in lexicographical order by
+// property name.
//
// A `Directory` that obeys the restrictions is said to be in canonical form.
//
@@ -549,7 +636,13 @@ message Platform {
// digest: {
// hash: "4a73bc9d03...",
// size: 65534
-// }
+// },
+// node_properties: [
+// {
+// "name": "MTime",
+// "value": "2017-01-15T01:30:15.01Z"
+// }
+// ]
// }
// ],
// directories: [
@@ -586,6 +679,22 @@ message Directory {
// The symlinks in the directory.
repeated SymlinkNode symlinks = 3;
+
+ // The node properties of the Directory.
+ repeated NodeProperty node_properties = 4;
+}
+
+// A single property for [FileNodes][build.bazel.remote.execution.v2.FileNode],
+// [DirectoryNodes][build.bazel.remote.execution.v2.DirectoryNode], and
+// [SymlinkNodes][build.bazel.remote.execution.v2.SymlinkNode]. The server is
+// responsible for specifying the property `name`s that it accepts. If
+// permitted by the server, the same `name` may occur multiple times.
+message NodeProperty {
+ // The property name.
+ string name = 1;
+
+ // The property value.
+ string value = 2;
}
// A `FileNode` represents a single file and associated metadata.
@@ -600,6 +709,9 @@ message FileNode {
// True if file is executable, false otherwise.
bool is_executable = 4;
+
+ // The node properties of the FileNode.
+ repeated NodeProperty node_properties = 5;
}
// A `DirectoryNode` represents a child of a
@@ -628,11 +740,13 @@ message SymlinkNode {
// API. The canonical form forbids the substrings `/./` and `//` in the target
// path. `..` components are allowed anywhere in the target path.
string target = 2;
+
+ // The node properties of the SymlinkNode.
+ repeated NodeProperty node_properties = 3;
}
// A content digest. A digest for a given blob consists of the size of the blob
-// and its hash. The hash algorithm to use is defined by the server, but servers
-// SHOULD use SHA-256.
+// and its hash. The hash algorithm to use is defined by the server.
//
// The size is considered to be an integral part of the digest and cannot be
// separated. That is, even if the `hash` field is correctly specified but
@@ -652,11 +766,12 @@ message SymlinkNode {
// When a `Digest` is used to refer to a proto message, it always refers to the
// message in binary encoded form. To ensure consistent hashing, clients and
// servers MUST ensure that they serialize messages according to the following
-// rules, even if there are alternate valid encodings for the same message.
-// - Fields are serialized in tag order.
-// - There are no unknown fields.
-// - There are no duplicate fields.
-// - Fields are serialized according to the default semantics for their type.
+// rules, even if there are alternate valid encodings for the same message:
+//
+// * Fields are serialized in tag order.
+// * There are no unknown fields.
+// * There are no duplicate fields.
+// * Fields are serialized according to the default semantics for their type.
//
// Most protocol buffer implementations will always follow these rules when
// serializing, but care should be taken to avoid shortcuts. For instance,
@@ -709,19 +824,58 @@ message ActionResult {
reserved 1; // Reserved for use as the resource name.
// The output files of the action. For each output file requested in the
- // `output_files` field of the Action, if the corresponding file existed after
- // the action completed, a single entry will be present in the output list.
+ // `output_files` or `output_paths` field of the Action, if the corresponding
+ // file existed after the action completed, a single entry will be present
+ // either in this field, or the `output_file_symlinks` field if the file was
+ // a symbolic link to another file (`output_symlinks` field after v2.1).
//
- // If the action does not produce the requested output, or produces a
- // directory where a regular file is expected or vice versa, then that output
+ // If an output listed in `output_files` was found, but was a directory rather
+ // than a regular file, the server will return a FAILED_PRECONDITION.
+ // If the action does not produce the requested output, then that output
// will be omitted from the list. The server is free to arrange the output
// list as desired; clients MUST NOT assume that the output list is sorted.
repeated OutputFile output_files = 2;
+ // The output files of the action that are symbolic links to other files. Those
+ // may be links to other output files, or input files, or even absolute paths
+ // outside of the working directory, if the server supports
+ // [SymlinkAbsolutePathStrategy.ALLOWED][build.bazel.remote.execution.v2.CacheCapabilities.SymlinkAbsolutePathStrategy].
+ // For each output file requested in the `output_files` or `output_paths`
+ // field of the Action, if the corresponding file existed after
+ // the action completed, a single entry will be present either in this field,
+ // or in the `output_files` field, if the file was not a symbolic link.
+ //
+ // If an output symbolic link of the same name as listed in `output_files` of
+ // the Command was found, but its target type was not a regular file, the
+ // server will return a FAILED_PRECONDITION.
+ // If the action does not produce the requested output, then that output
+ // will be omitted from the list. The server is free to arrange the output
+ // list as desired; clients MUST NOT assume that the output list is sorted.
+ //
+ // DEPRECATED as of v2.1. Servers that wish to be compatible with v2.0 API
+ // should still populate this field in addition to `output_symlinks`.
+ repeated OutputSymlink output_file_symlinks = 10;
+
+ // New in v2.1: this field will only be populated if the command
+ // `output_paths` field was used, and not the pre v2.1 `output_files` or
+ // `output_directories` fields.
+ // The output paths of the action that are symbolic links to other paths. Those
+ // may be links to other outputs, or inputs, or even absolute paths
+ // outside of the working directory, if the server supports
+ // [SymlinkAbsolutePathStrategy.ALLOWED][build.bazel.remote.execution.v2.CacheCapabilities.SymlinkAbsolutePathStrategy].
+ // A single entry for each output requested in `output_paths`
+ // field of the Action, if the corresponding path existed after
+ // the action completed and was a symbolic link.
+ //
+ // If the action does not produce a requested output, then that output
+ // will be omitted from the list. The server is free to arrange the output
+ // list as desired; clients MUST NOT assume that the output list is sorted.
+ repeated OutputSymlink output_symlinks = 12;
+
// The output directories of the action. For each output directory requested
- // in the `output_directories` field of the Action, if the corresponding
- // directory existed after the action completed, a single entry will be
- // present in the output list, which will contain the digest of a
+ // in the `output_directories` or `output_paths` field of the Action, if the
+ // corresponding directory existed after the action completed, a single entry
+ // will be present in the output list, which will contain the digest of a
// [Tree][build.bazel.remote.execution.v2.Tree] message containing the
// directory tree, and the path equal exactly to the corresponding Action
// output_directories member.
@@ -777,37 +931,56 @@ message ActionResult {
// }
// }
// ```
+ // If an output of the same name as listed in `output_files` of
+ // the Command was found in `output_directories`, but was not a directory, the
+ // server will return a FAILED_PRECONDITION.
repeated OutputDirectory output_directories = 3;
+ // The output directories of the action that are symbolic links to other
+ // directories. Those may be links to other output directories, or input
+ // directories, or even absolute paths outside of the working directory,
+ // if the server supports
+ // [SymlinkAbsolutePathStrategy.ALLOWED][build.bazel.remote.execution.v2.CacheCapabilities.SymlinkAbsolutePathStrategy].
+ // For each output directory requested in the `output_directories` field of
+ // the Action, if the directory existed after the action completed, a
+ // single entry will be present either in this field, or in the
+ // `output_directories` field, if the directory was not a symbolic link.
+ //
+ // If an output of the same name was found, but was a symbolic link to a file
+ // instead of a directory, the server will return a FAILED_PRECONDITION.
+ // If the action does not produce the requested output, then that output
+ // will be omitted from the list. The server is free to arrange the output
+ // list as desired; clients MUST NOT assume that the output list is sorted.
+ //
+ // DEPRECATED as of v2.1. Servers that wish to be compatible with v2.0 API
+ // should still populate this field in addition to `output_symlinks`.
+ repeated OutputSymlink output_directory_symlinks = 11;
+
// The exit code of the command.
int32 exit_code = 4;
- // The standard output buffer of the action. The server will determine, based
- // on the size of the buffer, whether to return it in raw form or to return
- // a digest in `stdout_digest` that points to the buffer. If neither is set,
- // then the buffer is empty. The client SHOULD NOT assume it will get one of
- // the raw buffer or a digest on any given request and should be prepared to
- // handle either.
+ // The standard output buffer of the action. The server SHOULD NOT inline
+ // stdout unless requested by the client in the
+ // [GetActionResultRequest][build.bazel.remote.execution.v2.GetActionResultRequest]
+ // message. The server MAY omit inlining, even if requested, and MUST do so if inlining
+ // would cause the response to exceed message size limits.
bytes stdout_raw = 5;
// The digest for a blob containing the standard output of the action, which
// can be retrieved from the
// [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage].
- // See `stdout_raw` for when this will be set.
Digest stdout_digest = 6;
- // The standard error buffer of the action. The server will determine, based
- // on the size of the buffer, whether to return it in raw form or to return
- // a digest in `stderr_digest` that points to the buffer. If neither is set,
- // then the buffer is empty. The client SHOULD NOT assume it will get one of
- // the raw buffer or a digest on any given request and should be prepared to
- // handle either.
+ // The standard error buffer of the action. The server SHOULD NOT inline
+ // stderr unless requested by the client in the
+ // [GetActionResultRequest][build.bazel.remote.execution.v2.GetActionResultRequest]
+ // message. The server MAY omit inlining, even if requested, and MUST do so if inlining
+ // would cause the response to exceed message size limits.
bytes stderr_raw = 7;
// The digest for a blob containing the standard error of the action, which
// can be retrieved from the
// [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage].
- // See `stderr_raw` for when this will be set.
Digest stderr_digest = 8;
// The details of the execution that originally produced this result.
@@ -818,10 +991,8 @@ message ActionResult {
// [FileNode][build.bazel.remote.execution.v2.FileNode], but it is used as an
// output in an `ActionResult`. It allows a full file path rather than
// only a name.
-//
-// `OutputFile` is binary-compatible with `FileNode`.
message OutputFile {
- // The full path of the file relative to the input root, including the
+ // The full path of the file relative to the working directory, including the
// filename. The path separator is a forward slash `/`. Since this is a
// relative path, it MUST NOT begin with a leading forward slash.
string path = 1;
@@ -833,6 +1004,16 @@ message OutputFile {
// True if file is executable, false otherwise.
bool is_executable = 4;
+
+ // The contents of the file if inlining was requested. The server SHOULD NOT inline
+ // file contents unless requested by the client in the
+ // [GetActionResultRequest][build.bazel.remote.execution.v2.GetActionResultRequest]
+ // message. The server MAY omit inlining, even if requested, and MUST do so if inlining
+ // would cause the response to exceed message size limits.
+ bytes contents = 5;
+
+ // The supported node properties of the OutputFile, if requested by the Action.
+ repeated NodeProperty node_properties = 6;
}
// A `Tree` contains all the
@@ -866,6 +1047,30 @@ message OutputDirectory {
Digest tree_digest = 3;
}
+// An `OutputSymlink` is similar to a
+// [Symlink][build.bazel.remote.execution.v2.SymlinkNode], but it is used as an
+// output in an `ActionResult`.
+//
+// `OutputSymlink` is binary-compatible with `SymlinkNode`.
+message OutputSymlink {
+ // The full path of the symlink relative to the working directory, including the
+ // filename. The path separator is a forward slash `/`. Since this is a
+ // relative path, it MUST NOT begin with a leading forward slash.
+ string path = 1;
+
+ // The target path of the symlink. The path separator is a forward slash `/`.
+ // The target path can be relative to the parent directory of the symlink or
+ // it can be an absolute path starting with `/`. Support for absolute paths
+ // can be checked using the [Capabilities][build.bazel.remote.execution.v2.Capabilities]
+ // API. The canonical form forbids the substrings `/./` and `//` in the target
+ // path. `..` components are allowed anywhere in the target path.
+ string target = 2;
+
+ // The supported node properties of the OutputSymlink, if requested by the
+ // Action.
+ repeated NodeProperty node_properties = 3;
+}
+
// An `ExecutionPolicy` can be used to control the scheduling of the action.
message ExecutionPolicy {
// The priority (relative importance) of this action. Generally, a lower value
@@ -904,9 +1109,19 @@ message ExecuteRequest {
// omitted.
string instance_name = 1;
- // If true, the action will be executed anew even if its result was already
- // present in the cache. If false, the result may be served from the
- // [ActionCache][build.bazel.remote.execution.v2.ActionCache].
+ // If true, the action will be executed even if its result is already
+ // present in the [ActionCache][build.bazel.remote.execution.v2.ActionCache].
+ // The execution is still allowed to be merged with other in-flight executions
+ // of the same action, however - semantically, the service MUST only guarantee
+ // that the results of an execution with this field set were not visible
+ // before the corresponding execution request was sent.
+ // Note that actions from execution requests setting this field set are still
+ // eligible to be entered into the action cache upon completion, and services
+ // SHOULD overwrite any existing entries that may exist. This allows
+ // skip_cache_lookup requests to be used as a mechanism for replacing action
+ // cache entries that reference outputs no longer available or that are
+ // poisoned in any way.
+ // If false, the result may be served from the action cache.
bool skip_cache_lookup = 3;
reserved 2, 4, 5; // Used for removed fields in an earlier version of the API.
@@ -970,16 +1185,16 @@ message ExecuteResponse {
// phase. The keys SHOULD be human readable so that a client can display them
// to a user.
map<string, LogFile> server_logs = 4;
+
+ // Freeform informational message with details on the execution of the action
+ // that may be displayed to the user upon failure or when requested explicitly.
+ string message = 5;
}
-// Metadata about an ongoing
-// [execution][build.bazel.remote.execution.v2.Execution.Execute], which
-// will be contained in the [metadata
-// field][google.longrunning.Operation.response] of the
-// [Operation][google.longrunning.Operation].
-message ExecuteOperationMetadata {
- // The current stage of execution.
- enum Stage {
+// The current stage of action execution.
+message ExecutionStage {
+ enum Value {
+ // Invalid value.
UNKNOWN = 0;
// Checking the result against the cache.
@@ -994,8 +1209,16 @@ message ExecuteOperationMetadata {
// Finished execution.
COMPLETED = 4;
}
+}
- Stage stage = 1;
+// Metadata about an ongoing
+// [execution][build.bazel.remote.execution.v2.Execution.Execute], which
+// will be contained in the [metadata
+// field][google.longrunning.Operation.response] of the
+// [Operation][google.longrunning.Operation].
+message ExecuteOperationMetadata {
+ // The current stage of execution.
+ ExecutionStage.Value stage = 1;
// The digest of the [Action][build.bazel.remote.execution.v2.Action]
// being executed.
@@ -1015,7 +1238,7 @@ message ExecuteOperationMetadata {
// A request message for
// [WaitExecution][build.bazel.remote.execution.v2.Execution.WaitExecution].
message WaitExecutionRequest {
- // The name of the [Operation][google.longrunning.operations.v1.Operation]
+ // The name of the [Operation][google.longrunning.Operation]
// returned by [Execute][build.bazel.remote.execution.v2.Execution.Execute].
string name = 1;
}
@@ -1033,6 +1256,19 @@ message GetActionResultRequest {
// The digest of the [Action][build.bazel.remote.execution.v2.Action]
// whose result is requested.
Digest action_digest = 2;
+
+ // A hint to the server to request inlining stdout in the
+ // [ActionResult][build.bazel.remote.execution.v2.ActionResult] message.
+ bool inline_stdout = 3;
+
+ // A hint to the server to request inlining stderr in the
+ // [ActionResult][build.bazel.remote.execution.v2.ActionResult] message.
+ bool inline_stderr = 4;
+
+ // A hint to the server to inline the contents of the listed output files.
+ // Each path needs to exactly match one path in `output_files` in the
+ // [Command][build.bazel.remote.execution.v2.Command] message.
+ repeated string inline_output_files = 5;
}
// A request message for
@@ -1136,7 +1372,7 @@ message BatchReadBlobsRequest {
// A response message for
// [ContentAddressableStorage.BatchReadBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchReadBlobs].
message BatchReadBlobsResponse {
- // A response corresponding to a single blob that the client tried to upload.
+ // A response corresponding to a single blob that the client tried to download.
message Response {
// The digest to which this response corresponds.
Digest digest = 1;
@@ -1176,7 +1412,8 @@ message GetTreeRequest {
// A page token, which must be a value received in a previous
// [GetTreeResponse][build.bazel.remote.execution.v2.GetTreeResponse].
- // If present, the server will use it to return the following page of results.
+ // If present, the server will use that token as an offset, returning only
+ // that page and the ones that succeed it.
string page_token = 4;
}
@@ -1194,7 +1431,7 @@ message GetTreeResponse {
}
// A request message for
-// [Capabilities.GetCapabilities][google.devtools.remoteexecution.v2.Capabilities.GetCapabilities].
+// [Capabilities.GetCapabilities][build.bazel.remote.execution.v2.Capabilities.GetCapabilities].
message GetCapabilitiesRequest {
// The instance of the execution system to operate against. A server may
// support multiple instances of the execution system (with their own workers,
@@ -1205,7 +1442,7 @@ message GetCapabilitiesRequest {
}
// A response message for
-// [Capabilities.GetCapabilities][google.devtools.remoteexecution.v2.Capabilities.GetCapabilities].
+// [Capabilities.GetCapabilities][build.bazel.remote.execution.v2.Capabilities.GetCapabilities].
message ServerCapabilities {
// Capabilities of the remote cache system.
CacheCapabilities cache_capabilities = 1;
@@ -1225,11 +1462,30 @@ message ServerCapabilities {
// The digest function used for converting values into keys for CAS and Action
// Cache.
-enum DigestFunction {
- UNKNOWN = 0;
- SHA256 = 1;
- SHA1 = 2;
- MD5 = 3;
+message DigestFunction {
+ enum Value {
+ // It is an error for the server to return this value.
+ UNKNOWN = 0;
+
+ // The SHA-256 digest function.
+ SHA256 = 1;
+
+ // The SHA-1 digest function.
+ SHA1 = 2;
+
+ // The MD5 digest function.
+ MD5 = 3;
+
+ // The Microsoft "VSO-Hash" paged SHA256 digest function.
+ // See https://github.com/microsoft/BuildXL/blob/master/Documentation/Specs/PagedHash.md .
+ VSO = 4;
+
+ // The SHA-384 digest function.
+ SHA384 = 5;
+
+ // The SHA-512 digest function.
+ SHA512 = 6;
+ }
}
// Describes the server/instance capabilities for updating the action cache.
@@ -1249,25 +1505,29 @@ message PriorityCapabilities {
repeated PriorityRange priorities = 1;
}
-// Capabilities of the remote cache system.
-message CacheCapabilities {
- // Describes how the server treats absolute symlink targets.
- enum SymlinkAbsolutePathStrategy {
+// Describes how the server treats absolute symlink targets.
+message SymlinkAbsolutePathStrategy {
+ enum Value {
+ // Invalid value.
UNKNOWN = 0;
- // Server will return an INVALID_ARGUMENT on input symlinks with absolute targets.
+ // Server will return an `INVALID_ARGUMENT` on input symlinks with absolute
+ // targets.
// If an action tries to create an output symlink with an absolute target, a
- // FAILED_PRECONDITION will be returned.
+ // `FAILED_PRECONDITION` will be returned.
DISALLOWED = 1;
// Server will allow symlink targets to escape the input root tree, possibly
// resulting in non-hermetic builds.
ALLOWED = 2;
}
+}
+// Capabilities of the remote cache system.
+message CacheCapabilities {
// All the digest functions supported by the remote cache.
// Remote cache may support multiple digest functions simultaneously.
- repeated DigestFunction digest_function = 1;
+ repeated DigestFunction.Value digest_function = 1;
// Capabilities for updating the action cache.
ActionCacheUpdateCapabilities action_cache_update_capabilities = 2;
@@ -1282,19 +1542,22 @@ message CacheCapabilities {
int64 max_batch_total_size_bytes = 4;
// Whether absolute symlink targets are supported.
- SymlinkAbsolutePathStrategy symlink_absolute_path_strategy = 5;
+ SymlinkAbsolutePathStrategy.Value symlink_absolute_path_strategy = 5;
}
// Capabilities of the remote execution system.
message ExecutionCapabilities {
// Remote execution may only support a single digest function.
- DigestFunction digest_function = 1;
+ DigestFunction.Value digest_function = 1;
// Whether remote execution is enabled for the particular server/instance.
bool exec_enabled = 2;
// Supported execution priority range.
PriorityCapabilities execution_priority_capabilities = 3;
+
+ // Supported node properties.
+ repeated string supported_node_properties = 4;
}
// Details for the tool used to call the API.
@@ -1310,8 +1573,14 @@ message ToolDetails {
// external context of the request. The server may use this for logging or other
// purposes. To use it, the client attaches the header to the call using the
// canonical proto serialization:
-// name: build.bazel.remote.execution.v2.requestmetadata-bin
-// contents: the base64 encoded binary RequestMetadata message.
+//
+// * name: `build.bazel.remote.execution.v2.requestmetadata-bin`
+// * contents: the base64 encoded binary `RequestMetadata` message.
+// Note: the gRPC library serializes binary headers encoded in base 64 by
+// default (https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests).
+// Therefore, if the gRPC library is used to pass/retrieve this
+// metadata, the user may ignore the base64 encoding and assume it is simply
+// serialized as a binary message.
message RequestMetadata {
// The details for the tool invoking the requests.
ToolDetails tool_details = 1;
diff --git a/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2.py b/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2.py
index ac42b7b0c..97ec30c2a 100644
--- a/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2.py
+++ b/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2.py
@@ -4,7 +4,6 @@
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -27,13 +26,15 @@ DESCRIPTOR = _descriptor.FileDescriptor(
package='build.bazel.remote.execution.v2',
syntax='proto3',
serialized_options=_b('\n\037build.bazel.remote.execution.v2B\024RemoteExecutionProtoP\001Z\017remoteexecution\242\002\003REX\252\002\037Build.Bazel.Remote.Execution.V2'),
- serialized_pb=_b('\n6build/bazel/remote/execution/v2/remote_execution.proto\x12\x1f\x62uild.bazel.remote.execution.v2\x1a\x1f\x62uild/bazel/semver/semver.proto\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\xd5\x01\n\x06\x41\x63tion\x12?\n\x0e\x63ommand_digest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x42\n\x11input_root_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12*\n\x07timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x14\n\x0c\x64o_not_cache\x18\x07 \x01(\x08J\x04\x08\x03\x10\x06\"\xb7\x02\n\x07\x43ommand\x12\x11\n\targuments\x18\x01 \x03(\t\x12[\n\x15\x65nvironment_variables\x18\x02 \x03(\x0b\x32<.build.bazel.remote.execution.v2.Command.EnvironmentVariable\x12\x14\n\x0coutput_files\x18\x03 \x03(\t\x12\x1a\n\x12output_directories\x18\x04 \x03(\t\x12;\n\x08platform\x18\x05 \x01(\x0b\x32).build.bazel.remote.execution.v2.Platform\x12\x19\n\x11working_directory\x18\x06 \x01(\t\x1a\x32\n\x13\x45nvironmentVariable\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"{\n\x08Platform\x12\x46\n\nproperties\x18\x01 \x03(\x0b\x32\x32.build.bazel.remote.execution.v2.Platform.Property\x1a\'\n\x08Property\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xca\x01\n\tDirectory\x12\x38\n\x05\x66iles\x18\x01 \x03(\x0b\x32).build.bazel.remote.execution.v2.FileNode\x12\x43\n\x0b\x64irectories\x18\x02 \x03(\x0b\x32..build.bazel.remote.execution.v2.DirectoryNode\x12>\n\x08symlinks\x18\x03 \x03(\x0b\x32,.build.bazel.remote.execution.v2.SymlinkNode\"n\n\x08\x46ileNode\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x15\n\ris_executable\x18\x04 \x01(\x08J\x04\x08\x03\x10\x04\"V\n\rDirectoryNode\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"+\n\x0bSymlinkNode\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06target\x18\x02 \x01(\t\"*\n\x06\x44igest\x12\x0c\n\x04hash\x18\x01 \x01(\t\x12\x12\n\nsize_bytes\x18\x02 \x01(\x03\"\xec\x04\n\x16\x45xecutedActionMetadata\x12\x0e\n\x06worker\x18\x01 \x01(\t\x12\x34\n\x10queued_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16worker_start_timestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12>\n\x1aworker_completed_timestamp\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12?\n\x1binput_fetch_start_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x1finput_fetch_completed_timestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x19\x65xecution_start_timestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x41\n\x1d\x65xecution_completed_timestamp\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x41\n\x1doutput_upload_start_timestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n!output_upload_completed_timestamp\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xb5\x03\n\x0c\x41\x63tionResult\x12\x41\n\x0coutput_files\x18\x02 \x03(\x0b\x32+.build.bazel.remote.execution.v2.OutputFile\x12L\n\x12output_directories\x18\x03 \x03(\x0b\x32\x30.build.bazel.remote.execution.v2.OutputDirectory\x12\x11\n\texit_code\x18\x04 \x01(\x05\x12\x12\n\nstdout_raw\x18\x05 \x01(\x0c\x12>\n\rstdout_digest\x18\x06 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x12\n\nstderr_raw\x18\x07 \x01(\x0c\x12>\n\rstderr_digest\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12S\n\x12\x65xecution_metadata\x18\t \x01(\x0b\x32\x37.build.bazel.remote.execution.v2.ExecutedActionMetadataJ\x04\x08\x01\x10\x02\"p\n\nOutputFile\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x15\n\ris_executable\x18\x04 \x01(\x08J\x04\x08\x03\x10\x04\"~\n\x04Tree\x12\x38\n\x04root\x18\x01 \x01(\x0b\x32*.build.bazel.remote.execution.v2.Directory\x12<\n\x08\x63hildren\x18\x02 \x03(\x0b\x32*.build.bazel.remote.execution.v2.Directory\"c\n\x0fOutputDirectory\x12\x0c\n\x04path\x18\x01 \x01(\t\x12<\n\x0btree_digest\x18\x03 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.DigestJ\x04\x08\x02\x10\x03\"#\n\x0f\x45xecutionPolicy\x12\x10\n\x08priority\x18\x01 \x01(\x05\"&\n\x12ResultsCachePolicy\x12\x10\n\x08priority\x18\x01 \x01(\x05\"\xb3\x02\n\x0e\x45xecuteRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x19\n\x11skip_cache_lookup\x18\x03 \x01(\x08\x12>\n\raction_digest\x18\x06 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12J\n\x10\x65xecution_policy\x18\x07 \x01(\x0b\x32\x30.build.bazel.remote.execution.v2.ExecutionPolicy\x12Q\n\x14results_cache_policy\x18\x08 \x01(\x0b\x32\x33.build.bazel.remote.execution.v2.ResultsCachePolicyJ\x04\x08\x02\x10\x03J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\"Z\n\x07LogFile\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x16\n\x0ehuman_readable\x18\x02 \x01(\x08\"\xbf\x02\n\x0f\x45xecuteResponse\x12=\n\x06result\x18\x01 \x01(\x0b\x32-.build.bazel.remote.execution.v2.ActionResult\x12\x15\n\rcached_result\x18\x02 \x01(\x08\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12U\n\x0bserver_logs\x18\x04 \x03(\x0b\x32@.build.bazel.remote.execution.v2.ExecuteResponse.ServerLogsEntry\x1a[\n\x0fServerLogsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.build.bazel.remote.execution.v2.LogFile:\x02\x38\x01\"\xb3\x02\n\x18\x45xecuteOperationMetadata\x12N\n\x05stage\x18\x01 \x01(\x0e\x32?.build.bazel.remote.execution.v2.ExecuteOperationMetadata.Stage\x12>\n\raction_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x1a\n\x12stdout_stream_name\x18\x03 \x01(\t\x12\x1a\n\x12stderr_stream_name\x18\x04 \x01(\t\"O\n\x05Stage\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x43\x41\x43HE_CHECK\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\r\n\tEXECUTING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\"$\n\x14WaitExecutionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"o\n\x16GetActionResultRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12>\n\raction_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\x8b\x02\n\x19UpdateActionResultRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12>\n\raction_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x44\n\raction_result\x18\x03 \x01(\x0b\x32-.build.bazel.remote.execution.v2.ActionResult\x12Q\n\x14results_cache_policy\x18\x04 \x01(\x0b\x32\x33.build.bazel.remote.execution.v2.ResultsCachePolicy\"o\n\x17\x46indMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"a\n\x18\x46indMissingBlobsResponse\x12\x45\n\x14missing_blob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xd6\x01\n\x17\x42\x61tchUpdateBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12R\n\x08requests\x18\x02 \x03(\x0b\x32@.build.bazel.remote.execution.v2.BatchUpdateBlobsRequest.Request\x1aP\n\x07Request\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\"\xda\x01\n\x18\x42\x61tchUpdateBlobsResponse\x12U\n\tresponses\x18\x01 \x03(\x0b\x32\x42.build.bazel.remote.execution.v2.BatchUpdateBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"h\n\x15\x42\x61tchReadBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x38\n\x07\x64igests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xe4\x01\n\x16\x42\x61tchReadBlobsResponse\x12S\n\tresponses\x18\x01 \x03(\x0b\x32@.build.bazel.remote.execution.v2.BatchReadBlobsResponse.Response\x1au\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\"\x8c\x01\n\x0eGetTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\"k\n\x0fGetTreeResponse\x12?\n\x0b\x64irectories\x18\x01 \x03(\x0b\x32*.build.bazel.remote.execution.v2.Directory\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"/\n\x16GetCapabilitiesRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"\xe3\x02\n\x12ServerCapabilities\x12N\n\x12\x63\x61\x63he_capabilities\x18\x01 \x01(\x0b\x32\x32.build.bazel.remote.execution.v2.CacheCapabilities\x12V\n\x16\x65xecution_capabilities\x18\x02 \x01(\x0b\x32\x36.build.bazel.remote.execution.v2.ExecutionCapabilities\x12:\n\x16\x64\x65precated_api_version\x18\x03 \x01(\x0b\x32\x1a.build.bazel.semver.SemVer\x12\x33\n\x0flow_api_version\x18\x04 \x01(\x0b\x32\x1a.build.bazel.semver.SemVer\x12\x34\n\x10high_api_version\x18\x05 \x01(\x0b\x32\x1a.build.bazel.semver.SemVer\"7\n\x1d\x41\x63tionCacheUpdateCapabilities\x12\x16\n\x0eupdate_enabled\x18\x01 \x01(\x08\"\xac\x01\n\x14PriorityCapabilities\x12W\n\npriorities\x18\x01 \x03(\x0b\x32\x43.build.bazel.remote.execution.v2.PriorityCapabilities.PriorityRange\x1a;\n\rPriorityRange\x12\x14\n\x0cmin_priority\x18\x01 \x01(\x05\x12\x14\n\x0cmax_priority\x18\x02 \x01(\x05\"\x88\x04\n\x11\x43\x61\x63heCapabilities\x12H\n\x0f\x64igest_function\x18\x01 \x03(\x0e\x32/.build.bazel.remote.execution.v2.DigestFunction\x12h\n action_cache_update_capabilities\x18\x02 \x01(\x0b\x32>.build.bazel.remote.execution.v2.ActionCacheUpdateCapabilities\x12Z\n\x1b\x63\x61\x63he_priority_capabilities\x18\x03 \x01(\x0b\x32\x35.build.bazel.remote.execution.v2.PriorityCapabilities\x12\"\n\x1amax_batch_total_size_bytes\x18\x04 \x01(\x03\x12v\n\x1esymlink_absolute_path_strategy\x18\x05 \x01(\x0e\x32N.build.bazel.remote.execution.v2.CacheCapabilities.SymlinkAbsolutePathStrategy\"G\n\x1bSymlinkAbsolutePathStrategy\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0e\n\nDISALLOWED\x10\x01\x12\x0b\n\x07\x41LLOWED\x10\x02\"\xd7\x01\n\x15\x45xecutionCapabilities\x12H\n\x0f\x64igest_function\x18\x01 \x01(\x0e\x32/.build.bazel.remote.execution.v2.DigestFunction\x12\x14\n\x0c\x65xec_enabled\x18\x02 \x01(\x08\x12^\n\x1f\x65xecution_priority_capabilities\x18\x03 \x01(\x0b\x32\x35.build.bazel.remote.execution.v2.PriorityCapabilities\"6\n\x0bToolDetails\x12\x11\n\ttool_name\x18\x01 \x01(\t\x12\x14\n\x0ctool_version\x18\x02 \x01(\t\"\xa7\x01\n\x0fRequestMetadata\x12\x42\n\x0ctool_details\x18\x01 \x01(\x0b\x32,.build.bazel.remote.execution.v2.ToolDetails\x12\x11\n\taction_id\x18\x02 \x01(\t\x12\x1a\n\x12tool_invocation_id\x18\x03 \x01(\t\x12!\n\x19\x63orrelated_invocations_id\x18\x04 \x01(\t*<\n\x0e\x44igestFunction\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06SHA256\x10\x01\x12\x08\n\x04SHA1\x10\x02\x12\x07\n\x03MD5\x10\x03\x32\xb9\x02\n\tExecution\x12\x8e\x01\n\x07\x45xecute\x12/.build.bazel.remote.execution.v2.ExecuteRequest\x1a\x1d.google.longrunning.Operation\"1\x82\xd3\xe4\x93\x02+\"&/v2/{instance_name=**}/actions:execute:\x01*0\x01\x12\x9a\x01\n\rWaitExecution\x12\x35.build.bazel.remote.execution.v2.WaitExecutionRequest\x1a\x1d.google.longrunning.Operation\"1\x82\xd3\xe4\x93\x02+\"&/v2/{name=operations/**}:waitExecution:\x01*0\x01\x32\xd6\x03\n\x0b\x41\x63tionCache\x12\xd7\x01\n\x0fGetActionResult\x12\x37.build.bazel.remote.execution.v2.GetActionResultRequest\x1a-.build.bazel.remote.execution.v2.ActionResult\"\\\x82\xd3\xe4\x93\x02V\x12T/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}\x12\xec\x01\n\x12UpdateActionResult\x12:.build.bazel.remote.execution.v2.UpdateActionResultRequest\x1a-.build.bazel.remote.execution.v2.ActionResult\"k\x82\xd3\xe4\x93\x02\x65\x1aT/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}:\raction_result2\x9b\x06\n\x19\x43ontentAddressableStorage\x12\xbc\x01\n\x10\x46indMissingBlobs\x12\x38.build.bazel.remote.execution.v2.FindMissingBlobsRequest\x1a\x39.build.bazel.remote.execution.v2.FindMissingBlobsResponse\"3\x82\xd3\xe4\x93\x02-\"(/v2/{instance_name=**}/blobs:findMissing:\x01*\x12\xbc\x01\n\x10\x42\x61tchUpdateBlobs\x12\x38.build.bazel.remote.execution.v2.BatchUpdateBlobsRequest\x1a\x39.build.bazel.remote.execution.v2.BatchUpdateBlobsResponse\"3\x82\xd3\xe4\x93\x02-\"(/v2/{instance_name=**}/blobs:batchUpdate:\x01*\x12\xb4\x01\n\x0e\x42\x61tchReadBlobs\x12\x36.build.bazel.remote.execution.v2.BatchReadBlobsRequest\x1a\x37.build.bazel.remote.execution.v2.BatchReadBlobsResponse\"1\x82\xd3\xe4\x93\x02+\"&/v2/{instance_name=**}/blobs:batchRead:\x01*\x12\xc8\x01\n\x07GetTree\x12/.build.bazel.remote.execution.v2.GetTreeRequest\x1a\x30.build.bazel.remote.execution.v2.GetTreeResponse\"X\x82\xd3\xe4\x93\x02R\x12P/v2/{instance_name=**}/blobs/{root_digest.hash}/{root_digest.size_bytes}:getTree0\x01\x32\xbd\x01\n\x0c\x43\x61pabilities\x12\xac\x01\n\x0fGetCapabilities\x12\x37.build.bazel.remote.execution.v2.GetCapabilitiesRequest\x1a\x33.build.bazel.remote.execution.v2.ServerCapabilities\"+\x82\xd3\xe4\x93\x02%\x12#/v2/{instance_name=**}/capabilitiesBr\n\x1f\x62uild.bazel.remote.execution.v2B\x14RemoteExecutionProtoP\x01Z\x0fremoteexecution\xa2\x02\x03REX\xaa\x02\x1f\x42uild.Bazel.Remote.Execution.V2b\x06proto3')
+ serialized_pb=_b('\n6build/bazel/remote/execution/v2/remote_execution.proto\x12\x1f\x62uild.bazel.remote.execution.v2\x1a\x1f\x62uild/bazel/semver/semver.proto\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\xf5\x01\n\x06\x41\x63tion\x12?\n\x0e\x63ommand_digest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x42\n\x11input_root_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12*\n\x07timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x14\n\x0c\x64o_not_cache\x18\x07 \x01(\x08\x12\x1e\n\x16output_node_properties\x18\x08 \x03(\tJ\x04\x08\x03\x10\x06\"\xcd\x02\n\x07\x43ommand\x12\x11\n\targuments\x18\x01 \x03(\t\x12[\n\x15\x65nvironment_variables\x18\x02 \x03(\x0b\x32<.build.bazel.remote.execution.v2.Command.EnvironmentVariable\x12\x14\n\x0coutput_files\x18\x03 \x03(\t\x12\x1a\n\x12output_directories\x18\x04 \x03(\t\x12\x14\n\x0coutput_paths\x18\x07 \x03(\t\x12;\n\x08platform\x18\x05 \x01(\x0b\x32).build.bazel.remote.execution.v2.Platform\x12\x19\n\x11working_directory\x18\x06 \x01(\t\x1a\x32\n\x13\x45nvironmentVariable\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"{\n\x08Platform\x12\x46\n\nproperties\x18\x01 \x03(\x0b\x32\x32.build.bazel.remote.execution.v2.Platform.Property\x1a\'\n\x08Property\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\x92\x02\n\tDirectory\x12\x38\n\x05\x66iles\x18\x01 \x03(\x0b\x32).build.bazel.remote.execution.v2.FileNode\x12\x43\n\x0b\x64irectories\x18\x02 \x03(\x0b\x32..build.bazel.remote.execution.v2.DirectoryNode\x12>\n\x08symlinks\x18\x03 \x03(\x0b\x32,.build.bazel.remote.execution.v2.SymlinkNode\x12\x46\n\x0fnode_properties\x18\x04 \x03(\x0b\x32-.build.bazel.remote.execution.v2.NodeProperty\"+\n\x0cNodeProperty\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xb6\x01\n\x08\x46ileNode\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x15\n\ris_executable\x18\x04 \x01(\x08\x12\x46\n\x0fnode_properties\x18\x05 \x03(\x0b\x32-.build.bazel.remote.execution.v2.NodePropertyJ\x04\x08\x03\x10\x04\"V\n\rDirectoryNode\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"s\n\x0bSymlinkNode\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06target\x18\x02 \x01(\t\x12\x46\n\x0fnode_properties\x18\x03 \x03(\x0b\x32-.build.bazel.remote.execution.v2.NodeProperty\"*\n\x06\x44igest\x12\x0c\n\x04hash\x18\x01 \x01(\t\x12\x12\n\nsize_bytes\x18\x02 \x01(\x03\"\xec\x04\n\x16\x45xecutedActionMetadata\x12\x0e\n\x06worker\x18\x01 \x01(\t\x12\x34\n\x10queued_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16worker_start_timestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12>\n\x1aworker_completed_timestamp\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12?\n\x1binput_fetch_start_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x1finput_fetch_completed_timestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x19\x65xecution_start_timestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x41\n\x1d\x65xecution_completed_timestamp\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x41\n\x1doutput_upload_start_timestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n!output_upload_completed_timestamp\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x9f\x05\n\x0c\x41\x63tionResult\x12\x41\n\x0coutput_files\x18\x02 \x03(\x0b\x32+.build.bazel.remote.execution.v2.OutputFile\x12L\n\x14output_file_symlinks\x18\n \x03(\x0b\x32..build.bazel.remote.execution.v2.OutputSymlink\x12G\n\x0foutput_symlinks\x18\x0c \x03(\x0b\x32..build.bazel.remote.execution.v2.OutputSymlink\x12L\n\x12output_directories\x18\x03 \x03(\x0b\x32\x30.build.bazel.remote.execution.v2.OutputDirectory\x12Q\n\x19output_directory_symlinks\x18\x0b \x03(\x0b\x32..build.bazel.remote.execution.v2.OutputSymlink\x12\x11\n\texit_code\x18\x04 \x01(\x05\x12\x12\n\nstdout_raw\x18\x05 \x01(\x0c\x12>\n\rstdout_digest\x18\x06 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x12\n\nstderr_raw\x18\x07 \x01(\x0c\x12>\n\rstderr_digest\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12S\n\x12\x65xecution_metadata\x18\t \x01(\x0b\x32\x37.build.bazel.remote.execution.v2.ExecutedActionMetadataJ\x04\x08\x01\x10\x02\"\xca\x01\n\nOutputFile\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x15\n\ris_executable\x18\x04 \x01(\x08\x12\x10\n\x08\x63ontents\x18\x05 \x01(\x0c\x12\x46\n\x0fnode_properties\x18\x06 \x03(\x0b\x32-.build.bazel.remote.execution.v2.NodePropertyJ\x04\x08\x03\x10\x04\"~\n\x04Tree\x12\x38\n\x04root\x18\x01 \x01(\x0b\x32*.build.bazel.remote.execution.v2.Directory\x12<\n\x08\x63hildren\x18\x02 \x03(\x0b\x32*.build.bazel.remote.execution.v2.Directory\"c\n\x0fOutputDirectory\x12\x0c\n\x04path\x18\x01 \x01(\t\x12<\n\x0btree_digest\x18\x03 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.DigestJ\x04\x08\x02\x10\x03\"u\n\rOutputSymlink\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0e\n\x06target\x18\x02 \x01(\t\x12\x46\n\x0fnode_properties\x18\x03 \x03(\x0b\x32-.build.bazel.remote.execution.v2.NodeProperty\"#\n\x0f\x45xecutionPolicy\x12\x10\n\x08priority\x18\x01 \x01(\x05\"&\n\x12ResultsCachePolicy\x12\x10\n\x08priority\x18\x01 \x01(\x05\"\xb3\x02\n\x0e\x45xecuteRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x19\n\x11skip_cache_lookup\x18\x03 \x01(\x08\x12>\n\raction_digest\x18\x06 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12J\n\x10\x65xecution_policy\x18\x07 \x01(\x0b\x32\x30.build.bazel.remote.execution.v2.ExecutionPolicy\x12Q\n\x14results_cache_policy\x18\x08 \x01(\x0b\x32\x33.build.bazel.remote.execution.v2.ResultsCachePolicyJ\x04\x08\x02\x10\x03J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\"Z\n\x07LogFile\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x16\n\x0ehuman_readable\x18\x02 \x01(\x08\"\xd0\x02\n\x0f\x45xecuteResponse\x12=\n\x06result\x18\x01 \x01(\x0b\x32-.build.bazel.remote.execution.v2.ActionResult\x12\x15\n\rcached_result\x18\x02 \x01(\x08\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12U\n\x0bserver_logs\x18\x04 \x03(\x0b\x32@.build.bazel.remote.execution.v2.ExecuteResponse.ServerLogsEntry\x12\x0f\n\x07message\x18\x05 \x01(\t\x1a[\n\x0fServerLogsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.build.bazel.remote.execution.v2.LogFile:\x02\x38\x01\"a\n\x0e\x45xecutionStage\"O\n\x05Value\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x43\x41\x43HE_CHECK\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\r\n\tEXECUTING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\"\xd8\x01\n\x18\x45xecuteOperationMetadata\x12\x44\n\x05stage\x18\x01 \x01(\x0e\x32\x35.build.bazel.remote.execution.v2.ExecutionStage.Value\x12>\n\raction_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x1a\n\x12stdout_stream_name\x18\x03 \x01(\t\x12\x1a\n\x12stderr_stream_name\x18\x04 \x01(\t\"$\n\x14WaitExecutionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xba\x01\n\x16GetActionResultRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12>\n\raction_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x15\n\rinline_stdout\x18\x03 \x01(\x08\x12\x15\n\rinline_stderr\x18\x04 \x01(\x08\x12\x1b\n\x13inline_output_files\x18\x05 \x03(\t\"\x8b\x02\n\x19UpdateActionResultRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12>\n\raction_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x44\n\raction_result\x18\x03 \x01(\x0b\x32-.build.bazel.remote.execution.v2.ActionResult\x12Q\n\x14results_cache_policy\x18\x04 \x01(\x0b\x32\x33.build.bazel.remote.execution.v2.ResultsCachePolicy\"o\n\x17\x46indMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"a\n\x18\x46indMissingBlobsResponse\x12\x45\n\x14missing_blob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xd6\x01\n\x17\x42\x61tchUpdateBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12R\n\x08requests\x18\x02 \x03(\x0b\x32@.build.bazel.remote.execution.v2.BatchUpdateBlobsRequest.Request\x1aP\n\x07Request\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\"\xda\x01\n\x18\x42\x61tchUpdateBlobsResponse\x12U\n\tresponses\x18\x01 \x03(\x0b\x32\x42.build.bazel.remote.execution.v2.BatchUpdateBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"h\n\x15\x42\x61tchReadBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x38\n\x07\x64igests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xe4\x01\n\x16\x42\x61tchReadBlobsResponse\x12S\n\tresponses\x18\x01 \x03(\x0b\x32@.build.bazel.remote.execution.v2.BatchReadBlobsResponse.Response\x1au\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\"\x8c\x01\n\x0eGetTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\"k\n\x0fGetTreeResponse\x12?\n\x0b\x64irectories\x18\x01 \x03(\x0b\x32*.build.bazel.remote.execution.v2.Directory\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"/\n\x16GetCapabilitiesRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"\xe3\x02\n\x12ServerCapabilities\x12N\n\x12\x63\x61\x63he_capabilities\x18\x01 \x01(\x0b\x32\x32.build.bazel.remote.execution.v2.CacheCapabilities\x12V\n\x16\x65xecution_capabilities\x18\x02 \x01(\x0b\x32\x36.build.bazel.remote.execution.v2.ExecutionCapabilities\x12:\n\x16\x64\x65precated_api_version\x18\x03 \x01(\x0b\x32\x1a.build.bazel.semver.SemVer\x12\x33\n\x0flow_api_version\x18\x04 \x01(\x0b\x32\x1a.build.bazel.semver.SemVer\x12\x34\n\x10high_api_version\x18\x05 \x01(\x0b\x32\x1a.build.bazel.semver.SemVer\"f\n\x0e\x44igestFunction\"T\n\x05Value\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06SHA256\x10\x01\x12\x08\n\x04SHA1\x10\x02\x12\x07\n\x03MD5\x10\x03\x12\x07\n\x03VSO\x10\x04\x12\n\n\x06SHA384\x10\x05\x12\n\n\x06SHA512\x10\x06\"7\n\x1d\x41\x63tionCacheUpdateCapabilities\x12\x16\n\x0eupdate_enabled\x18\x01 \x01(\x08\"\xac\x01\n\x14PriorityCapabilities\x12W\n\npriorities\x18\x01 \x03(\x0b\x32\x43.build.bazel.remote.execution.v2.PriorityCapabilities.PriorityRange\x1a;\n\rPriorityRange\x12\x14\n\x0cmin_priority\x18\x01 \x01(\x05\x12\x14\n\x0cmax_priority\x18\x02 \x01(\x05\"P\n\x1bSymlinkAbsolutePathStrategy\"1\n\x05Value\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0e\n\nDISALLOWED\x10\x01\x12\x0b\n\x07\x41LLOWED\x10\x02\"\xb9\x03\n\x11\x43\x61\x63heCapabilities\x12N\n\x0f\x64igest_function\x18\x01 \x03(\x0e\x32\x35.build.bazel.remote.execution.v2.DigestFunction.Value\x12h\n action_cache_update_capabilities\x18\x02 \x01(\x0b\x32>.build.bazel.remote.execution.v2.ActionCacheUpdateCapabilities\x12Z\n\x1b\x63\x61\x63he_priority_capabilities\x18\x03 \x01(\x0b\x32\x35.build.bazel.remote.execution.v2.PriorityCapabilities\x12\"\n\x1amax_batch_total_size_bytes\x18\x04 \x01(\x03\x12j\n\x1esymlink_absolute_path_strategy\x18\x05 \x01(\x0e\x32\x42.build.bazel.remote.execution.v2.SymlinkAbsolutePathStrategy.Value\"\x80\x02\n\x15\x45xecutionCapabilities\x12N\n\x0f\x64igest_function\x18\x01 \x01(\x0e\x32\x35.build.bazel.remote.execution.v2.DigestFunction.Value\x12\x14\n\x0c\x65xec_enabled\x18\x02 \x01(\x08\x12^\n\x1f\x65xecution_priority_capabilities\x18\x03 \x01(\x0b\x32\x35.build.bazel.remote.execution.v2.PriorityCapabilities\x12!\n\x19supported_node_properties\x18\x04 \x03(\t\"6\n\x0bToolDetails\x12\x11\n\ttool_name\x18\x01 \x01(\t\x12\x14\n\x0ctool_version\x18\x02 \x01(\t\"\xa7\x01\n\x0fRequestMetadata\x12\x42\n\x0ctool_details\x18\x01 \x01(\x0b\x32,.build.bazel.remote.execution.v2.ToolDetails\x12\x11\n\taction_id\x18\x02 \x01(\t\x12\x1a\n\x12tool_invocation_id\x18\x03 \x01(\t\x12!\n\x19\x63orrelated_invocations_id\x18\x04 \x01(\t2\xb9\x02\n\tExecution\x12\x8e\x01\n\x07\x45xecute\x12/.build.bazel.remote.execution.v2.ExecuteRequest\x1a\x1d.google.longrunning.Operation\"1\x82\xd3\xe4\x93\x02+\"&/v2/{instance_name=**}/actions:execute:\x01*0\x01\x12\x9a\x01\n\rWaitExecution\x12\x35.build.bazel.remote.execution.v2.WaitExecutionRequest\x1a\x1d.google.longrunning.Operation\"1\x82\xd3\xe4\x93\x02+\"&/v2/{name=operations/**}:waitExecution:\x01*0\x01\x32\xd6\x03\n\x0b\x41\x63tionCache\x12\xd7\x01\n\x0fGetActionResult\x12\x37.build.bazel.remote.execution.v2.GetActionResultRequest\x1a-.build.bazel.remote.execution.v2.ActionResult\"\\\x82\xd3\xe4\x93\x02V\x12T/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}\x12\xec\x01\n\x12UpdateActionResult\x12:.build.bazel.remote.execution.v2.UpdateActionResultRequest\x1a-.build.bazel.remote.execution.v2.ActionResult\"k\x82\xd3\xe4\x93\x02\x65\x1aT/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}:\raction_result2\x9b\x06\n\x19\x43ontentAddressableStorage\x12\xbc\x01\n\x10\x46indMissingBlobs\x12\x38.build.bazel.remote.execution.v2.FindMissingBlobsRequest\x1a\x39.build.bazel.remote.execution.v2.FindMissingBlobsResponse\"3\x82\xd3\xe4\x93\x02-\"(/v2/{instance_name=**}/blobs:findMissing:\x01*\x12\xbc\x01\n\x10\x42\x61tchUpdateBlobs\x12\x38.build.bazel.remote.execution.v2.BatchUpdateBlobsRequest\x1a\x39.build.bazel.remote.execution.v2.BatchUpdateBlobsResponse\"3\x82\xd3\xe4\x93\x02-\"(/v2/{instance_name=**}/blobs:batchUpdate:\x01*\x12\xb4\x01\n\x0e\x42\x61tchReadBlobs\x12\x36.build.bazel.remote.execution.v2.BatchReadBlobsRequest\x1a\x37.build.bazel.remote.execution.v2.BatchReadBlobsResponse\"1\x82\xd3\xe4\x93\x02+\"&/v2/{instance_name=**}/blobs:batchRead:\x01*\x12\xc8\x01\n\x07GetTree\x12/.build.bazel.remote.execution.v2.GetTreeRequest\x1a\x30.build.bazel.remote.execution.v2.GetTreeResponse\"X\x82\xd3\xe4\x93\x02R\x12P/v2/{instance_name=**}/blobs/{root_digest.hash}/{root_digest.size_bytes}:getTree0\x01\x32\xbd\x01\n\x0c\x43\x61pabilities\x12\xac\x01\n\x0fGetCapabilities\x12\x37.build.bazel.remote.execution.v2.GetCapabilitiesRequest\x1a\x33.build.bazel.remote.execution.v2.ServerCapabilities\"+\x82\xd3\xe4\x93\x02%\x12#/v2/{instance_name=**}/capabilitiesBr\n\x1f\x62uild.bazel.remote.execution.v2B\x14RemoteExecutionProtoP\x01Z\x0fremoteexecution\xa2\x02\x03REX\xaa\x02\x1f\x42uild.Bazel.Remote.Execution.V2b\x06proto3')
,
dependencies=[build_dot_bazel_dot_semver_dot_semver__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,])
-_DIGESTFUNCTION = _descriptor.EnumDescriptor(
- name='DigestFunction',
- full_name='build.bazel.remote.execution.v2.DigestFunction',
+
+
+_EXECUTIONSTAGE_VALUE = _descriptor.EnumDescriptor(
+ name='Value',
+ full_name='build.bazel.remote.execution.v2.ExecutionStage.Value',
filename=None,
file=DESCRIPTOR,
values=[
@@ -42,35 +43,32 @@ _DIGESTFUNCTION = _descriptor.EnumDescriptor(
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
- name='SHA256', index=1, number=1,
+ name='CACHE_CHECK', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
- name='SHA1', index=2, number=2,
+ name='QUEUED', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
- name='MD5', index=3, number=3,
+ name='EXECUTING', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='COMPLETED', index=4, number=4,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
- serialized_start=7213,
- serialized_end=7273,
+ serialized_start=4432,
+ serialized_end=4511,
)
-_sym_db.RegisterEnumDescriptor(_DIGESTFUNCTION)
-
-DigestFunction = enum_type_wrapper.EnumTypeWrapper(_DIGESTFUNCTION)
-UNKNOWN = 0
-SHA256 = 1
-SHA1 = 2
-MD5 = 3
-
+_sym_db.RegisterEnumDescriptor(_EXECUTIONSTAGE_VALUE)
-_EXECUTEOPERATIONMETADATA_STAGE = _descriptor.EnumDescriptor(
- name='Stage',
- full_name='build.bazel.remote.execution.v2.ExecuteOperationMetadata.Stage',
+_DIGESTFUNCTION_VALUE = _descriptor.EnumDescriptor(
+ name='Value',
+ full_name='build.bazel.remote.execution.v2.DigestFunction.Value',
filename=None,
file=DESCRIPTOR,
values=[
@@ -79,32 +77,40 @@ _EXECUTEOPERATIONMETADATA_STAGE = _descriptor.EnumDescriptor(
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
- name='CACHE_CHECK', index=1, number=1,
+ name='SHA256', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
- name='QUEUED', index=2, number=2,
+ name='SHA1', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
- name='EXECUTING', index=3, number=3,
+ name='MD5', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
- name='COMPLETED', index=4, number=4,
+ name='VSO', index=4, number=4,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SHA384', index=5, number=5,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SHA512', index=6, number=6,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
- serialized_start=3866,
- serialized_end=3945,
+ serialized_start=6893,
+ serialized_end=6977,
)
-_sym_db.RegisterEnumDescriptor(_EXECUTEOPERATIONMETADATA_STAGE)
+_sym_db.RegisterEnumDescriptor(_DIGESTFUNCTION_VALUE)
-_CACHECAPABILITIES_SYMLINKABSOLUTEPATHSTRATEGY = _descriptor.EnumDescriptor(
- name='SymlinkAbsolutePathStrategy',
- full_name='build.bazel.remote.execution.v2.CacheCapabilities.SymlinkAbsolutePathStrategy',
+_SYMLINKABSOLUTEPATHSTRATEGY_VALUE = _descriptor.EnumDescriptor(
+ name='Value',
+ full_name='build.bazel.remote.execution.v2.SymlinkAbsolutePathStrategy.Value',
filename=None,
file=DESCRIPTOR,
values=[
@@ -123,10 +129,10 @@ _CACHECAPABILITIES_SYMLINKABSOLUTEPATHSTRATEGY = _descriptor.EnumDescriptor(
],
containing_type=None,
serialized_options=None,
- serialized_start=6696,
- serialized_end=6767,
+ serialized_start=7242,
+ serialized_end=7291,
)
-_sym_db.RegisterEnumDescriptor(_CACHECAPABILITIES_SYMLINKABSOLUTEPATHSTRATEGY)
+_sym_db.RegisterEnumDescriptor(_SYMLINKABSOLUTEPATHSTRATEGY_VALUE)
_ACTION = _descriptor.Descriptor(
@@ -164,6 +170,13 @@ _ACTION = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='output_node_properties', full_name='build.bazel.remote.execution.v2.Action.output_node_properties', index=4,
+ number=8, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -177,7 +190,7 @@ _ACTION = _descriptor.Descriptor(
oneofs=[
],
serialized_start=282,
- serialized_end=495,
+ serialized_end=527,
)
@@ -214,8 +227,8 @@ _COMMAND_ENVIRONMENTVARIABLE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=759,
- serialized_end=809,
+ serialized_start=813,
+ serialized_end=863,
)
_COMMAND = _descriptor.Descriptor(
@@ -254,14 +267,21 @@ _COMMAND = _descriptor.Descriptor(
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='platform', full_name='build.bazel.remote.execution.v2.Command.platform', index=4,
+ name='output_paths', full_name='build.bazel.remote.execution.v2.Command.output_paths', index=4,
+ number=7, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='platform', full_name='build.bazel.remote.execution.v2.Command.platform', index=5,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='working_directory', full_name='build.bazel.remote.execution.v2.Command.working_directory', index=5,
+ name='working_directory', full_name='build.bazel.remote.execution.v2.Command.working_directory', index=6,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
@@ -279,8 +299,8 @@ _COMMAND = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=498,
- serialized_end=809,
+ serialized_start=530,
+ serialized_end=863,
)
@@ -317,8 +337,8 @@ _PLATFORM_PROPERTY = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=895,
- serialized_end=934,
+ serialized_start=949,
+ serialized_end=988,
)
_PLATFORM = _descriptor.Descriptor(
@@ -347,8 +367,8 @@ _PLATFORM = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=811,
- serialized_end=934,
+ serialized_start=865,
+ serialized_end=988,
)
@@ -380,6 +400,13 @@ _DIRECTORY = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='node_properties', full_name='build.bazel.remote.execution.v2.Directory.node_properties', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -392,8 +419,46 @@ _DIRECTORY = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=937,
- serialized_end=1139,
+ serialized_start=991,
+ serialized_end=1265,
+)
+
+
+_NODEPROPERTY = _descriptor.Descriptor(
+ name='NodeProperty',
+ full_name='build.bazel.remote.execution.v2.NodeProperty',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='build.bazel.remote.execution.v2.NodeProperty.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='build.bazel.remote.execution.v2.NodeProperty.value', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1267,
+ serialized_end=1310,
)
@@ -425,6 +490,13 @@ _FILENODE = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='node_properties', full_name='build.bazel.remote.execution.v2.FileNode.node_properties', index=3,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -437,8 +509,8 @@ _FILENODE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1141,
- serialized_end=1251,
+ serialized_start=1313,
+ serialized_end=1495,
)
@@ -475,8 +547,8 @@ _DIRECTORYNODE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1253,
- serialized_end=1339,
+ serialized_start=1497,
+ serialized_end=1583,
)
@@ -501,6 +573,13 @@ _SYMLINKNODE = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='node_properties', full_name='build.bazel.remote.execution.v2.SymlinkNode.node_properties', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -513,8 +592,8 @@ _SYMLINKNODE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1341,
- serialized_end=1384,
+ serialized_start=1585,
+ serialized_end=1700,
)
@@ -551,8 +630,8 @@ _DIGEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1386,
- serialized_end=1428,
+ serialized_start=1702,
+ serialized_end=1744,
)
@@ -645,8 +724,8 @@ _EXECUTEDACTIONMETADATA = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1431,
- serialized_end=2051,
+ serialized_start=1747,
+ serialized_end=2367,
)
@@ -665,49 +744,70 @@ _ACTIONRESULT = _descriptor.Descriptor(
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='output_directories', full_name='build.bazel.remote.execution.v2.ActionResult.output_directories', index=1,
+ name='output_file_symlinks', full_name='build.bazel.remote.execution.v2.ActionResult.output_file_symlinks', index=1,
+ number=10, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='output_symlinks', full_name='build.bazel.remote.execution.v2.ActionResult.output_symlinks', index=2,
+ number=12, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='output_directories', full_name='build.bazel.remote.execution.v2.ActionResult.output_directories', index=3,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='exit_code', full_name='build.bazel.remote.execution.v2.ActionResult.exit_code', index=2,
+ name='output_directory_symlinks', full_name='build.bazel.remote.execution.v2.ActionResult.output_directory_symlinks', index=4,
+ number=11, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='exit_code', full_name='build.bazel.remote.execution.v2.ActionResult.exit_code', index=5,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='stdout_raw', full_name='build.bazel.remote.execution.v2.ActionResult.stdout_raw', index=3,
+ name='stdout_raw', full_name='build.bazel.remote.execution.v2.ActionResult.stdout_raw', index=6,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='stdout_digest', full_name='build.bazel.remote.execution.v2.ActionResult.stdout_digest', index=4,
+ name='stdout_digest', full_name='build.bazel.remote.execution.v2.ActionResult.stdout_digest', index=7,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='stderr_raw', full_name='build.bazel.remote.execution.v2.ActionResult.stderr_raw', index=5,
+ name='stderr_raw', full_name='build.bazel.remote.execution.v2.ActionResult.stderr_raw', index=8,
number=7, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='stderr_digest', full_name='build.bazel.remote.execution.v2.ActionResult.stderr_digest', index=6,
+ name='stderr_digest', full_name='build.bazel.remote.execution.v2.ActionResult.stderr_digest', index=9,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='execution_metadata', full_name='build.bazel.remote.execution.v2.ActionResult.execution_metadata', index=7,
+ name='execution_metadata', full_name='build.bazel.remote.execution.v2.ActionResult.execution_metadata', index=10,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
@@ -725,8 +825,8 @@ _ACTIONRESULT = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=2054,
- serialized_end=2491,
+ serialized_start=2370,
+ serialized_end=3041,
)
@@ -758,6 +858,20 @@ _OUTPUTFILE = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='contents', full_name='build.bazel.remote.execution.v2.OutputFile.contents', index=3,
+ number=5, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='node_properties', full_name='build.bazel.remote.execution.v2.OutputFile.node_properties', index=4,
+ number=6, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -770,8 +884,8 @@ _OUTPUTFILE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=2493,
- serialized_end=2605,
+ serialized_start=3044,
+ serialized_end=3246,
)
@@ -808,8 +922,8 @@ _TREE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=2607,
- serialized_end=2733,
+ serialized_start=3248,
+ serialized_end=3374,
)
@@ -846,8 +960,53 @@ _OUTPUTDIRECTORY = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=2735,
- serialized_end=2834,
+ serialized_start=3376,
+ serialized_end=3475,
+)
+
+
+_OUTPUTSYMLINK = _descriptor.Descriptor(
+ name='OutputSymlink',
+ full_name='build.bazel.remote.execution.v2.OutputSymlink',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='path', full_name='build.bazel.remote.execution.v2.OutputSymlink.path', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='target', full_name='build.bazel.remote.execution.v2.OutputSymlink.target', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='node_properties', full_name='build.bazel.remote.execution.v2.OutputSymlink.node_properties', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=3477,
+ serialized_end=3594,
)
@@ -877,8 +1036,8 @@ _EXECUTIONPOLICY = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=2836,
- serialized_end=2871,
+ serialized_start=3596,
+ serialized_end=3631,
)
@@ -908,8 +1067,8 @@ _RESULTSCACHEPOLICY = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=2873,
- serialized_end=2911,
+ serialized_start=3633,
+ serialized_end=3671,
)
@@ -967,8 +1126,8 @@ _EXECUTEREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=2914,
- serialized_end=3221,
+ serialized_start=3674,
+ serialized_end=3981,
)
@@ -1005,8 +1164,8 @@ _LOGFILE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=3223,
- serialized_end=3313,
+ serialized_start=3983,
+ serialized_end=4073,
)
@@ -1043,8 +1202,8 @@ _EXECUTERESPONSE_SERVERLOGSENTRY = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=3544,
- serialized_end=3635,
+ serialized_start=4321,
+ serialized_end=4412,
)
_EXECUTERESPONSE = _descriptor.Descriptor(
@@ -1082,6 +1241,13 @@ _EXECUTERESPONSE = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='message', full_name='build.bazel.remote.execution.v2.ExecuteResponse.message', index=4,
+ number=5, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -1094,8 +1260,33 @@ _EXECUTERESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=3316,
- serialized_end=3635,
+ serialized_start=4076,
+ serialized_end=4412,
+)
+
+
+_EXECUTIONSTAGE = _descriptor.Descriptor(
+ name='ExecutionStage',
+ full_name='build.bazel.remote.execution.v2.ExecutionStage',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _EXECUTIONSTAGE_VALUE,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=4414,
+ serialized_end=4511,
)
@@ -1139,7 +1330,6 @@ _EXECUTEOPERATIONMETADATA = _descriptor.Descriptor(
],
nested_types=[],
enum_types=[
- _EXECUTEOPERATIONMETADATA_STAGE,
],
serialized_options=None,
is_extendable=False,
@@ -1147,8 +1337,8 @@ _EXECUTEOPERATIONMETADATA = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=3638,
- serialized_end=3945,
+ serialized_start=4514,
+ serialized_end=4730,
)
@@ -1178,8 +1368,8 @@ _WAITEXECUTIONREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=3947,
- serialized_end=3983,
+ serialized_start=4732,
+ serialized_end=4768,
)
@@ -1204,6 +1394,27 @@ _GETACTIONRESULTREQUEST = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='inline_stdout', full_name='build.bazel.remote.execution.v2.GetActionResultRequest.inline_stdout', index=2,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='inline_stderr', full_name='build.bazel.remote.execution.v2.GetActionResultRequest.inline_stderr', index=3,
+ number=4, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='inline_output_files', full_name='build.bazel.remote.execution.v2.GetActionResultRequest.inline_output_files', index=4,
+ number=5, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -1216,8 +1427,8 @@ _GETACTIONRESULTREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=3985,
- serialized_end=4096,
+ serialized_start=4771,
+ serialized_end=4957,
)
@@ -1268,8 +1479,8 @@ _UPDATEACTIONRESULTREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=4099,
- serialized_end=4366,
+ serialized_start=4960,
+ serialized_end=5227,
)
@@ -1306,8 +1517,8 @@ _FINDMISSINGBLOBSREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=4368,
- serialized_end=4479,
+ serialized_start=5229,
+ serialized_end=5340,
)
@@ -1337,8 +1548,8 @@ _FINDMISSINGBLOBSRESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=4481,
- serialized_end=4578,
+ serialized_start=5342,
+ serialized_end=5439,
)
@@ -1375,8 +1586,8 @@ _BATCHUPDATEBLOBSREQUEST_REQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=4715,
- serialized_end=4795,
+ serialized_start=5576,
+ serialized_end=5656,
)
_BATCHUPDATEBLOBSREQUEST = _descriptor.Descriptor(
@@ -1412,8 +1623,8 @@ _BATCHUPDATEBLOBSREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=4581,
- serialized_end=4795,
+ serialized_start=5442,
+ serialized_end=5656,
)
@@ -1450,8 +1661,8 @@ _BATCHUPDATEBLOBSRESPONSE_RESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=4913,
- serialized_end=5016,
+ serialized_start=5774,
+ serialized_end=5877,
)
_BATCHUPDATEBLOBSRESPONSE = _descriptor.Descriptor(
@@ -1480,8 +1691,8 @@ _BATCHUPDATEBLOBSRESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=4798,
- serialized_end=5016,
+ serialized_start=5659,
+ serialized_end=5877,
)
@@ -1518,8 +1729,8 @@ _BATCHREADBLOBSREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=5018,
- serialized_end=5122,
+ serialized_start=5879,
+ serialized_end=5983,
)
@@ -1563,8 +1774,8 @@ _BATCHREADBLOBSRESPONSE_RESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=5236,
- serialized_end=5353,
+ serialized_start=6097,
+ serialized_end=6214,
)
_BATCHREADBLOBSRESPONSE = _descriptor.Descriptor(
@@ -1593,8 +1804,8 @@ _BATCHREADBLOBSRESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=5125,
- serialized_end=5353,
+ serialized_start=5986,
+ serialized_end=6214,
)
@@ -1645,8 +1856,8 @@ _GETTREEREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=5356,
- serialized_end=5496,
+ serialized_start=6217,
+ serialized_end=6357,
)
@@ -1683,8 +1894,8 @@ _GETTREERESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=5498,
- serialized_end=5605,
+ serialized_start=6359,
+ serialized_end=6466,
)
@@ -1714,8 +1925,8 @@ _GETCAPABILITIESREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=5607,
- serialized_end=5654,
+ serialized_start=6468,
+ serialized_end=6515,
)
@@ -1773,8 +1984,33 @@ _SERVERCAPABILITIES = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=5657,
- serialized_end=6012,
+ serialized_start=6518,
+ serialized_end=6873,
+)
+
+
+_DIGESTFUNCTION = _descriptor.Descriptor(
+ name='DigestFunction',
+ full_name='build.bazel.remote.execution.v2.DigestFunction',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _DIGESTFUNCTION_VALUE,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=6875,
+ serialized_end=6977,
)
@@ -1804,8 +2040,8 @@ _ACTIONCACHEUPDATECAPABILITIES = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=6014,
- serialized_end=6069,
+ serialized_start=6979,
+ serialized_end=7034,
)
@@ -1842,8 +2078,8 @@ _PRIORITYCAPABILITIES_PRIORITYRANGE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=6185,
- serialized_end=6244,
+ serialized_start=7150,
+ serialized_end=7209,
)
_PRIORITYCAPABILITIES = _descriptor.Descriptor(
@@ -1872,8 +2108,33 @@ _PRIORITYCAPABILITIES = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=6072,
- serialized_end=6244,
+ serialized_start=7037,
+ serialized_end=7209,
+)
+
+
+_SYMLINKABSOLUTEPATHSTRATEGY = _descriptor.Descriptor(
+ name='SymlinkAbsolutePathStrategy',
+ full_name='build.bazel.remote.execution.v2.SymlinkAbsolutePathStrategy',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _SYMLINKABSOLUTEPATHSTRATEGY_VALUE,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=7211,
+ serialized_end=7291,
)
@@ -1924,7 +2185,6 @@ _CACHECAPABILITIES = _descriptor.Descriptor(
],
nested_types=[],
enum_types=[
- _CACHECAPABILITIES_SYMLINKABSOLUTEPATHSTRATEGY,
],
serialized_options=None,
is_extendable=False,
@@ -1932,8 +2192,8 @@ _CACHECAPABILITIES = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=6247,
- serialized_end=6767,
+ serialized_start=7294,
+ serialized_end=7735,
)
@@ -1965,6 +2225,13 @@ _EXECUTIONCAPABILITIES = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='supported_node_properties', full_name='build.bazel.remote.execution.v2.ExecutionCapabilities.supported_node_properties', index=3,
+ number=4, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -1977,8 +2244,8 @@ _EXECUTIONCAPABILITIES = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=6770,
- serialized_end=6985,
+ serialized_start=7738,
+ serialized_end=7994,
)
@@ -2015,8 +2282,8 @@ _TOOLDETAILS = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=6987,
- serialized_end=7041,
+ serialized_start=7996,
+ serialized_end=8050,
)
@@ -2067,8 +2334,8 @@ _REQUESTMETADATA = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=7044,
- serialized_end=7211,
+ serialized_start=8053,
+ serialized_end=8220,
)
_ACTION.fields_by_name['command_digest'].message_type = _DIGEST
@@ -2082,8 +2349,11 @@ _PLATFORM.fields_by_name['properties'].message_type = _PLATFORM_PROPERTY
_DIRECTORY.fields_by_name['files'].message_type = _FILENODE
_DIRECTORY.fields_by_name['directories'].message_type = _DIRECTORYNODE
_DIRECTORY.fields_by_name['symlinks'].message_type = _SYMLINKNODE
+_DIRECTORY.fields_by_name['node_properties'].message_type = _NODEPROPERTY
_FILENODE.fields_by_name['digest'].message_type = _DIGEST
+_FILENODE.fields_by_name['node_properties'].message_type = _NODEPROPERTY
_DIRECTORYNODE.fields_by_name['digest'].message_type = _DIGEST
+_SYMLINKNODE.fields_by_name['node_properties'].message_type = _NODEPROPERTY
_EXECUTEDACTIONMETADATA.fields_by_name['queued_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['worker_start_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['worker_completed_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
@@ -2094,14 +2364,19 @@ _EXECUTEDACTIONMETADATA.fields_by_name['execution_completed_timestamp'].message_
_EXECUTEDACTIONMETADATA.fields_by_name['output_upload_start_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['output_upload_completed_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_ACTIONRESULT.fields_by_name['output_files'].message_type = _OUTPUTFILE
+_ACTIONRESULT.fields_by_name['output_file_symlinks'].message_type = _OUTPUTSYMLINK
+_ACTIONRESULT.fields_by_name['output_symlinks'].message_type = _OUTPUTSYMLINK
_ACTIONRESULT.fields_by_name['output_directories'].message_type = _OUTPUTDIRECTORY
+_ACTIONRESULT.fields_by_name['output_directory_symlinks'].message_type = _OUTPUTSYMLINK
_ACTIONRESULT.fields_by_name['stdout_digest'].message_type = _DIGEST
_ACTIONRESULT.fields_by_name['stderr_digest'].message_type = _DIGEST
_ACTIONRESULT.fields_by_name['execution_metadata'].message_type = _EXECUTEDACTIONMETADATA
_OUTPUTFILE.fields_by_name['digest'].message_type = _DIGEST
+_OUTPUTFILE.fields_by_name['node_properties'].message_type = _NODEPROPERTY
_TREE.fields_by_name['root'].message_type = _DIRECTORY
_TREE.fields_by_name['children'].message_type = _DIRECTORY
_OUTPUTDIRECTORY.fields_by_name['tree_digest'].message_type = _DIGEST
+_OUTPUTSYMLINK.fields_by_name['node_properties'].message_type = _NODEPROPERTY
_EXECUTEREQUEST.fields_by_name['action_digest'].message_type = _DIGEST
_EXECUTEREQUEST.fields_by_name['execution_policy'].message_type = _EXECUTIONPOLICY
_EXECUTEREQUEST.fields_by_name['results_cache_policy'].message_type = _RESULTSCACHEPOLICY
@@ -2111,9 +2386,9 @@ _EXECUTERESPONSE_SERVERLOGSENTRY.containing_type = _EXECUTERESPONSE
_EXECUTERESPONSE.fields_by_name['result'].message_type = _ACTIONRESULT
_EXECUTERESPONSE.fields_by_name['status'].message_type = google_dot_rpc_dot_status__pb2._STATUS
_EXECUTERESPONSE.fields_by_name['server_logs'].message_type = _EXECUTERESPONSE_SERVERLOGSENTRY
-_EXECUTEOPERATIONMETADATA.fields_by_name['stage'].enum_type = _EXECUTEOPERATIONMETADATA_STAGE
+_EXECUTIONSTAGE_VALUE.containing_type = _EXECUTIONSTAGE
+_EXECUTEOPERATIONMETADATA.fields_by_name['stage'].enum_type = _EXECUTIONSTAGE_VALUE
_EXECUTEOPERATIONMETADATA.fields_by_name['action_digest'].message_type = _DIGEST
-_EXECUTEOPERATIONMETADATA_STAGE.containing_type = _EXECUTEOPERATIONMETADATA
_GETACTIONRESULTREQUEST.fields_by_name['action_digest'].message_type = _DIGEST
_UPDATEACTIONRESULTREQUEST.fields_by_name['action_digest'].message_type = _DIGEST
_UPDATEACTIONRESULTREQUEST.fields_by_name['action_result'].message_type = _ACTIONRESULT
@@ -2139,20 +2414,22 @@ _SERVERCAPABILITIES.fields_by_name['execution_capabilities'].message_type = _EXE
_SERVERCAPABILITIES.fields_by_name['deprecated_api_version'].message_type = build_dot_bazel_dot_semver_dot_semver__pb2._SEMVER
_SERVERCAPABILITIES.fields_by_name['low_api_version'].message_type = build_dot_bazel_dot_semver_dot_semver__pb2._SEMVER
_SERVERCAPABILITIES.fields_by_name['high_api_version'].message_type = build_dot_bazel_dot_semver_dot_semver__pb2._SEMVER
+_DIGESTFUNCTION_VALUE.containing_type = _DIGESTFUNCTION
_PRIORITYCAPABILITIES_PRIORITYRANGE.containing_type = _PRIORITYCAPABILITIES
_PRIORITYCAPABILITIES.fields_by_name['priorities'].message_type = _PRIORITYCAPABILITIES_PRIORITYRANGE
-_CACHECAPABILITIES.fields_by_name['digest_function'].enum_type = _DIGESTFUNCTION
+_SYMLINKABSOLUTEPATHSTRATEGY_VALUE.containing_type = _SYMLINKABSOLUTEPATHSTRATEGY
+_CACHECAPABILITIES.fields_by_name['digest_function'].enum_type = _DIGESTFUNCTION_VALUE
_CACHECAPABILITIES.fields_by_name['action_cache_update_capabilities'].message_type = _ACTIONCACHEUPDATECAPABILITIES
_CACHECAPABILITIES.fields_by_name['cache_priority_capabilities'].message_type = _PRIORITYCAPABILITIES
-_CACHECAPABILITIES.fields_by_name['symlink_absolute_path_strategy'].enum_type = _CACHECAPABILITIES_SYMLINKABSOLUTEPATHSTRATEGY
-_CACHECAPABILITIES_SYMLINKABSOLUTEPATHSTRATEGY.containing_type = _CACHECAPABILITIES
-_EXECUTIONCAPABILITIES.fields_by_name['digest_function'].enum_type = _DIGESTFUNCTION
+_CACHECAPABILITIES.fields_by_name['symlink_absolute_path_strategy'].enum_type = _SYMLINKABSOLUTEPATHSTRATEGY_VALUE
+_EXECUTIONCAPABILITIES.fields_by_name['digest_function'].enum_type = _DIGESTFUNCTION_VALUE
_EXECUTIONCAPABILITIES.fields_by_name['execution_priority_capabilities'].message_type = _PRIORITYCAPABILITIES
_REQUESTMETADATA.fields_by_name['tool_details'].message_type = _TOOLDETAILS
DESCRIPTOR.message_types_by_name['Action'] = _ACTION
DESCRIPTOR.message_types_by_name['Command'] = _COMMAND
DESCRIPTOR.message_types_by_name['Platform'] = _PLATFORM
DESCRIPTOR.message_types_by_name['Directory'] = _DIRECTORY
+DESCRIPTOR.message_types_by_name['NodeProperty'] = _NODEPROPERTY
DESCRIPTOR.message_types_by_name['FileNode'] = _FILENODE
DESCRIPTOR.message_types_by_name['DirectoryNode'] = _DIRECTORYNODE
DESCRIPTOR.message_types_by_name['SymlinkNode'] = _SYMLINKNODE
@@ -2162,11 +2439,13 @@ DESCRIPTOR.message_types_by_name['ActionResult'] = _ACTIONRESULT
DESCRIPTOR.message_types_by_name['OutputFile'] = _OUTPUTFILE
DESCRIPTOR.message_types_by_name['Tree'] = _TREE
DESCRIPTOR.message_types_by_name['OutputDirectory'] = _OUTPUTDIRECTORY
+DESCRIPTOR.message_types_by_name['OutputSymlink'] = _OUTPUTSYMLINK
DESCRIPTOR.message_types_by_name['ExecutionPolicy'] = _EXECUTIONPOLICY
DESCRIPTOR.message_types_by_name['ResultsCachePolicy'] = _RESULTSCACHEPOLICY
DESCRIPTOR.message_types_by_name['ExecuteRequest'] = _EXECUTEREQUEST
DESCRIPTOR.message_types_by_name['LogFile'] = _LOGFILE
DESCRIPTOR.message_types_by_name['ExecuteResponse'] = _EXECUTERESPONSE
+DESCRIPTOR.message_types_by_name['ExecutionStage'] = _EXECUTIONSTAGE
DESCRIPTOR.message_types_by_name['ExecuteOperationMetadata'] = _EXECUTEOPERATIONMETADATA
DESCRIPTOR.message_types_by_name['WaitExecutionRequest'] = _WAITEXECUTIONREQUEST
DESCRIPTOR.message_types_by_name['GetActionResultRequest'] = _GETACTIONRESULTREQUEST
@@ -2181,13 +2460,14 @@ DESCRIPTOR.message_types_by_name['GetTreeRequest'] = _GETTREEREQUEST
DESCRIPTOR.message_types_by_name['GetTreeResponse'] = _GETTREERESPONSE
DESCRIPTOR.message_types_by_name['GetCapabilitiesRequest'] = _GETCAPABILITIESREQUEST
DESCRIPTOR.message_types_by_name['ServerCapabilities'] = _SERVERCAPABILITIES
+DESCRIPTOR.message_types_by_name['DigestFunction'] = _DIGESTFUNCTION
DESCRIPTOR.message_types_by_name['ActionCacheUpdateCapabilities'] = _ACTIONCACHEUPDATECAPABILITIES
DESCRIPTOR.message_types_by_name['PriorityCapabilities'] = _PRIORITYCAPABILITIES
+DESCRIPTOR.message_types_by_name['SymlinkAbsolutePathStrategy'] = _SYMLINKABSOLUTEPATHSTRATEGY
DESCRIPTOR.message_types_by_name['CacheCapabilities'] = _CACHECAPABILITIES
DESCRIPTOR.message_types_by_name['ExecutionCapabilities'] = _EXECUTIONCAPABILITIES
DESCRIPTOR.message_types_by_name['ToolDetails'] = _TOOLDETAILS
DESCRIPTOR.message_types_by_name['RequestMetadata'] = _REQUESTMETADATA
-DESCRIPTOR.enum_types_by_name['DigestFunction'] = _DIGESTFUNCTION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Action = _reflection.GeneratedProtocolMessageType('Action', (_message.Message,), {
@@ -2234,6 +2514,13 @@ Directory = _reflection.GeneratedProtocolMessageType('Directory', (_message.Mess
})
_sym_db.RegisterMessage(Directory)
+NodeProperty = _reflection.GeneratedProtocolMessageType('NodeProperty', (_message.Message,), {
+ 'DESCRIPTOR' : _NODEPROPERTY,
+ '__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
+ # @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.NodeProperty)
+ })
+_sym_db.RegisterMessage(NodeProperty)
+
FileNode = _reflection.GeneratedProtocolMessageType('FileNode', (_message.Message,), {
'DESCRIPTOR' : _FILENODE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
@@ -2297,6 +2584,13 @@ OutputDirectory = _reflection.GeneratedProtocolMessageType('OutputDirectory', (_
})
_sym_db.RegisterMessage(OutputDirectory)
+OutputSymlink = _reflection.GeneratedProtocolMessageType('OutputSymlink', (_message.Message,), {
+ 'DESCRIPTOR' : _OUTPUTSYMLINK,
+ '__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
+ # @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.OutputSymlink)
+ })
+_sym_db.RegisterMessage(OutputSymlink)
+
ExecutionPolicy = _reflection.GeneratedProtocolMessageType('ExecutionPolicy', (_message.Message,), {
'DESCRIPTOR' : _EXECUTIONPOLICY,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
@@ -2340,6 +2634,13 @@ ExecuteResponse = _reflection.GeneratedProtocolMessageType('ExecuteResponse', (_
_sym_db.RegisterMessage(ExecuteResponse)
_sym_db.RegisterMessage(ExecuteResponse.ServerLogsEntry)
+ExecutionStage = _reflection.GeneratedProtocolMessageType('ExecutionStage', (_message.Message,), {
+ 'DESCRIPTOR' : _EXECUTIONSTAGE,
+ '__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
+ # @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ExecutionStage)
+ })
+_sym_db.RegisterMessage(ExecutionStage)
+
ExecuteOperationMetadata = _reflection.GeneratedProtocolMessageType('ExecuteOperationMetadata', (_message.Message,), {
'DESCRIPTOR' : _EXECUTEOPERATIONMETADATA,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
@@ -2462,6 +2763,13 @@ ServerCapabilities = _reflection.GeneratedProtocolMessageType('ServerCapabilitie
})
_sym_db.RegisterMessage(ServerCapabilities)
+DigestFunction = _reflection.GeneratedProtocolMessageType('DigestFunction', (_message.Message,), {
+ 'DESCRIPTOR' : _DIGESTFUNCTION,
+ '__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
+ # @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.DigestFunction)
+ })
+_sym_db.RegisterMessage(DigestFunction)
+
ActionCacheUpdateCapabilities = _reflection.GeneratedProtocolMessageType('ActionCacheUpdateCapabilities', (_message.Message,), {
'DESCRIPTOR' : _ACTIONCACHEUPDATECAPABILITIES,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
@@ -2484,6 +2792,13 @@ PriorityCapabilities = _reflection.GeneratedProtocolMessageType('PriorityCapabil
_sym_db.RegisterMessage(PriorityCapabilities)
_sym_db.RegisterMessage(PriorityCapabilities.PriorityRange)
+SymlinkAbsolutePathStrategy = _reflection.GeneratedProtocolMessageType('SymlinkAbsolutePathStrategy', (_message.Message,), {
+ 'DESCRIPTOR' : _SYMLINKABSOLUTEPATHSTRATEGY,
+ '__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
+ # @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.SymlinkAbsolutePathStrategy)
+ })
+_sym_db.RegisterMessage(SymlinkAbsolutePathStrategy)
+
CacheCapabilities = _reflection.GeneratedProtocolMessageType('CacheCapabilities', (_message.Message,), {
'DESCRIPTOR' : _CACHECAPABILITIES,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
@@ -2522,8 +2837,8 @@ _EXECUTION = _descriptor.ServiceDescriptor(
file=DESCRIPTOR,
index=0,
serialized_options=None,
- serialized_start=7276,
- serialized_end=7589,
+ serialized_start=8223,
+ serialized_end=8536,
methods=[
_descriptor.MethodDescriptor(
name='Execute',
@@ -2555,8 +2870,8 @@ _ACTIONCACHE = _descriptor.ServiceDescriptor(
file=DESCRIPTOR,
index=1,
serialized_options=None,
- serialized_start=7592,
- serialized_end=8062,
+ serialized_start=8539,
+ serialized_end=9009,
methods=[
_descriptor.MethodDescriptor(
name='GetActionResult',
@@ -2588,8 +2903,8 @@ _CONTENTADDRESSABLESTORAGE = _descriptor.ServiceDescriptor(
file=DESCRIPTOR,
index=2,
serialized_options=None,
- serialized_start=8065,
- serialized_end=8860,
+ serialized_start=9012,
+ serialized_end=9807,
methods=[
_descriptor.MethodDescriptor(
name='FindMissingBlobs',
@@ -2639,8 +2954,8 @@ _CAPABILITIES = _descriptor.ServiceDescriptor(
file=DESCRIPTOR,
index=3,
serialized_options=None,
- serialized_start=8863,
- serialized_end=9052,
+ serialized_start=9810,
+ serialized_end=9999,
methods=[
_descriptor.MethodDescriptor(
name='GetCapabilities',
diff --git a/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2_grpc.py b/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2_grpc.py
index 3769a680d..5a30549ba 100644
--- a/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2_grpc.py
+++ b/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2_grpc.py
@@ -87,6 +87,7 @@ class ExecutionServicer(object):
action will be reported in the `status` field of the `ExecuteResponse`. The
server MUST NOT set the `error` field of the `Operation` proto.
The possible errors include:
+
* `INVALID_ARGUMENT`: One or more arguments are invalid.
* `FAILED_PRECONDITION`: One or more errors occurred in setting up the
action requested, such as a missing input or command or no worker being
@@ -99,6 +100,9 @@ class ExecutionServicer(object):
* `INTERNAL`: An internal error occurred in the execution engine or the
worker.
* `DEADLINE_EXCEEDED`: The execution timed out.
+ * `CANCELLED`: The operation was cancelled by the client. This status is
+ only possible if the server implements the Operations API CancelOperation
+ method, and it was called for the current execution.
In the case of a missing input or command, the server SHOULD additionally
send a [PreconditionFailure][google.rpc.PreconditionFailure] error detail
@@ -152,10 +156,7 @@ class ActionCacheStub(object):
The lifetime of entries in the action cache is implementation-specific, but
the server SHOULD assume that more recently used entries are more likely to
- be used again. Additionally, action cache implementations SHOULD ensure that
- any blobs referenced in the
- [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
- are still valid when returning a result.
+ be used again.
As with other services in the Remote Execution API, any call may return an
error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
@@ -192,10 +193,7 @@ class ActionCacheServicer(object):
The lifetime of entries in the action cache is implementation-specific, but
the server SHOULD assume that more recently used entries are more likely to
- be used again. Additionally, action cache implementations SHOULD ensure that
- any blobs referenced in the
- [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
- are still valid when returning a result.
+ be used again.
As with other services in the Remote Execution API, any call may return an
error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
@@ -206,7 +204,15 @@ class ActionCacheServicer(object):
def GetActionResult(self, request, context):
"""Retrieve a cached execution result.
+ Implementations SHOULD ensure that any blobs referenced from the
+ [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
+ are available at the time of returning the
+ [ActionResult][build.bazel.remote.execution.v2.ActionResult] and will be
+ for some period of time afterwards. The TTLs of the referenced blobs SHOULD be increased
+ if necessary and applicable.
+
Errors:
+
* `NOT_FOUND`: The requested `ActionResult` is not in the cache.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -216,11 +222,6 @@ class ActionCacheServicer(object):
def UpdateActionResult(self, request, context):
"""Upload a new execution result.
- This method is intended for servers which implement the distributed cache
- independently of the
- [Execution][build.bazel.remote.execution.v2.Execution] API. As a
- result, it is OPTIONAL for servers to implement.
-
In order to allow the server to perform access control based on the type of
action, and to assist with client debugging, the client MUST first upload
the [Action][build.bazel.remote.execution.v2.Execution] that produced the
@@ -229,7 +230,10 @@ class ActionCacheServicer(object):
`ContentAddressableStorage`.
Errors:
- * `NOT_IMPLEMENTED`: This method is not supported by the server.
+
+ * `INVALID_ARGUMENT`: One or more arguments are invalid.
+ * `FAILED_PRECONDITION`: One or more errors occurred in updating the
+ action result, such as a missing command or action.
* `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
entry to the cache.
"""
@@ -273,8 +277,8 @@ class ContentAddressableStorageStub(object):
hierarchy, which must also each be uploaded on their own.
For small file uploads the client should group them together and call
- [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs]
- on chunks of no more than 10 MiB. For large uploads, the client must use the
+ [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs].
+ For large uploads, the client must use the
[Write method][google.bytestream.ByteStream.Write] of the ByteStream API. The
`resource_name` is `{instance_name}/uploads/{uuid}/blobs/{hash}/{size}`,
where `instance_name` is as described in the next paragraph, `uuid` is a
@@ -296,6 +300,9 @@ class ContentAddressableStorageStub(object):
by the server. For servers which do not support multiple instances, then the
`instance_name` is the empty path and the leading slash is omitted, so that
the `resource_name` becomes `uploads/{uuid}/blobs/{hash}/{size}`.
+ To simplify parsing, a path segment cannot equal any of the following
+ keywords: `blobs`, `uploads`, `actions`, `actionResults`, `operations` and
+ `capabilities`.
When attempting an upload, if another client has already completed the upload
(which may occur in the middle of a single upload if another client uploads
@@ -369,8 +376,8 @@ class ContentAddressableStorageServicer(object):
hierarchy, which must also each be uploaded on their own.
For small file uploads the client should group them together and call
- [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs]
- on chunks of no more than 10 MiB. For large uploads, the client must use the
+ [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs].
+ For large uploads, the client must use the
[Write method][google.bytestream.ByteStream.Write] of the ByteStream API. The
`resource_name` is `{instance_name}/uploads/{uuid}/blobs/{hash}/{size}`,
where `instance_name` is as described in the next paragraph, `uuid` is a
@@ -392,6 +399,9 @@ class ContentAddressableStorageServicer(object):
by the server. For servers which do not support multiple instances, then the
`instance_name` is the empty path and the leading slash is omitted, so that
the `resource_name` becomes `uploads/{uuid}/blobs/{hash}/{size}`.
+ To simplify parsing, a path segment cannot equal any of the following
+ keywords: `blobs`, `uploads`, `actions`, `actionResults`, `operations` and
+ `capabilities`.
When attempting an upload, if another client has already completed the upload
(which may occur in the middle of a single upload if another client uploads
@@ -447,10 +457,12 @@ class ContentAddressableStorageServicer(object):
independently.
Errors:
+
* `INVALID_ARGUMENT`: The client attempted to upload more than the
server supported limit.
Individual requests may return the following errors, additionally:
+
* `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the blob.
* `INVALID_ARGUMENT`: The
[Digest][build.bazel.remote.execution.v2.Digest] does not match the
@@ -475,6 +487,7 @@ class ContentAddressableStorageServicer(object):
independently.
Errors:
+
* `INVALID_ARGUMENT`: The client attempted to read more than the
server supported limit.
@@ -506,6 +519,8 @@ class ContentAddressableStorageServicer(object):
If part of the tree is missing from the CAS, the server will return the
portion present and omit the rest.
+ Errors:
+
* `NOT_FOUND`: The requested tree root is not present in the CAS.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -573,7 +588,14 @@ class CapabilitiesServicer(object):
"""
def GetCapabilities(self, request, context):
- """GetCapabilities returns the server capabilities configuration.
+ """GetCapabilities returns the server capabilities configuration of the
+ remote endpoint.
+ Only the capabilities of the services supported by the endpoint will
+ be returned:
+ * Execution + CAS + Action Cache endpoints should return both
+ CacheCapabilities and ExecutionCapabilities.
+ * Execution only endpoints should return ExecutionCapabilities.
+ * CAS + Action Cache only endpoints should return CacheCapabilities.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
diff --git a/src/buildstream/_protos/build/buildgrid/local_cas.proto b/src/buildstream/_protos/build/buildgrid/local_cas.proto
index f2955f971..722ac70d0 100644
--- a/src/buildstream/_protos/build/buildgrid/local_cas.proto
+++ b/src/buildstream/_protos/build/buildgrid/local_cas.proto
@@ -60,10 +60,10 @@ service LocalContentAddressableStorage {
// in the local cache. Optionally, this will also fetch all blobs referenced
// by the `Directory` objects, equivalent to `FetchMissingBlobs`.
//
- // If part of the tree is missing from the CAS, the server will return the
- // portion present and omit the rest.
+ // If no remote CAS is available, this will check presence of the entire
+ // directory tree (and optionally also file blobs) in the local cache.
//
- // * `NOT_FOUND`: The requested tree root is not present in the CAS.
+ // * `NOT_FOUND`: The requested tree is not present in the CAS or incomplete.
rpc FetchTree(FetchTreeRequest) returns (FetchTreeResponse) {}
// Upload the entire directory tree from the local cache to a remote CAS.
@@ -139,7 +139,7 @@ message FetchMissingBlobsRequest {
// A response message for
// [LocalContentAddressableStorage.FetchMissingBlobs][build.buildgrid.v2.LocalContentAddressableStorage.FetchMissingBlobs].
message FetchMissingBlobsResponse {
- // A response corresponding to a single blob that the client tried to upload.
+ // A response corresponding to a single blob that the client tried to download.
message Response {
// The digest to which this response corresponds.
build.bazel.remote.execution.v2.Digest digest = 1;
@@ -281,6 +281,9 @@ message CaptureTreeRequest {
// This is a hint whether the blobs shall be uploaded to the remote CAS
// without first storing them in the local cache.
bool bypass_local_cache = 3;
+
+ // The properties of path(s) in the local filesystem to capture.
+ repeated string node_properties = 4;
}
// A response message for
@@ -320,6 +323,9 @@ message CaptureFilesRequest {
// This is a hint whether the blobs shall be uploaded to the remote CAS
// without first storing them in the local cache.
bool bypass_local_cache = 3;
+
+ // The properties of path(s) in the local filesystem to capture.
+ repeated string node_properties = 4;
}
// A response message for
@@ -335,6 +341,12 @@ message CaptureFilesResponse {
// The result of attempting to capture and upload the file.
google.rpc.Status status = 3;
+
+ // True if the captured file was executable, false otherwise.
+ bool is_executable = 4;
+
+ // The node properties of the captured file.
+ repeated build.bazel.remote.execution.v2.NodeProperty node_properties = 5;
}
// The responses to the requests.
diff --git a/src/buildstream/_protos/build/buildgrid/local_cas_pb2.py b/src/buildstream/_protos/build/buildgrid/local_cas_pb2.py
index 06df1385c..6be36622b 100644
--- a/src/buildstream/_protos/build/buildgrid/local_cas_pb2.py
+++ b/src/buildstream/_protos/build/buildgrid/local_cas_pb2.py
@@ -23,7 +23,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
package='build.buildgrid',
syntax='proto3',
serialized_options=None,
- serialized_pb=_b('\n\x1f\x62uild/buildgrid/local_cas.proto\x12\x0f\x62uild.buildgrid\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/rpc/status.proto\"p\n\x18\x46\x65tchMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xcc\x01\n\x19\x46\x65tchMissingBlobsResponse\x12\x46\n\tresponses\x18\x01 \x03(\x0b\x32\x33.build.buildgrid.FetchMissingBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"q\n\x19UploadMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xce\x01\n\x1aUploadMissingBlobsResponse\x12G\n\tresponses\x18\x01 \x03(\x0b\x32\x34.build.buildgrid.UploadMissingBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"\x81\x01\n\x10\x46\x65tchTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x18\n\x10\x66\x65tch_file_blobs\x18\x03 \x01(\x08\"\x13\n\x11\x46\x65tchTreeResponse\"h\n\x11UploadTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\x14\n\x12UploadTreeResponse\"u\n\x10StageTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x0c\n\x04path\x18\x03 \x01(\t\"!\n\x11StageTreeResponse\x12\x0c\n\x04path\x18\x01 \x01(\t\"U\n\x12\x43\x61ptureTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x12\x1a\n\x12\x62ypass_local_cache\x18\x03 \x01(\x08\"\xd3\x01\n\x13\x43\x61ptureTreeResponse\x12@\n\tresponses\x18\x01 \x03(\x0b\x32-.build.buildgrid.CaptureTreeResponse.Response\x1az\n\x08Response\x12\x0c\n\x04path\x18\x01 \x01(\t\x12<\n\x0btree_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\"V\n\x13\x43\x61ptureFilesRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x12\x1a\n\x12\x62ypass_local_cache\x18\x03 \x01(\x08\"\xd0\x01\n\x14\x43\x61ptureFilesResponse\x12\x41\n\tresponses\x18\x01 \x03(\x0b\x32..build.buildgrid.CaptureFilesResponse.Response\x1au\n\x08Response\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\"\x83\x01\n\x1fGetInstanceNameForRemoteRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x15\n\rinstance_name\x18\x02 \x01(\t\x12\x13\n\x0bserver_cert\x18\x03 \x01(\x0c\x12\x12\n\nclient_key\x18\x04 \x01(\x0c\x12\x13\n\x0b\x63lient_cert\x18\x05 \x01(\x0c\"9\n GetInstanceNameForRemoteResponse\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"\x1a\n\x18GetLocalDiskUsageRequest\"D\n\x19GetLocalDiskUsageResponse\x12\x12\n\nsize_bytes\x18\x01 \x01(\x03\x12\x13\n\x0bquota_bytes\x18\x02 \x01(\x03\x32\xb5\x07\n\x1eLocalContentAddressableStorage\x12l\n\x11\x46\x65tchMissingBlobs\x12).build.buildgrid.FetchMissingBlobsRequest\x1a*.build.buildgrid.FetchMissingBlobsResponse\"\x00\x12o\n\x12UploadMissingBlobs\x12*.build.buildgrid.UploadMissingBlobsRequest\x1a+.build.buildgrid.UploadMissingBlobsResponse\"\x00\x12T\n\tFetchTree\x12!.build.buildgrid.FetchTreeRequest\x1a\".build.buildgrid.FetchTreeResponse\"\x00\x12W\n\nUploadTree\x12\".build.buildgrid.UploadTreeRequest\x1a#.build.buildgrid.UploadTreeResponse\"\x00\x12X\n\tStageTree\x12!.build.buildgrid.StageTreeRequest\x1a\".build.buildgrid.StageTreeResponse\"\x00(\x01\x30\x01\x12Z\n\x0b\x43\x61ptureTree\x12#.build.buildgrid.CaptureTreeRequest\x1a$.build.buildgrid.CaptureTreeResponse\"\x00\x12]\n\x0c\x43\x61ptureFiles\x12$.build.buildgrid.CaptureFilesRequest\x1a%.build.buildgrid.CaptureFilesResponse\"\x00\x12\x81\x01\n\x18GetInstanceNameForRemote\x12\x30.build.buildgrid.GetInstanceNameForRemoteRequest\x1a\x31.build.buildgrid.GetInstanceNameForRemoteResponse\"\x00\x12l\n\x11GetLocalDiskUsage\x12).build.buildgrid.GetLocalDiskUsageRequest\x1a*.build.buildgrid.GetLocalDiskUsageResponse\"\x00\x62\x06proto3')
+ serialized_pb=_b('\n\x1f\x62uild/buildgrid/local_cas.proto\x12\x0f\x62uild.buildgrid\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/rpc/status.proto\"p\n\x18\x46\x65tchMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xcc\x01\n\x19\x46\x65tchMissingBlobsResponse\x12\x46\n\tresponses\x18\x01 \x03(\x0b\x32\x33.build.buildgrid.FetchMissingBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"q\n\x19UploadMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xce\x01\n\x1aUploadMissingBlobsResponse\x12G\n\tresponses\x18\x01 \x03(\x0b\x32\x34.build.buildgrid.UploadMissingBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"\x81\x01\n\x10\x46\x65tchTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x18\n\x10\x66\x65tch_file_blobs\x18\x03 \x01(\x08\"\x13\n\x11\x46\x65tchTreeResponse\"h\n\x11UploadTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\x14\n\x12UploadTreeResponse\"u\n\x10StageTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x0c\n\x04path\x18\x03 \x01(\t\"!\n\x11StageTreeResponse\x12\x0c\n\x04path\x18\x01 \x01(\t\"n\n\x12\x43\x61ptureTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x12\x1a\n\x12\x62ypass_local_cache\x18\x03 \x01(\x08\x12\x17\n\x0fnode_properties\x18\x04 \x03(\t\"\xd3\x01\n\x13\x43\x61ptureTreeResponse\x12@\n\tresponses\x18\x01 \x03(\x0b\x32-.build.buildgrid.CaptureTreeResponse.Response\x1az\n\x08Response\x12\x0c\n\x04path\x18\x01 \x01(\t\x12<\n\x0btree_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\"o\n\x13\x43\x61ptureFilesRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x12\x1a\n\x12\x62ypass_local_cache\x18\x03 \x01(\x08\x12\x17\n\x0fnode_properties\x18\x04 \x03(\t\"\xb0\x02\n\x14\x43\x61ptureFilesResponse\x12\x41\n\tresponses\x18\x01 \x03(\x0b\x32..build.buildgrid.CaptureFilesResponse.Response\x1a\xd4\x01\n\x08Response\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x15\n\ris_executable\x18\x04 \x01(\x08\x12\x46\n\x0fnode_properties\x18\x05 \x03(\x0b\x32-.build.bazel.remote.execution.v2.NodeProperty\"\x83\x01\n\x1fGetInstanceNameForRemoteRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x15\n\rinstance_name\x18\x02 \x01(\t\x12\x13\n\x0bserver_cert\x18\x03 \x01(\x0c\x12\x12\n\nclient_key\x18\x04 \x01(\x0c\x12\x13\n\x0b\x63lient_cert\x18\x05 \x01(\x0c\"9\n GetInstanceNameForRemoteResponse\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"\x1a\n\x18GetLocalDiskUsageRequest\"D\n\x19GetLocalDiskUsageResponse\x12\x12\n\nsize_bytes\x18\x01 \x01(\x03\x12\x13\n\x0bquota_bytes\x18\x02 \x01(\x03\x32\xb5\x07\n\x1eLocalContentAddressableStorage\x12l\n\x11\x46\x65tchMissingBlobs\x12).build.buildgrid.FetchMissingBlobsRequest\x1a*.build.buildgrid.FetchMissingBlobsResponse\"\x00\x12o\n\x12UploadMissingBlobs\x12*.build.buildgrid.UploadMissingBlobsRequest\x1a+.build.buildgrid.UploadMissingBlobsResponse\"\x00\x12T\n\tFetchTree\x12!.build.buildgrid.FetchTreeRequest\x1a\".build.buildgrid.FetchTreeResponse\"\x00\x12W\n\nUploadTree\x12\".build.buildgrid.UploadTreeRequest\x1a#.build.buildgrid.UploadTreeResponse\"\x00\x12X\n\tStageTree\x12!.build.buildgrid.StageTreeRequest\x1a\".build.buildgrid.StageTreeResponse\"\x00(\x01\x30\x01\x12Z\n\x0b\x43\x61ptureTree\x12#.build.buildgrid.CaptureTreeRequest\x1a$.build.buildgrid.CaptureTreeResponse\"\x00\x12]\n\x0c\x43\x61ptureFiles\x12$.build.buildgrid.CaptureFilesRequest\x1a%.build.buildgrid.CaptureFilesResponse\"\x00\x12\x81\x01\n\x18GetInstanceNameForRemote\x12\x30.build.buildgrid.GetInstanceNameForRemoteRequest\x1a\x31.build.buildgrid.GetInstanceNameForRemoteResponse\"\x00\x12l\n\x11GetLocalDiskUsage\x12).build.buildgrid.GetLocalDiskUsageRequest\x1a*.build.buildgrid.GetLocalDiskUsageResponse\"\x00\x62\x06proto3')
,
dependencies=[build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,])
@@ -477,6 +477,13 @@ _CAPTURETREEREQUEST = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='node_properties', full_name='build.buildgrid.CaptureTreeRequest.node_properties', index=3,
+ number=4, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -490,7 +497,7 @@ _CAPTURETREEREQUEST = _descriptor.Descriptor(
oneofs=[
],
serialized_start=1243,
- serialized_end=1328,
+ serialized_end=1353,
)
@@ -534,8 +541,8 @@ _CAPTURETREERESPONSE_RESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1420,
- serialized_end=1542,
+ serialized_start=1445,
+ serialized_end=1567,
)
_CAPTURETREERESPONSE = _descriptor.Descriptor(
@@ -564,8 +571,8 @@ _CAPTURETREERESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1331,
- serialized_end=1542,
+ serialized_start=1356,
+ serialized_end=1567,
)
@@ -597,6 +604,13 @@ _CAPTUREFILESREQUEST = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='node_properties', full_name='build.buildgrid.CaptureFilesRequest.node_properties', index=3,
+ number=4, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -609,8 +623,8 @@ _CAPTUREFILESREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1544,
- serialized_end=1630,
+ serialized_start=1569,
+ serialized_end=1680,
)
@@ -642,6 +656,20 @@ _CAPTUREFILESRESPONSE_RESPONSE = _descriptor.Descriptor(
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='is_executable', full_name='build.buildgrid.CaptureFilesResponse.Response.is_executable', index=3,
+ number=4, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='node_properties', full_name='build.buildgrid.CaptureFilesResponse.Response.node_properties', index=4,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -654,8 +682,8 @@ _CAPTUREFILESRESPONSE_RESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1724,
- serialized_end=1841,
+ serialized_start=1775,
+ serialized_end=1987,
)
_CAPTUREFILESRESPONSE = _descriptor.Descriptor(
@@ -684,8 +712,8 @@ _CAPTUREFILESRESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1633,
- serialized_end=1841,
+ serialized_start=1683,
+ serialized_end=1987,
)
@@ -743,8 +771,8 @@ _GETINSTANCENAMEFORREMOTEREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1844,
- serialized_end=1975,
+ serialized_start=1990,
+ serialized_end=2121,
)
@@ -774,8 +802,8 @@ _GETINSTANCENAMEFORREMOTERESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=1977,
- serialized_end=2034,
+ serialized_start=2123,
+ serialized_end=2180,
)
@@ -798,8 +826,8 @@ _GETLOCALDISKUSAGEREQUEST = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=2036,
- serialized_end=2062,
+ serialized_start=2182,
+ serialized_end=2208,
)
@@ -836,8 +864,8 @@ _GETLOCALDISKUSAGERESPONSE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
- serialized_start=2064,
- serialized_end=2132,
+ serialized_start=2210,
+ serialized_end=2278,
)
_FETCHMISSINGBLOBSREQUEST.fields_by_name['blob_digests'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
@@ -859,6 +887,7 @@ _CAPTURETREERESPONSE_RESPONSE.containing_type = _CAPTURETREERESPONSE
_CAPTURETREERESPONSE.fields_by_name['responses'].message_type = _CAPTURETREERESPONSE_RESPONSE
_CAPTUREFILESRESPONSE_RESPONSE.fields_by_name['digest'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
_CAPTUREFILESRESPONSE_RESPONSE.fields_by_name['status'].message_type = google_dot_rpc_dot_status__pb2._STATUS
+_CAPTUREFILESRESPONSE_RESPONSE.fields_by_name['node_properties'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._NODEPROPERTY
_CAPTUREFILESRESPONSE_RESPONSE.containing_type = _CAPTUREFILESRESPONSE
_CAPTUREFILESRESPONSE.fields_by_name['responses'].message_type = _CAPTUREFILESRESPONSE_RESPONSE
DESCRIPTOR.message_types_by_name['FetchMissingBlobsRequest'] = _FETCHMISSINGBLOBSREQUEST
@@ -881,162 +910,162 @@ DESCRIPTOR.message_types_by_name['GetLocalDiskUsageRequest'] = _GETLOCALDISKUSAG
DESCRIPTOR.message_types_by_name['GetLocalDiskUsageResponse'] = _GETLOCALDISKUSAGERESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-FetchMissingBlobsRequest = _reflection.GeneratedProtocolMessageType('FetchMissingBlobsRequest', (_message.Message,), dict(
- DESCRIPTOR = _FETCHMISSINGBLOBSREQUEST,
- __module__ = 'build.buildgrid.local_cas_pb2'
+FetchMissingBlobsRequest = _reflection.GeneratedProtocolMessageType('FetchMissingBlobsRequest', (_message.Message,), {
+ 'DESCRIPTOR' : _FETCHMISSINGBLOBSREQUEST,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.FetchMissingBlobsRequest)
- ))
+ })
_sym_db.RegisterMessage(FetchMissingBlobsRequest)
-FetchMissingBlobsResponse = _reflection.GeneratedProtocolMessageType('FetchMissingBlobsResponse', (_message.Message,), dict(
+FetchMissingBlobsResponse = _reflection.GeneratedProtocolMessageType('FetchMissingBlobsResponse', (_message.Message,), {
- Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
- DESCRIPTOR = _FETCHMISSINGBLOBSRESPONSE_RESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+ 'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
+ 'DESCRIPTOR' : _FETCHMISSINGBLOBSRESPONSE_RESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.FetchMissingBlobsResponse.Response)
- ))
+ })
,
- DESCRIPTOR = _FETCHMISSINGBLOBSRESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+ 'DESCRIPTOR' : _FETCHMISSINGBLOBSRESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.FetchMissingBlobsResponse)
- ))
+ })
_sym_db.RegisterMessage(FetchMissingBlobsResponse)
_sym_db.RegisterMessage(FetchMissingBlobsResponse.Response)
-UploadMissingBlobsRequest = _reflection.GeneratedProtocolMessageType('UploadMissingBlobsRequest', (_message.Message,), dict(
- DESCRIPTOR = _UPLOADMISSINGBLOBSREQUEST,
- __module__ = 'build.buildgrid.local_cas_pb2'
+UploadMissingBlobsRequest = _reflection.GeneratedProtocolMessageType('UploadMissingBlobsRequest', (_message.Message,), {
+ 'DESCRIPTOR' : _UPLOADMISSINGBLOBSREQUEST,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.UploadMissingBlobsRequest)
- ))
+ })
_sym_db.RegisterMessage(UploadMissingBlobsRequest)
-UploadMissingBlobsResponse = _reflection.GeneratedProtocolMessageType('UploadMissingBlobsResponse', (_message.Message,), dict(
+UploadMissingBlobsResponse = _reflection.GeneratedProtocolMessageType('UploadMissingBlobsResponse', (_message.Message,), {
- Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
- DESCRIPTOR = _UPLOADMISSINGBLOBSRESPONSE_RESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+ 'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
+ 'DESCRIPTOR' : _UPLOADMISSINGBLOBSRESPONSE_RESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.UploadMissingBlobsResponse.Response)
- ))
+ })
,
- DESCRIPTOR = _UPLOADMISSINGBLOBSRESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+ 'DESCRIPTOR' : _UPLOADMISSINGBLOBSRESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.UploadMissingBlobsResponse)
- ))
+ })
_sym_db.RegisterMessage(UploadMissingBlobsResponse)
_sym_db.RegisterMessage(UploadMissingBlobsResponse.Response)
-FetchTreeRequest = _reflection.GeneratedProtocolMessageType('FetchTreeRequest', (_message.Message,), dict(
- DESCRIPTOR = _FETCHTREEREQUEST,
- __module__ = 'build.buildgrid.local_cas_pb2'
+FetchTreeRequest = _reflection.GeneratedProtocolMessageType('FetchTreeRequest', (_message.Message,), {
+ 'DESCRIPTOR' : _FETCHTREEREQUEST,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.FetchTreeRequest)
- ))
+ })
_sym_db.RegisterMessage(FetchTreeRequest)
-FetchTreeResponse = _reflection.GeneratedProtocolMessageType('FetchTreeResponse', (_message.Message,), dict(
- DESCRIPTOR = _FETCHTREERESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+FetchTreeResponse = _reflection.GeneratedProtocolMessageType('FetchTreeResponse', (_message.Message,), {
+ 'DESCRIPTOR' : _FETCHTREERESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.FetchTreeResponse)
- ))
+ })
_sym_db.RegisterMessage(FetchTreeResponse)
-UploadTreeRequest = _reflection.GeneratedProtocolMessageType('UploadTreeRequest', (_message.Message,), dict(
- DESCRIPTOR = _UPLOADTREEREQUEST,
- __module__ = 'build.buildgrid.local_cas_pb2'
+UploadTreeRequest = _reflection.GeneratedProtocolMessageType('UploadTreeRequest', (_message.Message,), {
+ 'DESCRIPTOR' : _UPLOADTREEREQUEST,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.UploadTreeRequest)
- ))
+ })
_sym_db.RegisterMessage(UploadTreeRequest)
-UploadTreeResponse = _reflection.GeneratedProtocolMessageType('UploadTreeResponse', (_message.Message,), dict(
- DESCRIPTOR = _UPLOADTREERESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+UploadTreeResponse = _reflection.GeneratedProtocolMessageType('UploadTreeResponse', (_message.Message,), {
+ 'DESCRIPTOR' : _UPLOADTREERESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.UploadTreeResponse)
- ))
+ })
_sym_db.RegisterMessage(UploadTreeResponse)
-StageTreeRequest = _reflection.GeneratedProtocolMessageType('StageTreeRequest', (_message.Message,), dict(
- DESCRIPTOR = _STAGETREEREQUEST,
- __module__ = 'build.buildgrid.local_cas_pb2'
+StageTreeRequest = _reflection.GeneratedProtocolMessageType('StageTreeRequest', (_message.Message,), {
+ 'DESCRIPTOR' : _STAGETREEREQUEST,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.StageTreeRequest)
- ))
+ })
_sym_db.RegisterMessage(StageTreeRequest)
-StageTreeResponse = _reflection.GeneratedProtocolMessageType('StageTreeResponse', (_message.Message,), dict(
- DESCRIPTOR = _STAGETREERESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+StageTreeResponse = _reflection.GeneratedProtocolMessageType('StageTreeResponse', (_message.Message,), {
+ 'DESCRIPTOR' : _STAGETREERESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.StageTreeResponse)
- ))
+ })
_sym_db.RegisterMessage(StageTreeResponse)
-CaptureTreeRequest = _reflection.GeneratedProtocolMessageType('CaptureTreeRequest', (_message.Message,), dict(
- DESCRIPTOR = _CAPTURETREEREQUEST,
- __module__ = 'build.buildgrid.local_cas_pb2'
+CaptureTreeRequest = _reflection.GeneratedProtocolMessageType('CaptureTreeRequest', (_message.Message,), {
+ 'DESCRIPTOR' : _CAPTURETREEREQUEST,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.CaptureTreeRequest)
- ))
+ })
_sym_db.RegisterMessage(CaptureTreeRequest)
-CaptureTreeResponse = _reflection.GeneratedProtocolMessageType('CaptureTreeResponse', (_message.Message,), dict(
+CaptureTreeResponse = _reflection.GeneratedProtocolMessageType('CaptureTreeResponse', (_message.Message,), {
- Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
- DESCRIPTOR = _CAPTURETREERESPONSE_RESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+ 'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
+ 'DESCRIPTOR' : _CAPTURETREERESPONSE_RESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.CaptureTreeResponse.Response)
- ))
+ })
,
- DESCRIPTOR = _CAPTURETREERESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+ 'DESCRIPTOR' : _CAPTURETREERESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.CaptureTreeResponse)
- ))
+ })
_sym_db.RegisterMessage(CaptureTreeResponse)
_sym_db.RegisterMessage(CaptureTreeResponse.Response)
-CaptureFilesRequest = _reflection.GeneratedProtocolMessageType('CaptureFilesRequest', (_message.Message,), dict(
- DESCRIPTOR = _CAPTUREFILESREQUEST,
- __module__ = 'build.buildgrid.local_cas_pb2'
+CaptureFilesRequest = _reflection.GeneratedProtocolMessageType('CaptureFilesRequest', (_message.Message,), {
+ 'DESCRIPTOR' : _CAPTUREFILESREQUEST,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.CaptureFilesRequest)
- ))
+ })
_sym_db.RegisterMessage(CaptureFilesRequest)
-CaptureFilesResponse = _reflection.GeneratedProtocolMessageType('CaptureFilesResponse', (_message.Message,), dict(
+CaptureFilesResponse = _reflection.GeneratedProtocolMessageType('CaptureFilesResponse', (_message.Message,), {
- Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
- DESCRIPTOR = _CAPTUREFILESRESPONSE_RESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+ 'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
+ 'DESCRIPTOR' : _CAPTUREFILESRESPONSE_RESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.CaptureFilesResponse.Response)
- ))
+ })
,
- DESCRIPTOR = _CAPTUREFILESRESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+ 'DESCRIPTOR' : _CAPTUREFILESRESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.CaptureFilesResponse)
- ))
+ })
_sym_db.RegisterMessage(CaptureFilesResponse)
_sym_db.RegisterMessage(CaptureFilesResponse.Response)
-GetInstanceNameForRemoteRequest = _reflection.GeneratedProtocolMessageType('GetInstanceNameForRemoteRequest', (_message.Message,), dict(
- DESCRIPTOR = _GETINSTANCENAMEFORREMOTEREQUEST,
- __module__ = 'build.buildgrid.local_cas_pb2'
+GetInstanceNameForRemoteRequest = _reflection.GeneratedProtocolMessageType('GetInstanceNameForRemoteRequest', (_message.Message,), {
+ 'DESCRIPTOR' : _GETINSTANCENAMEFORREMOTEREQUEST,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.GetInstanceNameForRemoteRequest)
- ))
+ })
_sym_db.RegisterMessage(GetInstanceNameForRemoteRequest)
-GetInstanceNameForRemoteResponse = _reflection.GeneratedProtocolMessageType('GetInstanceNameForRemoteResponse', (_message.Message,), dict(
- DESCRIPTOR = _GETINSTANCENAMEFORREMOTERESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+GetInstanceNameForRemoteResponse = _reflection.GeneratedProtocolMessageType('GetInstanceNameForRemoteResponse', (_message.Message,), {
+ 'DESCRIPTOR' : _GETINSTANCENAMEFORREMOTERESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.GetInstanceNameForRemoteResponse)
- ))
+ })
_sym_db.RegisterMessage(GetInstanceNameForRemoteResponse)
-GetLocalDiskUsageRequest = _reflection.GeneratedProtocolMessageType('GetLocalDiskUsageRequest', (_message.Message,), dict(
- DESCRIPTOR = _GETLOCALDISKUSAGEREQUEST,
- __module__ = 'build.buildgrid.local_cas_pb2'
+GetLocalDiskUsageRequest = _reflection.GeneratedProtocolMessageType('GetLocalDiskUsageRequest', (_message.Message,), {
+ 'DESCRIPTOR' : _GETLOCALDISKUSAGEREQUEST,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.GetLocalDiskUsageRequest)
- ))
+ })
_sym_db.RegisterMessage(GetLocalDiskUsageRequest)
-GetLocalDiskUsageResponse = _reflection.GeneratedProtocolMessageType('GetLocalDiskUsageResponse', (_message.Message,), dict(
- DESCRIPTOR = _GETLOCALDISKUSAGERESPONSE,
- __module__ = 'build.buildgrid.local_cas_pb2'
+GetLocalDiskUsageResponse = _reflection.GeneratedProtocolMessageType('GetLocalDiskUsageResponse', (_message.Message,), {
+ 'DESCRIPTOR' : _GETLOCALDISKUSAGERESPONSE,
+ '__module__' : 'build.buildgrid.local_cas_pb2'
# @@protoc_insertion_point(class_scope:build.buildgrid.GetLocalDiskUsageResponse)
- ))
+ })
_sym_db.RegisterMessage(GetLocalDiskUsageResponse)
@@ -1047,8 +1076,8 @@ _LOCALCONTENTADDRESSABLESTORAGE = _descriptor.ServiceDescriptor(
file=DESCRIPTOR,
index=0,
serialized_options=None,
- serialized_start=2135,
- serialized_end=3084,
+ serialized_start=2281,
+ serialized_end=3230,
methods=[
_descriptor.MethodDescriptor(
name='FetchMissingBlobs',
diff --git a/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py b/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py
index 8dfaec7f3..68af4826c 100644
--- a/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py
+++ b/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py
@@ -113,10 +113,10 @@ class LocalContentAddressableStorageServicer(object):
in the local cache. Optionally, this will also fetch all blobs referenced
by the `Directory` objects, equivalent to `FetchMissingBlobs`.
- If part of the tree is missing from the CAS, the server will return the
- portion present and omit the rest.
+ If no remote CAS is available, this will check presence of the entire
+ directory tree (and optionally also file blobs) in the local cache.
- * `NOT_FOUND`: The requested tree root is not present in the CAS.
+ * `NOT_FOUND`: The requested tree is not present in the CAS or incomplete.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index fe7d36649..cb4dc5450 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -75,6 +75,7 @@ Class Reference
import os
import re
import stat
+import time
import copy
from collections import OrderedDict
import contextlib
@@ -91,7 +92,7 @@ from ._variables import Variables
from ._versions import BST_CORE_ARTIFACT_VERSION
from ._exceptions import BstError, LoadError, ImplError, SourceCacheError
from .exceptions import ErrorDomain, LoadErrorReason
-from .utils import FileListResult
+from .utils import FileListResult, BST_ARBITRARY_TIMESTAMP
from . import utils
from . import _cachekey
from . import _site
@@ -707,7 +708,7 @@ class Element(Plugin):
if update_mtimes:
copy_result = vstagedir.import_files(
- files_vdir, filter_callback=copy_filter, report_written=True, update_mtime=True
+ files_vdir, filter_callback=copy_filter, report_written=True, update_mtime=time.time()
)
result = result.combine(copy_result)
@@ -1466,11 +1467,10 @@ class Element(Plugin):
reason="import-source-files-fail",
)
+ # Set update_mtime to ensure deterministic mtime of sources at build time
with utils._deterministic_umask():
- vdirectory.import_files(import_dir)
+ vdirectory.import_files(import_dir, update_mtime=BST_ARBITRARY_TIMESTAMP)
- # Ensure deterministic mtime of sources at build time
- vdirectory.set_deterministic_mtime()
# Ensure deterministic owners of sources at build time
vdirectory.set_deterministic_user()
diff --git a/src/buildstream/plugins/sources/workspace.py b/src/buildstream/plugins/sources/workspace.py
index ce62f3aff..f1ad2eead 100644
--- a/src/buildstream/plugins/sources/workspace.py
+++ b/src/buildstream/plugins/sources/workspace.py
@@ -96,7 +96,7 @@ class WorkspaceSource(Source):
def stage(self, directory: Directory) -> None:
assert isinstance(directory, Directory)
with self.timed_activity("Staging local files"):
- result = directory.import_files(self.path)
+ result = directory.import_files(self.path, properties=["MTime"])
if result.overwritten or result.ignored:
raise SourceError(
diff --git a/src/buildstream/storage/_casbaseddirectory.py b/src/buildstream/storage/_casbaseddirectory.py
index 46984041e..624d071dd 100644
--- a/src/buildstream/storage/_casbaseddirectory.py
+++ b/src/buildstream/storage/_casbaseddirectory.py
@@ -29,13 +29,14 @@ See also: :ref:`sandboxing`.
import os
import stat
+import copy
import tarfile as tarfilelib
from io import StringIO
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from .directory import Directory, VirtualDirectoryError, _FileType
from ._filebaseddirectory import FileBasedDirectory
-from ..utils import FileListResult, BST_ARBITRARY_TIMESTAMP
+from ..utils import FileListResult, BST_ARBITRARY_TIMESTAMP, _get_file_mtimestamp
class IndexEntry:
@@ -50,7 +51,8 @@ class IndexEntry:
target=None,
is_executable=False,
buildstream_object=None,
- modified=False
+ modified=False,
+ node_properties=None
):
self.name = name
self.type = entrytype
@@ -59,6 +61,7 @@ class IndexEntry:
self.is_executable = is_executable
self.buildstream_object = buildstream_object
self.modified = modified
+ self.node_properties = copy.deepcopy(node_properties)
def get_directory(self, parent):
if not self.buildstream_object:
@@ -126,7 +129,11 @@ class CasBasedDirectory(Directory):
self.index[entry.name] = IndexEntry(entry.name, _FileType.DIRECTORY, digest=entry.digest)
for entry in pb2_directory.files:
self.index[entry.name] = IndexEntry(
- entry.name, _FileType.REGULAR_FILE, digest=entry.digest, is_executable=entry.is_executable
+ entry.name,
+ _FileType.REGULAR_FILE,
+ digest=entry.digest,
+ is_executable=entry.is_executable,
+ node_properties=list(entry.node_properties),
)
for entry in pb2_directory.symlinks:
self.index[entry.name] = IndexEntry(entry.name, _FileType.SYMLINK, target=entry.target)
@@ -150,11 +157,20 @@ class CasBasedDirectory(Directory):
return newdir
- def _add_file(self, basename, filename, modified=False, can_link=False):
+ def _add_file(self, basename, filename, modified=False, can_link=False, properties=None):
entry = IndexEntry(filename, _FileType.REGULAR_FILE, modified=modified or filename in self.index)
path = os.path.join(basename, filename)
entry.digest = self.cas_cache.add_object(path=path, link_directly=can_link)
entry.is_executable = os.access(path, os.X_OK)
+ properties = properties or []
+ # see https://github.com/bazelbuild/remote-apis/blob/master/build/bazel/remote/execution/v2/nodeproperties.md
+ # for supported node property specifications
+ entry.node_properties = []
+ if "MTime" in properties:
+ node_property = remote_execution_pb2.NodeProperty()
+ node_property.name = "MTime"
+ node_property.value = _get_file_mtimestamp(path)
+ entry.node_properties.append(node_property)
self.index[filename] = entry
self.__invalidate_digest()
@@ -333,6 +349,7 @@ class CasBasedDirectory(Directory):
digest=entry.digest,
is_executable=entry.is_executable,
modified=True,
+ node_properties=entry.node_properties,
)
self.__invalidate_digest()
else:
@@ -341,7 +358,14 @@ class CasBasedDirectory(Directory):
result.files_written.append(relative_pathname)
def import_files(
- self, external_pathspec, *, filter_callback=None, report_written=True, update_mtime=False, can_link=False
+ self,
+ external_pathspec,
+ *,
+ filter_callback=None,
+ report_written=True,
+ update_mtime=None,
+ can_link=False,
+ properties=None
):
""" See superclass Directory for arguments """
@@ -356,7 +380,7 @@ class CasBasedDirectory(Directory):
# content into this CasBasedDirectory using CAS-to-CAS import
# to write the report, handle possible conflicts (if the target
# directory is not empty) and apply the optional filter.
- digest = self.cas_cache.import_directory(external_pathspec)
+ digest = self.cas_cache.import_directory(external_pathspec, properties=properties)
external_pathspec = CasBasedDirectory(self.cas_cache, digest=digest)
assert isinstance(external_pathspec, CasBasedDirectory)
@@ -368,13 +392,14 @@ class CasBasedDirectory(Directory):
return result
- def import_single_file(self, external_pathspec):
+ def import_single_file(self, external_pathspec, properties=None):
result = FileListResult()
if self._check_replacement(os.path.basename(external_pathspec), os.path.dirname(external_pathspec), result):
self._add_file(
os.path.dirname(external_pathspec),
os.path.basename(external_pathspec),
modified=os.path.basename(external_pathspec) in result.overwritten,
+ properties=properties,
)
result.files_written.append(external_pathspec)
return result
@@ -639,6 +664,9 @@ class CasBasedDirectory(Directory):
filenode.name = name
filenode.digest.CopyFrom(entry.digest)
filenode.is_executable = entry.is_executable
+ if entry.node_properties:
+ node_properties = sorted(entry.node_properties, key=lambda prop: prop.name)
+ filenode.node_properties.extend(node_properties)
elif entry.type == _FileType.SYMLINK:
symlinknode = pb2_directory.symlinks.add()
symlinknode.name = name
diff --git a/src/buildstream/storage/_filebaseddirectory.py b/src/buildstream/storage/_filebaseddirectory.py
index 222b47979..7b745f777 100644
--- a/src/buildstream/storage/_filebaseddirectory.py
+++ b/src/buildstream/storage/_filebaseddirectory.py
@@ -30,7 +30,6 @@ See also: :ref:`sandboxing`.
import os
import shutil
import stat
-import time
from .directory import Directory, VirtualDirectoryError, _FileType
from .. import utils
@@ -79,20 +78,29 @@ class FileBasedDirectory(Directory):
return current_dir
def import_files(
- self, external_pathspec, *, filter_callback=None, report_written=True, update_mtime=False, can_link=False
+ self,
+ external_pathspec,
+ *,
+ filter_callback=None,
+ report_written=True,
+ update_mtime=None,
+ can_link=False,
+ properties=None
):
""" See superclass Directory for arguments """
from ._casbaseddirectory import CasBasedDirectory # pylint: disable=cyclic-import
if isinstance(external_pathspec, CasBasedDirectory):
- if can_link and not update_mtime:
+ if can_link:
actionfunc = utils.safe_link
else:
actionfunc = utils.safe_copy
import_result = FileListResult()
- self._import_files_from_cas(external_pathspec, actionfunc, filter_callback, result=import_result)
+ self._import_files_from_cas(
+ external_pathspec, actionfunc, filter_callback, update_mtime=update_mtime, result=import_result,
+ )
else:
if isinstance(external_pathspec, Directory):
source_directory = external_pathspec.external_directory
@@ -115,15 +123,13 @@ class FileBasedDirectory(Directory):
ignore_missing=False,
report_written=report_written,
)
+ if update_mtime:
+ for f in import_result.files_written:
+ os.utime(os.path.join(self.external_directory, f), times=(update_mtime, update_mtime))
- if update_mtime:
- cur_time = time.time()
-
- for f in import_result.files_written:
- os.utime(os.path.join(self.external_directory, f), times=(cur_time, cur_time))
return import_result
- def import_single_file(self, external_pathspec):
+ def import_single_file(self, external_pathspec, properties=None):
dstpath = os.path.join(self.external_directory, os.path.basename(external_pathspec))
result = FileListResult()
if os.path.exists(dstpath):
@@ -241,7 +247,9 @@ class FileBasedDirectory(Directory):
else:
return _FileType.SPECIAL_FILE
- def _import_files_from_cas(self, source_directory, actionfunc, filter_callback, *, path_prefix="", result):
+ def _import_files_from_cas(
+ self, source_directory, actionfunc, filter_callback, *, path_prefix="", update_mtime=None, result
+ ):
""" Import files from a CAS-based directory. """
for name, entry in source_directory.index.items():
@@ -266,7 +274,12 @@ class FileBasedDirectory(Directory):
)
dest_subdir._import_files_from_cas(
- src_subdir, actionfunc, filter_callback, path_prefix=relative_pathname, result=result
+ src_subdir,
+ actionfunc,
+ filter_callback,
+ path_prefix=relative_pathname,
+ result=result,
+ update_mtime=update_mtime,
)
if filter_callback and not filter_callback(relative_pathname):
@@ -289,7 +302,25 @@ class FileBasedDirectory(Directory):
if entry.type == _FileType.REGULAR_FILE:
src_path = source_directory.cas_cache.objpath(entry.digest)
- actionfunc(src_path, dest_path, result=result)
+
+ # fallback to copying if we require mtime support on this file
+ if update_mtime or entry.node_properties:
+ utils.safe_copy(src_path, dest_path, result=result)
+ mtime = update_mtime
+ # mtime property will override specified mtime
+ # see https://github.com/bazelbuild/remote-apis/blob/master/build/bazel/remote/execution/v2/nodeproperties.md
+ # for supported node property specifications
+ if entry.node_properties:
+ for prop in entry.node_properties:
+ if prop.name == "MTime" and prop.value:
+ mtime = utils._parse_timestamp(prop.value)
+ else:
+ raise ImplError("{} is not a supported node property.".format(prop.name))
+ if mtime:
+ utils._set_file_mtime(dest_path, mtime)
+ else:
+ actionfunc(src_path, dest_path, result=result)
+
if entry.is_executable:
os.chmod(
dest_path,
@@ -301,6 +332,7 @@ class FileBasedDirectory(Directory):
| stat.S_IROTH
| stat.S_IXOTH,
)
+
else:
assert entry.type == _FileType.SYMLINK
os.symlink(entry.target, dest_path)
diff --git a/src/buildstream/storage/directory.py b/src/buildstream/storage/directory.py
index 2d3dfd4da..f0aab7c10 100644
--- a/src/buildstream/storage/directory.py
+++ b/src/buildstream/storage/directory.py
@@ -32,7 +32,7 @@ See also: :ref:`sandboxing`.
"""
-from typing import Callable, Optional, Union
+from typing import Callable, Optional, Union, List
from .._exceptions import BstError
from ..exceptions import ErrorDomain
@@ -82,8 +82,9 @@ class Directory:
*,
filter_callback: Optional[Callable[[str], bool]] = None,
report_written: bool = True,
- update_mtime: bool = False,
- can_link: bool = False
+ update_mtime: Optional[float] = None,
+ can_link: bool = False,
+ properties: Optional[List[str]] = None
) -> FileListResult:
"""Imports some or all files from external_path into this directory.
@@ -98,12 +99,13 @@ class Directory:
written. Defaults to true. If false, only a list of
overwritten files is returned.
update_mtime: Update the access and modification time
- of each file copied to the current time.
+ of each file copied to the time specified in seconds.
can_link: Whether it's OK to create a hard link to the
original content, meaning the stored copy will change when the
original files change. Setting this doesn't guarantee hard
- links will be made. can_link will never be used if
- update_mtime is set.
+ links will be made.
+ properties: Optional list of strings representing file properties
+ to capture when importing.
Yields:
A report of files imported and overwritten.
@@ -112,7 +114,7 @@ class Directory:
raise NotImplementedError()
- def import_single_file(self, external_pathspec):
+ def import_single_file(self, external_pathspec, properties=None):
"""Imports a single file from an external path"""
raise NotImplementedError()
diff --git a/src/buildstream/testing/_sourcetests/source_determinism.py b/src/buildstream/testing/_sourcetests/source_determinism.py
index ed00c71ea..b834f3223 100644
--- a/src/buildstream/testing/_sourcetests/source_determinism.py
+++ b/src/buildstream/testing/_sourcetests/source_determinism.py
@@ -50,11 +50,10 @@ def create_test_directory(*path, mode=0o644):
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
-@pytest.mark.skipif(
- HAVE_SANDBOX == "buildbox-run" and CASD_SEPARATE_USER,
- reason="Flaky due to timestamps: https://gitlab.com/BuildStream/buildstream/issues/1218",
-)
def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
+ if CASD_SEPARATE_USER and kind == "ostree":
+ pytest.xfail("The ostree plugin ignores the umask")
+
project = str(datafiles)
element_name = "list.bst"
element_path = os.path.join(project, "elements", element_name)
@@ -92,6 +91,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
old_umask = os.umask(umask)
try:
+ test_values = []
result = cli.run(project=project, args=["build", element_name])
result.assert_success()
@@ -99,7 +99,9 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
result.assert_success()
with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
- return f.read()
+ for line in f.readlines():
+ test_values.append(line.split()[0] + " " + line.split()[-1])
+ return test_values
finally:
os.umask(old_umask)
cli.remove_artifact_from_cache(project, element_name)
diff --git a/src/buildstream/utils.py b/src/buildstream/utils.py
index 88314b263..9593f3e75 100644
--- a/src/buildstream/utils.py
+++ b/src/buildstream/utils.py
@@ -33,10 +33,12 @@ from stat import S_ISDIR
import subprocess
import tempfile
import time
+import datetime
import itertools
from contextlib import contextmanager
from pathlib import Path
from typing import Callable, IO, Iterable, Iterator, Optional, Tuple, Union
+from dateutil import parser as dateutil_parser
import psutil
@@ -133,6 +135,81 @@ class FileListResult:
return ret
+def _make_timestamp(timepoint: float) -> str:
+ """Obtain the ISO 8601 timestamp represented by the time given in seconds.
+
+ Args:
+ timepoint (float): the time since the epoch in seconds
+
+ Returns:
+ (str): the timestamp specified by https://www.ietf.org/rfc/rfc3339.txt
+ with a UTC timezone code 'Z'.
+
+ """
+ assert isinstance(timepoint, float), "Time to render as timestamp must be a float: {}".format(str(timepoint))
+ try:
+ return datetime.datetime.utcfromtimestamp(timepoint).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
+ except (OverflowError, TypeError):
+ raise UtilError("Failed to make UTC timestamp from {}".format(timepoint))
+
+
+def _get_file_mtimestamp(fullpath: str) -> str:
+ """Obtain the ISO 8601 timestamp represented by the mtime of the
+ file at the given path."""
+ assert isinstance(fullpath, str), "Path to file must be a string: {}".format(str(fullpath))
+ try:
+ mtime = os.path.getmtime(fullpath)
+ except OSError:
+ raise UtilError("Failed to get mtime of file at {}".format(fullpath))
+ return _make_timestamp(mtime)
+
+
+def _parse_timestamp(timestamp: str) -> float:
+ """Parse an ISO 8601 timestamp as specified in
+ https://www.ietf.org/rfc/rfc3339.txt. Only timestamps with the UTC code
+ 'Z' or an offset are valid. For example: '2019-12-12T10:23:01.54Z' or
+ '2019-12-12T10:23:01.54+00:00'.
+
+ Args:
+ timestamp (str): the timestamp
+
+ Returns:
+ (float): The time in seconds since epoch represented by the
+ timestamp.
+
+ Raises:
+ UtilError: if extraction of seconds fails
+ """
+ assert isinstance(timestamp, str), "Timestamp to parse must be a string: {}".format(str(timestamp))
+ try:
+ errmsg = "Failed to parse given timestamp: " + timestamp
+ parsed_time = dateutil_parser.isoparse(timestamp)
+ if parsed_time.tzinfo:
+ return parsed_time.timestamp()
+ raise UtilError(errmsg)
+ except (ValueError, OverflowError, TypeError):
+ raise UtilError(errmsg)
+
+
+def _set_file_mtime(fullpath: str, seconds: Union[int, float]) -> None:
+ """Set the access and modification times of the file at the given path
+ to the given time. The time of the file will be set with nanosecond
+ resolution if supported.
+
+ Args:
+ fullpath (str): the string representing the path to the file
+ timestamp (int, float): the time in seconds since the UNIX epoch
+ """
+ assert isinstance(fullpath, str), "Path to file must be a string: {}".format(str(fullpath))
+ assert isinstance(seconds, (int, float)), "Mtime to set must be a float or integer: {}".format(str(seconds))
+ set_mtime = seconds * 10 ** 9
+ try:
+ os.utime(fullpath, times=None, ns=(int(set_mtime), int(set_mtime)))
+ except OSError:
+ errmsg = "Failed to set the times of the file at {} to {}".format(fullpath, str(seconds))
+ raise UtilError(errmsg)
+
+
def list_relative_paths(directory: str) -> Iterator[str]:
"""A generator for walking directory relative paths
diff --git a/tests/integration/source-determinism.py b/tests/integration/source-determinism.py
index 355588133..a69e55a23 100644
--- a/tests/integration/source-determinism.py
+++ b/tests/integration/source-determinism.py
@@ -29,10 +29,6 @@ def create_test_directory(*path, mode=0o644):
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.skipif(not HAVE_SANDBOX, reason="Only available with a functioning sandbox")
-@pytest.mark.skipif(
- HAVE_SANDBOX == "buildbox-run" and CASD_SEPARATE_USER,
- reason="Flaky due to timestamps: https://gitlab.com/BuildStream/buildstream/issues/1218",
-)
def test_deterministic_source_local(cli, tmpdir, datafiles):
"""Only user rights should be considered for local source.
"""
@@ -62,6 +58,7 @@ def test_deterministic_source_local(cli, tmpdir, datafiles):
create_test_directory(sourcedir, "dir-c", mode=0o2755 & mask)
create_test_directory(sourcedir, "dir-d", mode=0o1755 & mask)
try:
+ test_values = []
result = cli.run(project=project, args=["build", element_name])
result.assert_success()
@@ -69,7 +66,9 @@ def test_deterministic_source_local(cli, tmpdir, datafiles):
result.assert_success()
with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
- return f.read()
+ for line in f.readlines():
+ test_values.append(line.split()[0] + " " + line.split()[-1])
+ return test_values
finally:
cli.remove_artifact_from_cache(project, element_name)
diff --git a/tests/internals/storage_vdir_import.py b/tests/internals/storage_vdir_import.py
index 6b70f92b5..fe3012712 100644
--- a/tests/internals/storage_vdir_import.py
+++ b/tests/internals/storage_vdir_import.py
@@ -23,6 +23,7 @@ from buildstream.storage._casbaseddirectory import CasBasedDirectory
from buildstream.storage._filebaseddirectory import FileBasedDirectory
from buildstream._cas import CASCache
from buildstream.storage.directory import VirtualDirectoryError
+from buildstream.utils import _set_file_mtime, _parse_timestamp
# These are comparitive tests that check that FileBasedDirectory and
@@ -48,6 +49,8 @@ root_filesets = [
empty_hash_ref = sha256().hexdigest()
RANDOM_SEED = 69105
NUM_RANDOM_TESTS = 4
+TIMESTAMP = "2019-12-16T08:49:04.012Z"
+MTIME = 1576486144.0120000
def generate_import_roots(rootno, directory):
@@ -63,8 +66,11 @@ def generate_import_root(rootdir, filelist):
if typesymbol == "F":
(dirnames, filename) = os.path.split(path)
os.makedirs(os.path.join(rootdir, dirnames), exist_ok=True)
- with open(os.path.join(rootdir, dirnames, filename), "wt") as f:
+ fullpath = os.path.join(rootdir, dirnames, filename)
+ with open(fullpath, "wt") as f:
f.write(content)
+ # set file mtime to arbitrary
+ _set_file_mtime(fullpath, _parse_timestamp(TIMESTAMP))
elif typesymbol == "D":
os.makedirs(os.path.join(rootdir, path), exist_ok=True)
elif typesymbol == "S":
@@ -98,6 +104,7 @@ def generate_random_root(rootno, directory):
elif thing == "file":
with open(target, "wt") as f:
f.write("This is node {}\n".format(i))
+ _set_file_mtime(target, _parse_timestamp(TIMESTAMP))
elif thing == "link":
symlink_type = random.choice(["absolute", "relative", "broken"])
if symlink_type == "broken" or not things:
@@ -124,7 +131,7 @@ def file_contents_are(path, contents):
def create_new_casdir(root_number, cas_cache, tmpdir):
d = CasBasedDirectory(cas_cache)
- d.import_files(os.path.join(tmpdir, "content", "root{}".format(root_number)))
+ d.import_files(os.path.join(tmpdir, "content", "root{}".format(root_number)), properties=["MTime"])
digest = d._get_digest()
assert digest.hash != empty_hash_ref
return d
@@ -192,7 +199,7 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents=
assert duplicate_cas._get_digest().hash == d._get_digest().hash
d2 = create_new_casdir(overlay, cas_cache, tmpdir)
- d.import_files(d2)
+ d.import_files(d2, properties=["MTime"])
export_dir = os.path.join(tmpdir, "output-{}-{}".format(original, overlay))
roundtrip_dir = os.path.join(tmpdir, "roundtrip-{}-{}".format(original, overlay))
d2.export_files(roundtrip_dir)
@@ -211,6 +218,10 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents=
path
)
assert file_contents_are(realpath, content)
+ roundtrip = os.path.join(roundtrip_dir, path)
+ assert os.path.getmtime(roundtrip) == MTIME
+ assert os.path.getmtime(realpath) == MTIME
+
elif typename == "S":
if os.path.isdir(realpath) and directory_not_empty(realpath):
# The symlink should not have overwritten the directory in this case.
@@ -227,7 +238,7 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents=
# Now do the same thing with filebaseddirectories and check the contents match
- duplicate_cas.import_files(roundtrip_dir)
+ duplicate_cas.import_files(roundtrip_dir, properties=["MTime"])
assert duplicate_cas._get_digest().hash == d._get_digest().hash
finally:
diff --git a/tests/internals/utils_move_atomic.py b/tests/internals/utils_move_atomic.py
index cda020809..dd417cb66 100644
--- a/tests/internals/utils_move_atomic.py
+++ b/tests/internals/utils_move_atomic.py
@@ -3,7 +3,13 @@
import pytest
-from buildstream.utils import move_atomic, DirectoryExistsError
+from buildstream.utils import (
+ move_atomic,
+ DirectoryExistsError,
+ _get_file_mtimestamp,
+ _set_file_mtime,
+ _parse_timestamp,
+)
@pytest.fixture
@@ -89,3 +95,18 @@ def test_move_to_existing_non_empty_dir(src, tmp_path):
with pytest.raises(DirectoryExistsError):
move_atomic(src, dst)
+
+
+def test_move_to_empty_dir_set_mtime(src, tmp_path):
+ dst = tmp_path.joinpath("dst")
+ move_atomic(src, dst)
+ assert dst.joinpath("test").exists()
+ _dst = str(dst)
+ # set the mtime via stamp
+ timestamp1 = "2020-01-08T11:05:50.832123Z"
+ _set_file_mtime(_dst, _parse_timestamp(timestamp1))
+ assert timestamp1 == _get_file_mtimestamp(_dst)
+ # reset the mtime using an offset stamp
+ timestamp2 = "2010-02-12T12:05:50.832123+01:00"
+ _set_file_mtime(_dst, _parse_timestamp(timestamp2))
+ assert _get_file_mtimestamp(_dst) == "2010-02-12T11:05:50.832123Z"
diff --git a/tests/remoteexecution/workspace.py b/tests/remoteexecution/workspace.py
index 83480b42e..3bf35a738 100644
--- a/tests/remoteexecution/workspace.py
+++ b/tests/remoteexecution/workspace.py
@@ -2,6 +2,7 @@
# pylint: disable=redefined-outer-name
import os
+import re
import shutil
import pytest
@@ -11,10 +12,15 @@ from buildstream.testing.integration import assert_contains
pytestmark = pytest.mark.remoteexecution
+# subdirectories of the buildtree
+SRC = "src"
+DEPS = os.path.join(SRC, ".deps")
+AUTO = "autom4te.cache"
+DIRS = [os.sep + SRC, os.sep + DEPS, os.sep + AUTO]
+
DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
-MKFILEAM = os.path.join("src", "Makefile.am")
-MKFILE = os.path.join("src", "Makefile")
-MAIN = os.path.join("src", "main.o")
+MAIN = os.path.join(SRC, "main.c")
+MAINO = os.path.join(SRC, "main.o")
CFGMARK = "config-time"
BLDMARK = "build-time"
@@ -29,9 +35,9 @@ def files():
"depcomp",
"configure.ac",
"compile",
- "src",
- os.path.join("src", "main.c"),
- MKFILEAM,
+ SRC,
+ MAIN,
+ os.path.join(SRC, "Makefile.am"),
"Makefile.am",
]
input_files = [os.sep + fname for fname in _input_files]
@@ -39,26 +45,26 @@ def files():
_generated_files = [
"Makefile",
"Makefile.in",
- "autom4te.cache",
- os.path.join("autom4te.cache", "traces.1"),
- os.path.join("autom4te.cache", "traces.0"),
- os.path.join("autom4te.cache", "requests"),
- os.path.join("autom4te.cache", "output.0"),
- os.path.join("autom4te.cache", "output.1"),
+ AUTO,
+ os.path.join(AUTO, "traces.1"),
+ os.path.join(AUTO, "traces.0"),
+ os.path.join(AUTO, "requests"),
+ os.path.join(AUTO, "output.0"),
+ os.path.join(AUTO, "output.1"),
"config.h",
"config.h.in",
"config.log",
"config.status",
"configure",
"configure.lineno",
- os.path.join("src", "hello"),
- os.path.join("src", ".deps"),
- os.path.join("src", ".deps", "main.Po"),
- MKFILE,
- MAIN,
+ os.path.join(SRC, "hello"),
+ DEPS,
+ os.path.join(DEPS, "main.Po"),
+ os.path.join(SRC, "Makefile"),
+ MAINO,
CFGMARK,
BLDMARK,
- os.path.join("src", "Makefile.in"),
+ os.path.join(SRC, "Makefile.in"),
"stamp-h1",
]
generated_files = [os.sep + fname for fname in _generated_files]
@@ -79,12 +85,9 @@ def files():
def _get_mtimes(root):
assert os.path.exists(root)
- for dirname, dirnames, filenames in os.walk(root):
- dirnames.sort()
+ # timestamps on subdirs are not currently semantically meaningful
+ for dirname, _, filenames in os.walk(root):
filenames.sort()
- for subdirname in dirnames:
- fname = os.path.join(dirname, subdirname)
- yield fname[len(root) :], os.stat(fname).st_mtime
for filename in filenames:
fname = os.path.join(dirname, filename)
yield fname[len(root) :], os.stat(fname).st_mtime
@@ -129,8 +132,6 @@ def check_buildtree(
result.assert_success()
buildtree = {}
- inp_times = []
- gen_times = []
output = result.output.splitlines()
for line in output:
@@ -141,28 +142,34 @@ def check_buildtree(
mtime = int(mtime)
buildtree[fname] = mtime
+ typ_inptime = None
+ typ_gentime = None
+
if incremental:
+ # directory timestamps are not meaningful
+ if fname in DIRS:
+ continue
if fname in input_files:
- inp_times.append(mtime)
- else:
- gen_times.append(mtime)
+ if fname != os.sep + MAIN and not typ_inptime:
+ typ_inptime = mtime
+ if fname in generated_files:
+ if fname != os.sep + MAINO and not typ_gentime:
+ typ_gentime = mtime
# all expected files should have been found
for filename in input_files + generated_files:
assert filename in buildtree
if incremental:
- # at least inputs should be older than generated files
- assert not any([inp_time > gen_time for inp_time in inp_times for gen_time in gen_times])
-
- makefile = os.sep + "Makefile"
- makefile_am = os.sep + "Makefile.am"
- mainc = os.sep + os.path.join("src", "main.c")
- maino = os.sep + os.path.join("src", "hello")
- testfiles = [makefile, makefile_am, mainc, maino]
- if all([testfile in buildtree for testfile in testfiles]):
- assert buildtree[makefile] < buildtree[makefile_am]
- assert buildtree[mainc] < buildtree[maino]
+ # the source file was changed so should be more recent than other input files
+ # it should be older than the main object.
+ # The main object should be more recent than generated files.
+ assert buildtree[os.sep + MAIN] > typ_inptime
+ assert buildtree[os.sep + MAINO] > buildtree[os.sep + MAIN]
+ assert buildtree[os.sep + MAINO] > typ_gentime
+
+ for fname in DIRS:
+ del buildtree[fname]
return buildtree
@@ -178,12 +185,7 @@ def get_timemark(cli, project, element_name, marker):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
- "modification",
- [
- pytest.param("none"),
- pytest.param("content"),
- pytest.param("time", marks=pytest.mark.xfail(reason="mtimes are set to a magic value and not stored in CAS")),
- ],
+ "modification", [pytest.param("content"), pytest.param("time"),],
)
@pytest.mark.parametrize(
"buildtype",
@@ -195,10 +197,7 @@ def get_timemark(cli, project, element_name, marker):
],
)
def test_workspace_build(cli, tmpdir, datafiles, modification, buildtype):
- incremental = False
- if buildtype == "incremental":
- incremental = True
-
+ incremental = buildtype == "incremental"
project = str(datafiles)
checkout = os.path.join(cli.directory, "checkout")
workspace = os.path.join(cli.directory, "workspace")
@@ -234,14 +233,17 @@ def test_workspace_build(cli, tmpdir, datafiles, modification, buildtype):
# build the element and cache the buildtree
result = cli.run(project=project, args=build)
result.assert_success()
+ assert cli.get_element_state(project, element_name) == "cached"
+ build_key = cli.get_element_key(project, element_name)
# check that the local workspace is unchanged
assert_contains(workspace, input_files, strict=True)
assert ws_times == get_mtimes(workspace)
# check modified workspace dir was cached and save the time
- # build was run
- build_mtimes = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=incremental)
+ # build was run. Incremental build conditions do not apply since the workspace
+ # was initially opened using magic timestamps.
+ build_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=False)
build_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK))
# check that the artifacts are available
@@ -253,45 +255,55 @@ def test_workspace_build(cli, tmpdir, datafiles, modification, buildtype):
# rebuild the element
result = cli.run(project=project, args=build)
result.assert_success()
- # this should all be cached
- # so the buildmark time should be the same
- rebuild_mtimes = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=incremental)
+ assert cli.get_element_state(project, element_name) == "cached"
+ rebuild_key = cli.get_element_key(project, element_name)
+ assert rebuild_key == build_key
+ rebuild_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=False)
rebuild_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK))
+ # buildmark time should be the same
assert build_timemark == rebuild_timemark
- assert build_mtimes == rebuild_mtimes
+ assert all([rebuild_times[fname] == build_times[fname] for fname in rebuild_times]), "{}\n{}".format(
+ rebuild_times, build_times
+ )
# modify the open workspace and rebuild
- if modification != "none":
- assert os.path.exists(newfile_path)
-
- if modification == "time":
- # touch a file in the workspace and save the mtime
- os.utime(newfile_path)
-
- elif modification == "content":
- # change a source file
- with open(newfile_path, "w") as fdata:
- fdata.write("anotherstring")
-
- # refresh input times
- ws_times = get_mtimes(workspace)
+ main_path = os.path.join(workspace, MAIN)
+ assert os.path.exists(main_path)
+
+ if modification == "time":
+ # touch a file in the workspace and save the mtime
+ os.utime(main_path)
+ touched_time = os.stat(main_path).st_mtime
+
+ elif modification == "content":
+ # change a source file (there's a race here but it's not serious)
+ with open(main_path, "r") as fdata:
+ data = fdata.readlines()
+ with open(main_path, "w") as fdata:
+ for line in data:
+ fdata.write(re.sub(r"Hello", "Goodbye", line))
+ touched_time = os.stat(main_path).st_mtime
+
+ # refresh input times
+ ws_times = get_mtimes(workspace)
- # rebuild the element
- result = cli.run(project=project, args=build)
- result.assert_success()
+ # rebuild the element
+ result = cli.run(project=project, args=build)
+ result.assert_success()
- rebuild_mtimes = check_buildtree(
- cli, project, element_name, input_files, generated_files, incremental=incremental
- )
- rebuild_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK))
- assert build_timemark != rebuild_timemark
+ rebuild_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=incremental)
+ rebuild_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK))
+ assert rebuild_timemark > build_timemark
- # check the times of the changed files
- if incremental:
- touched_time = os.stat(newfile_path).st_mtime
- assert rebuild_mtimes[newfile] == touched_time
+ # check the times of the changed files
+ if incremental:
+ assert rebuild_times[os.sep + MAIN] == touched_time
+ del rebuild_times[os.sep + MAIN]
+ assert all([rebuild_times[fname] == build_times[fname] for fname in rebuild_times]), "{}\n{}".format(
+ rebuild_times, build_times
+ )
- # Check workspace is unchanged
- assert_contains(workspace, input_files, strict=True)
- assert ws_times == get_mtimes(workspace)
+ # Check workspace is unchanged
+ assert_contains(workspace, input_files, strict=True)
+ assert ws_times == get_mtimes(workspace)