summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2020-06-24 10:41:00 +0200
committerbst-marge-bot <marge-bot@buildstream.build>2020-08-13 09:24:43 +0000
commitc61a04431cd04ee1ef64fb15297271300fb00041 (patch)
tree1ff7bb29bf6f3439784c541f33b68fcc8100d18a
parentccdac3517f7a241aa9d3b5637dea6640a63667fd (diff)
downloadbuildstream-c61a04431cd04ee1ef64fb15297271300fb00041.tar.gz
_protos: Update generated files
Use grpcio-tools 1.28.1, matching grpcio from requirements.txt
-rw-r--r--src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2_grpc.py1374
-rw-r--r--src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py629
-rw-r--r--src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py139
-rw-r--r--src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py281
-rw-r--r--src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py141
-rw-r--r--src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py334
-rw-r--r--src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py310
7 files changed, 1957 insertions, 1251 deletions
diff --git a/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2_grpc.py b/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2_grpc.py
index 33ca1c6aa..19e3d337a 100644
--- a/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2_grpc.py
+++ b/src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2_grpc.py
@@ -6,621 +6,887 @@ from buildstream._protos.google.longrunning import operations_pb2 as google_dot_
class ExecutionStub(object):
- """The Remote Execution API is used to execute an
- [Action][build.bazel.remote.execution.v2.Action] on the remote
- workers.
-
- As with other services in the Remote Execution API, any call may return an
- error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
- information about when the client should retry the request; clients SHOULD
- respect the information provided.
- """
+ """The Remote Execution API is used to execute an
+ [Action][build.bazel.remote.execution.v2.Action] on the remote
+ workers.
+
+ As with other services in the Remote Execution API, any call may return an
+ error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
+ information about when the client should retry the request; clients SHOULD
+ respect the information provided.
+ """
- def __init__(self, channel):
- """Constructor.
+ def __init__(self, channel):
+ """Constructor.
- Args:
- channel: A grpc.Channel.
- """
- self.Execute = channel.unary_stream(
- '/build.bazel.remote.execution.v2.Execution/Execute',
- request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ExecuteRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.WaitExecution = channel.unary_stream(
- '/build.bazel.remote.execution.v2.Execution/WaitExecution',
- request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.WaitExecutionRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.Execute = channel.unary_stream(
+ '/build.bazel.remote.execution.v2.Execution/Execute',
+ request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ExecuteRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
+ self.WaitExecution = channel.unary_stream(
+ '/build.bazel.remote.execution.v2.Execution/WaitExecution',
+ request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.WaitExecutionRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
class ExecutionServicer(object):
- """The Remote Execution API is used to execute an
- [Action][build.bazel.remote.execution.v2.Action] on the remote
- workers.
-
- As with other services in the Remote Execution API, any call may return an
- error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
- information about when the client should retry the request; clients SHOULD
- respect the information provided.
- """
-
- def Execute(self, request, context):
- """Execute an action remotely.
-
- In order to execute an action, the client must first upload all of the
- inputs, the
- [Command][build.bazel.remote.execution.v2.Command] to run, and the
- [Action][build.bazel.remote.execution.v2.Action] into the
- [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage].
- It then calls `Execute` with an `action_digest` referring to them. The
- server will run the action and eventually return the result.
-
- The input `Action`'s fields MUST meet the various canonicalization
- requirements specified in the documentation for their types so that it has
- the same digest as other logically equivalent `Action`s. The server MAY
- enforce the requirements and return errors if a non-canonical input is
- received. It MAY also proceed without verifying some or all of the
- requirements, such as for performance reasons. If the server does not
- verify the requirement, then it will treat the `Action` as distinct from
- another logically equivalent action if they hash differently.
-
- Returns a stream of
- [google.longrunning.Operation][google.longrunning.Operation] messages
- describing the resulting execution, with eventual `response`
- [ExecuteResponse][build.bazel.remote.execution.v2.ExecuteResponse]. The
- `metadata` on the operation is of type
- [ExecuteOperationMetadata][build.bazel.remote.execution.v2.ExecuteOperationMetadata].
-
- If the client remains connected after the first response is returned after
- the server, then updates are streamed as if the client had called
- [WaitExecution][build.bazel.remote.execution.v2.Execution.WaitExecution]
- until the execution completes or the request reaches an error. The
- operation can also be queried using [Operations
- API][google.longrunning.Operations.GetOperation].
-
- The server NEED NOT implement other methods or functionality of the
- Operations API.
-
- Errors discovered during creation of the `Operation` will be reported
- as gRPC Status errors, while errors that occurred while running the
- action will be reported in the `status` field of the `ExecuteResponse`. The
- server MUST NOT set the `error` field of the `Operation` proto.
- The possible errors include:
-
- * `INVALID_ARGUMENT`: One or more arguments are invalid.
- * `FAILED_PRECONDITION`: One or more errors occurred in setting up the
- action requested, such as a missing input or command or no worker being
- available. The client may be able to fix the errors and retry.
- * `RESOURCE_EXHAUSTED`: There is insufficient quota of some resource to run
- the action.
- * `UNAVAILABLE`: Due to a transient condition, such as all workers being
- occupied (and the server does not support a queue), the action could not
- be started. The client should retry.
- * `INTERNAL`: An internal error occurred in the execution engine or the
- worker.
- * `DEADLINE_EXCEEDED`: The execution timed out.
- * `CANCELLED`: The operation was cancelled by the client. This status is
- only possible if the server implements the Operations API CancelOperation
- method, and it was called for the current execution.
-
- In the case of a missing input or command, the server SHOULD additionally
- send a [PreconditionFailure][google.rpc.PreconditionFailure] error detail
- where, for each requested blob not present in the CAS, there is a
- `Violation` with a `type` of `MISSING` and a `subject` of
- `"blobs/{hash}/{size}"` indicating the digest of the missing blob.
+ """The Remote Execution API is used to execute an
+ [Action][build.bazel.remote.execution.v2.Action] on the remote
+ workers.
+
+ As with other services in the Remote Execution API, any call may return an
+ error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
+ information about when the client should retry the request; clients SHOULD
+ respect the information provided.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def WaitExecution(self, request, context):
- """Wait for an execution operation to complete. When the client initially
- makes the request, the server immediately responds with the current status
- of the execution. The server will leave the request stream open until the
- operation completes, and then respond with the completed operation. The
- server MAY choose to stream additional updates as execution progresses,
- such as to provide an update as to the state of the execution.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+
+ def Execute(self, request, context):
+ """Execute an action remotely.
+
+ In order to execute an action, the client must first upload all of the
+ inputs, the
+ [Command][build.bazel.remote.execution.v2.Command] to run, and the
+ [Action][build.bazel.remote.execution.v2.Action] into the
+ [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage].
+ It then calls `Execute` with an `action_digest` referring to them. The
+ server will run the action and eventually return the result.
+
+ The input `Action`'s fields MUST meet the various canonicalization
+ requirements specified in the documentation for their types so that it has
+ the same digest as other logically equivalent `Action`s. The server MAY
+ enforce the requirements and return errors if a non-canonical input is
+ received. It MAY also proceed without verifying some or all of the
+ requirements, such as for performance reasons. If the server does not
+ verify the requirement, then it will treat the `Action` as distinct from
+ another logically equivalent action if they hash differently.
+
+ Returns a stream of
+ [google.longrunning.Operation][google.longrunning.Operation] messages
+ describing the resulting execution, with eventual `response`
+ [ExecuteResponse][build.bazel.remote.execution.v2.ExecuteResponse]. The
+ `metadata` on the operation is of type
+ [ExecuteOperationMetadata][build.bazel.remote.execution.v2.ExecuteOperationMetadata].
+
+ If the client remains connected after the first response is returned after
+ the server, then updates are streamed as if the client had called
+ [WaitExecution][build.bazel.remote.execution.v2.Execution.WaitExecution]
+ until the execution completes or the request reaches an error. The
+ operation can also be queried using [Operations
+ API][google.longrunning.Operations.GetOperation].
+
+ The server NEED NOT implement other methods or functionality of the
+ Operations API.
+
+ Errors discovered during creation of the `Operation` will be reported
+ as gRPC Status errors, while errors that occurred while running the
+ action will be reported in the `status` field of the `ExecuteResponse`. The
+ server MUST NOT set the `error` field of the `Operation` proto.
+ The possible errors include:
+
+ * `INVALID_ARGUMENT`: One or more arguments are invalid.
+ * `FAILED_PRECONDITION`: One or more errors occurred in setting up the
+ action requested, such as a missing input or command or no worker being
+ available. The client may be able to fix the errors and retry.
+ * `RESOURCE_EXHAUSTED`: There is insufficient quota of some resource to run
+ the action.
+ * `UNAVAILABLE`: Due to a transient condition, such as all workers being
+ occupied (and the server does not support a queue), the action could not
+ be started. The client should retry.
+ * `INTERNAL`: An internal error occurred in the execution engine or the
+ worker.
+ * `DEADLINE_EXCEEDED`: The execution timed out.
+ * `CANCELLED`: The operation was cancelled by the client. This status is
+ only possible if the server implements the Operations API CancelOperation
+ method, and it was called for the current execution.
+
+ In the case of a missing input or command, the server SHOULD additionally
+ send a [PreconditionFailure][google.rpc.PreconditionFailure] error detail
+ where, for each requested blob not present in the CAS, there is a
+ `Violation` with a `type` of `MISSING` and a `subject` of
+ `"blobs/{hash}/{size}"` indicating the digest of the missing blob.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def WaitExecution(self, request, context):
+ """Wait for an execution operation to complete. When the client initially
+ makes the request, the server immediately responds with the current status
+ of the execution. The server will leave the request stream open until the
+ operation completes, and then respond with the completed operation. The
+ server MAY choose to stream additional updates as execution progresses,
+ such as to provide an update as to the state of the execution.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_ExecutionServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'Execute': grpc.unary_stream_rpc_method_handler(
- servicer.Execute,
- request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ExecuteRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- 'WaitExecution': grpc.unary_stream_rpc_method_handler(
- servicer.WaitExecution,
- request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.WaitExecutionRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'build.bazel.remote.execution.v2.Execution', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'Execute': grpc.unary_stream_rpc_method_handler(
+ servicer.Execute,
+ request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ExecuteRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ 'WaitExecution': grpc.unary_stream_rpc_method_handler(
+ servicer.WaitExecution,
+ request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.WaitExecutionRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'build.bazel.remote.execution.v2.Execution', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class Execution(object):
+ """The Remote Execution API is used to execute an
+ [Action][build.bazel.remote.execution.v2.Action] on the remote
+ workers.
+
+ As with other services in the Remote Execution API, any call may return an
+ error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
+ information about when the client should retry the request; clients SHOULD
+ respect the information provided.
+ """
+
+ @staticmethod
+ def Execute(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_stream(request, target, '/build.bazel.remote.execution.v2.Execution/Execute',
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ExecuteRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def WaitExecution(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_stream(request, target, '/build.bazel.remote.execution.v2.Execution/WaitExecution',
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.WaitExecutionRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
class ActionCacheStub(object):
- """The action cache API is used to query whether a given action has already been
- performed and, if so, retrieve its result. Unlike the
- [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage],
- which addresses blobs by their own content, the action cache addresses the
- [ActionResult][build.bazel.remote.execution.v2.ActionResult] by a
- digest of the encoded [Action][build.bazel.remote.execution.v2.Action]
- which produced them.
-
- The lifetime of entries in the action cache is implementation-specific, but
- the server SHOULD assume that more recently used entries are more likely to
- be used again.
-
- As with other services in the Remote Execution API, any call may return an
- error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
- information about when the client should retry the request; clients SHOULD
- respect the information provided.
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
+ """The action cache API is used to query whether a given action has already been
+ performed and, if so, retrieve its result. Unlike the
+ [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage],
+ which addresses blobs by their own content, the action cache addresses the
+ [ActionResult][build.bazel.remote.execution.v2.ActionResult] by a
+ digest of the encoded [Action][build.bazel.remote.execution.v2.Action]
+ which produced them.
+
+ The lifetime of entries in the action cache is implementation-specific, but
+ the server SHOULD assume that more recently used entries are more likely to
+ be used again.
+
+ As with other services in the Remote Execution API, any call may return an
+ error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
+ information about when the client should retry the request; clients SHOULD
+ respect the information provided.
"""
- self.GetActionResult = channel.unary_unary(
- '/build.bazel.remote.execution.v2.ActionCache/GetActionResult',
- request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetActionResultRequest.SerializeToString,
- response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ActionResult.FromString,
- )
- self.UpdateActionResult = channel.unary_unary(
- '/build.bazel.remote.execution.v2.ActionCache/UpdateActionResult',
- request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.UpdateActionResultRequest.SerializeToString,
- response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ActionResult.FromString,
- )
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.GetActionResult = channel.unary_unary(
+ '/build.bazel.remote.execution.v2.ActionCache/GetActionResult',
+ request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetActionResultRequest.SerializeToString,
+ response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ActionResult.FromString,
+ )
+ self.UpdateActionResult = channel.unary_unary(
+ '/build.bazel.remote.execution.v2.ActionCache/UpdateActionResult',
+ request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.UpdateActionResultRequest.SerializeToString,
+ response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ActionResult.FromString,
+ )
class ActionCacheServicer(object):
- """The action cache API is used to query whether a given action has already been
- performed and, if so, retrieve its result. Unlike the
- [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage],
- which addresses blobs by their own content, the action cache addresses the
- [ActionResult][build.bazel.remote.execution.v2.ActionResult] by a
- digest of the encoded [Action][build.bazel.remote.execution.v2.Action]
- which produced them.
-
- The lifetime of entries in the action cache is implementation-specific, but
- the server SHOULD assume that more recently used entries are more likely to
- be used again.
-
- As with other services in the Remote Execution API, any call may return an
- error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
- information about when the client should retry the request; clients SHOULD
- respect the information provided.
- """
-
- def GetActionResult(self, request, context):
- """Retrieve a cached execution result.
-
- Implementations SHOULD ensure that any blobs referenced from the
- [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
- are available at the time of returning the
- [ActionResult][build.bazel.remote.execution.v2.ActionResult] and will be
- for some period of time afterwards. The TTLs of the referenced blobs SHOULD be increased
- if necessary and applicable.
-
- Errors:
-
- * `NOT_FOUND`: The requested `ActionResult` is not in the cache.
+ """The action cache API is used to query whether a given action has already been
+ performed and, if so, retrieve its result. Unlike the
+ [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage],
+ which addresses blobs by their own content, the action cache addresses the
+ [ActionResult][build.bazel.remote.execution.v2.ActionResult] by a
+ digest of the encoded [Action][build.bazel.remote.execution.v2.Action]
+ which produced them.
+
+ The lifetime of entries in the action cache is implementation-specific, but
+ the server SHOULD assume that more recently used entries are more likely to
+ be used again.
+
+ As with other services in the Remote Execution API, any call may return an
+ error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
+ information about when the client should retry the request; clients SHOULD
+ respect the information provided.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def UpdateActionResult(self, request, context):
- """Upload a new execution result.
-
- In order to allow the server to perform access control based on the type of
- action, and to assist with client debugging, the client MUST first upload
- the [Action][build.bazel.remote.execution.v2.Execution] that produced the
- result, along with its
- [Command][build.bazel.remote.execution.v2.Command], into the
- `ContentAddressableStorage`.
-
- Errors:
-
- * `INVALID_ARGUMENT`: One or more arguments are invalid.
- * `FAILED_PRECONDITION`: One or more errors occurred in updating the
- action result, such as a missing command or action.
- * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
- entry to the cache.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+
+ def GetActionResult(self, request, context):
+ """Retrieve a cached execution result.
+
+ Implementations SHOULD ensure that any blobs referenced from the
+ [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
+ are available at the time of returning the
+ [ActionResult][build.bazel.remote.execution.v2.ActionResult] and will be
+ for some period of time afterwards. The TTLs of the referenced blobs SHOULD be increased
+ if necessary and applicable.
+
+ Errors:
+
+ * `NOT_FOUND`: The requested `ActionResult` is not in the cache.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def UpdateActionResult(self, request, context):
+ """Upload a new execution result.
+
+ In order to allow the server to perform access control based on the type of
+ action, and to assist with client debugging, the client MUST first upload
+ the [Action][build.bazel.remote.execution.v2.Execution] that produced the
+ result, along with its
+ [Command][build.bazel.remote.execution.v2.Command], into the
+ `ContentAddressableStorage`.
+
+ Errors:
+
+ * `INVALID_ARGUMENT`: One or more arguments are invalid.
+ * `FAILED_PRECONDITION`: One or more errors occurred in updating the
+ action result, such as a missing command or action.
+ * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
+ entry to the cache.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_ActionCacheServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetActionResult': grpc.unary_unary_rpc_method_handler(
- servicer.GetActionResult,
- request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetActionResultRequest.FromString,
- response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ActionResult.SerializeToString,
- ),
- 'UpdateActionResult': grpc.unary_unary_rpc_method_handler(
- servicer.UpdateActionResult,
- request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.UpdateActionResultRequest.FromString,
- response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ActionResult.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'build.bazel.remote.execution.v2.ActionCache', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'GetActionResult': grpc.unary_unary_rpc_method_handler(
+ servicer.GetActionResult,
+ request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetActionResultRequest.FromString,
+ response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ActionResult.SerializeToString,
+ ),
+ 'UpdateActionResult': grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateActionResult,
+ request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.UpdateActionResultRequest.FromString,
+ response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ActionResult.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'build.bazel.remote.execution.v2.ActionCache', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class ActionCache(object):
+ """The action cache API is used to query whether a given action has already been
+ performed and, if so, retrieve its result. Unlike the
+ [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage],
+ which addresses blobs by their own content, the action cache addresses the
+ [ActionResult][build.bazel.remote.execution.v2.ActionResult] by a
+ digest of the encoded [Action][build.bazel.remote.execution.v2.Action]
+ which produced them.
+
+ The lifetime of entries in the action cache is implementation-specific, but
+ the server SHOULD assume that more recently used entries are more likely to
+ be used again.
+
+ As with other services in the Remote Execution API, any call may return an
+ error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
+ information about when the client should retry the request; clients SHOULD
+ respect the information provided.
+ """
+
+ @staticmethod
+ def GetActionResult(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.bazel.remote.execution.v2.ActionCache/GetActionResult',
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetActionResultRequest.SerializeToString,
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ActionResult.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def UpdateActionResult(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.bazel.remote.execution.v2.ActionCache/UpdateActionResult',
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.UpdateActionResultRequest.SerializeToString,
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ActionResult.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
class ContentAddressableStorageStub(object):
- """The CAS (content-addressable storage) is used to store the inputs to and
- outputs from the execution service. Each piece of content is addressed by the
- digest of its binary data.
-
- Most of the binary data stored in the CAS is opaque to the execution engine,
- and is only used as a communication medium. In order to build an
- [Action][build.bazel.remote.execution.v2.Action],
- however, the client will need to also upload the
- [Command][build.bazel.remote.execution.v2.Command] and input root
- [Directory][build.bazel.remote.execution.v2.Directory] for the Action.
- The Command and Directory messages must be marshalled to wire format and then
- uploaded under the hash as with any other piece of content. In practice, the
- input root directory is likely to refer to other Directories in its
- hierarchy, which must also each be uploaded on their own.
-
- For small file uploads the client should group them together and call
- [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs].
- For large uploads, the client must use the
- [Write method][google.bytestream.ByteStream.Write] of the ByteStream API. The
- `resource_name` is `{instance_name}/uploads/{uuid}/blobs/{hash}/{size}`,
- where `instance_name` is as described in the next paragraph, `uuid` is a
- version 4 UUID generated by the client, and `hash` and `size` are the
- [Digest][build.bazel.remote.execution.v2.Digest] of the blob. The
- `uuid` is used only to avoid collisions when multiple clients try to upload
- the same file (or the same client tries to upload the file multiple times at
- once on different threads), so the client MAY reuse the `uuid` for uploading
- different blobs. The `resource_name` may optionally have a trailing filename
- (or other metadata) for a client to use if it is storing URLs, as in
- `{instance}/uploads/{uuid}/blobs/{hash}/{size}/foo/bar/baz.cc`. Anything
- after the `size` is ignored.
-
- A single server MAY support multiple instances of the execution system, each
- with their own workers, storage, cache, etc. The exact relationship between
- instances is up to the server. If the server does, then the `instance_name`
- is an identifier, possibly containing multiple path segments, used to
- distinguish between the various instances on the server, in a manner defined
- by the server. For servers which do not support multiple instances, then the
- `instance_name` is the empty path and the leading slash is omitted, so that
- the `resource_name` becomes `uploads/{uuid}/blobs/{hash}/{size}`.
- To simplify parsing, a path segment cannot equal any of the following
- keywords: `blobs`, `uploads`, `actions`, `actionResults`, `operations` and
- `capabilities`.
-
- When attempting an upload, if another client has already completed the upload
- (which may occur in the middle of a single upload if another client uploads
- the same blob concurrently), the request will terminate immediately with
- a response whose `committed_size` is the full size of the uploaded file
- (regardless of how much data was transmitted by the client). If the client
- completes the upload but the
- [Digest][build.bazel.remote.execution.v2.Digest] does not match, an
- `INVALID_ARGUMENT` error will be returned. In either case, the client should
- not attempt to retry the upload.
-
- For downloading blobs, the client must use the
- [Read method][google.bytestream.ByteStream.Read] of the ByteStream API, with
- a `resource_name` of `"{instance_name}/blobs/{hash}/{size}"`, where
- `instance_name` is the instance name (see above), and `hash` and `size` are
- the [Digest][build.bazel.remote.execution.v2.Digest] of the blob.
-
- The lifetime of entries in the CAS is implementation specific, but it SHOULD
- be long enough to allow for newly-added and recently looked-up entries to be
- used in subsequent calls (e.g. to
- [Execute][build.bazel.remote.execution.v2.Execution.Execute]).
-
- Servers MUST behave as though empty blobs are always available, even if they
- have not been uploaded. Clients MAY optimize away the uploading or
- downloading of empty blobs.
-
- As with other services in the Remote Execution API, any call may return an
- error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
- information about when the client should retry the request; clients SHOULD
- respect the information provided.
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
+ """The CAS (content-addressable storage) is used to store the inputs to and
+ outputs from the execution service. Each piece of content is addressed by the
+ digest of its binary data.
+
+ Most of the binary data stored in the CAS is opaque to the execution engine,
+ and is only used as a communication medium. In order to build an
+ [Action][build.bazel.remote.execution.v2.Action],
+ however, the client will need to also upload the
+ [Command][build.bazel.remote.execution.v2.Command] and input root
+ [Directory][build.bazel.remote.execution.v2.Directory] for the Action.
+ The Command and Directory messages must be marshalled to wire format and then
+ uploaded under the hash as with any other piece of content. In practice, the
+ input root directory is likely to refer to other Directories in its
+ hierarchy, which must also each be uploaded on their own.
+
+ For small file uploads the client should group them together and call
+ [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs].
+ For large uploads, the client must use the
+ [Write method][google.bytestream.ByteStream.Write] of the ByteStream API. The
+ `resource_name` is `{instance_name}/uploads/{uuid}/blobs/{hash}/{size}`,
+ where `instance_name` is as described in the next paragraph, `uuid` is a
+ version 4 UUID generated by the client, and `hash` and `size` are the
+ [Digest][build.bazel.remote.execution.v2.Digest] of the blob. The
+ `uuid` is used only to avoid collisions when multiple clients try to upload
+ the same file (or the same client tries to upload the file multiple times at
+ once on different threads), so the client MAY reuse the `uuid` for uploading
+ different blobs. The `resource_name` may optionally have a trailing filename
+ (or other metadata) for a client to use if it is storing URLs, as in
+ `{instance}/uploads/{uuid}/blobs/{hash}/{size}/foo/bar/baz.cc`. Anything
+ after the `size` is ignored.
+
+ A single server MAY support multiple instances of the execution system, each
+ with their own workers, storage, cache, etc. The exact relationship between
+ instances is up to the server. If the server does, then the `instance_name`
+ is an identifier, possibly containing multiple path segments, used to
+ distinguish between the various instances on the server, in a manner defined
+ by the server. For servers which do not support multiple instances, then the
+ `instance_name` is the empty path and the leading slash is omitted, so that
+ the `resource_name` becomes `uploads/{uuid}/blobs/{hash}/{size}`.
+ To simplify parsing, a path segment cannot equal any of the following
+ keywords: `blobs`, `uploads`, `actions`, `actionResults`, `operations` and
+ `capabilities`.
+
+ When attempting an upload, if another client has already completed the upload
+ (which may occur in the middle of a single upload if another client uploads
+ the same blob concurrently), the request will terminate immediately with
+ a response whose `committed_size` is the full size of the uploaded file
+ (regardless of how much data was transmitted by the client). If the client
+ completes the upload but the
+ [Digest][build.bazel.remote.execution.v2.Digest] does not match, an
+ `INVALID_ARGUMENT` error will be returned. In either case, the client should
+ not attempt to retry the upload.
+
+ For downloading blobs, the client must use the
+ [Read method][google.bytestream.ByteStream.Read] of the ByteStream API, with
+ a `resource_name` of `"{instance_name}/blobs/{hash}/{size}"`, where
+ `instance_name` is the instance name (see above), and `hash` and `size` are
+ the [Digest][build.bazel.remote.execution.v2.Digest] of the blob.
+
+ The lifetime of entries in the CAS is implementation specific, but it SHOULD
+ be long enough to allow for newly-added and recently looked-up entries to be
+ used in subsequent calls (e.g. to
+ [Execute][build.bazel.remote.execution.v2.Execution.Execute]).
+
+ Servers MUST behave as though empty blobs are always available, even if they
+ have not been uploaded. Clients MAY optimize away the uploading or
+ downloading of empty blobs.
+
+ As with other services in the Remote Execution API, any call may return an
+ error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
+ information about when the client should retry the request; clients SHOULD
+ respect the information provided.
"""
- self.FindMissingBlobs = channel.unary_unary(
- '/build.bazel.remote.execution.v2.ContentAddressableStorage/FindMissingBlobs',
- request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.FindMissingBlobsRequest.SerializeToString,
- response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.FindMissingBlobsResponse.FromString,
- )
- self.BatchUpdateBlobs = channel.unary_unary(
- '/build.bazel.remote.execution.v2.ContentAddressableStorage/BatchUpdateBlobs',
- request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchUpdateBlobsRequest.SerializeToString,
- response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchUpdateBlobsResponse.FromString,
- )
- self.BatchReadBlobs = channel.unary_unary(
- '/build.bazel.remote.execution.v2.ContentAddressableStorage/BatchReadBlobs',
- request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchReadBlobsRequest.SerializeToString,
- response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchReadBlobsResponse.FromString,
- )
- self.GetTree = channel.unary_stream(
- '/build.bazel.remote.execution.v2.ContentAddressableStorage/GetTree',
- request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetTreeRequest.SerializeToString,
- response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetTreeResponse.FromString,
- )
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.FindMissingBlobs = channel.unary_unary(
+ '/build.bazel.remote.execution.v2.ContentAddressableStorage/FindMissingBlobs',
+ request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.FindMissingBlobsRequest.SerializeToString,
+ response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.FindMissingBlobsResponse.FromString,
+ )
+ self.BatchUpdateBlobs = channel.unary_unary(
+ '/build.bazel.remote.execution.v2.ContentAddressableStorage/BatchUpdateBlobs',
+ request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchUpdateBlobsRequest.SerializeToString,
+ response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchUpdateBlobsResponse.FromString,
+ )
+ self.BatchReadBlobs = channel.unary_unary(
+ '/build.bazel.remote.execution.v2.ContentAddressableStorage/BatchReadBlobs',
+ request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchReadBlobsRequest.SerializeToString,
+ response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchReadBlobsResponse.FromString,
+ )
+ self.GetTree = channel.unary_stream(
+ '/build.bazel.remote.execution.v2.ContentAddressableStorage/GetTree',
+ request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetTreeRequest.SerializeToString,
+ response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetTreeResponse.FromString,
+ )
class ContentAddressableStorageServicer(object):
- """The CAS (content-addressable storage) is used to store the inputs to and
- outputs from the execution service. Each piece of content is addressed by the
- digest of its binary data.
-
- Most of the binary data stored in the CAS is opaque to the execution engine,
- and is only used as a communication medium. In order to build an
- [Action][build.bazel.remote.execution.v2.Action],
- however, the client will need to also upload the
- [Command][build.bazel.remote.execution.v2.Command] and input root
- [Directory][build.bazel.remote.execution.v2.Directory] for the Action.
- The Command and Directory messages must be marshalled to wire format and then
- uploaded under the hash as with any other piece of content. In practice, the
- input root directory is likely to refer to other Directories in its
- hierarchy, which must also each be uploaded on their own.
-
- For small file uploads the client should group them together and call
- [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs].
- For large uploads, the client must use the
- [Write method][google.bytestream.ByteStream.Write] of the ByteStream API. The
- `resource_name` is `{instance_name}/uploads/{uuid}/blobs/{hash}/{size}`,
- where `instance_name` is as described in the next paragraph, `uuid` is a
- version 4 UUID generated by the client, and `hash` and `size` are the
- [Digest][build.bazel.remote.execution.v2.Digest] of the blob. The
- `uuid` is used only to avoid collisions when multiple clients try to upload
- the same file (or the same client tries to upload the file multiple times at
- once on different threads), so the client MAY reuse the `uuid` for uploading
- different blobs. The `resource_name` may optionally have a trailing filename
- (or other metadata) for a client to use if it is storing URLs, as in
- `{instance}/uploads/{uuid}/blobs/{hash}/{size}/foo/bar/baz.cc`. Anything
- after the `size` is ignored.
-
- A single server MAY support multiple instances of the execution system, each
- with their own workers, storage, cache, etc. The exact relationship between
- instances is up to the server. If the server does, then the `instance_name`
- is an identifier, possibly containing multiple path segments, used to
- distinguish between the various instances on the server, in a manner defined
- by the server. For servers which do not support multiple instances, then the
- `instance_name` is the empty path and the leading slash is omitted, so that
- the `resource_name` becomes `uploads/{uuid}/blobs/{hash}/{size}`.
- To simplify parsing, a path segment cannot equal any of the following
- keywords: `blobs`, `uploads`, `actions`, `actionResults`, `operations` and
- `capabilities`.
-
- When attempting an upload, if another client has already completed the upload
- (which may occur in the middle of a single upload if another client uploads
- the same blob concurrently), the request will terminate immediately with
- a response whose `committed_size` is the full size of the uploaded file
- (regardless of how much data was transmitted by the client). If the client
- completes the upload but the
- [Digest][build.bazel.remote.execution.v2.Digest] does not match, an
- `INVALID_ARGUMENT` error will be returned. In either case, the client should
- not attempt to retry the upload.
-
- For downloading blobs, the client must use the
- [Read method][google.bytestream.ByteStream.Read] of the ByteStream API, with
- a `resource_name` of `"{instance_name}/blobs/{hash}/{size}"`, where
- `instance_name` is the instance name (see above), and `hash` and `size` are
- the [Digest][build.bazel.remote.execution.v2.Digest] of the blob.
-
- The lifetime of entries in the CAS is implementation specific, but it SHOULD
- be long enough to allow for newly-added and recently looked-up entries to be
- used in subsequent calls (e.g. to
- [Execute][build.bazel.remote.execution.v2.Execution.Execute]).
-
- Servers MUST behave as though empty blobs are always available, even if they
- have not been uploaded. Clients MAY optimize away the uploading or
- downloading of empty blobs.
-
- As with other services in the Remote Execution API, any call may return an
- error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
- information about when the client should retry the request; clients SHOULD
- respect the information provided.
- """
-
- def FindMissingBlobs(self, request, context):
- """Determine if blobs are present in the CAS.
-
- Clients can use this API before uploading blobs to determine which ones are
- already present in the CAS and do not need to be uploaded again.
-
- Servers SHOULD increase the TTLs of the referenced blobs if necessary and
- applicable.
-
- There are no method-specific errors.
+ """The CAS (content-addressable storage) is used to store the inputs to and
+ outputs from the execution service. Each piece of content is addressed by the
+ digest of its binary data.
+
+ Most of the binary data stored in the CAS is opaque to the execution engine,
+ and is only used as a communication medium. In order to build an
+ [Action][build.bazel.remote.execution.v2.Action],
+ however, the client will need to also upload the
+ [Command][build.bazel.remote.execution.v2.Command] and input root
+ [Directory][build.bazel.remote.execution.v2.Directory] for the Action.
+ The Command and Directory messages must be marshalled to wire format and then
+ uploaded under the hash as with any other piece of content. In practice, the
+ input root directory is likely to refer to other Directories in its
+ hierarchy, which must also each be uploaded on their own.
+
+ For small file uploads the client should group them together and call
+ [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs].
+ For large uploads, the client must use the
+ [Write method][google.bytestream.ByteStream.Write] of the ByteStream API. The
+ `resource_name` is `{instance_name}/uploads/{uuid}/blobs/{hash}/{size}`,
+ where `instance_name` is as described in the next paragraph, `uuid` is a
+ version 4 UUID generated by the client, and `hash` and `size` are the
+ [Digest][build.bazel.remote.execution.v2.Digest] of the blob. The
+ `uuid` is used only to avoid collisions when multiple clients try to upload
+ the same file (or the same client tries to upload the file multiple times at
+ once on different threads), so the client MAY reuse the `uuid` for uploading
+ different blobs. The `resource_name` may optionally have a trailing filename
+ (or other metadata) for a client to use if it is storing URLs, as in
+ `{instance}/uploads/{uuid}/blobs/{hash}/{size}/foo/bar/baz.cc`. Anything
+ after the `size` is ignored.
+
+ A single server MAY support multiple instances of the execution system, each
+ with their own workers, storage, cache, etc. The exact relationship between
+ instances is up to the server. If the server does, then the `instance_name`
+ is an identifier, possibly containing multiple path segments, used to
+ distinguish between the various instances on the server, in a manner defined
+ by the server. For servers which do not support multiple instances, then the
+ `instance_name` is the empty path and the leading slash is omitted, so that
+ the `resource_name` becomes `uploads/{uuid}/blobs/{hash}/{size}`.
+ To simplify parsing, a path segment cannot equal any of the following
+ keywords: `blobs`, `uploads`, `actions`, `actionResults`, `operations` and
+ `capabilities`.
+
+ When attempting an upload, if another client has already completed the upload
+ (which may occur in the middle of a single upload if another client uploads
+ the same blob concurrently), the request will terminate immediately with
+ a response whose `committed_size` is the full size of the uploaded file
+ (regardless of how much data was transmitted by the client). If the client
+ completes the upload but the
+ [Digest][build.bazel.remote.execution.v2.Digest] does not match, an
+ `INVALID_ARGUMENT` error will be returned. In either case, the client should
+ not attempt to retry the upload.
+
+ For downloading blobs, the client must use the
+ [Read method][google.bytestream.ByteStream.Read] of the ByteStream API, with
+ a `resource_name` of `"{instance_name}/blobs/{hash}/{size}"`, where
+ `instance_name` is the instance name (see above), and `hash` and `size` are
+ the [Digest][build.bazel.remote.execution.v2.Digest] of the blob.
+
+ The lifetime of entries in the CAS is implementation specific, but it SHOULD
+ be long enough to allow for newly-added and recently looked-up entries to be
+ used in subsequent calls (e.g. to
+ [Execute][build.bazel.remote.execution.v2.Execution.Execute]).
+
+ Servers MUST behave as though empty blobs are always available, even if they
+ have not been uploaded. Clients MAY optimize away the uploading or
+ downloading of empty blobs.
+
+ As with other services in the Remote Execution API, any call may return an
+ error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
+ information about when the client should retry the request; clients SHOULD
+ respect the information provided.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
- def BatchUpdateBlobs(self, request, context):
- """Upload many blobs at once.
+ def FindMissingBlobs(self, request, context):
+ """Determine if blobs are present in the CAS.
- The server may enforce a limit of the combined total size of blobs
- to be uploaded using this API. This limit may be obtained using the
- [Capabilities][build.bazel.remote.execution.v2.Capabilities] API.
- Requests exceeding the limit should either be split into smaller
- chunks or uploaded using the
- [ByteStream API][google.bytestream.ByteStream], as appropriate.
+ Clients can use this API before uploading blobs to determine which ones are
+ already present in the CAS and do not need to be uploaded again.
- This request is equivalent to calling a Bytestream `Write` request
- on each individual blob, in parallel. The requests may succeed or fail
- independently.
+ Servers SHOULD increase the TTLs of the referenced blobs if necessary and
+ applicable.
- Errors:
+ There are no method-specific errors.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
- * `INVALID_ARGUMENT`: The client attempted to upload more than the
- server supported limit.
+ def BatchUpdateBlobs(self, request, context):
+ """Upload many blobs at once.
- Individual requests may return the following errors, additionally:
+ The server may enforce a limit of the combined total size of blobs
+ to be uploaded using this API. This limit may be obtained using the
+ [Capabilities][build.bazel.remote.execution.v2.Capabilities] API.
+ Requests exceeding the limit should either be split into smaller
+ chunks or uploaded using the
+ [ByteStream API][google.bytestream.ByteStream], as appropriate.
- * `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the blob.
- * `INVALID_ARGUMENT`: The
- [Digest][build.bazel.remote.execution.v2.Digest] does not match the
- provided data.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ This request is equivalent to calling a Bytestream `Write` request
+ on each individual blob, in parallel. The requests may succeed or fail
+ independently.
- def BatchReadBlobs(self, request, context):
- """Download many blobs at once.
+ Errors:
- The server may enforce a limit of the combined total size of blobs
- to be downloaded using this API. This limit may be obtained using the
- [Capabilities][build.bazel.remote.execution.v2.Capabilities] API.
- Requests exceeding the limit should either be split into smaller
- chunks or downloaded using the
- [ByteStream API][google.bytestream.ByteStream], as appropriate.
+ * `INVALID_ARGUMENT`: The client attempted to upload more than the
+ server supported limit.
- This request is equivalent to calling a Bytestream `Read` request
- on each individual blob, in parallel. The requests may succeed or fail
- independently.
+ Individual requests may return the following errors, additionally:
- Errors:
+ * `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the blob.
+ * `INVALID_ARGUMENT`: The
+ [Digest][build.bazel.remote.execution.v2.Digest] does not match the
+ provided data.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
- * `INVALID_ARGUMENT`: The client attempted to read more than the
- server supported limit.
+ def BatchReadBlobs(self, request, context):
+ """Download many blobs at once.
- Every error on individual read will be returned in the corresponding digest
- status.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ The server may enforce a limit of the combined total size of blobs
+ to be downloaded using this API. This limit may be obtained using the
+ [Capabilities][build.bazel.remote.execution.v2.Capabilities] API.
+ Requests exceeding the limit should either be split into smaller
+ chunks or downloaded using the
+ [ByteStream API][google.bytestream.ByteStream], as appropriate.
- def GetTree(self, request, context):
- """Fetch the entire directory tree rooted at a node.
+ This request is equivalent to calling a Bytestream `Read` request
+ on each individual blob, in parallel. The requests may succeed or fail
+ independently.
- This request must be targeted at a
- [Directory][build.bazel.remote.execution.v2.Directory] stored in the
- [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
- (CAS). The server will enumerate the `Directory` tree recursively and
- return every node descended from the root.
+ Errors:
- The GetTreeRequest.page_token parameter can be used to skip ahead in
- the stream (e.g. when retrying a partially completed and aborted request),
- by setting it to a value taken from GetTreeResponse.next_page_token of the
- last successfully processed GetTreeResponse).
+ * `INVALID_ARGUMENT`: The client attempted to read more than the
+ server supported limit.
- The exact traversal order is unspecified and, unless retrieving subsequent
- pages from an earlier request, is not guaranteed to be stable across
- multiple invocations of `GetTree`.
+ Every error on individual read will be returned in the corresponding digest
+ status.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
- If part of the tree is missing from the CAS, the server will return the
- portion present and omit the rest.
+ def GetTree(self, request, context):
+ """Fetch the entire directory tree rooted at a node.
- Errors:
+ This request must be targeted at a
+ [Directory][build.bazel.remote.execution.v2.Directory] stored in the
+ [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
+ (CAS). The server will enumerate the `Directory` tree recursively and
+ return every node descended from the root.
- * `NOT_FOUND`: The requested tree root is not present in the CAS.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ The GetTreeRequest.page_token parameter can be used to skip ahead in
+ the stream (e.g. when retrying a partially completed and aborted request),
+ by setting it to a value taken from GetTreeResponse.next_page_token of the
+ last successfully processed GetTreeResponse).
+
+ The exact traversal order is unspecified and, unless retrieving subsequent
+ pages from an earlier request, is not guaranteed to be stable across
+ multiple invocations of `GetTree`.
+
+ If part of the tree is missing from the CAS, the server will return the
+ portion present and omit the rest.
+
+ Errors:
+
+ * `NOT_FOUND`: The requested tree root is not present in the CAS.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_ContentAddressableStorageServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'FindMissingBlobs': grpc.unary_unary_rpc_method_handler(
- servicer.FindMissingBlobs,
- request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.FindMissingBlobsRequest.FromString,
- response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.FindMissingBlobsResponse.SerializeToString,
- ),
- 'BatchUpdateBlobs': grpc.unary_unary_rpc_method_handler(
- servicer.BatchUpdateBlobs,
- request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchUpdateBlobsRequest.FromString,
- response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchUpdateBlobsResponse.SerializeToString,
- ),
- 'BatchReadBlobs': grpc.unary_unary_rpc_method_handler(
- servicer.BatchReadBlobs,
- request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchReadBlobsRequest.FromString,
- response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchReadBlobsResponse.SerializeToString,
- ),
- 'GetTree': grpc.unary_stream_rpc_method_handler(
- servicer.GetTree,
- request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetTreeRequest.FromString,
- response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetTreeResponse.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'build.bazel.remote.execution.v2.ContentAddressableStorage', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'FindMissingBlobs': grpc.unary_unary_rpc_method_handler(
+ servicer.FindMissingBlobs,
+ request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.FindMissingBlobsRequest.FromString,
+ response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.FindMissingBlobsResponse.SerializeToString,
+ ),
+ 'BatchUpdateBlobs': grpc.unary_unary_rpc_method_handler(
+ servicer.BatchUpdateBlobs,
+ request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchUpdateBlobsRequest.FromString,
+ response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchUpdateBlobsResponse.SerializeToString,
+ ),
+ 'BatchReadBlobs': grpc.unary_unary_rpc_method_handler(
+ servicer.BatchReadBlobs,
+ request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchReadBlobsRequest.FromString,
+ response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchReadBlobsResponse.SerializeToString,
+ ),
+ 'GetTree': grpc.unary_stream_rpc_method_handler(
+ servicer.GetTree,
+ request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetTreeRequest.FromString,
+ response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetTreeResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'build.bazel.remote.execution.v2.ContentAddressableStorage', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class ContentAddressableStorage(object):
+ """The CAS (content-addressable storage) is used to store the inputs to and
+ outputs from the execution service. Each piece of content is addressed by the
+ digest of its binary data.
+
+ Most of the binary data stored in the CAS is opaque to the execution engine,
+ and is only used as a communication medium. In order to build an
+ [Action][build.bazel.remote.execution.v2.Action],
+ however, the client will need to also upload the
+ [Command][build.bazel.remote.execution.v2.Command] and input root
+ [Directory][build.bazel.remote.execution.v2.Directory] for the Action.
+ The Command and Directory messages must be marshalled to wire format and then
+ uploaded under the hash as with any other piece of content. In practice, the
+ input root directory is likely to refer to other Directories in its
+ hierarchy, which must also each be uploaded on their own.
+
+ For small file uploads the client should group them together and call
+ [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs].
+ For large uploads, the client must use the
+ [Write method][google.bytestream.ByteStream.Write] of the ByteStream API. The
+ `resource_name` is `{instance_name}/uploads/{uuid}/blobs/{hash}/{size}`,
+ where `instance_name` is as described in the next paragraph, `uuid` is a
+ version 4 UUID generated by the client, and `hash` and `size` are the
+ [Digest][build.bazel.remote.execution.v2.Digest] of the blob. The
+ `uuid` is used only to avoid collisions when multiple clients try to upload
+ the same file (or the same client tries to upload the file multiple times at
+ once on different threads), so the client MAY reuse the `uuid` for uploading
+ different blobs. The `resource_name` may optionally have a trailing filename
+ (or other metadata) for a client to use if it is storing URLs, as in
+ `{instance}/uploads/{uuid}/blobs/{hash}/{size}/foo/bar/baz.cc`. Anything
+ after the `size` is ignored.
+
+ A single server MAY support multiple instances of the execution system, each
+ with their own workers, storage, cache, etc. The exact relationship between
+ instances is up to the server. If the server does, then the `instance_name`
+ is an identifier, possibly containing multiple path segments, used to
+ distinguish between the various instances on the server, in a manner defined
+ by the server. For servers which do not support multiple instances, then the
+ `instance_name` is the empty path and the leading slash is omitted, so that
+ the `resource_name` becomes `uploads/{uuid}/blobs/{hash}/{size}`.
+ To simplify parsing, a path segment cannot equal any of the following
+ keywords: `blobs`, `uploads`, `actions`, `actionResults`, `operations` and
+ `capabilities`.
+
+ When attempting an upload, if another client has already completed the upload
+ (which may occur in the middle of a single upload if another client uploads
+ the same blob concurrently), the request will terminate immediately with
+ a response whose `committed_size` is the full size of the uploaded file
+ (regardless of how much data was transmitted by the client). If the client
+ completes the upload but the
+ [Digest][build.bazel.remote.execution.v2.Digest] does not match, an
+ `INVALID_ARGUMENT` error will be returned. In either case, the client should
+ not attempt to retry the upload.
+
+ For downloading blobs, the client must use the
+ [Read method][google.bytestream.ByteStream.Read] of the ByteStream API, with
+ a `resource_name` of `"{instance_name}/blobs/{hash}/{size}"`, where
+ `instance_name` is the instance name (see above), and `hash` and `size` are
+ the [Digest][build.bazel.remote.execution.v2.Digest] of the blob.
+
+ The lifetime of entries in the CAS is implementation specific, but it SHOULD
+ be long enough to allow for newly-added and recently looked-up entries to be
+ used in subsequent calls (e.g. to
+ [Execute][build.bazel.remote.execution.v2.Execution.Execute]).
+
+ Servers MUST behave as though empty blobs are always available, even if they
+ have not been uploaded. Clients MAY optimize away the uploading or
+ downloading of empty blobs.
+
+ As with other services in the Remote Execution API, any call may return an
+ error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
+ information about when the client should retry the request; clients SHOULD
+ respect the information provided.
+ """
+
+ @staticmethod
+ def FindMissingBlobs(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.bazel.remote.execution.v2.ContentAddressableStorage/FindMissingBlobs',
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.FindMissingBlobsRequest.SerializeToString,
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.FindMissingBlobsResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def BatchUpdateBlobs(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.bazel.remote.execution.v2.ContentAddressableStorage/BatchUpdateBlobs',
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchUpdateBlobsRequest.SerializeToString,
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchUpdateBlobsResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def BatchReadBlobs(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.bazel.remote.execution.v2.ContentAddressableStorage/BatchReadBlobs',
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchReadBlobsRequest.SerializeToString,
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.BatchReadBlobsResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def GetTree(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_stream(request, target, '/build.bazel.remote.execution.v2.ContentAddressableStorage/GetTree',
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetTreeRequest.SerializeToString,
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetTreeResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
class CapabilitiesStub(object):
- """The Capabilities service may be used by remote execution clients to query
- various server properties, in order to self-configure or return meaningful
- error messages.
+ """The Capabilities service may be used by remote execution clients to query
+ various server properties, in order to self-configure or return meaningful
+ error messages.
- The query may include a particular `instance_name`, in which case the values
- returned will pertain to that instance.
- """
+ The query may include a particular `instance_name`, in which case the values
+ returned will pertain to that instance.
+ """
- def __init__(self, channel):
- """Constructor.
+ def __init__(self, channel):
+ """Constructor.
- Args:
- channel: A grpc.Channel.
- """
- self.GetCapabilities = channel.unary_unary(
- '/build.bazel.remote.execution.v2.Capabilities/GetCapabilities',
- request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetCapabilitiesRequest.SerializeToString,
- response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ServerCapabilities.FromString,
- )
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.GetCapabilities = channel.unary_unary(
+ '/build.bazel.remote.execution.v2.Capabilities/GetCapabilities',
+ request_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetCapabilitiesRequest.SerializeToString,
+ response_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ServerCapabilities.FromString,
+ )
class CapabilitiesServicer(object):
- """The Capabilities service may be used by remote execution clients to query
- various server properties, in order to self-configure or return meaningful
- error messages.
-
- The query may include a particular `instance_name`, in which case the values
- returned will pertain to that instance.
- """
-
- def GetCapabilities(self, request, context):
- """GetCapabilities returns the server capabilities configuration of the
- remote endpoint.
- Only the capabilities of the services supported by the endpoint will
- be returned:
- * Execution + CAS + Action Cache endpoints should return both
- CacheCapabilities and ExecutionCapabilities.
- * Execution only endpoints should return ExecutionCapabilities.
- * CAS + Action Cache only endpoints should return CacheCapabilities.
+ """The Capabilities service may be used by remote execution clients to query
+ various server properties, in order to self-configure or return meaningful
+ error messages.
+
+ The query may include a particular `instance_name`, in which case the values
+ returned will pertain to that instance.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+
+ def GetCapabilities(self, request, context):
+ """GetCapabilities returns the server capabilities configuration of the
+ remote endpoint.
+ Only the capabilities of the services supported by the endpoint will
+ be returned:
+ * Execution + CAS + Action Cache endpoints should return both
+ CacheCapabilities and ExecutionCapabilities.
+ * Execution only endpoints should return ExecutionCapabilities.
+ * CAS + Action Cache only endpoints should return CacheCapabilities.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_CapabilitiesServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetCapabilities': grpc.unary_unary_rpc_method_handler(
- servicer.GetCapabilities,
- request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetCapabilitiesRequest.FromString,
- response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ServerCapabilities.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'build.bazel.remote.execution.v2.Capabilities', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'GetCapabilities': grpc.unary_unary_rpc_method_handler(
+ servicer.GetCapabilities,
+ request_deserializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetCapabilitiesRequest.FromString,
+ response_serializer=build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ServerCapabilities.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'build.bazel.remote.execution.v2.Capabilities', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class Capabilities(object):
+ """The Capabilities service may be used by remote execution clients to query
+ various server properties, in order to self-configure or return meaningful
+ error messages.
+
+ The query may include a particular `instance_name`, in which case the values
+ returned will pertain to that instance.
+ """
+
+ @staticmethod
+ def GetCapabilities(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.bazel.remote.execution.v2.Capabilities/GetCapabilities',
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.GetCapabilitiesRequest.SerializeToString,
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.ServerCapabilities.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py b/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py
index 68af4826c..8bbd902dd 100644
--- a/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py
+++ b/src/buildstream/_protos/build/buildgrid/local_cas_pb2_grpc.py
@@ -5,251 +5,398 @@ from buildstream._protos.build.buildgrid import local_cas_pb2 as build_dot_build
class LocalContentAddressableStorageStub(object):
- # missing associated documentation comment in .proto file
- pass
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.FetchMissingBlobs = channel.unary_unary(
- '/build.buildgrid.LocalContentAddressableStorage/FetchMissingBlobs',
- request_serializer=build_dot_buildgrid_dot_local__cas__pb2.FetchMissingBlobsRequest.SerializeToString,
- response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.FetchMissingBlobsResponse.FromString,
- )
- self.UploadMissingBlobs = channel.unary_unary(
- '/build.buildgrid.LocalContentAddressableStorage/UploadMissingBlobs',
- request_serializer=build_dot_buildgrid_dot_local__cas__pb2.UploadMissingBlobsRequest.SerializeToString,
- response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.UploadMissingBlobsResponse.FromString,
- )
- self.FetchTree = channel.unary_unary(
- '/build.buildgrid.LocalContentAddressableStorage/FetchTree',
- request_serializer=build_dot_buildgrid_dot_local__cas__pb2.FetchTreeRequest.SerializeToString,
- response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.FetchTreeResponse.FromString,
- )
- self.UploadTree = channel.unary_unary(
- '/build.buildgrid.LocalContentAddressableStorage/UploadTree',
- request_serializer=build_dot_buildgrid_dot_local__cas__pb2.UploadTreeRequest.SerializeToString,
- response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.UploadTreeResponse.FromString,
- )
- self.StageTree = channel.stream_stream(
- '/build.buildgrid.LocalContentAddressableStorage/StageTree',
- request_serializer=build_dot_buildgrid_dot_local__cas__pb2.StageTreeRequest.SerializeToString,
- response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.StageTreeResponse.FromString,
- )
- self.CaptureTree = channel.unary_unary(
- '/build.buildgrid.LocalContentAddressableStorage/CaptureTree',
- request_serializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureTreeRequest.SerializeToString,
- response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureTreeResponse.FromString,
- )
- self.CaptureFiles = channel.unary_unary(
- '/build.buildgrid.LocalContentAddressableStorage/CaptureFiles',
- request_serializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureFilesRequest.SerializeToString,
- response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureFilesResponse.FromString,
- )
- self.GetInstanceNameForRemote = channel.unary_unary(
- '/build.buildgrid.LocalContentAddressableStorage/GetInstanceNameForRemote',
- request_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemoteRequest.SerializeToString,
- response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemoteResponse.FromString,
- )
- self.GetLocalDiskUsage = channel.unary_unary(
- '/build.buildgrid.LocalContentAddressableStorage/GetLocalDiskUsage',
- request_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageRequest.SerializeToString,
- response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageResponse.FromString,
- )
+ """Missing associated documentation comment in .proto file"""
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.FetchMissingBlobs = channel.unary_unary(
+ '/build.buildgrid.LocalContentAddressableStorage/FetchMissingBlobs',
+ request_serializer=build_dot_buildgrid_dot_local__cas__pb2.FetchMissingBlobsRequest.SerializeToString,
+ response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.FetchMissingBlobsResponse.FromString,
+ )
+ self.UploadMissingBlobs = channel.unary_unary(
+ '/build.buildgrid.LocalContentAddressableStorage/UploadMissingBlobs',
+ request_serializer=build_dot_buildgrid_dot_local__cas__pb2.UploadMissingBlobsRequest.SerializeToString,
+ response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.UploadMissingBlobsResponse.FromString,
+ )
+ self.FetchTree = channel.unary_unary(
+ '/build.buildgrid.LocalContentAddressableStorage/FetchTree',
+ request_serializer=build_dot_buildgrid_dot_local__cas__pb2.FetchTreeRequest.SerializeToString,
+ response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.FetchTreeResponse.FromString,
+ )
+ self.UploadTree = channel.unary_unary(
+ '/build.buildgrid.LocalContentAddressableStorage/UploadTree',
+ request_serializer=build_dot_buildgrid_dot_local__cas__pb2.UploadTreeRequest.SerializeToString,
+ response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.UploadTreeResponse.FromString,
+ )
+ self.StageTree = channel.stream_stream(
+ '/build.buildgrid.LocalContentAddressableStorage/StageTree',
+ request_serializer=build_dot_buildgrid_dot_local__cas__pb2.StageTreeRequest.SerializeToString,
+ response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.StageTreeResponse.FromString,
+ )
+ self.CaptureTree = channel.unary_unary(
+ '/build.buildgrid.LocalContentAddressableStorage/CaptureTree',
+ request_serializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureTreeRequest.SerializeToString,
+ response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureTreeResponse.FromString,
+ )
+ self.CaptureFiles = channel.unary_unary(
+ '/build.buildgrid.LocalContentAddressableStorage/CaptureFiles',
+ request_serializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureFilesRequest.SerializeToString,
+ response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureFilesResponse.FromString,
+ )
+ self.GetInstanceNameForRemote = channel.unary_unary(
+ '/build.buildgrid.LocalContentAddressableStorage/GetInstanceNameForRemote',
+ request_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemoteRequest.SerializeToString,
+ response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemoteResponse.FromString,
+ )
+ self.GetLocalDiskUsage = channel.unary_unary(
+ '/build.buildgrid.LocalContentAddressableStorage/GetLocalDiskUsage',
+ request_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageRequest.SerializeToString,
+ response_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageResponse.FromString,
+ )
class LocalContentAddressableStorageServicer(object):
- # missing associated documentation comment in .proto file
- pass
+ """Missing associated documentation comment in .proto file"""
- def FetchMissingBlobs(self, request, context):
- """Fetch blobs from a remote CAS to the local cache.
-
- This request is equivalent to ByteStream `Read` or `BatchReadBlobs`
- requests, storing the downloaded blobs in the local cache.
-
- Requested blobs that failed to be downloaded will be listed in the
- response.
-
- Errors:
- * `INVALID_ARGUMENT`: The client attempted to download more than the
- server supported limit.
-
- Individual requests may return the following error, additionally:
- * `NOT_FOUND`: The requested blob is not present in the remote CAS.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def UploadMissingBlobs(self, request, context):
- """Upload blobs from the local cache to a remote CAS.
-
- This request is equivalent to `FindMissingBlobs` followed by
- ByteStream `Write` or `BatchUpdateBlobs` requests.
-
- Blobs that failed to be uploaded will be listed in the response.
-
- Errors:
- * `INVALID_ARGUMENT`: The client attempted to upload more than the
- server supported limit.
-
- Individual requests may return the following error, additionally:
- * `NOT_FOUND`: The requested blob is not present in the local cache.
- * `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the blob.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def FetchTree(self, request, context):
- """Fetch the entire directory tree rooted at a node from a remote CAS to the
- local cache.
-
- This request is equivalent to `GetTree`, storing the `Directory` objects
- in the local cache. Optionally, this will also fetch all blobs referenced
- by the `Directory` objects, equivalent to `FetchMissingBlobs`.
-
- If no remote CAS is available, this will check presence of the entire
- directory tree (and optionally also file blobs) in the local cache.
-
- * `NOT_FOUND`: The requested tree is not present in the CAS or incomplete.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def UploadTree(self, request, context):
- """Upload the entire directory tree from the local cache to a remote CAS.
-
- This request is equivalent to `UploadMissingBlobs` for all blobs
- referenced by the specified tree (recursively).
-
- Errors:
- * `NOT_FOUND`: The requested tree root is not present in the local cache.
- * `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the tree.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def StageTree(self, request_iterator, context):
- """Stage a directory tree in the local filesystem.
-
- This makes the specified directory tree temporarily available for local
- filesystem access. It is implementation-defined whether this uses a
- userspace filesystem such as FUSE, hardlinking or a full copy.
-
- Missing blobs are fetched, if a CAS remote is configured.
-
- The staging starts when the server receives the initial request and
- it is ready to be used on the initial (non-error) response from the
- server.
-
- The server will clean up the staged directory when it either
- receives an additional request (with all fields unset) or when the
- stream is closed. The server will send an additional response after
- cleanup is complete.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def CaptureTree(self, request, context):
- """Capture a directory tree from the local filesystem.
-
- This imports the specified path from the local filesystem into CAS.
-
- If a CAS remote is configured, the blobs are uploaded.
- The `bypass_local_cache` parameter is a hint to indicate whether the blobs
- shall be uploaded without first storing them in the local cache.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def CaptureFiles(self, request, context):
- """Capture files from the local filesystem.
-
- This imports the specified paths from the local filesystem into CAS.
-
- If a CAS remote is configured, the blobs are uploaded.
- The `bypass_local_cache` parameter is a hint to indicate whether the blobs
- shall be uploaded without first storing them in the local cache.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def GetInstanceNameForRemote(self, request, context):
- """Configure remote CAS endpoint.
-
- This returns a string that can be used as instance_name to access the
- specified endpoint in further requests.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def GetLocalDiskUsage(self, request, context):
- """Query total space used by the local cache.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ def FetchMissingBlobs(self, request, context):
+ """Fetch blobs from a remote CAS to the local cache.
+
+ This request is equivalent to ByteStream `Read` or `BatchReadBlobs`
+ requests, storing the downloaded blobs in the local cache.
+
+ Requested blobs that failed to be downloaded will be listed in the
+ response.
+
+ Errors:
+ * `INVALID_ARGUMENT`: The client attempted to download more than the
+ server supported limit.
+
+ Individual requests may return the following error, additionally:
+ * `NOT_FOUND`: The requested blob is not present in the remote CAS.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def UploadMissingBlobs(self, request, context):
+ """Upload blobs from the local cache to a remote CAS.
+
+ This request is equivalent to `FindMissingBlobs` followed by
+ ByteStream `Write` or `BatchUpdateBlobs` requests.
+
+ Blobs that failed to be uploaded will be listed in the response.
+
+ Errors:
+ * `INVALID_ARGUMENT`: The client attempted to upload more than the
+ server supported limit.
+
+ Individual requests may return the following error, additionally:
+ * `NOT_FOUND`: The requested blob is not present in the local cache.
+ * `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the blob.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def FetchTree(self, request, context):
+ """Fetch the entire directory tree rooted at a node from a remote CAS to the
+ local cache.
+
+ This request is equivalent to `GetTree`, storing the `Directory` objects
+ in the local cache. Optionally, this will also fetch all blobs referenced
+ by the `Directory` objects, equivalent to `FetchMissingBlobs`.
+
+ If no remote CAS is available, this will check presence of the entire
+ directory tree (and optionally also file blobs) in the local cache.
+
+ * `NOT_FOUND`: The requested tree is not present in the CAS or incomplete.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def UploadTree(self, request, context):
+ """Upload the entire directory tree from the local cache to a remote CAS.
+
+ This request is equivalent to `UploadMissingBlobs` for all blobs
+ referenced by the specified tree (recursively).
+
+ Errors:
+ * `NOT_FOUND`: The requested tree root is not present in the local cache.
+ * `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the tree.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def StageTree(self, request_iterator, context):
+ """Stage a directory tree in the local filesystem.
+
+ This makes the specified directory tree temporarily available for local
+ filesystem access. It is implementation-defined whether this uses a
+ userspace filesystem such as FUSE, hardlinking or a full copy.
+
+ Missing blobs are fetched, if a CAS remote is configured.
+
+ The staging starts when the server receives the initial request and
+ it is ready to be used on the initial (non-error) response from the
+ server.
+
+ The server will clean up the staged directory when it either
+ receives an additional request (with all fields unset) or when the
+ stream is closed. The server will send an additional response after
+ cleanup is complete.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def CaptureTree(self, request, context):
+ """Capture a directory tree from the local filesystem.
+
+ This imports the specified path from the local filesystem into CAS.
+
+ If a CAS remote is configured, the blobs are uploaded.
+ The `bypass_local_cache` parameter is a hint to indicate whether the blobs
+ shall be uploaded without first storing them in the local cache.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def CaptureFiles(self, request, context):
+ """Capture files from the local filesystem.
+
+ This imports the specified paths from the local filesystem into CAS.
+
+ If a CAS remote is configured, the blobs are uploaded.
+ The `bypass_local_cache` parameter is a hint to indicate whether the blobs
+ shall be uploaded without first storing them in the local cache.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def GetInstanceNameForRemote(self, request, context):
+ """Configure remote CAS endpoint.
+
+ This returns a string that can be used as instance_name to access the
+ specified endpoint in further requests.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def GetLocalDiskUsage(self, request, context):
+ """Query total space used by the local cache.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_LocalContentAddressableStorageServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'FetchMissingBlobs': grpc.unary_unary_rpc_method_handler(
- servicer.FetchMissingBlobs,
- request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.FetchMissingBlobsRequest.FromString,
- response_serializer=build_dot_buildgrid_dot_local__cas__pb2.FetchMissingBlobsResponse.SerializeToString,
- ),
- 'UploadMissingBlobs': grpc.unary_unary_rpc_method_handler(
- servicer.UploadMissingBlobs,
- request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.UploadMissingBlobsRequest.FromString,
- response_serializer=build_dot_buildgrid_dot_local__cas__pb2.UploadMissingBlobsResponse.SerializeToString,
- ),
- 'FetchTree': grpc.unary_unary_rpc_method_handler(
- servicer.FetchTree,
- request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.FetchTreeRequest.FromString,
- response_serializer=build_dot_buildgrid_dot_local__cas__pb2.FetchTreeResponse.SerializeToString,
- ),
- 'UploadTree': grpc.unary_unary_rpc_method_handler(
- servicer.UploadTree,
- request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.UploadTreeRequest.FromString,
- response_serializer=build_dot_buildgrid_dot_local__cas__pb2.UploadTreeResponse.SerializeToString,
- ),
- 'StageTree': grpc.stream_stream_rpc_method_handler(
- servicer.StageTree,
- request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.StageTreeRequest.FromString,
- response_serializer=build_dot_buildgrid_dot_local__cas__pb2.StageTreeResponse.SerializeToString,
- ),
- 'CaptureTree': grpc.unary_unary_rpc_method_handler(
- servicer.CaptureTree,
- request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureTreeRequest.FromString,
- response_serializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureTreeResponse.SerializeToString,
- ),
- 'CaptureFiles': grpc.unary_unary_rpc_method_handler(
- servicer.CaptureFiles,
- request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureFilesRequest.FromString,
- response_serializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureFilesResponse.SerializeToString,
- ),
- 'GetInstanceNameForRemote': grpc.unary_unary_rpc_method_handler(
- servicer.GetInstanceNameForRemote,
- request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemoteRequest.FromString,
- response_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemoteResponse.SerializeToString,
- ),
- 'GetLocalDiskUsage': grpc.unary_unary_rpc_method_handler(
- servicer.GetLocalDiskUsage,
- request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageRequest.FromString,
- response_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageResponse.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'build.buildgrid.LocalContentAddressableStorage', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'FetchMissingBlobs': grpc.unary_unary_rpc_method_handler(
+ servicer.FetchMissingBlobs,
+ request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.FetchMissingBlobsRequest.FromString,
+ response_serializer=build_dot_buildgrid_dot_local__cas__pb2.FetchMissingBlobsResponse.SerializeToString,
+ ),
+ 'UploadMissingBlobs': grpc.unary_unary_rpc_method_handler(
+ servicer.UploadMissingBlobs,
+ request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.UploadMissingBlobsRequest.FromString,
+ response_serializer=build_dot_buildgrid_dot_local__cas__pb2.UploadMissingBlobsResponse.SerializeToString,
+ ),
+ 'FetchTree': grpc.unary_unary_rpc_method_handler(
+ servicer.FetchTree,
+ request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.FetchTreeRequest.FromString,
+ response_serializer=build_dot_buildgrid_dot_local__cas__pb2.FetchTreeResponse.SerializeToString,
+ ),
+ 'UploadTree': grpc.unary_unary_rpc_method_handler(
+ servicer.UploadTree,
+ request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.UploadTreeRequest.FromString,
+ response_serializer=build_dot_buildgrid_dot_local__cas__pb2.UploadTreeResponse.SerializeToString,
+ ),
+ 'StageTree': grpc.stream_stream_rpc_method_handler(
+ servicer.StageTree,
+ request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.StageTreeRequest.FromString,
+ response_serializer=build_dot_buildgrid_dot_local__cas__pb2.StageTreeResponse.SerializeToString,
+ ),
+ 'CaptureTree': grpc.unary_unary_rpc_method_handler(
+ servicer.CaptureTree,
+ request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureTreeRequest.FromString,
+ response_serializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureTreeResponse.SerializeToString,
+ ),
+ 'CaptureFiles': grpc.unary_unary_rpc_method_handler(
+ servicer.CaptureFiles,
+ request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureFilesRequest.FromString,
+ response_serializer=build_dot_buildgrid_dot_local__cas__pb2.CaptureFilesResponse.SerializeToString,
+ ),
+ 'GetInstanceNameForRemote': grpc.unary_unary_rpc_method_handler(
+ servicer.GetInstanceNameForRemote,
+ request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemoteRequest.FromString,
+ response_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemoteResponse.SerializeToString,
+ ),
+ 'GetLocalDiskUsage': grpc.unary_unary_rpc_method_handler(
+ servicer.GetLocalDiskUsage,
+ request_deserializer=build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageRequest.FromString,
+ response_serializer=build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'build.buildgrid.LocalContentAddressableStorage', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class LocalContentAddressableStorage(object):
+ """Missing associated documentation comment in .proto file"""
+
+ @staticmethod
+ def FetchMissingBlobs(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.buildgrid.LocalContentAddressableStorage/FetchMissingBlobs',
+ build_dot_buildgrid_dot_local__cas__pb2.FetchMissingBlobsRequest.SerializeToString,
+ build_dot_buildgrid_dot_local__cas__pb2.FetchMissingBlobsResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def UploadMissingBlobs(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.buildgrid.LocalContentAddressableStorage/UploadMissingBlobs',
+ build_dot_buildgrid_dot_local__cas__pb2.UploadMissingBlobsRequest.SerializeToString,
+ build_dot_buildgrid_dot_local__cas__pb2.UploadMissingBlobsResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def FetchTree(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.buildgrid.LocalContentAddressableStorage/FetchTree',
+ build_dot_buildgrid_dot_local__cas__pb2.FetchTreeRequest.SerializeToString,
+ build_dot_buildgrid_dot_local__cas__pb2.FetchTreeResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def UploadTree(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.buildgrid.LocalContentAddressableStorage/UploadTree',
+ build_dot_buildgrid_dot_local__cas__pb2.UploadTreeRequest.SerializeToString,
+ build_dot_buildgrid_dot_local__cas__pb2.UploadTreeResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def StageTree(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_stream(request_iterator, target, '/build.buildgrid.LocalContentAddressableStorage/StageTree',
+ build_dot_buildgrid_dot_local__cas__pb2.StageTreeRequest.SerializeToString,
+ build_dot_buildgrid_dot_local__cas__pb2.StageTreeResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def CaptureTree(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.buildgrid.LocalContentAddressableStorage/CaptureTree',
+ build_dot_buildgrid_dot_local__cas__pb2.CaptureTreeRequest.SerializeToString,
+ build_dot_buildgrid_dot_local__cas__pb2.CaptureTreeResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def CaptureFiles(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.buildgrid.LocalContentAddressableStorage/CaptureFiles',
+ build_dot_buildgrid_dot_local__cas__pb2.CaptureFilesRequest.SerializeToString,
+ build_dot_buildgrid_dot_local__cas__pb2.CaptureFilesResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def GetInstanceNameForRemote(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.buildgrid.LocalContentAddressableStorage/GetInstanceNameForRemote',
+ build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemoteRequest.SerializeToString,
+ build_dot_buildgrid_dot_local__cas__pb2.GetInstanceNameForRemoteResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def GetLocalDiskUsage(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/build.buildgrid.LocalContentAddressableStorage/GetLocalDiskUsage',
+ build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageRequest.SerializeToString,
+ build_dot_buildgrid_dot_local__cas__pb2.GetLocalDiskUsageResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
index db0cd6435..305a8d53b 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
@@ -5,65 +5,100 @@ from buildstream._protos.buildstream.v2 import artifact_pb2 as buildstream_dot_v
class ArtifactServiceStub(object):
- # missing associated documentation comment in .proto file
- pass
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.GetArtifact = channel.unary_unary(
- '/buildstream.v2.ArtifactService/GetArtifact',
- request_serializer=buildstream_dot_v2_dot_artifact__pb2.GetArtifactRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.FromString,
- )
- self.UpdateArtifact = channel.unary_unary(
- '/buildstream.v2.ArtifactService/UpdateArtifact',
- request_serializer=buildstream_dot_v2_dot_artifact__pb2.UpdateArtifactRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.FromString,
- )
+ """Missing associated documentation comment in .proto file"""
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.GetArtifact = channel.unary_unary(
+ '/buildstream.v2.ArtifactService/GetArtifact',
+ request_serializer=buildstream_dot_v2_dot_artifact__pb2.GetArtifactRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.FromString,
+ )
+ self.UpdateArtifact = channel.unary_unary(
+ '/buildstream.v2.ArtifactService/UpdateArtifact',
+ request_serializer=buildstream_dot_v2_dot_artifact__pb2.UpdateArtifactRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.FromString,
+ )
class ArtifactServiceServicer(object):
- # missing associated documentation comment in .proto file
- pass
+ """Missing associated documentation comment in .proto file"""
- def GetArtifact(self, request, context):
- """Retrieves an Artifact message
+ def GetArtifact(self, request, context):
+ """Retrieves an Artifact message
- Errors:
- * `NOT_FOUND`: Artifact not found on server
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ Errors:
+ * `NOT_FOUND`: Artifact not found on server
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
- def UpdateArtifact(self, request, context):
- """Sets an Artifact message
+ def UpdateArtifact(self, request, context):
+ """Sets an Artifact message
- Errors:
- * `FAILED_PRECONDITION`: Files specified in upload aren't present in CAS
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ Errors:
+ * `FAILED_PRECONDITION`: Files specified in upload aren't present in CAS
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_ArtifactServiceServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetArtifact': grpc.unary_unary_rpc_method_handler(
- servicer.GetArtifact,
- request_deserializer=buildstream_dot_v2_dot_artifact__pb2.GetArtifactRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.SerializeToString,
- ),
- 'UpdateArtifact': grpc.unary_unary_rpc_method_handler(
- servicer.UpdateArtifact,
- request_deserializer=buildstream_dot_v2_dot_artifact__pb2.UpdateArtifactRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'buildstream.v2.ArtifactService', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'GetArtifact': grpc.unary_unary_rpc_method_handler(
+ servicer.GetArtifact,
+ request_deserializer=buildstream_dot_v2_dot_artifact__pb2.GetArtifactRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.SerializeToString,
+ ),
+ 'UpdateArtifact': grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateArtifact,
+ request_deserializer=buildstream_dot_v2_dot_artifact__pb2.UpdateArtifactRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'buildstream.v2.ArtifactService', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class ArtifactService(object):
+ """Missing associated documentation comment in .proto file"""
+
+ @staticmethod
+ def GetArtifact(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/buildstream.v2.ArtifactService/GetArtifact',
+ buildstream_dot_v2_dot_artifact__pb2.GetArtifactRequest.SerializeToString,
+ buildstream_dot_v2_dot_artifact__pb2.Artifact.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def UpdateArtifact(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/buildstream.v2.ArtifactService/UpdateArtifact',
+ buildstream_dot_v2_dot_artifact__pb2.UpdateArtifactRequest.SerializeToString,
+ buildstream_dot_v2_dot_artifact__pb2.Artifact.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
index 52d22c593..e60fd7f65 100644
--- a/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
@@ -5,127 +5,196 @@ from buildstream._protos.buildstream.v2 import buildstream_pb2 as buildstream_do
class ReferenceStorageStub(object):
- # missing associated documentation comment in .proto file
- pass
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.GetReference = channel.unary_unary(
- '/buildstream.v2.ReferenceStorage/GetReference',
- request_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.FromString,
- )
- self.UpdateReference = channel.unary_unary(
- '/buildstream.v2.ReferenceStorage/UpdateReference',
- request_serializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.FromString,
- )
- self.Status = channel.unary_unary(
- '/buildstream.v2.ReferenceStorage/Status',
- request_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.FromString,
- )
+ """Missing associated documentation comment in .proto file"""
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.GetReference = channel.unary_unary(
+ '/buildstream.v2.ReferenceStorage/GetReference',
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.FromString,
+ )
+ self.UpdateReference = channel.unary_unary(
+ '/buildstream.v2.ReferenceStorage/UpdateReference',
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.FromString,
+ )
+ self.Status = channel.unary_unary(
+ '/buildstream.v2.ReferenceStorage/Status',
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.FromString,
+ )
class ReferenceStorageServicer(object):
- # missing associated documentation comment in .proto file
- pass
-
- def GetReference(self, request, context):
- """Retrieve a CAS [Directory][build.bazel.remote.execution.v2.Directory]
- digest by name.
-
- Errors:
- * `NOT_FOUND`: The requested reference is not in the cache.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def UpdateReference(self, request, context):
- """Associate a name with a CAS [Directory][build.bazel.remote.execution.v2.Directory]
- digest.
-
- Errors:
- * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
- entry to the cache.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def Status(self, request, context):
- # missing associated documentation comment in .proto file
- pass
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ """Missing associated documentation comment in .proto file"""
+
+ def GetReference(self, request, context):
+ """Retrieve a CAS [Directory][build.bazel.remote.execution.v2.Directory]
+ digest by name.
+
+ Errors:
+ * `NOT_FOUND`: The requested reference is not in the cache.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def UpdateReference(self, request, context):
+ """Associate a name with a CAS [Directory][build.bazel.remote.execution.v2.Directory]
+ digest.
+
+ Errors:
+ * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
+ entry to the cache.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def Status(self, request, context):
+ """Missing associated documentation comment in .proto file"""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_ReferenceStorageServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetReference': grpc.unary_unary_rpc_method_handler(
- servicer.GetReference,
- request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.SerializeToString,
- ),
- 'UpdateReference': grpc.unary_unary_rpc_method_handler(
- servicer.UpdateReference,
- request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.SerializeToString,
- ),
- 'Status': grpc.unary_unary_rpc_method_handler(
- servicer.Status,
- request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'buildstream.v2.ReferenceStorage', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'GetReference': grpc.unary_unary_rpc_method_handler(
+ servicer.GetReference,
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.SerializeToString,
+ ),
+ 'UpdateReference': grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateReference,
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.SerializeToString,
+ ),
+ 'Status': grpc.unary_unary_rpc_method_handler(
+ servicer.Status,
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'buildstream.v2.ReferenceStorage', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class ReferenceStorage(object):
+ """Missing associated documentation comment in .proto file"""
+
+ @staticmethod
+ def GetReference(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/buildstream.v2.ReferenceStorage/GetReference',
+ buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.SerializeToString,
+ buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def UpdateReference(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/buildstream.v2.ReferenceStorage/UpdateReference',
+ buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.SerializeToString,
+ buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def Status(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/buildstream.v2.ReferenceStorage/Status',
+ buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.SerializeToString,
+ buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
class CapabilitiesStub(object):
- # missing associated documentation comment in .proto file
- pass
+ """Missing associated documentation comment in .proto file"""
- def __init__(self, channel):
- """Constructor.
+ def __init__(self, channel):
+ """Constructor.
- Args:
- channel: A grpc.Channel.
- """
- self.GetCapabilities = channel.unary_unary(
- '/buildstream.v2.Capabilities/GetCapabilities',
- request_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetCapabilitiesRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.FromString,
- )
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.GetCapabilities = channel.unary_unary(
+ '/buildstream.v2.Capabilities/GetCapabilities',
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetCapabilitiesRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.FromString,
+ )
class CapabilitiesServicer(object):
- # missing associated documentation comment in .proto file
- pass
+ """Missing associated documentation comment in .proto file"""
- def GetCapabilities(self, request, context):
- """GetCapabilities mirrors
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ def GetCapabilities(self, request, context):
+ """GetCapabilities mirrors
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_CapabilitiesServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetCapabilities': grpc.unary_unary_rpc_method_handler(
- servicer.GetCapabilities,
- request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetCapabilitiesRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'buildstream.v2.Capabilities', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'GetCapabilities': grpc.unary_unary_rpc_method_handler(
+ servicer.GetCapabilities,
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetCapabilitiesRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'buildstream.v2.Capabilities', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class Capabilities(object):
+ """Missing associated documentation comment in .proto file"""
+
+ @staticmethod
+ def GetCapabilities(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/buildstream.v2.Capabilities/GetCapabilities',
+ buildstream_dot_v2_dot_buildstream__pb2.GetCapabilitiesRequest.SerializeToString,
+ buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
index ecf734afb..8b1b37b0b 100644
--- a/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
@@ -5,66 +5,101 @@ from buildstream._protos.buildstream.v2 import source_pb2 as buildstream_dot_v2_
class SourceServiceStub(object):
- # missing associated documentation comment in .proto file
- pass
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.GetSource = channel.unary_unary(
- '/buildstream.v2.SourceService/GetSource',
- request_serializer=buildstream_dot_v2_dot_source__pb2.GetSourceRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_source__pb2.Source.FromString,
- )
- self.UpdateSource = channel.unary_unary(
- '/buildstream.v2.SourceService/UpdateSource',
- request_serializer=buildstream_dot_v2_dot_source__pb2.UpdateSourceRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_source__pb2.Source.FromString,
- )
+ """Missing associated documentation comment in .proto file"""
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.GetSource = channel.unary_unary(
+ '/buildstream.v2.SourceService/GetSource',
+ request_serializer=buildstream_dot_v2_dot_source__pb2.GetSourceRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_source__pb2.Source.FromString,
+ )
+ self.UpdateSource = channel.unary_unary(
+ '/buildstream.v2.SourceService/UpdateSource',
+ request_serializer=buildstream_dot_v2_dot_source__pb2.UpdateSourceRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_source__pb2.Source.FromString,
+ )
class SourceServiceServicer(object):
- # missing associated documentation comment in .proto file
- pass
+ """Missing associated documentation comment in .proto file"""
- def GetSource(self, request, context):
- """Retrieve a source message given a reference name from the service
+ def GetSource(self, request, context):
+ """Retrieve a source message given a reference name from the service
- Errors:
- * `NOT_FOUND`: The requested reference is not in the cache.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ Errors:
+ * `NOT_FOUND`: The requested reference is not in the cache.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
- def UpdateSource(self, request, context):
- """Sets a source message on the service
+ def UpdateSource(self, request, context):
+ """Sets a source message on the service
- Errors:
- * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
- entry to the cache.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ Errors:
+ * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
+ entry to the cache.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_SourceServiceServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetSource': grpc.unary_unary_rpc_method_handler(
- servicer.GetSource,
- request_deserializer=buildstream_dot_v2_dot_source__pb2.GetSourceRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_source__pb2.Source.SerializeToString,
- ),
- 'UpdateSource': grpc.unary_unary_rpc_method_handler(
- servicer.UpdateSource,
- request_deserializer=buildstream_dot_v2_dot_source__pb2.UpdateSourceRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_source__pb2.Source.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'buildstream.v2.SourceService', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'GetSource': grpc.unary_unary_rpc_method_handler(
+ servicer.GetSource,
+ request_deserializer=buildstream_dot_v2_dot_source__pb2.GetSourceRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_source__pb2.Source.SerializeToString,
+ ),
+ 'UpdateSource': grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateSource,
+ request_deserializer=buildstream_dot_v2_dot_source__pb2.UpdateSourceRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_source__pb2.Source.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'buildstream.v2.SourceService', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class SourceService(object):
+ """Missing associated documentation comment in .proto file"""
+
+ @staticmethod
+ def GetSource(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/buildstream.v2.SourceService/GetSource',
+ buildstream_dot_v2_dot_source__pb2.GetSourceRequest.SerializeToString,
+ buildstream_dot_v2_dot_source__pb2.Source.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def UpdateSource(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/buildstream.v2.SourceService/UpdateSource',
+ buildstream_dot_v2_dot_source__pb2.UpdateSourceRequest.SerializeToString,
+ buildstream_dot_v2_dot_source__pb2.Source.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py b/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
index ef993e040..98859f38e 100644
--- a/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
+++ b/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
@@ -5,156 +5,232 @@ from buildstream._protos.google.bytestream import bytestream_pb2 as google_dot_b
class ByteStreamStub(object):
- """#### Introduction
+ """#### Introduction
- The Byte Stream API enables a client to read and write a stream of bytes to
- and from a resource. Resources have names, and these names are supplied in
- the API calls below to identify the resource that is being read from or
- written to.
+ The Byte Stream API enables a client to read and write a stream of bytes to
+ and from a resource. Resources have names, and these names are supplied in
+ the API calls below to identify the resource that is being read from or
+ written to.
- All implementations of the Byte Stream API export the interface defined here:
+ All implementations of the Byte Stream API export the interface defined here:
- * `Read()`: Reads the contents of a resource.
+ * `Read()`: Reads the contents of a resource.
- * `Write()`: Writes the contents of a resource. The client can call `Write()`
- multiple times with the same resource and can check the status of the write
- by calling `QueryWriteStatus()`.
+ * `Write()`: Writes the contents of a resource. The client can call `Write()`
+ multiple times with the same resource and can check the status of the write
+ by calling `QueryWriteStatus()`.
- #### Service parameters and metadata
+ #### Service parameters and metadata
- The ByteStream API provides no direct way to access/modify any metadata
- associated with the resource.
+ The ByteStream API provides no direct way to access/modify any metadata
+ associated with the resource.
- #### Errors
+ #### Errors
- The errors returned by the service are in the Google canonical error space.
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
+ The errors returned by the service are in the Google canonical error space.
"""
- self.Read = channel.unary_stream(
- '/google.bytestream.ByteStream/Read',
- request_serializer=google_dot_bytestream_dot_bytestream__pb2.ReadRequest.SerializeToString,
- response_deserializer=google_dot_bytestream_dot_bytestream__pb2.ReadResponse.FromString,
- )
- self.Write = channel.stream_unary(
- '/google.bytestream.ByteStream/Write',
- request_serializer=google_dot_bytestream_dot_bytestream__pb2.WriteRequest.SerializeToString,
- response_deserializer=google_dot_bytestream_dot_bytestream__pb2.WriteResponse.FromString,
- )
- self.QueryWriteStatus = channel.unary_unary(
- '/google.bytestream.ByteStream/QueryWriteStatus',
- request_serializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusRequest.SerializeToString,
- response_deserializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.FromString,
- )
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.Read = channel.unary_stream(
+ '/google.bytestream.ByteStream/Read',
+ request_serializer=google_dot_bytestream_dot_bytestream__pb2.ReadRequest.SerializeToString,
+ response_deserializer=google_dot_bytestream_dot_bytestream__pb2.ReadResponse.FromString,
+ )
+ self.Write = channel.stream_unary(
+ '/google.bytestream.ByteStream/Write',
+ request_serializer=google_dot_bytestream_dot_bytestream__pb2.WriteRequest.SerializeToString,
+ response_deserializer=google_dot_bytestream_dot_bytestream__pb2.WriteResponse.FromString,
+ )
+ self.QueryWriteStatus = channel.unary_unary(
+ '/google.bytestream.ByteStream/QueryWriteStatus',
+ request_serializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusRequest.SerializeToString,
+ response_deserializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.FromString,
+ )
-class ByteStreamServicer(object):
- """#### Introduction
- The Byte Stream API enables a client to read and write a stream of bytes to
- and from a resource. Resources have names, and these names are supplied in
- the API calls below to identify the resource that is being read from or
- written to.
+class ByteStreamServicer(object):
+ """#### Introduction
- All implementations of the Byte Stream API export the interface defined here:
+ The Byte Stream API enables a client to read and write a stream of bytes to
+ and from a resource. Resources have names, and these names are supplied in
+ the API calls below to identify the resource that is being read from or
+ written to.
- * `Read()`: Reads the contents of a resource.
+ All implementations of the Byte Stream API export the interface defined here:
- * `Write()`: Writes the contents of a resource. The client can call `Write()`
- multiple times with the same resource and can check the status of the write
- by calling `QueryWriteStatus()`.
+ * `Read()`: Reads the contents of a resource.
- #### Service parameters and metadata
+ * `Write()`: Writes the contents of a resource. The client can call `Write()`
+ multiple times with the same resource and can check the status of the write
+ by calling `QueryWriteStatus()`.
- The ByteStream API provides no direct way to access/modify any metadata
- associated with the resource.
+ #### Service parameters and metadata
- #### Errors
+ The ByteStream API provides no direct way to access/modify any metadata
+ associated with the resource.
- The errors returned by the service are in the Google canonical error space.
- """
+ #### Errors
- def Read(self, request, context):
- """`Read()` is used to retrieve the contents of a resource as a sequence
- of bytes. The bytes are returned in a sequence of responses, and the
- responses are delivered as the results of a server-side streaming RPC.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def Write(self, request_iterator, context):
- """`Write()` is used to send the contents of a resource as a sequence of
- bytes. The bytes are sent in a sequence of request protos of a client-side
- streaming RPC.
-
- A `Write()` action is resumable. If there is an error or the connection is
- broken during the `Write()`, the client should check the status of the
- `Write()` by calling `QueryWriteStatus()` and continue writing from the
- returned `committed_size`. This may be less than the amount of data the
- client previously sent.
-
- Calling `Write()` on a resource name that was previously written and
- finalized could cause an error, depending on whether the underlying service
- allows over-writing of previously written resources.
-
- When the client closes the request channel, the service will respond with
- a `WriteResponse`. The service will not view the resource as `complete`
- until the client has sent a `WriteRequest` with `finish_write` set to
- `true`. Sending any requests on a stream after sending a request with
- `finish_write` set to `true` will cause an error. The client **should**
- check the `WriteResponse` it receives to determine how much data the
- service was able to commit and whether the service views the resource as
- `complete` or not.
+ The errors returned by the service are in the Google canonical error space.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def QueryWriteStatus(self, request, context):
- """`QueryWriteStatus()` is used to find the `committed_size` for a resource
- that is being written, which can then be used as the `write_offset` for
- the next `Write()` call.
-
- If the resource does not exist (i.e., the resource has been deleted, or the
- first `Write()` has not yet reached the service), this method returns the
- error `NOT_FOUND`.
-
- The client **may** call `QueryWriteStatus()` at any time to determine how
- much data has been processed for this resource. This is useful if the
- client is buffering data and needs to know which data can be safely
- evicted. For any sequence of `QueryWriteStatus()` calls for a given
- resource name, the sequence of returned `committed_size` values will be
- non-decreasing.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+
+ def Read(self, request, context):
+ """`Read()` is used to retrieve the contents of a resource as a sequence
+ of bytes. The bytes are returned in a sequence of responses, and the
+ responses are delivered as the results of a server-side streaming RPC.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def Write(self, request_iterator, context):
+ """`Write()` is used to send the contents of a resource as a sequence of
+ bytes. The bytes are sent in a sequence of request protos of a client-side
+ streaming RPC.
+
+ A `Write()` action is resumable. If there is an error or the connection is
+ broken during the `Write()`, the client should check the status of the
+ `Write()` by calling `QueryWriteStatus()` and continue writing from the
+ returned `committed_size`. This may be less than the amount of data the
+ client previously sent.
+
+ Calling `Write()` on a resource name that was previously written and
+ finalized could cause an error, depending on whether the underlying service
+ allows over-writing of previously written resources.
+
+ When the client closes the request channel, the service will respond with
+ a `WriteResponse`. The service will not view the resource as `complete`
+ until the client has sent a `WriteRequest` with `finish_write` set to
+ `true`. Sending any requests on a stream after sending a request with
+ `finish_write` set to `true` will cause an error. The client **should**
+ check the `WriteResponse` it receives to determine how much data the
+ service was able to commit and whether the service views the resource as
+ `complete` or not.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def QueryWriteStatus(self, request, context):
+ """`QueryWriteStatus()` is used to find the `committed_size` for a resource
+ that is being written, which can then be used as the `write_offset` for
+ the next `Write()` call.
+
+ If the resource does not exist (i.e., the resource has been deleted, or the
+ first `Write()` has not yet reached the service), this method returns the
+ error `NOT_FOUND`.
+
+ The client **may** call `QueryWriteStatus()` at any time to determine how
+ much data has been processed for this resource. This is useful if the
+ client is buffering data and needs to know which data can be safely
+ evicted. For any sequence of `QueryWriteStatus()` calls for a given
+ resource name, the sequence of returned `committed_size` values will be
+ non-decreasing.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_ByteStreamServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'Read': grpc.unary_stream_rpc_method_handler(
- servicer.Read,
- request_deserializer=google_dot_bytestream_dot_bytestream__pb2.ReadRequest.FromString,
- response_serializer=google_dot_bytestream_dot_bytestream__pb2.ReadResponse.SerializeToString,
- ),
- 'Write': grpc.stream_unary_rpc_method_handler(
- servicer.Write,
- request_deserializer=google_dot_bytestream_dot_bytestream__pb2.WriteRequest.FromString,
- response_serializer=google_dot_bytestream_dot_bytestream__pb2.WriteResponse.SerializeToString,
- ),
- 'QueryWriteStatus': grpc.unary_unary_rpc_method_handler(
- servicer.QueryWriteStatus,
- request_deserializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusRequest.FromString,
- response_serializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'google.bytestream.ByteStream', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'Read': grpc.unary_stream_rpc_method_handler(
+ servicer.Read,
+ request_deserializer=google_dot_bytestream_dot_bytestream__pb2.ReadRequest.FromString,
+ response_serializer=google_dot_bytestream_dot_bytestream__pb2.ReadResponse.SerializeToString,
+ ),
+ 'Write': grpc.stream_unary_rpc_method_handler(
+ servicer.Write,
+ request_deserializer=google_dot_bytestream_dot_bytestream__pb2.WriteRequest.FromString,
+ response_serializer=google_dot_bytestream_dot_bytestream__pb2.WriteResponse.SerializeToString,
+ ),
+ 'QueryWriteStatus': grpc.unary_unary_rpc_method_handler(
+ servicer.QueryWriteStatus,
+ request_deserializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusRequest.FromString,
+ response_serializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'google.bytestream.ByteStream', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class ByteStream(object):
+ """#### Introduction
+
+ The Byte Stream API enables a client to read and write a stream of bytes to
+ and from a resource. Resources have names, and these names are supplied in
+ the API calls below to identify the resource that is being read from or
+ written to.
+
+ All implementations of the Byte Stream API export the interface defined here:
+
+ * `Read()`: Reads the contents of a resource.
+
+ * `Write()`: Writes the contents of a resource. The client can call `Write()`
+ multiple times with the same resource and can check the status of the write
+ by calling `QueryWriteStatus()`.
+
+ #### Service parameters and metadata
+
+ The ByteStream API provides no direct way to access/modify any metadata
+ associated with the resource.
+
+ #### Errors
+
+ The errors returned by the service are in the Google canonical error space.
+ """
+
+ @staticmethod
+ def Read(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_stream(request, target, '/google.bytestream.ByteStream/Read',
+ google_dot_bytestream_dot_bytestream__pb2.ReadRequest.SerializeToString,
+ google_dot_bytestream_dot_bytestream__pb2.ReadResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def Write(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_unary(request_iterator, target, '/google.bytestream.ByteStream/Write',
+ google_dot_bytestream_dot_bytestream__pb2.WriteRequest.SerializeToString,
+ google_dot_bytestream_dot_bytestream__pb2.WriteResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def QueryWriteStatus(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/google.bytestream.ByteStream/QueryWriteStatus',
+ google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusRequest.SerializeToString,
+ google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py b/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
index 8f89862e7..11a47e0d3 100644
--- a/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
+++ b/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
@@ -6,127 +6,205 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class OperationsStub(object):
- """Manages long-running operations with an API service.
-
- When an API method normally takes long time to complete, it can be designed
- to return [Operation][google.longrunning.Operation] to the client, and the client can use this
- interface to receive the real response asynchronously by polling the
- operation resource, or pass the operation resource to another API (such as
- Google Cloud Pub/Sub API) to receive the response. Any API service that
- returns long-running operations should implement the `Operations` interface
- so developers can have a consistent client experience.
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be designed
+ to return [Operation][google.longrunning.Operation] to the client, and the client can use this
+ interface to receive the real response asynchronously by polling the
+ operation resource, or pass the operation resource to another API (such as
+ Google Cloud Pub/Sub API) to receive the response. Any API service that
+ returns long-running operations should implement the `Operations` interface
+ so developers can have a consistent client experience.
"""
- self.ListOperations = channel.unary_unary(
- '/google.longrunning.Operations/ListOperations',
- request_serializer=google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.FromString,
- )
- self.GetOperation = channel.unary_unary(
- '/google.longrunning.Operations/GetOperation',
- request_serializer=google_dot_longrunning_dot_operations__pb2.GetOperationRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.DeleteOperation = channel.unary_unary(
- '/google.longrunning.Operations/DeleteOperation',
- request_serializer=google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.SerializeToString,
- response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
- )
- self.CancelOperation = channel.unary_unary(
- '/google.longrunning.Operations/CancelOperation',
- request_serializer=google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.SerializeToString,
- response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
- )
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.ListOperations = channel.unary_unary(
+ '/google.longrunning.Operations/ListOperations',
+ request_serializer=google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.FromString,
+ )
+ self.GetOperation = channel.unary_unary(
+ '/google.longrunning.Operations/GetOperation',
+ request_serializer=google_dot_longrunning_dot_operations__pb2.GetOperationRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ )
+ self.DeleteOperation = channel.unary_unary(
+ '/google.longrunning.Operations/DeleteOperation',
+ request_serializer=google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ )
+ self.CancelOperation = channel.unary_unary(
+ '/google.longrunning.Operations/CancelOperation',
+ request_serializer=google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ )
class OperationsServicer(object):
- """Manages long-running operations with an API service.
-
- When an API method normally takes long time to complete, it can be designed
- to return [Operation][google.longrunning.Operation] to the client, and the client can use this
- interface to receive the real response asynchronously by polling the
- operation resource, or pass the operation resource to another API (such as
- Google Cloud Pub/Sub API) to receive the response. Any API service that
- returns long-running operations should implement the `Operations` interface
- so developers can have a consistent client experience.
- """
-
- def ListOperations(self, request, context):
- """Lists operations that match the specified filter in the request. If the
- server doesn't support this method, it returns `UNIMPLEMENTED`.
-
- NOTE: the `name` binding below allows API services to override the binding
- to use different resource name schemes, such as `users/*/operations`.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def GetOperation(self, request, context):
- """Gets the latest state of a long-running operation. Clients can use this
- method to poll the operation result at intervals as recommended by the API
- service.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def DeleteOperation(self, request, context):
- """Deletes a long-running operation. This method indicates that the client is
- no longer interested in the operation result. It does not cancel the
- operation. If the server doesn't support this method, it returns
- `google.rpc.Code.UNIMPLEMENTED`.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
- def CancelOperation(self, request, context):
- """Starts asynchronous cancellation on a long-running operation. The server
- makes a best effort to cancel the operation, but success is not
- guaranteed. If the server doesn't support this method, it returns
- `google.rpc.Code.UNIMPLEMENTED`. Clients can use
- [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
- other methods to check whether the cancellation succeeded or whether the
- operation completed despite cancellation. On successful cancellation,
- the operation is not deleted; instead, it becomes an operation with
- an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
- corresponding to `Code.CANCELLED`.
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be designed
+ to return [Operation][google.longrunning.Operation] to the client, and the client can use this
+ interface to receive the real response asynchronously by polling the
+ operation resource, or pass the operation resource to another API (such as
+ Google Cloud Pub/Sub API) to receive the response. Any API service that
+ returns long-running operations should implement the `Operations` interface
+ so developers can have a consistent client experience.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+
+ def ListOperations(self, request, context):
+ """Lists operations that match the specified filter in the request. If the
+ server doesn't support this method, it returns `UNIMPLEMENTED`.
+
+ NOTE: the `name` binding below allows API services to override the binding
+ to use different resource name schemes, such as `users/*/operations`.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def GetOperation(self, request, context):
+ """Gets the latest state of a long-running operation. Clients can use this
+ method to poll the operation result at intervals as recommended by the API
+ service.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def DeleteOperation(self, request, context):
+ """Deletes a long-running operation. This method indicates that the client is
+ no longer interested in the operation result. It does not cancel the
+ operation. If the server doesn't support this method, it returns
+ `google.rpc.Code.UNIMPLEMENTED`.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def CancelOperation(self, request, context):
+ """Starts asynchronous cancellation on a long-running operation. The server
+ makes a best effort to cancel the operation, but success is not
+ guaranteed. If the server doesn't support this method, it returns
+ `google.rpc.Code.UNIMPLEMENTED`. Clients can use
+ [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
+ other methods to check whether the cancellation succeeded or whether the
+ operation completed despite cancellation. On successful cancellation,
+ the operation is not deleted; instead, it becomes an operation with
+ an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+ corresponding to `Code.CANCELLED`.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
def add_OperationsServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'ListOperations': grpc.unary_unary_rpc_method_handler(
- servicer.ListOperations,
- request_deserializer=google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.SerializeToString,
- ),
- 'GetOperation': grpc.unary_unary_rpc_method_handler(
- servicer.GetOperation,
- request_deserializer=google_dot_longrunning_dot_operations__pb2.GetOperationRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- 'DeleteOperation': grpc.unary_unary_rpc_method_handler(
- servicer.DeleteOperation,
- request_deserializer=google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.FromString,
- response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
- ),
- 'CancelOperation': grpc.unary_unary_rpc_method_handler(
- servicer.CancelOperation,
- request_deserializer=google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.FromString,
- response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'google.longrunning.Operations', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ 'ListOperations': grpc.unary_unary_rpc_method_handler(
+ servicer.ListOperations,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.SerializeToString,
+ ),
+ 'GetOperation': grpc.unary_unary_rpc_method_handler(
+ servicer.GetOperation,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.GetOperationRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ 'DeleteOperation': grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteOperation,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ 'CancelOperation': grpc.unary_unary_rpc_method_handler(
+ servicer.CancelOperation,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'google.longrunning.Operations', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class Operations(object):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be designed
+ to return [Operation][google.longrunning.Operation] to the client, and the client can use this
+ interface to receive the real response asynchronously by polling the
+ operation resource, or pass the operation resource to another API (such as
+ Google Cloud Pub/Sub API) to receive the response. Any API service that
+ returns long-running operations should implement the `Operations` interface
+ so developers can have a consistent client experience.
+ """
+
+ @staticmethod
+ def ListOperations(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/google.longrunning.Operations/ListOperations',
+ google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def GetOperation(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/google.longrunning.Operations/GetOperation',
+ google_dot_longrunning_dot_operations__pb2.GetOperationRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def DeleteOperation(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/google.longrunning.Operations/DeleteOperation',
+ google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def CancelOperation(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/google.longrunning.Operations/CancelOperation',
+ google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)