refactor: excel parse
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,80 @@
|
||||
# Copyright 2016 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""GRPCAuthMetadataPlugins for standard authentication."""
|
||||
|
||||
import inspect
|
||||
from typing import Any, Optional
|
||||
|
||||
import grpc
|
||||
|
||||
|
||||
def _sign_request(
|
||||
callback: grpc.AuthMetadataPluginCallback,
|
||||
token: Optional[str],
|
||||
error: Optional[Exception],
|
||||
):
|
||||
metadata = (("authorization", "Bearer {}".format(token)),)
|
||||
callback(metadata, error)
|
||||
|
||||
|
||||
class GoogleCallCredentials(grpc.AuthMetadataPlugin):
|
||||
"""Metadata wrapper for GoogleCredentials from the oauth2client library."""
|
||||
|
||||
_is_jwt: bool
|
||||
_credentials: Any
|
||||
|
||||
# TODO(xuanwn): Give credentials an actual type.
|
||||
def __init__(self, credentials: Any):
|
||||
self._credentials = credentials
|
||||
# Hack to determine if these are JWT creds and we need to pass # noqa: FIX004
|
||||
# additional_claims when getting a token
|
||||
self._is_jwt = (
|
||||
"additional_claims"
|
||||
in inspect.getfullargspec(credentials.get_access_token).args
|
||||
)
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
context: grpc.AuthMetadataContext,
|
||||
callback: grpc.AuthMetadataPluginCallback,
|
||||
):
|
||||
try:
|
||||
if self._is_jwt:
|
||||
access_token = self._credentials.get_access_token(
|
||||
additional_claims={
|
||||
"aud": context.service_url # pytype: disable=attribute-error
|
||||
}
|
||||
).access_token
|
||||
else:
|
||||
access_token = self._credentials.get_access_token().access_token
|
||||
except Exception as exception: # pylint: disable=broad-except
|
||||
_sign_request(callback, None, exception)
|
||||
else:
|
||||
_sign_request(callback, access_token, None)
|
||||
|
||||
|
||||
class AccessTokenAuthMetadataPlugin(grpc.AuthMetadataPlugin):
|
||||
"""Metadata wrapper for raw access token credentials."""
|
||||
|
||||
_access_token: str
|
||||
|
||||
def __init__(self, access_token: str):
|
||||
self._access_token = access_token
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
context: grpc.AuthMetadataContext,
|
||||
callback: grpc.AuthMetadataPluginCallback,
|
||||
):
|
||||
_sign_request(callback, self._access_token, None)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,180 @@
|
||||
# Copyright 2016 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Shared implementation."""
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, AnyStr, Callable, Optional, Union
|
||||
|
||||
import grpc
|
||||
from grpc._cython import cygrpc
|
||||
from grpc._typing import DeserializingFunction
|
||||
from grpc._typing import SerializingFunction
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
|
||||
cygrpc.ConnectivityState.idle: grpc.ChannelConnectivity.IDLE,
|
||||
cygrpc.ConnectivityState.connecting: grpc.ChannelConnectivity.CONNECTING,
|
||||
cygrpc.ConnectivityState.ready: grpc.ChannelConnectivity.READY,
|
||||
cygrpc.ConnectivityState.transient_failure: grpc.ChannelConnectivity.TRANSIENT_FAILURE,
|
||||
cygrpc.ConnectivityState.shutdown: grpc.ChannelConnectivity.SHUTDOWN,
|
||||
}
|
||||
|
||||
CYGRPC_STATUS_CODE_TO_STATUS_CODE = {
|
||||
cygrpc.StatusCode.ok: grpc.StatusCode.OK,
|
||||
cygrpc.StatusCode.cancelled: grpc.StatusCode.CANCELLED,
|
||||
cygrpc.StatusCode.unknown: grpc.StatusCode.UNKNOWN,
|
||||
cygrpc.StatusCode.invalid_argument: grpc.StatusCode.INVALID_ARGUMENT,
|
||||
cygrpc.StatusCode.deadline_exceeded: grpc.StatusCode.DEADLINE_EXCEEDED,
|
||||
cygrpc.StatusCode.not_found: grpc.StatusCode.NOT_FOUND,
|
||||
cygrpc.StatusCode.already_exists: grpc.StatusCode.ALREADY_EXISTS,
|
||||
cygrpc.StatusCode.permission_denied: grpc.StatusCode.PERMISSION_DENIED,
|
||||
cygrpc.StatusCode.unauthenticated: grpc.StatusCode.UNAUTHENTICATED,
|
||||
cygrpc.StatusCode.resource_exhausted: grpc.StatusCode.RESOURCE_EXHAUSTED,
|
||||
cygrpc.StatusCode.failed_precondition: grpc.StatusCode.FAILED_PRECONDITION,
|
||||
cygrpc.StatusCode.aborted: grpc.StatusCode.ABORTED,
|
||||
cygrpc.StatusCode.out_of_range: grpc.StatusCode.OUT_OF_RANGE,
|
||||
cygrpc.StatusCode.unimplemented: grpc.StatusCode.UNIMPLEMENTED,
|
||||
cygrpc.StatusCode.internal: grpc.StatusCode.INTERNAL,
|
||||
cygrpc.StatusCode.unavailable: grpc.StatusCode.UNAVAILABLE,
|
||||
cygrpc.StatusCode.data_loss: grpc.StatusCode.DATA_LOSS,
|
||||
}
|
||||
STATUS_CODE_TO_CYGRPC_STATUS_CODE = {
|
||||
grpc_code: cygrpc_code
|
||||
for cygrpc_code, grpc_code in CYGRPC_STATUS_CODE_TO_STATUS_CODE.items()
|
||||
}
|
||||
|
||||
MAXIMUM_WAIT_TIMEOUT = 0.1
|
||||
|
||||
_ERROR_MESSAGE_PORT_BINDING_FAILED = (
|
||||
"Failed to bind to address %s; set "
|
||||
"GRPC_VERBOSITY=debug environment variable to see detailed error message."
|
||||
)
|
||||
|
||||
|
||||
def encode(s: AnyStr) -> bytes:
|
||||
if isinstance(s, bytes):
|
||||
return s
|
||||
return s.encode("utf8")
|
||||
|
||||
|
||||
def decode(b: AnyStr) -> str:
|
||||
if isinstance(b, bytes):
|
||||
return b.decode("utf-8", "replace")
|
||||
return b
|
||||
|
||||
|
||||
def _transform(
|
||||
message: Any,
|
||||
transformer: Union[SerializingFunction, DeserializingFunction, None],
|
||||
exception_message: str,
|
||||
) -> Any:
|
||||
if transformer is None:
|
||||
return message
|
||||
try:
|
||||
return transformer(message)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(exception_message)
|
||||
return None
|
||||
|
||||
|
||||
def serialize(message: Any, serializer: Optional[SerializingFunction]) -> bytes:
|
||||
return _transform(message, serializer, "Exception serializing message!")
|
||||
|
||||
|
||||
def deserialize(
|
||||
serialized_message: bytes, deserializer: Optional[DeserializingFunction]
|
||||
) -> Any:
|
||||
return _transform(
|
||||
serialized_message, deserializer, "Exception deserializing message!"
|
||||
)
|
||||
|
||||
|
||||
def fully_qualified_method(group: str, method: str) -> str:
|
||||
return "/{}/{}".format(group, method)
|
||||
|
||||
|
||||
def _wait_once(
|
||||
wait_fn: Callable[..., bool],
|
||||
timeout: float,
|
||||
spin_cb: Optional[Callable[[], None]],
|
||||
):
|
||||
wait_fn(timeout=timeout)
|
||||
if spin_cb is not None:
|
||||
spin_cb()
|
||||
|
||||
|
||||
def wait(
|
||||
wait_fn: Callable[..., bool],
|
||||
wait_complete_fn: Callable[[], bool],
|
||||
timeout: Optional[float] = None,
|
||||
spin_cb: Optional[Callable[[], None]] = None,
|
||||
) -> bool:
|
||||
"""Blocks waiting for an event without blocking the thread indefinitely.
|
||||
|
||||
See https://github.com/grpc/grpc/issues/19464 for full context. CPython's
|
||||
`threading.Event.wait` and `threading.Condition.wait` methods, if invoked
|
||||
without a timeout kwarg, may block the calling thread indefinitely. If the
|
||||
call is made from the main thread, this means that signal handlers may not
|
||||
run for an arbitrarily long period of time.
|
||||
|
||||
This wrapper calls the supplied wait function with an arbitrary short
|
||||
timeout to ensure that no signal handler has to wait longer than
|
||||
MAXIMUM_WAIT_TIMEOUT before executing.
|
||||
|
||||
Args:
|
||||
wait_fn: A callable acceptable a single float-valued kwarg named
|
||||
`timeout`. This function is expected to be one of `threading.Event.wait`
|
||||
or `threading.Condition.wait`.
|
||||
wait_complete_fn: A callable taking no arguments and returning a bool.
|
||||
When this function returns true, it indicates that waiting should cease.
|
||||
timeout: An optional float-valued number of seconds after which the wait
|
||||
should cease.
|
||||
spin_cb: An optional Callable taking no arguments and returning nothing.
|
||||
This callback will be called on each iteration of the spin. This may be
|
||||
used for, e.g. work related to forking.
|
||||
|
||||
Returns:
|
||||
True if a timeout was supplied and it was reached. False otherwise.
|
||||
"""
|
||||
if timeout is None:
|
||||
while not wait_complete_fn():
|
||||
_wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb)
|
||||
else:
|
||||
end = time.time() + timeout
|
||||
while not wait_complete_fn():
|
||||
remaining = min(end - time.time(), MAXIMUM_WAIT_TIMEOUT)
|
||||
if remaining < 0:
|
||||
return True
|
||||
_wait_once(wait_fn, remaining, spin_cb)
|
||||
return False
|
||||
|
||||
|
||||
def validate_port_binding_result(address: str, port: int) -> int:
|
||||
"""Validates if the port binding succeed.
|
||||
|
||||
If the port returned by Core is 0, the binding is failed. However, in that
|
||||
case, the Core API doesn't return a detailed failing reason. The best we
|
||||
can do is raising an exception to prevent further confusion.
|
||||
|
||||
Args:
|
||||
address: The address string to be bound.
|
||||
port: An int returned by core
|
||||
"""
|
||||
if port == 0:
|
||||
# The Core API doesn't return a failure message. The best we can do
|
||||
# is raising an exception to prevent further confusion.
|
||||
raise RuntimeError(_ERROR_MESSAGE_PORT_BINDING_FAILED % address)
|
||||
return port
|
||||
@@ -0,0 +1,71 @@
|
||||
# Copyright 2019 The gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import grpc
|
||||
from grpc._cython import cygrpc
|
||||
from grpc._typing import MetadataType
|
||||
|
||||
NoCompression = cygrpc.CompressionAlgorithm.none
|
||||
Deflate = cygrpc.CompressionAlgorithm.deflate
|
||||
Gzip = cygrpc.CompressionAlgorithm.gzip
|
||||
|
||||
_METADATA_STRING_MAPPING = {
|
||||
NoCompression: "identity",
|
||||
Deflate: "deflate",
|
||||
Gzip: "gzip",
|
||||
}
|
||||
|
||||
|
||||
def _compression_algorithm_to_metadata_value(
|
||||
compression: grpc.Compression,
|
||||
) -> str:
|
||||
return _METADATA_STRING_MAPPING[compression]
|
||||
|
||||
|
||||
def compression_algorithm_to_metadata(compression: grpc.Compression):
|
||||
return (
|
||||
cygrpc.GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY,
|
||||
_compression_algorithm_to_metadata_value(compression),
|
||||
)
|
||||
|
||||
|
||||
def create_channel_option(compression: Optional[grpc.Compression]):
|
||||
return (
|
||||
((cygrpc.GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM, int(compression)),)
|
||||
if compression
|
||||
else ()
|
||||
)
|
||||
|
||||
|
||||
def augment_metadata(
|
||||
metadata: Optional[MetadataType], compression: Optional[grpc.Compression]
|
||||
):
|
||||
if not metadata and not compression:
|
||||
return None
|
||||
base_metadata = tuple(metadata) if metadata else ()
|
||||
compression_metadata = (
|
||||
(compression_algorithm_to_metadata(compression),) if compression else ()
|
||||
)
|
||||
return base_metadata + compression_metadata
|
||||
|
||||
|
||||
__all__ = (
|
||||
"Deflate",
|
||||
"Gzip",
|
||||
"NoCompression",
|
||||
)
|
||||
@@ -0,0 +1,13 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,13 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
+100
@@ -0,0 +1,100 @@
|
||||
//
|
||||
//
|
||||
// Copyright 2026 gRPC authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
//
|
||||
|
||||
#include "src/python/grpcio/grpc/_cython/_cygrpc/private_key_signing/private_key_signer_py_wrapper.h"
|
||||
|
||||
#include <grpc/support/log.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "Python.h"
|
||||
#include "grpc/private_key_signer.h"
|
||||
#include "absl/status/status.h"
|
||||
|
||||
namespace grpc_python {
|
||||
|
||||
std::shared_ptr<grpc_core::PrivateKeySigner> PrivateKeySignerPyWrapper::Create(
|
||||
PrivateKeySignerPyWrapper::SignWrapperForPy sign_py_wrapper,
|
||||
PyObject* py_user_sign_fn, PyObject* destroy_event) {
|
||||
PyGILState_STATE state = PyGILState_Ensure();
|
||||
Py_INCREF(py_user_sign_fn);
|
||||
Py_INCREF(destroy_event);
|
||||
PyGILState_Release(state);
|
||||
return std::make_shared<PrivateKeySignerPyWrapper>(
|
||||
sign_py_wrapper, py_user_sign_fn, destroy_event);
|
||||
}
|
||||
|
||||
PrivateKeySignerPyWrapper::~PrivateKeySignerPyWrapper() {
|
||||
PyGILState_STATE state = PyGILState_Ensure();
|
||||
Py_DECREF(static_cast<PyObject*>(py_user_sign_fn_));
|
||||
// Python will stay alive until this event is set
|
||||
PyObject* result = PyObject_CallMethod(destroy_event_, "set", "()");
|
||||
// crash if result is nullptr? - discussing
|
||||
Py_XDECREF(result);
|
||||
PyGILState_Release(state);
|
||||
}
|
||||
|
||||
std::variant<absl::StatusOr<std::string>,
|
||||
std::shared_ptr<grpc_core::PrivateKeySigner::AsyncSigningHandle>>
|
||||
PrivateKeySignerPyWrapper::Sign(absl::string_view data_to_sign,
|
||||
SignatureAlgorithm signature_algorithm,
|
||||
OnSignComplete on_sign_complete) {
|
||||
auto completion_context =
|
||||
std::make_shared<CompletionContext>(std::move(on_sign_complete));
|
||||
|
||||
PrivateKeySignerPyWrapperResult result = sign_py_wrapper_(
|
||||
data_to_sign, signature_algorithm, py_user_sign_fn_, completion_context);
|
||||
if (result.is_sync) {
|
||||
return result.sync_result;
|
||||
} else {
|
||||
auto handle = std::make_shared<AsyncSigningHandlePyWrapper>(
|
||||
result.async_result.cancel_wrapper,
|
||||
result.async_result.py_user_cancel_fn, std::move(completion_context));
|
||||
return handle;
|
||||
}
|
||||
}
|
||||
|
||||
void PrivateKeySignerPyWrapper::Cancel(
|
||||
std::shared_ptr<AsyncSigningHandle> handle) {
|
||||
if (handle == nullptr) return;
|
||||
auto handle_impl =
|
||||
std::static_pointer_cast<AsyncSigningHandlePyWrapper>(handle);
|
||||
handle_impl->Cancel();
|
||||
}
|
||||
|
||||
PrivateKeySignerPyWrapper::AsyncSigningHandlePyWrapper::
|
||||
~AsyncSigningHandlePyWrapper() {
|
||||
PyGILState_STATE state = PyGILState_Ensure();
|
||||
Py_DECREF(py_user_cancel_fn_);
|
||||
PyGILState_Release(state);
|
||||
}
|
||||
|
||||
void PrivateKeySignerPyWrapper::AsyncSigningHandlePyWrapper::Cancel() {
|
||||
if (cancel_py_wrapper_ != nullptr && py_user_cancel_fn_ != nullptr) {
|
||||
cancel_py_wrapper_(py_user_cancel_fn_);
|
||||
}
|
||||
completion_context_.reset();
|
||||
}
|
||||
|
||||
std::string MakeStringForCython(const char* inp, size_t size) {
|
||||
return std::string(inp, size);
|
||||
}
|
||||
|
||||
std::string MakeStringForCython(const char* inp) { return std::string(inp); }
|
||||
|
||||
} // namespace grpc_python
|
||||
+143
@@ -0,0 +1,143 @@
|
||||
//
|
||||
//
|
||||
// Copyright 2026 gRPC authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
//
|
||||
|
||||
#ifndef GRPC_PRIVATE_KEY_SIGNER_PY_WRAPPER_H
|
||||
#define GRPC_PRIVATE_KEY_SIGNER_PY_WRAPPER_H
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <variant>
|
||||
|
||||
#include "Python.h"
|
||||
#include "grpc/private_key_signer.h"
|
||||
#include "absl/status/statusor.h"
|
||||
|
||||
namespace grpc_python {
|
||||
|
||||
// An implementation of PrivateKeySigner for interop with Python.
|
||||
// It is thread-safe to call Sign on this class.
|
||||
class PrivateKeySignerPyWrapper final
|
||||
: public grpc_core::PrivateKeySigner,
|
||||
public std::enable_shared_from_this<PrivateKeySignerPyWrapper> {
|
||||
public:
|
||||
// A C-style callback for the PrivateKeySigner Cancel function.
|
||||
typedef void (*CancelWrapperForPy)(PyObject* cancel_data);
|
||||
|
||||
// A wrapper for holding the user's Python cancellation function as well as
|
||||
// the C callback that can call that function.
|
||||
struct AsyncResult {
|
||||
CancelWrapperForPy cancel_wrapper;
|
||||
PyObject* py_user_cancel_fn;
|
||||
};
|
||||
|
||||
// The result of the sign call for interop between Cython and C. Is converted
|
||||
// to the C++ std::variant Sign result.
|
||||
struct PrivateKeySignerPyWrapperResult {
|
||||
absl::StatusOr<std::string> sync_result;
|
||||
AsyncResult async_result;
|
||||
bool is_sync;
|
||||
};
|
||||
|
||||
// The context needed for calling the Completion callback at the Cython layer.
|
||||
// Wrapped in regular Python and passed to the user for them to be able to
|
||||
// call the proper on_complete callback passed out by gRPC Core.
|
||||
class CompletionContext final {
|
||||
public:
|
||||
explicit CompletionContext(
|
||||
grpc_core::PrivateKeySigner::OnSignComplete on_complete)
|
||||
: on_complete_(std::move(on_complete)) {}
|
||||
void OnComplete(absl::StatusOr<std::string> result) {
|
||||
on_complete_(std::move(result));
|
||||
};
|
||||
|
||||
private:
|
||||
// Holds the completion function passed out by gRPC Core.
|
||||
grpc_core::PrivateKeySigner::OnSignComplete on_complete_;
|
||||
};
|
||||
|
||||
// A C-Style function for the Cython layer to call when the gRPC C++ layer
|
||||
// calls `Sign` on the `PrivateKeySignerPyWrapper`.
|
||||
typedef PrivateKeySignerPyWrapperResult (*SignWrapperForPy)(
|
||||
absl::string_view data_to_sign,
|
||||
grpc_core::PrivateKeySigner::SignatureAlgorithm signature_algorithm,
|
||||
PyObject* py_user_sign_fn,
|
||||
std::weak_ptr<CompletionContext> completion_context);
|
||||
|
||||
class AsyncSigningHandlePyWrapper
|
||||
: public grpc_core::PrivateKeySigner::AsyncSigningHandle {
|
||||
public:
|
||||
AsyncSigningHandlePyWrapper(
|
||||
PrivateKeySignerPyWrapper::CancelWrapperForPy cancel_py_wrapper,
|
||||
PyObject* py_user_cancel_fn,
|
||||
std::shared_ptr<PrivateKeySignerPyWrapper::CompletionContext>
|
||||
completion_context)
|
||||
: cancel_py_wrapper_(cancel_py_wrapper),
|
||||
py_user_cancel_fn_(py_user_cancel_fn),
|
||||
completion_context_(std::move(completion_context)) {}
|
||||
// This will decrememnt the py_user_cancel_fn on object destruction
|
||||
~AsyncSigningHandlePyWrapper() override;
|
||||
void Cancel();
|
||||
|
||||
private:
|
||||
// This is a function provided by the Cython implementation of Private Key
|
||||
// Offloading.
|
||||
PrivateKeySignerPyWrapper::CancelWrapperForPy cancel_py_wrapper_;
|
||||
// This will hold the Python callable object
|
||||
PyObject* py_user_cancel_fn_;
|
||||
std::shared_ptr<PrivateKeySignerPyWrapper::CompletionContext>
|
||||
completion_context_;
|
||||
};
|
||||
|
||||
// The entry point for Cython to build a PrivateKeySigner.
|
||||
static std::shared_ptr<grpc_core::PrivateKeySigner> Create(
|
||||
SignWrapperForPy sign, PyObject* py_user_sign_fn,
|
||||
PyObject* destroy_event);
|
||||
|
||||
PrivateKeySignerPyWrapper(SignWrapperForPy sign_py_wrapper,
|
||||
PyObject* py_user_sign_fn, PyObject* destroy_event)
|
||||
: sign_py_wrapper_(sign_py_wrapper),
|
||||
py_user_sign_fn_(py_user_sign_fn),
|
||||
destroy_event_(destroy_event) {}
|
||||
~PrivateKeySignerPyWrapper() override;
|
||||
|
||||
std::variant<absl::StatusOr<std::string>, std::shared_ptr<AsyncSigningHandle>>
|
||||
Sign(absl::string_view data_to_sign, SignatureAlgorithm signature_algorithm,
|
||||
OnSignComplete on_sign_complete) override;
|
||||
|
||||
void Cancel(std::shared_ptr<AsyncSigningHandle> handle) override;
|
||||
|
||||
private:
|
||||
// This is a function provided by the Cython implementation of Private Key
|
||||
// Offloading.
|
||||
SignWrapperForPy sign_py_wrapper_;
|
||||
// This will hold the Python callable object
|
||||
PyObject* py_user_sign_fn_;
|
||||
// An event to make sure the python interpreter stays alive until this
|
||||
// destruction is complete
|
||||
PyObject* destroy_event_;
|
||||
};
|
||||
|
||||
// Python cannot call the string constructor directly in Cython. The string
|
||||
// constructor can throw exceptions, so the generated C code from Cython
|
||||
// contains try/catch statements. This fails our strict builds. Instead, we can
|
||||
// just construct them here and pass them down.
|
||||
std::string MakeStringForCython(const char* inp);
|
||||
std::string MakeStringForCython(const char* inp, size_t size);
|
||||
} // namespace grpc_python
|
||||
|
||||
#endif // GRPC_PRIVATE_KEY_SIGNER_PY_WRAPPER_H
|
||||
BIN
Binary file not shown.
@@ -0,0 +1 @@
|
||||
__version__ = """1.80.0"""
|
||||
@@ -0,0 +1,809 @@
|
||||
# Copyright 2017 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Implementation of gRPC Python interceptors."""
|
||||
|
||||
import collections
|
||||
import sys
|
||||
import types
|
||||
from typing import Any, Callable, Optional, Sequence, Tuple, Union
|
||||
|
||||
import grpc
|
||||
|
||||
from ._typing import DeserializingFunction
|
||||
from ._typing import DoneCallbackType
|
||||
from ._typing import MetadataType
|
||||
from ._typing import RequestIterableType
|
||||
from ._typing import SerializingFunction
|
||||
|
||||
|
||||
class _ServicePipeline(object):
|
||||
interceptors: Tuple[grpc.ServerInterceptor]
|
||||
|
||||
def __init__(self, interceptors: Sequence[grpc.ServerInterceptor]):
|
||||
self.interceptors = tuple(interceptors)
|
||||
|
||||
def _continuation(self, thunk: Callable, index: int) -> Callable:
|
||||
return lambda context: self._intercept_at(thunk, index, context)
|
||||
|
||||
def _intercept_at(
|
||||
self, thunk: Callable, index: int, context: grpc.HandlerCallDetails
|
||||
) -> grpc.RpcMethodHandler:
|
||||
if index < len(self.interceptors):
|
||||
interceptor = self.interceptors[index]
|
||||
thunk = self._continuation(thunk, index + 1)
|
||||
return interceptor.intercept_service(thunk, context)
|
||||
return thunk(context)
|
||||
|
||||
def execute(
|
||||
self, thunk: Callable, context: grpc.HandlerCallDetails
|
||||
) -> grpc.RpcMethodHandler:
|
||||
return self._intercept_at(thunk, 0, context)
|
||||
|
||||
|
||||
def service_pipeline(
|
||||
interceptors: Optional[Sequence[grpc.ServerInterceptor]],
|
||||
) -> Optional[_ServicePipeline]:
|
||||
return _ServicePipeline(interceptors) if interceptors else None
|
||||
|
||||
|
||||
class _ClientCallDetails(
|
||||
collections.namedtuple(
|
||||
"_ClientCallDetails",
|
||||
(
|
||||
"method",
|
||||
"timeout",
|
||||
"metadata",
|
||||
"credentials",
|
||||
"wait_for_ready",
|
||||
"compression",
|
||||
),
|
||||
),
|
||||
grpc.ClientCallDetails,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
def _unwrap_client_call_details(
|
||||
call_details: grpc.ClientCallDetails,
|
||||
default_details: grpc.ClientCallDetails,
|
||||
) -> Tuple[
|
||||
str, float, MetadataType, grpc.CallCredentials, bool, grpc.Compression
|
||||
]:
|
||||
try:
|
||||
method = call_details.method # pytype: disable=attribute-error
|
||||
except AttributeError:
|
||||
method = default_details.method # pytype: disable=attribute-error
|
||||
|
||||
try:
|
||||
timeout = call_details.timeout # pytype: disable=attribute-error
|
||||
except AttributeError:
|
||||
timeout = default_details.timeout # pytype: disable=attribute-error
|
||||
|
||||
try:
|
||||
metadata = call_details.metadata # pytype: disable=attribute-error
|
||||
except AttributeError:
|
||||
metadata = default_details.metadata # pytype: disable=attribute-error
|
||||
|
||||
try:
|
||||
credentials = (
|
||||
call_details.credentials
|
||||
) # pytype: disable=attribute-error
|
||||
except AttributeError:
|
||||
credentials = (
|
||||
default_details.credentials
|
||||
) # pytype: disable=attribute-error
|
||||
|
||||
try:
|
||||
wait_for_ready = (
|
||||
call_details.wait_for_ready
|
||||
) # pytype: disable=attribute-error
|
||||
except AttributeError:
|
||||
wait_for_ready = (
|
||||
default_details.wait_for_ready
|
||||
) # pytype: disable=attribute-error
|
||||
|
||||
try:
|
||||
compression = (
|
||||
call_details.compression
|
||||
) # pytype: disable=attribute-error
|
||||
except AttributeError:
|
||||
compression = (
|
||||
default_details.compression
|
||||
) # pytype: disable=attribute-error
|
||||
|
||||
return method, timeout, metadata, credentials, wait_for_ready, compression
|
||||
|
||||
|
||||
class _FailureOutcome(
|
||||
grpc.RpcError, grpc.Future, grpc.Call
|
||||
): # pylint: disable=too-many-ancestors
|
||||
_exception: Exception
|
||||
_traceback: types.TracebackType
|
||||
|
||||
def __init__(self, exception: Exception, traceback: types.TracebackType):
|
||||
super(_FailureOutcome, self).__init__()
|
||||
self._exception = exception
|
||||
self._traceback = traceback
|
||||
|
||||
def initial_metadata(self) -> Optional[MetadataType]:
|
||||
return None
|
||||
|
||||
def trailing_metadata(self) -> Optional[MetadataType]:
|
||||
return None
|
||||
|
||||
def code(self) -> Optional[grpc.StatusCode]:
|
||||
return grpc.StatusCode.INTERNAL
|
||||
|
||||
def details(self) -> Optional[str]:
|
||||
return "Exception raised while intercepting the RPC"
|
||||
|
||||
def cancel(self) -> bool:
|
||||
return False
|
||||
|
||||
def cancelled(self) -> bool:
|
||||
return False
|
||||
|
||||
def is_active(self) -> bool:
|
||||
return False
|
||||
|
||||
def time_remaining(self) -> Optional[float]:
|
||||
return None
|
||||
|
||||
def running(self) -> bool:
|
||||
return False
|
||||
|
||||
def done(self) -> bool:
|
||||
return True
|
||||
|
||||
def result(self, ignored_timeout: Optional[float] = None):
|
||||
raise self._exception
|
||||
|
||||
def exception(
|
||||
self, ignored_timeout: Optional[float] = None
|
||||
) -> Optional[Exception]:
|
||||
return self._exception
|
||||
|
||||
def traceback(
|
||||
self, ignored_timeout: Optional[float] = None
|
||||
) -> Optional[types.TracebackType]:
|
||||
return self._traceback
|
||||
|
||||
def add_callback(self, unused_callback) -> bool:
|
||||
return False
|
||||
|
||||
def add_done_callback(self, fn: DoneCallbackType) -> None:
|
||||
fn(self)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
raise self._exception
|
||||
|
||||
def next(self):
|
||||
return self.__next__()
|
||||
|
||||
|
||||
class _UnaryOutcome(grpc.Call, grpc.Future):
|
||||
_response: Any
|
||||
_call: grpc.Call
|
||||
|
||||
def __init__(self, response: Any, call: grpc.Call):
|
||||
self._response = response
|
||||
self._call = call
|
||||
|
||||
def initial_metadata(self) -> Optional[MetadataType]:
|
||||
return self._call.initial_metadata()
|
||||
|
||||
def trailing_metadata(self) -> Optional[MetadataType]:
|
||||
return self._call.trailing_metadata()
|
||||
|
||||
def code(self) -> Optional[grpc.StatusCode]:
|
||||
return self._call.code()
|
||||
|
||||
def details(self) -> Optional[str]:
|
||||
return self._call.details()
|
||||
|
||||
def is_active(self) -> bool:
|
||||
return self._call.is_active()
|
||||
|
||||
def time_remaining(self) -> Optional[float]:
|
||||
return self._call.time_remaining()
|
||||
|
||||
def cancel(self) -> bool:
|
||||
return self._call.cancel()
|
||||
|
||||
def add_callback(self, callback) -> bool:
|
||||
return self._call.add_callback(callback)
|
||||
|
||||
def cancelled(self) -> bool:
|
||||
return False
|
||||
|
||||
def running(self) -> bool:
|
||||
return False
|
||||
|
||||
def done(self) -> bool:
|
||||
return True
|
||||
|
||||
def result(self, ignored_timeout: Optional[float] = None):
|
||||
return self._response
|
||||
|
||||
def exception(self, ignored_timeout: Optional[float] = None):
|
||||
return None
|
||||
|
||||
def traceback(self, ignored_timeout: Optional[float] = None):
|
||||
return None
|
||||
|
||||
def add_done_callback(self, fn: DoneCallbackType) -> None:
|
||||
fn(self)
|
||||
|
||||
|
||||
class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
|
||||
_thunk: Callable
|
||||
_method: str
|
||||
_interceptor: grpc.UnaryUnaryClientInterceptor
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
thunk: Callable,
|
||||
method: str,
|
||||
interceptor: grpc.UnaryUnaryClientInterceptor,
|
||||
):
|
||||
self._thunk = thunk
|
||||
self._method = method
|
||||
self._interceptor = interceptor
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
request: Any,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> Any:
|
||||
response, ignored_call = self._with_call(
|
||||
request,
|
||||
timeout=timeout,
|
||||
metadata=metadata,
|
||||
credentials=credentials,
|
||||
wait_for_ready=wait_for_ready,
|
||||
compression=compression,
|
||||
)
|
||||
return response
|
||||
|
||||
def _with_call(
|
||||
self,
|
||||
request: Any,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> Tuple[Any, grpc.Call]:
|
||||
client_call_details = _ClientCallDetails(
|
||||
self._method,
|
||||
timeout,
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
compression,
|
||||
)
|
||||
|
||||
def continuation(new_details, request):
|
||||
(
|
||||
new_method,
|
||||
new_timeout,
|
||||
new_metadata,
|
||||
new_credentials,
|
||||
new_wait_for_ready,
|
||||
new_compression,
|
||||
) = _unwrap_client_call_details(new_details, client_call_details)
|
||||
try:
|
||||
response, call = self._thunk(new_method).with_call(
|
||||
request,
|
||||
timeout=new_timeout,
|
||||
metadata=new_metadata,
|
||||
credentials=new_credentials,
|
||||
wait_for_ready=new_wait_for_ready,
|
||||
compression=new_compression,
|
||||
)
|
||||
return _UnaryOutcome(response, call)
|
||||
except grpc.RpcError as rpc_error:
|
||||
return rpc_error
|
||||
except Exception as exception: # pylint:disable=broad-except
|
||||
return _FailureOutcome(exception, sys.exc_info()[2])
|
||||
|
||||
call = self._interceptor.intercept_unary_unary(
|
||||
continuation, client_call_details, request
|
||||
)
|
||||
return call.result(), call
|
||||
|
||||
def with_call(
|
||||
self,
|
||||
request: Any,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> Tuple[Any, grpc.Call]:
|
||||
return self._with_call(
|
||||
request,
|
||||
timeout=timeout,
|
||||
metadata=metadata,
|
||||
credentials=credentials,
|
||||
wait_for_ready=wait_for_ready,
|
||||
compression=compression,
|
||||
)
|
||||
|
||||
def future(
|
||||
self,
|
||||
request: Any,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> Any:
|
||||
client_call_details = _ClientCallDetails(
|
||||
self._method,
|
||||
timeout,
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
compression,
|
||||
)
|
||||
|
||||
def continuation(new_details, request):
|
||||
(
|
||||
new_method,
|
||||
new_timeout,
|
||||
new_metadata,
|
||||
new_credentials,
|
||||
new_wait_for_ready,
|
||||
new_compression,
|
||||
) = _unwrap_client_call_details(new_details, client_call_details)
|
||||
return self._thunk(new_method).future(
|
||||
request,
|
||||
timeout=new_timeout,
|
||||
metadata=new_metadata,
|
||||
credentials=new_credentials,
|
||||
wait_for_ready=new_wait_for_ready,
|
||||
compression=new_compression,
|
||||
)
|
||||
|
||||
try:
|
||||
return self._interceptor.intercept_unary_unary(
|
||||
continuation, client_call_details, request
|
||||
)
|
||||
except Exception as exception: # pylint:disable=broad-except
|
||||
return _FailureOutcome(exception, sys.exc_info()[2])
|
||||
|
||||
|
||||
class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
|
||||
_thunk: Callable
|
||||
_method: str
|
||||
_interceptor: grpc.UnaryStreamClientInterceptor
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
thunk: Callable,
|
||||
method: str,
|
||||
interceptor: grpc.UnaryStreamClientInterceptor,
|
||||
):
|
||||
self._thunk = thunk
|
||||
self._method = method
|
||||
self._interceptor = interceptor
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
request: Any,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
):
|
||||
client_call_details = _ClientCallDetails(
|
||||
self._method,
|
||||
timeout,
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
compression,
|
||||
)
|
||||
|
||||
def continuation(new_details, request):
|
||||
(
|
||||
new_method,
|
||||
new_timeout,
|
||||
new_metadata,
|
||||
new_credentials,
|
||||
new_wait_for_ready,
|
||||
new_compression,
|
||||
) = _unwrap_client_call_details(new_details, client_call_details)
|
||||
return self._thunk(new_method)(
|
||||
request,
|
||||
timeout=new_timeout,
|
||||
metadata=new_metadata,
|
||||
credentials=new_credentials,
|
||||
wait_for_ready=new_wait_for_ready,
|
||||
compression=new_compression,
|
||||
)
|
||||
|
||||
try:
|
||||
return self._interceptor.intercept_unary_stream(
|
||||
continuation, client_call_details, request
|
||||
)
|
||||
except Exception as exception: # pylint:disable=broad-except
|
||||
return _FailureOutcome(exception, sys.exc_info()[2])
|
||||
|
||||
|
||||
class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
|
||||
_thunk: Callable
|
||||
_method: str
|
||||
_interceptor: grpc.StreamUnaryClientInterceptor
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
thunk: Callable,
|
||||
method: str,
|
||||
interceptor: grpc.StreamUnaryClientInterceptor,
|
||||
):
|
||||
self._thunk = thunk
|
||||
self._method = method
|
||||
self._interceptor = interceptor
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
request_iterator: RequestIterableType,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> Any:
|
||||
response, ignored_call = self._with_call(
|
||||
request_iterator,
|
||||
timeout=timeout,
|
||||
metadata=metadata,
|
||||
credentials=credentials,
|
||||
wait_for_ready=wait_for_ready,
|
||||
compression=compression,
|
||||
)
|
||||
return response
|
||||
|
||||
def _with_call(
|
||||
self,
|
||||
request_iterator: RequestIterableType,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> Tuple[Any, grpc.Call]:
|
||||
client_call_details = _ClientCallDetails(
|
||||
self._method,
|
||||
timeout,
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
compression,
|
||||
)
|
||||
|
||||
def continuation(new_details, request_iterator):
|
||||
(
|
||||
new_method,
|
||||
new_timeout,
|
||||
new_metadata,
|
||||
new_credentials,
|
||||
new_wait_for_ready,
|
||||
new_compression,
|
||||
) = _unwrap_client_call_details(new_details, client_call_details)
|
||||
try:
|
||||
response, call = self._thunk(new_method).with_call(
|
||||
request_iterator,
|
||||
timeout=new_timeout,
|
||||
metadata=new_metadata,
|
||||
credentials=new_credentials,
|
||||
wait_for_ready=new_wait_for_ready,
|
||||
compression=new_compression,
|
||||
)
|
||||
return _UnaryOutcome(response, call)
|
||||
except grpc.RpcError as rpc_error:
|
||||
return rpc_error
|
||||
except Exception as exception: # pylint:disable=broad-except
|
||||
return _FailureOutcome(exception, sys.exc_info()[2])
|
||||
|
||||
call = self._interceptor.intercept_stream_unary(
|
||||
continuation, client_call_details, request_iterator
|
||||
)
|
||||
return call.result(), call
|
||||
|
||||
def with_call(
|
||||
self,
|
||||
request_iterator: RequestIterableType,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> Tuple[Any, grpc.Call]:
|
||||
return self._with_call(
|
||||
request_iterator,
|
||||
timeout=timeout,
|
||||
metadata=metadata,
|
||||
credentials=credentials,
|
||||
wait_for_ready=wait_for_ready,
|
||||
compression=compression,
|
||||
)
|
||||
|
||||
def future(
|
||||
self,
|
||||
request_iterator: RequestIterableType,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> Any:
|
||||
client_call_details = _ClientCallDetails(
|
||||
self._method,
|
||||
timeout,
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
compression,
|
||||
)
|
||||
|
||||
def continuation(new_details, request_iterator):
|
||||
(
|
||||
new_method,
|
||||
new_timeout,
|
||||
new_metadata,
|
||||
new_credentials,
|
||||
new_wait_for_ready,
|
||||
new_compression,
|
||||
) = _unwrap_client_call_details(new_details, client_call_details)
|
||||
return self._thunk(new_method).future(
|
||||
request_iterator,
|
||||
timeout=new_timeout,
|
||||
metadata=new_metadata,
|
||||
credentials=new_credentials,
|
||||
wait_for_ready=new_wait_for_ready,
|
||||
compression=new_compression,
|
||||
)
|
||||
|
||||
try:
|
||||
return self._interceptor.intercept_stream_unary(
|
||||
continuation, client_call_details, request_iterator
|
||||
)
|
||||
except Exception as exception: # pylint:disable=broad-except
|
||||
return _FailureOutcome(exception, sys.exc_info()[2])
|
||||
|
||||
|
||||
class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
|
||||
_thunk: Callable
|
||||
_method: str
|
||||
_interceptor: grpc.StreamStreamClientInterceptor
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
thunk: Callable,
|
||||
method: str,
|
||||
interceptor: grpc.StreamStreamClientInterceptor,
|
||||
):
|
||||
self._thunk = thunk
|
||||
self._method = method
|
||||
self._interceptor = interceptor
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
request_iterator: RequestIterableType,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
):
|
||||
client_call_details = _ClientCallDetails(
|
||||
self._method,
|
||||
timeout,
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
compression,
|
||||
)
|
||||
|
||||
def continuation(new_details, request_iterator):
|
||||
(
|
||||
new_method,
|
||||
new_timeout,
|
||||
new_metadata,
|
||||
new_credentials,
|
||||
new_wait_for_ready,
|
||||
new_compression,
|
||||
) = _unwrap_client_call_details(new_details, client_call_details)
|
||||
return self._thunk(new_method)(
|
||||
request_iterator,
|
||||
timeout=new_timeout,
|
||||
metadata=new_metadata,
|
||||
credentials=new_credentials,
|
||||
wait_for_ready=new_wait_for_ready,
|
||||
compression=new_compression,
|
||||
)
|
||||
|
||||
try:
|
||||
return self._interceptor.intercept_stream_stream(
|
||||
continuation, client_call_details, request_iterator
|
||||
)
|
||||
except Exception as exception: # pylint:disable=broad-except
|
||||
return _FailureOutcome(exception, sys.exc_info()[2])
|
||||
|
||||
|
||||
class _Channel(grpc.Channel):
|
||||
_channel: grpc.Channel
|
||||
_interceptor: Union[
|
||||
grpc.UnaryUnaryClientInterceptor,
|
||||
grpc.UnaryStreamClientInterceptor,
|
||||
grpc.StreamStreamClientInterceptor,
|
||||
grpc.StreamUnaryClientInterceptor,
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
channel: grpc.Channel,
|
||||
interceptor: Union[
|
||||
grpc.UnaryUnaryClientInterceptor,
|
||||
grpc.UnaryStreamClientInterceptor,
|
||||
grpc.StreamStreamClientInterceptor,
|
||||
grpc.StreamUnaryClientInterceptor,
|
||||
],
|
||||
):
|
||||
self._channel = channel
|
||||
self._interceptor = interceptor
|
||||
|
||||
def subscribe(
|
||||
self, callback: Callable, try_to_connect: Optional[bool] = False
|
||||
):
|
||||
self._channel.subscribe(callback, try_to_connect=try_to_connect)
|
||||
|
||||
def unsubscribe(self, callback: Callable):
|
||||
self._channel.unsubscribe(callback)
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def unary_unary(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> grpc.UnaryUnaryMultiCallable:
|
||||
# pytype: disable=wrong-arg-count
|
||||
thunk = lambda m: self._channel.unary_unary(
|
||||
m,
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
_registered_method,
|
||||
)
|
||||
# pytype: enable=wrong-arg-count
|
||||
if isinstance(self._interceptor, grpc.UnaryUnaryClientInterceptor):
|
||||
return _UnaryUnaryMultiCallable(thunk, method, self._interceptor)
|
||||
return thunk(method)
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def unary_stream(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> grpc.UnaryStreamMultiCallable:
|
||||
# pytype: disable=wrong-arg-count
|
||||
thunk = lambda m: self._channel.unary_stream(
|
||||
m,
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
_registered_method,
|
||||
)
|
||||
# pytype: enable=wrong-arg-count
|
||||
if isinstance(self._interceptor, grpc.UnaryStreamClientInterceptor):
|
||||
return _UnaryStreamMultiCallable(thunk, method, self._interceptor)
|
||||
return thunk(method)
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def stream_unary(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> grpc.StreamUnaryMultiCallable:
|
||||
# pytype: disable=wrong-arg-count
|
||||
thunk = lambda m: self._channel.stream_unary(
|
||||
m,
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
_registered_method,
|
||||
)
|
||||
# pytype: enable=wrong-arg-count
|
||||
if isinstance(self._interceptor, grpc.StreamUnaryClientInterceptor):
|
||||
return _StreamUnaryMultiCallable(thunk, method, self._interceptor)
|
||||
return thunk(method)
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def stream_stream(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> grpc.StreamStreamMultiCallable:
|
||||
# pytype: disable=wrong-arg-count
|
||||
thunk = lambda m: self._channel.stream_stream(
|
||||
m,
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
_registered_method,
|
||||
)
|
||||
# pytype: enable=wrong-arg-count
|
||||
if isinstance(self._interceptor, grpc.StreamStreamClientInterceptor):
|
||||
return _StreamStreamMultiCallable(thunk, method, self._interceptor)
|
||||
return thunk(method)
|
||||
|
||||
def _close(self):
|
||||
self._channel.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._close()
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
self._channel.close()
|
||||
|
||||
|
||||
def intercept_channel(
|
||||
channel: grpc.Channel,
|
||||
*interceptors: Optional[
|
||||
Sequence[
|
||||
Union[
|
||||
grpc.UnaryUnaryClientInterceptor,
|
||||
grpc.UnaryStreamClientInterceptor,
|
||||
grpc.StreamStreamClientInterceptor,
|
||||
grpc.StreamUnaryClientInterceptor,
|
||||
]
|
||||
]
|
||||
],
|
||||
) -> grpc.Channel:
|
||||
for interceptor in reversed(list(interceptors)):
|
||||
if (
|
||||
not isinstance(interceptor, grpc.UnaryUnaryClientInterceptor)
|
||||
and not isinstance(interceptor, grpc.UnaryStreamClientInterceptor)
|
||||
and not isinstance(interceptor, grpc.StreamUnaryClientInterceptor)
|
||||
and not isinstance(interceptor, grpc.StreamStreamClientInterceptor)
|
||||
):
|
||||
error_msg = (
|
||||
"interceptor must be "
|
||||
"grpc.UnaryUnaryClientInterceptor or "
|
||||
"grpc.UnaryStreamClientInterceptor or "
|
||||
"grpc.StreamUnaryClientInterceptor or "
|
||||
"grpc.StreamStreamClientInterceptor"
|
||||
)
|
||||
raise TypeError(error_msg)
|
||||
channel = _Channel(channel, interceptor)
|
||||
return channel
|
||||
@@ -0,0 +1,312 @@
|
||||
# Copyright 2023 The gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import contextlib
|
||||
import logging
|
||||
import threading
|
||||
from typing import (
|
||||
Any,
|
||||
Generator,
|
||||
Generic,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from grpc._cython import cygrpc as _cygrpc
|
||||
from grpc._typing import ChannelArgumentType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_channel = Any # _channel.py imports this module.
|
||||
ClientCallTracerCapsule = TypeVar("ClientCallTracerCapsule")
|
||||
ServerCallTracerFactoryCapsule = TypeVar("ServerCallTracerFactoryCapsule")
|
||||
|
||||
_plugin_lock: threading.RLock = threading.RLock()
|
||||
_OBSERVABILITY_PLUGIN: Optional["ObservabilityPlugin"] = None
|
||||
_SERVICES_TO_EXCLUDE: List[bytes] = [
|
||||
b"google.monitoring.v3.MetricService",
|
||||
b"google.devtools.cloudtrace.v2.TraceService",
|
||||
]
|
||||
|
||||
|
||||
class ServerCallTracerFactory:
|
||||
"""An encapsulation of a ServerCallTracerFactory.
|
||||
|
||||
Instances of this class can be passed to a Channel as values for the
|
||||
grpc.experimental.server_call_tracer_factory option
|
||||
"""
|
||||
|
||||
def __init__(self, address):
|
||||
self._address = address
|
||||
|
||||
def __int__(self):
|
||||
return self._address
|
||||
|
||||
|
||||
class ObservabilityPlugin(
|
||||
Generic[ClientCallTracerCapsule, ServerCallTracerFactoryCapsule],
|
||||
metaclass=abc.ABCMeta,
|
||||
):
|
||||
"""Abstract base class for observability plugin.
|
||||
|
||||
*This is a semi-private class that was intended for the exclusive use of
|
||||
the gRPC team.*
|
||||
|
||||
The ClientCallTracerCapsule and ClientCallTracerCapsule created by this
|
||||
plugin should be injected to gRPC core using observability_init at the
|
||||
start of a program, before any channels/servers are built.
|
||||
|
||||
Any future methods added to this interface cannot have the
|
||||
@abc.abstractmethod annotation.
|
||||
|
||||
Attributes:
|
||||
_stats_enabled: A bool indicates whether tracing is enabled.
|
||||
_tracing_enabled: A bool indicates whether stats(metrics) is enabled.
|
||||
_registered_methods: A set which stores the registered method names in
|
||||
bytes.
|
||||
"""
|
||||
|
||||
_tracing_enabled: bool = False
|
||||
_stats_enabled: bool = False
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_client_call_tracer(
|
||||
self, method_name: bytes, target: bytes
|
||||
) -> ClientCallTracerCapsule:
|
||||
"""Creates a ClientCallTracerCapsule.
|
||||
|
||||
After register the plugin, if tracing or stats is enabled, this method
|
||||
will be called after a call was created, the ClientCallTracer created
|
||||
by this method will be saved to call context.
|
||||
|
||||
The ClientCallTracer is an object which implements `grpc_core::ClientCallTracer`
|
||||
interface and wrapped in a PyCapsule using `client_call_tracer` as name.
|
||||
|
||||
Args:
|
||||
method_name: The method name of the call in byte format.
|
||||
target: The channel target of the call in byte format.
|
||||
registered_method: Whether this method is pre-registered.
|
||||
|
||||
Returns:
|
||||
A PyCapsule which stores a ClientCallTracer object.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def save_trace_context(
|
||||
self, trace_id: str, span_id: str, is_sampled: bool
|
||||
) -> None:
|
||||
"""Saves the trace_id and span_id related to the current span.
|
||||
|
||||
After register the plugin, if tracing is enabled, this method will be
|
||||
called after the server finished sending response.
|
||||
|
||||
This method can be used to propagate census context.
|
||||
|
||||
Args:
|
||||
trace_id: The identifier for the trace associated with the span as a
|
||||
32-character hexadecimal encoded string,
|
||||
e.g. 26ed0036f2eff2b7317bccce3e28d01f
|
||||
span_id: The identifier for the span as a 16-character hexadecimal encoded
|
||||
string. e.g. 113ec879e62583bc
|
||||
is_sampled: A bool indicates whether the span is sampled.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_server_call_tracer_factory(
|
||||
self,
|
||||
*,
|
||||
xds: bool = False,
|
||||
) -> Optional[ServerCallTracerFactoryCapsule]:
|
||||
"""Creates a ServerCallTracerFactoryCapsule.
|
||||
|
||||
This method will be called at server initialization time to create a
|
||||
ServerCallTracerFactory, which will be registered to gRPC core.
|
||||
|
||||
The ServerCallTracerFactory is an object which implements
|
||||
`grpc_core::ServerCallTracerFactory` interface and wrapped in a PyCapsule
|
||||
using `server_call_tracer_factory` as name.
|
||||
|
||||
Args:
|
||||
xds: Whether the server is xds server.
|
||||
|
||||
Returns:
|
||||
A PyCapsule which stores a ServerCallTracerFactory object. Or None if
|
||||
plugin decides not to create ServerCallTracerFactory.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def record_rpc_latency(
|
||||
self, method: str, target: str, rpc_latency: float, status_code: Any
|
||||
) -> None:
|
||||
"""Record the latency of the RPC.
|
||||
|
||||
After register the plugin, if stats is enabled, this method will be
|
||||
called at the end of each RPC.
|
||||
|
||||
Args:
|
||||
method: The fully-qualified name of the RPC method being invoked.
|
||||
target: The target name of the RPC method being invoked.
|
||||
rpc_latency: The latency for the RPC in seconds, equals to the time between
|
||||
when the client invokes the RPC and when the client receives the status.
|
||||
status_code: An element of grpc.StatusCode in string format representing the
|
||||
final status for the RPC.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_tracing(self, enable: bool) -> None:
|
||||
"""Enable or disable tracing.
|
||||
|
||||
Args:
|
||||
enable: A bool indicates whether tracing should be enabled.
|
||||
"""
|
||||
self._tracing_enabled = enable
|
||||
|
||||
def set_stats(self, enable: bool) -> None:
|
||||
"""Enable or disable stats(metrics).
|
||||
|
||||
Args:
|
||||
enable: A bool indicates whether stats should be enabled.
|
||||
"""
|
||||
self._stats_enabled = enable
|
||||
|
||||
def save_registered_method(self, method_name: bytes) -> None:
|
||||
"""Saves the method name to registered_method list.
|
||||
|
||||
When exporting metrics, method name for unregistered methods will be replaced
|
||||
with 'other' by default.
|
||||
|
||||
Args:
|
||||
method_name: The method name in bytes.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def tracing_enabled(self) -> bool:
|
||||
return self._tracing_enabled
|
||||
|
||||
@property
|
||||
def stats_enabled(self) -> bool:
|
||||
return self._stats_enabled
|
||||
|
||||
@property
|
||||
def observability_enabled(self) -> bool:
|
||||
return self.tracing_enabled or self.stats_enabled
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def get_plugin() -> Generator[Optional[ObservabilityPlugin], None, None]:
|
||||
"""Get the ObservabilityPlugin in _observability module.
|
||||
|
||||
Returns:
|
||||
The ObservabilityPlugin currently registered with the _observability
|
||||
module. Or None if no plugin exists at the time of calling this method.
|
||||
"""
|
||||
with _plugin_lock:
|
||||
yield _OBSERVABILITY_PLUGIN
|
||||
|
||||
|
||||
def set_plugin(observability_plugin: Optional[ObservabilityPlugin]) -> None:
|
||||
"""Save ObservabilityPlugin to _observability module.
|
||||
|
||||
Args:
|
||||
observability_plugin: The ObservabilityPlugin to save.
|
||||
|
||||
Raises:
|
||||
ValueError: If an ObservabilityPlugin was already registered at the
|
||||
time of calling this method.
|
||||
"""
|
||||
global _OBSERVABILITY_PLUGIN # pylint: disable=global-statement # noqa: PLW0603
|
||||
with _plugin_lock:
|
||||
if observability_plugin and _OBSERVABILITY_PLUGIN:
|
||||
error_msg = "observability_plugin was already set!"
|
||||
raise ValueError(error_msg)
|
||||
_OBSERVABILITY_PLUGIN = observability_plugin
|
||||
|
||||
|
||||
def observability_init(observability_plugin: ObservabilityPlugin) -> None:
|
||||
"""Initialize observability with provided ObservabilityPlugin.
|
||||
|
||||
This method have to be called at the start of a program, before any
|
||||
channels/servers are built.
|
||||
|
||||
Args:
|
||||
observability_plugin: The ObservabilityPlugin to use.
|
||||
|
||||
Raises:
|
||||
ValueError: If an ObservabilityPlugin was already registered at the
|
||||
time of calling this method.
|
||||
"""
|
||||
set_plugin(observability_plugin)
|
||||
|
||||
|
||||
def observability_deinit() -> None:
|
||||
"""Clear the observability context, including ObservabilityPlugin and
|
||||
ServerCallTracerFactory
|
||||
|
||||
This method have to be called after exit observability context so that
|
||||
it's possible to re-initialize again.
|
||||
"""
|
||||
set_plugin(None)
|
||||
_cygrpc.clear_server_call_tracer_factory()
|
||||
|
||||
|
||||
def maybe_record_rpc_latency(state: "_channel._RPCState") -> None:
|
||||
"""Record the latency of the RPC, if the plugin is registered and stats is enabled.
|
||||
|
||||
This method will be called at the end of each RPC.
|
||||
|
||||
Args:
|
||||
state: a grpc._channel._RPCState object which contains the stats related to the
|
||||
RPC.
|
||||
"""
|
||||
# TODO(xuanwn): use channel args to exclude those metrics.
|
||||
for exclude_prefix in _SERVICES_TO_EXCLUDE:
|
||||
if exclude_prefix in state.method.encode("utf8"):
|
||||
return
|
||||
with get_plugin() as plugin:
|
||||
if plugin and plugin.stats_enabled:
|
||||
rpc_latency_s = state.rpc_end_time - state.rpc_start_time
|
||||
rpc_latency_ms = rpc_latency_s * 1000
|
||||
plugin.record_rpc_latency(
|
||||
state.method, state.target, rpc_latency_ms, state.code
|
||||
)
|
||||
|
||||
|
||||
def create_server_call_tracer_factory_option(
|
||||
xds: bool,
|
||||
) -> Union[Tuple[ChannelArgumentType], Tuple[()]]:
|
||||
with get_plugin() as plugin:
|
||||
if plugin and plugin.stats_enabled:
|
||||
server_call_tracer_factory_address = (
|
||||
_cygrpc.get_server_call_tracer_factory_address(plugin, xds)
|
||||
)
|
||||
if server_call_tracer_factory_address:
|
||||
return (
|
||||
(
|
||||
"grpc.experimental.server_call_tracer_factory",
|
||||
ServerCallTracerFactory(
|
||||
server_call_tracer_factory_address
|
||||
),
|
||||
),
|
||||
)
|
||||
return ()
|
||||
@@ -0,0 +1,136 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import threading
|
||||
from typing import Callable, Optional, Type
|
||||
|
||||
import grpc
|
||||
from grpc import _common
|
||||
from grpc._cython import cygrpc
|
||||
from grpc._typing import MetadataType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _AuthMetadataContext(
|
||||
collections.namedtuple(
|
||||
"AuthMetadataContext",
|
||||
(
|
||||
"service_url",
|
||||
"method_name",
|
||||
),
|
||||
),
|
||||
grpc.AuthMetadataContext,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class _CallbackState(object):
|
||||
def __init__(self):
|
||||
self.lock = threading.Lock()
|
||||
self.called = False
|
||||
self.exception = None
|
||||
|
||||
|
||||
class _AuthMetadataPluginCallback(grpc.AuthMetadataPluginCallback):
|
||||
_state: _CallbackState
|
||||
_callback: Callable
|
||||
|
||||
def __init__(self, state: _CallbackState, callback: Callable):
|
||||
self._state = state
|
||||
self._callback = callback
|
||||
|
||||
def __call__(
|
||||
self, metadata: MetadataType, error: Optional[Type[BaseException]]
|
||||
):
|
||||
with self._state.lock:
|
||||
if self._state.exception is None:
|
||||
if self._state.called:
|
||||
error_msg = (
|
||||
"AuthMetadataPluginCallback invoked more than once!"
|
||||
)
|
||||
raise RuntimeError(error_msg)
|
||||
self._state.called = True
|
||||
else:
|
||||
error_msg = (
|
||||
"AuthMetadataPluginCallback"
|
||||
'raised exception "{self._state.exception}"!'
|
||||
)
|
||||
raise RuntimeError(error_msg)
|
||||
if error is None:
|
||||
self._callback(metadata, cygrpc.StatusCode.ok, None)
|
||||
else:
|
||||
self._callback(
|
||||
None, cygrpc.StatusCode.internal, _common.encode(str(error))
|
||||
)
|
||||
|
||||
|
||||
class _Plugin(object):
|
||||
_metadata_plugin: grpc.AuthMetadataPlugin
|
||||
|
||||
def __init__(self, metadata_plugin: grpc.AuthMetadataPlugin):
|
||||
self._metadata_plugin = metadata_plugin
|
||||
self._stored_ctx = None
|
||||
|
||||
try:
|
||||
import contextvars # pylint: disable=wrong-import-position
|
||||
|
||||
# The plugin may be invoked on a thread created by Core, which will not
|
||||
# have the context propagated. This context is stored and installed in
|
||||
# the thread invoking the plugin.
|
||||
self._stored_ctx = contextvars.copy_context()
|
||||
except ImportError:
|
||||
# Support versions predating contextvars.
|
||||
pass
|
||||
|
||||
def __call__(self, service_url: str, method_name: str, callback: Callable):
|
||||
context = _AuthMetadataContext(
|
||||
_common.decode(service_url), _common.decode(method_name)
|
||||
)
|
||||
callback_state = _CallbackState()
|
||||
try:
|
||||
self._metadata_plugin(
|
||||
context, _AuthMetadataPluginCallback(callback_state, callback)
|
||||
)
|
||||
except Exception as exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
'AuthMetadataPluginCallback "%s" raised exception!',
|
||||
self._metadata_plugin,
|
||||
)
|
||||
with callback_state.lock:
|
||||
callback_state.exception = exception
|
||||
if callback_state.called:
|
||||
return
|
||||
callback(
|
||||
None, cygrpc.StatusCode.internal, _common.encode(str(exception))
|
||||
)
|
||||
|
||||
|
||||
def metadata_plugin_call_credentials(
|
||||
metadata_plugin: grpc.AuthMetadataPlugin, name: Optional[str]
|
||||
) -> grpc.CallCredentials:
|
||||
if name is None:
|
||||
try:
|
||||
effective_name = metadata_plugin.__name__
|
||||
except AttributeError:
|
||||
effective_name = metadata_plugin.__class__.__name__
|
||||
else:
|
||||
effective_name = name
|
||||
return grpc.CallCredentials(
|
||||
cygrpc.MetadataPluginCallCredentials(
|
||||
_Plugin(metadata_plugin), _common.encode(effective_name)
|
||||
)
|
||||
)
|
||||
@@ -0,0 +1,163 @@
|
||||
# Copyright 2020 The gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
import types
|
||||
from typing import Tuple, Union
|
||||
|
||||
_REQUIRED_SYMBOLS = ("_protos", "_services", "_protos_and_services")
|
||||
_MINIMUM_VERSION = (3, 5, 0)
|
||||
|
||||
_UNINSTALLED_TEMPLATE = (
|
||||
"Install the grpcio-tools package (1.32.0+) to use the {} function."
|
||||
)
|
||||
_VERSION_ERROR_TEMPLATE = (
|
||||
"The {} function is only on available on Python 3.X interpreters."
|
||||
)
|
||||
|
||||
|
||||
def _has_runtime_proto_symbols(mod: types.ModuleType) -> bool:
|
||||
return all(hasattr(mod, sym) for sym in _REQUIRED_SYMBOLS)
|
||||
|
||||
|
||||
def _is_grpc_tools_importable() -> bool:
|
||||
try:
|
||||
import grpc_tools # pylint: disable=unused-import # pytype: disable=import-error
|
||||
|
||||
return True
|
||||
except ImportError as e:
|
||||
# NOTE: It's possible that we're encountering a transitive ImportError, so
|
||||
# we check for that and re-raise if so.
|
||||
if "grpc_tools" not in e.args[0]:
|
||||
raise
|
||||
return False
|
||||
|
||||
|
||||
def _call_with_lazy_import(
|
||||
fn_name: str, protobuf_path: str
|
||||
) -> Union[types.ModuleType, Tuple[types.ModuleType, types.ModuleType]]:
|
||||
"""Calls one of the three functions, lazily importing grpc_tools.
|
||||
|
||||
Args:
|
||||
fn_name: The name of the function to import from grpc_tools.protoc.
|
||||
protobuf_path: The path to import.
|
||||
|
||||
Returns:
|
||||
The appropriate module object.
|
||||
"""
|
||||
if sys.version_info < _MINIMUM_VERSION:
|
||||
raise NotImplementedError(_VERSION_ERROR_TEMPLATE.format(fn_name))
|
||||
if not _is_grpc_tools_importable():
|
||||
raise NotImplementedError(_UNINSTALLED_TEMPLATE.format(fn_name))
|
||||
import grpc_tools.protoc # pytype: disable=import-error
|
||||
|
||||
if _has_runtime_proto_symbols(grpc_tools.protoc):
|
||||
fn = getattr(grpc_tools.protoc, "_" + fn_name)
|
||||
return fn(protobuf_path)
|
||||
raise NotImplementedError(_UNINSTALLED_TEMPLATE.format(fn_name))
|
||||
|
||||
|
||||
def protos(protobuf_path): # pylint: disable=unused-argument
|
||||
"""Returns a module generated by the indicated .proto file.
|
||||
|
||||
THIS IS AN EXPERIMENTAL API.
|
||||
|
||||
Use this function to retrieve classes corresponding to message
|
||||
definitions in the .proto file.
|
||||
|
||||
To inspect the contents of the returned module, use the dir function.
|
||||
For example:
|
||||
|
||||
```
|
||||
protos = grpc.protos("foo.proto")
|
||||
print(dir(protos))
|
||||
```
|
||||
|
||||
The returned module object corresponds to the _pb2.py file generated
|
||||
by protoc. The path is expected to be relative to an entry on sys.path
|
||||
and all transitive dependencies of the file should also be resolvable
|
||||
from an entry on sys.path.
|
||||
|
||||
To completely disable the machinery behind this function, set the
|
||||
GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
|
||||
|
||||
Args:
|
||||
protobuf_path: The path to the .proto file on the filesystem. This path
|
||||
must be resolvable from an entry on sys.path and so must all of its
|
||||
transitive dependencies.
|
||||
|
||||
Returns:
|
||||
A module object corresponding to the message code for the indicated
|
||||
.proto file. Equivalent to a generated _pb2.py file.
|
||||
"""
|
||||
return _call_with_lazy_import("protos", protobuf_path)
|
||||
|
||||
|
||||
def services(protobuf_path): # pylint: disable=unused-argument
|
||||
"""Returns a module generated by the indicated .proto file.
|
||||
|
||||
THIS IS AN EXPERIMENTAL API.
|
||||
|
||||
Use this function to retrieve classes and functions corresponding to
|
||||
service definitions in the .proto file, including both stub and servicer
|
||||
definitions.
|
||||
|
||||
To inspect the contents of the returned module, use the dir function.
|
||||
For example:
|
||||
|
||||
```
|
||||
services = grpc.services("foo.proto")
|
||||
print(dir(services))
|
||||
```
|
||||
|
||||
The returned module object corresponds to the _pb2_grpc.py file generated
|
||||
by protoc. The path is expected to be relative to an entry on sys.path
|
||||
and all transitive dependencies of the file should also be resolvable
|
||||
from an entry on sys.path.
|
||||
|
||||
To completely disable the machinery behind this function, set the
|
||||
GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
|
||||
|
||||
Args:
|
||||
protobuf_path: The path to the .proto file on the filesystem. This path
|
||||
must be resolvable from an entry on sys.path and so must all of its
|
||||
transitive dependencies.
|
||||
|
||||
Returns:
|
||||
A module object corresponding to the stub/service code for the indicated
|
||||
.proto file. Equivalent to a generated _pb2_grpc.py file.
|
||||
"""
|
||||
return _call_with_lazy_import("services", protobuf_path)
|
||||
|
||||
|
||||
def protos_and_services(protobuf_path): # pylint: disable=unused-argument
|
||||
"""Returns a 2-tuple of modules corresponding to protos and services.
|
||||
|
||||
THIS IS AN EXPERIMENTAL API.
|
||||
|
||||
The return value of this function is equivalent to a call to protos and a
|
||||
call to services.
|
||||
|
||||
To completely disable the machinery behind this function, set the
|
||||
GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true".
|
||||
|
||||
Args:
|
||||
protobuf_path: The path to the .proto file on the filesystem. This path
|
||||
must be resolvable from an entry on sys.path and so must all of its
|
||||
transitive dependencies.
|
||||
|
||||
Returns:
|
||||
A 2-tuple of module objects corresponding to (protos(path), services(path)).
|
||||
"""
|
||||
return _call_with_lazy_import("protos_and_services", protobuf_path)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,587 @@
|
||||
# Copyright 2020 The gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Functions that obviate explicit stubs and explicit channels."""
|
||||
|
||||
import collections
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
from typing import (
|
||||
Any,
|
||||
AnyStr,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterator,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
import grpc
|
||||
from grpc.experimental import experimental_api
|
||||
|
||||
RequestType = TypeVar("RequestType")
|
||||
ResponseType = TypeVar("ResponseType")
|
||||
|
||||
OptionsType = Sequence[Tuple[str, str]]
|
||||
CacheKey = Tuple[
|
||||
str,
|
||||
OptionsType,
|
||||
Optional[grpc.ChannelCredentials],
|
||||
Optional[grpc.Compression],
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_EVICTION_PERIOD_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS"
|
||||
if _EVICTION_PERIOD_KEY in os.environ:
|
||||
_EVICTION_PERIOD = datetime.timedelta(
|
||||
seconds=float(os.environ[_EVICTION_PERIOD_KEY])
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Setting managed channel eviction period to %s", _EVICTION_PERIOD
|
||||
)
|
||||
else:
|
||||
_EVICTION_PERIOD = datetime.timedelta(minutes=10)
|
||||
|
||||
_MAXIMUM_CHANNELS_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM"
|
||||
if _MAXIMUM_CHANNELS_KEY in os.environ:
|
||||
_MAXIMUM_CHANNELS = int(os.environ[_MAXIMUM_CHANNELS_KEY])
|
||||
_LOGGER.debug("Setting maximum managed channels to %d", _MAXIMUM_CHANNELS)
|
||||
else:
|
||||
_MAXIMUM_CHANNELS = 2**8
|
||||
|
||||
_DEFAULT_TIMEOUT_KEY = "GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS"
|
||||
if _DEFAULT_TIMEOUT_KEY in os.environ:
|
||||
_DEFAULT_TIMEOUT = float(os.environ[_DEFAULT_TIMEOUT_KEY])
|
||||
_LOGGER.debug("Setting default timeout seconds to %f", _DEFAULT_TIMEOUT)
|
||||
else:
|
||||
_DEFAULT_TIMEOUT = 60.0
|
||||
|
||||
|
||||
def _create_channel(
|
||||
target: str,
|
||||
options: Sequence[Tuple[str, str]],
|
||||
channel_credentials: Optional[grpc.ChannelCredentials],
|
||||
compression: Optional[grpc.Compression],
|
||||
) -> grpc.Channel:
|
||||
debug_msg = (
|
||||
f"Creating secure channel with credentials '{channel_credentials}', "
|
||||
f"options '{options}' and compression '{compression}'"
|
||||
)
|
||||
_LOGGER.debug(debug_msg)
|
||||
return grpc.secure_channel(
|
||||
target,
|
||||
credentials=channel_credentials,
|
||||
options=options,
|
||||
compression=compression,
|
||||
)
|
||||
|
||||
|
||||
class ChannelCache:
|
||||
# NOTE(rbellevi): Untyped due to reference cycle.
|
||||
_singleton = None
|
||||
_lock: threading.RLock = threading.RLock()
|
||||
_condition: threading.Condition = threading.Condition(lock=_lock)
|
||||
_eviction_ready: threading.Event = threading.Event()
|
||||
|
||||
_mapping: Dict[CacheKey, Tuple[grpc.Channel, datetime.datetime]]
|
||||
_eviction_thread: threading.Thread
|
||||
|
||||
def __init__(self):
|
||||
self._mapping = collections.OrderedDict()
|
||||
self._eviction_thread = threading.Thread(
|
||||
target=ChannelCache._perform_evictions, daemon=True
|
||||
)
|
||||
self._eviction_thread.start()
|
||||
|
||||
@staticmethod
|
||||
def get():
|
||||
with ChannelCache._lock:
|
||||
if ChannelCache._singleton is None:
|
||||
ChannelCache._singleton = ChannelCache()
|
||||
ChannelCache._eviction_ready.wait()
|
||||
return ChannelCache._singleton
|
||||
|
||||
def _evict_locked(self, key: CacheKey):
|
||||
channel, _ = self._mapping.pop(key)
|
||||
_LOGGER.debug(
|
||||
"Evicting channel %s with configuration %s.", channel, key
|
||||
)
|
||||
channel.close()
|
||||
del channel
|
||||
|
||||
@staticmethod
|
||||
def _perform_evictions():
|
||||
while True:
|
||||
with ChannelCache._lock:
|
||||
ChannelCache._eviction_ready.set()
|
||||
if not ChannelCache._singleton._mapping:
|
||||
ChannelCache._condition.wait()
|
||||
elif len(ChannelCache._singleton._mapping) > _MAXIMUM_CHANNELS:
|
||||
key = next(iter(ChannelCache._singleton._mapping.keys()))
|
||||
ChannelCache._singleton._evict_locked(key)
|
||||
# And immediately reevaluate.
|
||||
else:
|
||||
key, (_, eviction_time) = next(
|
||||
iter(ChannelCache._singleton._mapping.items())
|
||||
)
|
||||
now = datetime.datetime.now()
|
||||
if eviction_time <= now:
|
||||
ChannelCache._singleton._evict_locked(key)
|
||||
continue
|
||||
time_to_eviction = (eviction_time - now).total_seconds()
|
||||
# NOTE: We aim to *eventually* coalesce to a state in
|
||||
# which no overdue channels are in the cache and the
|
||||
# length of the cache is longer than _MAXIMUM_CHANNELS.
|
||||
# We tolerate momentary states in which these two
|
||||
# criteria are not met.
|
||||
ChannelCache._condition.wait(timeout=time_to_eviction)
|
||||
|
||||
def get_channel(
|
||||
self,
|
||||
target: str,
|
||||
options: Sequence[Tuple[str, str]],
|
||||
channel_credentials: Optional[grpc.ChannelCredentials],
|
||||
insecure: bool,
|
||||
compression: Optional[grpc.Compression],
|
||||
method: str,
|
||||
_registered_method: bool,
|
||||
) -> Tuple[grpc.Channel, Optional[int]]:
|
||||
"""Get a channel from cache or creates a new channel.
|
||||
|
||||
This method also takes care of register method for channel,
|
||||
which means we'll register a new call handle if we're calling a
|
||||
non-registered method for an existing channel.
|
||||
|
||||
Returns:
|
||||
A tuple with two items. The first item is the channel, second item is
|
||||
the call handle if the method is registered, None if it's not registered.
|
||||
"""
|
||||
if insecure and channel_credentials:
|
||||
raise ValueError(
|
||||
"The insecure option is mutually exclusive with "
|
||||
+ "the channel_credentials option. Please use one "
|
||||
+ "or the other."
|
||||
)
|
||||
if insecure:
|
||||
channel_credentials = (
|
||||
grpc.experimental.insecure_channel_credentials()
|
||||
)
|
||||
elif channel_credentials is None:
|
||||
_LOGGER.debug("Defaulting to SSL channel credentials.")
|
||||
channel_credentials = grpc.ssl_channel_credentials()
|
||||
key = (target, options, channel_credentials, compression)
|
||||
with self._lock:
|
||||
channel_data = self._mapping.get(key, None)
|
||||
call_handle = None
|
||||
if channel_data is not None:
|
||||
channel = channel_data[0]
|
||||
# Register a new call handle if we're calling a registered method for an
|
||||
# existing channel and this method is not registered.
|
||||
if _registered_method:
|
||||
call_handle = channel._get_registered_call_handle(method)
|
||||
self._mapping.pop(key)
|
||||
self._mapping[key] = (
|
||||
channel,
|
||||
datetime.datetime.now() + _EVICTION_PERIOD,
|
||||
)
|
||||
return channel, call_handle
|
||||
channel = _create_channel(
|
||||
target, options, channel_credentials, compression
|
||||
)
|
||||
if _registered_method:
|
||||
call_handle = channel._get_registered_call_handle(method)
|
||||
self._mapping[key] = (
|
||||
channel,
|
||||
datetime.datetime.now() + _EVICTION_PERIOD,
|
||||
)
|
||||
if (
|
||||
len(self._mapping) == 1
|
||||
or len(self._mapping) >= _MAXIMUM_CHANNELS
|
||||
):
|
||||
self._condition.notify()
|
||||
return channel, call_handle
|
||||
|
||||
def _test_only_channel_count(self) -> int:
|
||||
with self._lock:
|
||||
return len(self._mapping)
|
||||
|
||||
|
||||
@experimental_api
|
||||
# pylint: disable=too-many-locals
|
||||
def unary_unary(
|
||||
request: RequestType,
|
||||
target: str,
|
||||
method: str,
|
||||
request_serializer: Optional[Callable[[Any], bytes]] = None,
|
||||
response_deserializer: Optional[Callable[[bytes], Any]] = None,
|
||||
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
|
||||
channel_credentials: Optional[grpc.ChannelCredentials] = None,
|
||||
insecure: bool = False,
|
||||
call_credentials: Optional[grpc.CallCredentials] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
timeout: Optional[float] = _DEFAULT_TIMEOUT,
|
||||
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> ResponseType:
|
||||
"""Invokes a unary-unary RPC without an explicitly specified channel.
|
||||
|
||||
THIS IS AN EXPERIMENTAL API.
|
||||
|
||||
This is backed by a per-process cache of channels. Channels are evicted
|
||||
from the cache after a fixed period by a background. Channels will also be
|
||||
evicted if more than a configured maximum accumulate.
|
||||
|
||||
The default eviction period is 10 minutes. One may set the environment
|
||||
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
|
||||
|
||||
The default maximum number of channels is 256. One may set the
|
||||
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
|
||||
this.
|
||||
|
||||
Args:
|
||||
request: An iterator that yields request values for the RPC.
|
||||
target: The server address.
|
||||
method: The name of the RPC method.
|
||||
request_serializer: Optional :term:`serializer` for serializing the request
|
||||
message. Request goes unserialized in case None is passed.
|
||||
response_deserializer: Optional :term:`deserializer` for deserializing the response
|
||||
message. Response goes undeserialized in case None is passed.
|
||||
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
|
||||
runtime) to configure the channel.
|
||||
channel_credentials: A credential applied to the whole channel, e.g. the
|
||||
return value of grpc.ssl_channel_credentials() or
|
||||
grpc.insecure_channel_credentials().
|
||||
insecure: If True, specifies channel_credentials as
|
||||
:term:`grpc.insecure_channel_credentials()`. This option is mutually
|
||||
exclusive with the `channel_credentials` option.
|
||||
call_credentials: A call credential applied to each call individually,
|
||||
e.g. the output of grpc.metadata_call_credentials() or
|
||||
grpc.access_token_call_credentials().
|
||||
compression: An optional value indicating the compression method to be
|
||||
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
|
||||
wait_for_ready: An optional flag indicating whether the RPC should fail
|
||||
immediately if the connection is not ready at the time the RPC is
|
||||
invoked, or if it should wait until the connection to the server
|
||||
becomes ready. When using this option, the user will likely also want
|
||||
to set a timeout. Defaults to True.
|
||||
timeout: An optional duration of time in seconds to allow for the RPC,
|
||||
after which an exception will be raised. If timeout is unspecified,
|
||||
defaults to a timeout controlled by the
|
||||
GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
|
||||
unset, defaults to 60 seconds. Supply a value of None to indicate that
|
||||
no timeout should be enforced.
|
||||
metadata: Optional metadata to send to the server.
|
||||
|
||||
Returns:
|
||||
The response to the RPC.
|
||||
"""
|
||||
channel, method_handle = ChannelCache.get().get_channel(
|
||||
target,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
compression,
|
||||
method,
|
||||
_registered_method,
|
||||
)
|
||||
multicallable = channel.unary_unary(
|
||||
method, request_serializer, response_deserializer, method_handle
|
||||
)
|
||||
wait_for_ready = wait_for_ready if wait_for_ready is not None else True
|
||||
return multicallable(
|
||||
request,
|
||||
metadata=metadata,
|
||||
wait_for_ready=wait_for_ready,
|
||||
credentials=call_credentials,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
|
||||
@experimental_api
|
||||
# pylint: disable=too-many-locals
|
||||
def unary_stream(
|
||||
request: RequestType,
|
||||
target: str,
|
||||
method: str,
|
||||
request_serializer: Optional[Callable[[Any], bytes]] = None,
|
||||
response_deserializer: Optional[Callable[[bytes], Any]] = None,
|
||||
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
|
||||
channel_credentials: Optional[grpc.ChannelCredentials] = None,
|
||||
insecure: bool = False,
|
||||
call_credentials: Optional[grpc.CallCredentials] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
timeout: Optional[float] = _DEFAULT_TIMEOUT,
|
||||
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> Iterator[ResponseType]:
|
||||
"""Invokes a unary-stream RPC without an explicitly specified channel.
|
||||
|
||||
THIS IS AN EXPERIMENTAL API.
|
||||
|
||||
This is backed by a per-process cache of channels. Channels are evicted
|
||||
from the cache after a fixed period by a background. Channels will also be
|
||||
evicted if more than a configured maximum accumulate.
|
||||
|
||||
The default eviction period is 10 minutes. One may set the environment
|
||||
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
|
||||
|
||||
The default maximum number of channels is 256. One may set the
|
||||
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
|
||||
this.
|
||||
|
||||
Args:
|
||||
request: An iterator that yields request values for the RPC.
|
||||
target: The server address.
|
||||
method: The name of the RPC method.
|
||||
request_serializer: Optional :term:`serializer` for serializing the request
|
||||
message. Request goes unserialized in case None is passed.
|
||||
response_deserializer: Optional :term:`deserializer` for deserializing the response
|
||||
message. Response goes undeserialized in case None is passed.
|
||||
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
|
||||
runtime) to configure the channel.
|
||||
channel_credentials: A credential applied to the whole channel, e.g. the
|
||||
return value of grpc.ssl_channel_credentials().
|
||||
insecure: If True, specifies channel_credentials as
|
||||
:term:`grpc.insecure_channel_credentials()`. This option is mutually
|
||||
exclusive with the `channel_credentials` option.
|
||||
call_credentials: A call credential applied to each call individually,
|
||||
e.g. the output of grpc.metadata_call_credentials() or
|
||||
grpc.access_token_call_credentials().
|
||||
compression: An optional value indicating the compression method to be
|
||||
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
|
||||
wait_for_ready: An optional flag indicating whether the RPC should fail
|
||||
immediately if the connection is not ready at the time the RPC is
|
||||
invoked, or if it should wait until the connection to the server
|
||||
becomes ready. When using this option, the user will likely also want
|
||||
to set a timeout. Defaults to True.
|
||||
timeout: An optional duration of time in seconds to allow for the RPC,
|
||||
after which an exception will be raised. If timeout is unspecified,
|
||||
defaults to a timeout controlled by the
|
||||
GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
|
||||
unset, defaults to 60 seconds. Supply a value of None to indicate that
|
||||
no timeout should be enforced.
|
||||
metadata: Optional metadata to send to the server.
|
||||
|
||||
Returns:
|
||||
An iterator of responses.
|
||||
"""
|
||||
channel, method_handle = ChannelCache.get().get_channel(
|
||||
target,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
compression,
|
||||
method,
|
||||
_registered_method,
|
||||
)
|
||||
multicallable = channel.unary_stream(
|
||||
method, request_serializer, response_deserializer, method_handle
|
||||
)
|
||||
wait_for_ready = wait_for_ready if wait_for_ready is not None else True
|
||||
return multicallable(
|
||||
request,
|
||||
metadata=metadata,
|
||||
wait_for_ready=wait_for_ready,
|
||||
credentials=call_credentials,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
|
||||
@experimental_api
|
||||
# pylint: disable=too-many-locals
|
||||
def stream_unary(
|
||||
request_iterator: Iterator[RequestType],
|
||||
target: str,
|
||||
method: str,
|
||||
request_serializer: Optional[Callable[[Any], bytes]] = None,
|
||||
response_deserializer: Optional[Callable[[bytes], Any]] = None,
|
||||
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
|
||||
channel_credentials: Optional[grpc.ChannelCredentials] = None,
|
||||
insecure: bool = False,
|
||||
call_credentials: Optional[grpc.CallCredentials] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
timeout: Optional[float] = _DEFAULT_TIMEOUT,
|
||||
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> ResponseType:
|
||||
"""Invokes a stream-unary RPC without an explicitly specified channel.
|
||||
|
||||
THIS IS AN EXPERIMENTAL API.
|
||||
|
||||
This is backed by a per-process cache of channels. Channels are evicted
|
||||
from the cache after a fixed period by a background. Channels will also be
|
||||
evicted if more than a configured maximum accumulate.
|
||||
|
||||
The default eviction period is 10 minutes. One may set the environment
|
||||
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
|
||||
|
||||
The default maximum number of channels is 256. One may set the
|
||||
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
|
||||
this.
|
||||
|
||||
Args:
|
||||
request_iterator: An iterator that yields request values for the RPC.
|
||||
target: The server address.
|
||||
method: The name of the RPC method.
|
||||
request_serializer: Optional :term:`serializer` for serializing the request
|
||||
message. Request goes unserialized in case None is passed.
|
||||
response_deserializer: Optional :term:`deserializer` for deserializing the response
|
||||
message. Response goes undeserialized in case None is passed.
|
||||
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
|
||||
runtime) to configure the channel.
|
||||
channel_credentials: A credential applied to the whole channel, e.g. the
|
||||
return value of grpc.ssl_channel_credentials().
|
||||
call_credentials: A call credential applied to each call individually,
|
||||
e.g. the output of grpc.metadata_call_credentials() or
|
||||
grpc.access_token_call_credentials().
|
||||
insecure: If True, specifies channel_credentials as
|
||||
:term:`grpc.insecure_channel_credentials()`. This option is mutually
|
||||
exclusive with the `channel_credentials` option.
|
||||
compression: An optional value indicating the compression method to be
|
||||
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
|
||||
wait_for_ready: An optional flag indicating whether the RPC should fail
|
||||
immediately if the connection is not ready at the time the RPC is
|
||||
invoked, or if it should wait until the connection to the server
|
||||
becomes ready. When using this option, the user will likely also want
|
||||
to set a timeout. Defaults to True.
|
||||
timeout: An optional duration of time in seconds to allow for the RPC,
|
||||
after which an exception will be raised. If timeout is unspecified,
|
||||
defaults to a timeout controlled by the
|
||||
GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
|
||||
unset, defaults to 60 seconds. Supply a value of None to indicate that
|
||||
no timeout should be enforced.
|
||||
metadata: Optional metadata to send to the server.
|
||||
|
||||
Returns:
|
||||
The response to the RPC.
|
||||
"""
|
||||
channel, method_handle = ChannelCache.get().get_channel(
|
||||
target,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
compression,
|
||||
method,
|
||||
_registered_method,
|
||||
)
|
||||
multicallable = channel.stream_unary(
|
||||
method, request_serializer, response_deserializer, method_handle
|
||||
)
|
||||
wait_for_ready = wait_for_ready if wait_for_ready is not None else True
|
||||
return multicallable(
|
||||
request_iterator,
|
||||
metadata=metadata,
|
||||
wait_for_ready=wait_for_ready,
|
||||
credentials=call_credentials,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
|
||||
@experimental_api
|
||||
# pylint: disable=too-many-locals
|
||||
def stream_stream(
|
||||
request_iterator: Iterator[RequestType],
|
||||
target: str,
|
||||
method: str,
|
||||
request_serializer: Optional[Callable[[Any], bytes]] = None,
|
||||
response_deserializer: Optional[Callable[[bytes], Any]] = None,
|
||||
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
|
||||
channel_credentials: Optional[grpc.ChannelCredentials] = None,
|
||||
insecure: bool = False,
|
||||
call_credentials: Optional[grpc.CallCredentials] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
timeout: Optional[float] = _DEFAULT_TIMEOUT,
|
||||
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> Iterator[ResponseType]:
|
||||
"""Invokes a stream-stream RPC without an explicitly specified channel.
|
||||
|
||||
THIS IS AN EXPERIMENTAL API.
|
||||
|
||||
This is backed by a per-process cache of channels. Channels are evicted
|
||||
from the cache after a fixed period by a background. Channels will also be
|
||||
evicted if more than a configured maximum accumulate.
|
||||
|
||||
The default eviction period is 10 minutes. One may set the environment
|
||||
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
|
||||
|
||||
The default maximum number of channels is 256. One may set the
|
||||
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
|
||||
this.
|
||||
|
||||
Args:
|
||||
request_iterator: An iterator that yields request values for the RPC.
|
||||
target: The server address.
|
||||
method: The name of the RPC method.
|
||||
request_serializer: Optional :term:`serializer` for serializing the request
|
||||
message. Request goes unserialized in case None is passed.
|
||||
response_deserializer: Optional :term:`deserializer` for deserializing the response
|
||||
message. Response goes undeserialized in case None is passed.
|
||||
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core
|
||||
runtime) to configure the channel.
|
||||
channel_credentials: A credential applied to the whole channel, e.g. the
|
||||
return value of grpc.ssl_channel_credentials().
|
||||
call_credentials: A call credential applied to each call individually,
|
||||
e.g. the output of grpc.metadata_call_credentials() or
|
||||
grpc.access_token_call_credentials().
|
||||
insecure: If True, specifies channel_credentials as
|
||||
:term:`grpc.insecure_channel_credentials()`. This option is mutually
|
||||
exclusive with the `channel_credentials` option.
|
||||
compression: An optional value indicating the compression method to be
|
||||
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
|
||||
wait_for_ready: An optional flag indicating whether the RPC should fail
|
||||
immediately if the connection is not ready at the time the RPC is
|
||||
invoked, or if it should wait until the connection to the server
|
||||
becomes ready. When using this option, the user will likely also want
|
||||
to set a timeout. Defaults to True.
|
||||
timeout: An optional duration of time in seconds to allow for the RPC,
|
||||
after which an exception will be raised. If timeout is unspecified,
|
||||
defaults to a timeout controlled by the
|
||||
GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is
|
||||
unset, defaults to 60 seconds. Supply a value of None to indicate that
|
||||
no timeout should be enforced.
|
||||
metadata: Optional metadata to send to the server.
|
||||
|
||||
Returns:
|
||||
An iterator of responses.
|
||||
"""
|
||||
channel, method_handle = ChannelCache.get().get_channel(
|
||||
target,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
compression,
|
||||
method,
|
||||
_registered_method,
|
||||
)
|
||||
multicallable = channel.stream_stream(
|
||||
method, request_serializer, response_deserializer, method_handle
|
||||
)
|
||||
wait_for_ready = wait_for_ready if wait_for_ready is not None else True
|
||||
return multicallable(
|
||||
request_iterator,
|
||||
metadata=metadata,
|
||||
wait_for_ready=wait_for_ready,
|
||||
credentials=call_credentials,
|
||||
timeout=timeout,
|
||||
)
|
||||
@@ -0,0 +1,96 @@
|
||||
# Copyright 2022 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Common types for gRPC Sync API"""
|
||||
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from grpc._cython import cygrpc
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from grpc import ServicerContext
|
||||
from grpc._server import _RPCState
|
||||
|
||||
RequestType = TypeVar("RequestType")
|
||||
ResponseType = TypeVar("ResponseType")
|
||||
SerializingFunction = Callable[[Any], bytes]
|
||||
DeserializingFunction = Callable[[bytes], Any]
|
||||
MetadataType = Sequence[Tuple[str, Union[str, bytes]]]
|
||||
ChannelArgumentType = Tuple[str, Any]
|
||||
DoneCallbackType = Callable[[Any], None]
|
||||
NullaryCallbackType = Callable[[], None]
|
||||
RequestIterableType = Iterable[Any]
|
||||
ResponseIterableType = Iterable[Any]
|
||||
UserTag = Callable[[cygrpc.BaseEvent], bool]
|
||||
IntegratedCallFactory = Callable[
|
||||
[
|
||||
int,
|
||||
bytes,
|
||||
Optional[str],
|
||||
Optional[float],
|
||||
Optional[MetadataType],
|
||||
Optional[cygrpc.CallCredentials],
|
||||
Sequence[Sequence[cygrpc.Operation]],
|
||||
UserTag,
|
||||
Any,
|
||||
Optional[int],
|
||||
],
|
||||
cygrpc.IntegratedCall,
|
||||
]
|
||||
ServerTagCallbackType = Tuple[
|
||||
Optional["_RPCState"], Sequence[NullaryCallbackType]
|
||||
]
|
||||
ServerCallbackTag = Callable[[cygrpc.BaseEvent], ServerTagCallbackType]
|
||||
ArityAgnosticMethodHandler = Union[
|
||||
Callable[
|
||||
[RequestType, "ServicerContext", Callable[[ResponseType], None]],
|
||||
ResponseType,
|
||||
],
|
||||
Callable[
|
||||
[RequestType, "ServicerContext", Callable[[ResponseType], None]],
|
||||
Iterator[ResponseType],
|
||||
],
|
||||
Callable[
|
||||
[
|
||||
Iterator[RequestType],
|
||||
"ServicerContext",
|
||||
Callable[[ResponseType], None],
|
||||
],
|
||||
ResponseType,
|
||||
],
|
||||
Callable[
|
||||
[
|
||||
Iterator[RequestType],
|
||||
"ServicerContext",
|
||||
Callable[[ResponseType], None],
|
||||
],
|
||||
Iterator[ResponseType],
|
||||
],
|
||||
Callable[[RequestType, "ServicerContext"], ResponseType],
|
||||
Callable[[RequestType, "ServicerContext"], Iterator[ResponseType]],
|
||||
Callable[[Iterator[RequestType], "ServicerContext"], ResponseType],
|
||||
Callable[
|
||||
[Iterator[RequestType], "ServicerContext"], Iterator[ResponseType]
|
||||
],
|
||||
]
|
||||
@@ -0,0 +1,220 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Internal utilities for gRPC Python."""
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
from typing import Callable, Dict, Optional, Sequence
|
||||
|
||||
import grpc
|
||||
from grpc import _common
|
||||
from grpc._typing import DoneCallbackType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
|
||||
'Exception calling connectivity future "done" callback!'
|
||||
)
|
||||
|
||||
|
||||
class RpcMethodHandler(
|
||||
collections.namedtuple(
|
||||
"_RpcMethodHandler",
|
||||
(
|
||||
"request_streaming",
|
||||
"response_streaming",
|
||||
"request_deserializer",
|
||||
"response_serializer",
|
||||
"unary_unary",
|
||||
"unary_stream",
|
||||
"stream_unary",
|
||||
"stream_stream",
|
||||
),
|
||||
),
|
||||
grpc.RpcMethodHandler,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class DictionaryGenericHandler(grpc.ServiceRpcHandler):
|
||||
_name: str
|
||||
_method_handlers: Dict[str, grpc.RpcMethodHandler]
|
||||
|
||||
def __init__(
|
||||
self, service: str, method_handlers: Dict[str, grpc.RpcMethodHandler]
|
||||
):
|
||||
self._name = service
|
||||
self._method_handlers = {
|
||||
_common.fully_qualified_method(service, method): method_handler
|
||||
for method, method_handler in method_handlers.items()
|
||||
}
|
||||
|
||||
def service_name(self) -> str:
|
||||
return self._name
|
||||
|
||||
def service(
|
||||
self, handler_call_details: grpc.HandlerCallDetails
|
||||
) -> Optional[grpc.RpcMethodHandler]:
|
||||
details_method = handler_call_details.method
|
||||
return self._method_handlers.get(
|
||||
details_method
|
||||
) # pytype: disable=attribute-error
|
||||
|
||||
|
||||
class _ChannelReadyFuture(grpc.Future):
|
||||
_condition: threading.Condition
|
||||
_channel: grpc.Channel
|
||||
_matured: bool
|
||||
_cancelled: bool
|
||||
_done_callbacks: Sequence[Callable]
|
||||
|
||||
def __init__(self, channel: grpc.Channel):
|
||||
self._condition = threading.Condition()
|
||||
self._channel = channel
|
||||
|
||||
self._matured = False
|
||||
self._cancelled = False
|
||||
self._done_callbacks = []
|
||||
|
||||
def _block(self, timeout: Optional[float]) -> None:
|
||||
until = None if timeout is None else time.time() + timeout
|
||||
with self._condition:
|
||||
while True:
|
||||
if self._cancelled:
|
||||
raise grpc.FutureCancelledError()
|
||||
if self._matured:
|
||||
return
|
||||
if until is None:
|
||||
self._condition.wait()
|
||||
else:
|
||||
remaining = until - time.time()
|
||||
if remaining < 0:
|
||||
raise grpc.FutureTimeoutError()
|
||||
self._condition.wait(timeout=remaining)
|
||||
|
||||
def _update(self, connectivity: Optional[grpc.ChannelConnectivity]) -> None:
|
||||
with self._condition:
|
||||
if (
|
||||
not self._cancelled
|
||||
and connectivity is grpc.ChannelConnectivity.READY
|
||||
):
|
||||
self._matured = True
|
||||
self._channel.unsubscribe(self._update)
|
||||
self._condition.notify_all()
|
||||
done_callbacks = tuple(self._done_callbacks)
|
||||
self._done_callbacks = None
|
||||
else:
|
||||
return
|
||||
|
||||
for done_callback in done_callbacks:
|
||||
try:
|
||||
done_callback(self)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE)
|
||||
|
||||
def cancel(self) -> bool:
|
||||
with self._condition:
|
||||
if not self._matured:
|
||||
self._cancelled = True
|
||||
self._channel.unsubscribe(self._update)
|
||||
self._condition.notify_all()
|
||||
done_callbacks = tuple(self._done_callbacks)
|
||||
self._done_callbacks = None
|
||||
else:
|
||||
return False
|
||||
|
||||
for done_callback in done_callbacks:
|
||||
try:
|
||||
done_callback(self)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE)
|
||||
|
||||
return True
|
||||
|
||||
def cancelled(self) -> bool:
|
||||
with self._condition:
|
||||
return self._cancelled
|
||||
|
||||
def running(self) -> bool:
|
||||
with self._condition:
|
||||
return not self._cancelled and not self._matured
|
||||
|
||||
def done(self) -> bool:
|
||||
with self._condition:
|
||||
return self._cancelled or self._matured
|
||||
|
||||
def result(self, timeout: Optional[float] = None) -> None:
|
||||
self._block(timeout)
|
||||
|
||||
def exception(self, timeout: Optional[float] = None) -> None:
|
||||
self._block(timeout)
|
||||
|
||||
def traceback(self, timeout: Optional[float] = None) -> None:
|
||||
self._block(timeout)
|
||||
|
||||
def add_done_callback(self, fn: DoneCallbackType):
|
||||
with self._condition:
|
||||
if not self._cancelled and not self._matured:
|
||||
self._done_callbacks.append(fn)
|
||||
return
|
||||
|
||||
fn(self)
|
||||
|
||||
def start(self):
|
||||
with self._condition:
|
||||
self._channel.subscribe(self._update, try_to_connect=True)
|
||||
|
||||
def __del__(self):
|
||||
with self._condition:
|
||||
if not self._cancelled and not self._matured:
|
||||
self._channel.unsubscribe(self._update)
|
||||
|
||||
|
||||
def channel_ready_future(channel: grpc.Channel) -> _ChannelReadyFuture:
|
||||
ready_future = _ChannelReadyFuture(channel)
|
||||
ready_future.start()
|
||||
return ready_future
|
||||
|
||||
|
||||
def first_version_is_lower(version1: str, version2: str) -> bool:
|
||||
"""
|
||||
Compares two versions in the format '1.60.1' or '1.60.1.dev0'.
|
||||
|
||||
This method will be used in all stubs generated by grpcio-tools to check whether
|
||||
the stub version is compatible with the runtime grpcio.
|
||||
|
||||
Args:
|
||||
version1: The first version string.
|
||||
version2: The second version string.
|
||||
|
||||
Returns:
|
||||
True if version1 is lower, False otherwise.
|
||||
"""
|
||||
version1_list = version1.split(".")
|
||||
version2_list = version2.split(".")
|
||||
|
||||
try:
|
||||
for i in range(3):
|
||||
if int(version1_list[i]) < int(version2_list[i]):
|
||||
return True
|
||||
if int(version1_list[i]) > int(version2_list[i]):
|
||||
return False
|
||||
except ValueError:
|
||||
# Return false in case we can't convert version to int.
|
||||
return False
|
||||
|
||||
# The version without dev0 will be considered lower.
|
||||
return len(version1_list) < len(version2_list)
|
||||
@@ -0,0 +1,95 @@
|
||||
# Copyright 2019 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""gRPC's Asynchronous Python API.
|
||||
|
||||
gRPC Async API objects may only be used on the thread on which they were
|
||||
created. AsyncIO doesn't provide thread safety for most of its APIs.
|
||||
"""
|
||||
|
||||
from typing import Any, Optional, Sequence, Tuple
|
||||
|
||||
import grpc
|
||||
from grpc._cython.cygrpc import AbortError
|
||||
from grpc._cython.cygrpc import BaseError
|
||||
from grpc._cython.cygrpc import EOF
|
||||
from grpc._cython.cygrpc import InternalError
|
||||
from grpc._cython.cygrpc import UsageError
|
||||
from grpc._cython.cygrpc import init_grpc_aio
|
||||
from grpc._cython.cygrpc import shutdown_grpc_aio
|
||||
|
||||
from ._base_call import Call
|
||||
from ._base_call import RpcContext
|
||||
from ._base_call import StreamStreamCall
|
||||
from ._base_call import StreamUnaryCall
|
||||
from ._base_call import UnaryStreamCall
|
||||
from ._base_call import UnaryUnaryCall
|
||||
from ._base_channel import Channel
|
||||
from ._base_channel import StreamStreamMultiCallable
|
||||
from ._base_channel import StreamUnaryMultiCallable
|
||||
from ._base_channel import UnaryStreamMultiCallable
|
||||
from ._base_channel import UnaryUnaryMultiCallable
|
||||
from ._base_server import Server
|
||||
from ._base_server import ServicerContext
|
||||
from ._call import AioRpcError
|
||||
from ._channel import insecure_channel
|
||||
from ._channel import secure_channel
|
||||
from ._interceptor import ClientCallDetails
|
||||
from ._interceptor import ClientInterceptor
|
||||
from ._interceptor import InterceptedUnaryUnaryCall
|
||||
from ._interceptor import ServerInterceptor
|
||||
from ._interceptor import StreamStreamClientInterceptor
|
||||
from ._interceptor import StreamUnaryClientInterceptor
|
||||
from ._interceptor import UnaryStreamClientInterceptor
|
||||
from ._interceptor import UnaryUnaryClientInterceptor
|
||||
from ._metadata import Metadata
|
||||
from ._server import server
|
||||
from ._typing import ChannelArgumentType
|
||||
|
||||
################################### __all__ #################################
|
||||
|
||||
__all__ = (
|
||||
"EOF",
|
||||
"AbortError",
|
||||
"AioRpcError",
|
||||
"BaseError",
|
||||
"Call",
|
||||
"Channel",
|
||||
"ClientCallDetails",
|
||||
"ClientInterceptor",
|
||||
"InterceptedUnaryUnaryCall",
|
||||
"InternalError",
|
||||
"Metadata",
|
||||
"RpcContext",
|
||||
"Server",
|
||||
"ServerInterceptor",
|
||||
"ServicerContext",
|
||||
"StreamStreamCall",
|
||||
"StreamStreamClientInterceptor",
|
||||
"StreamStreamMultiCallable",
|
||||
"StreamUnaryCall",
|
||||
"StreamUnaryClientInterceptor",
|
||||
"StreamUnaryMultiCallable",
|
||||
"UnaryStreamCall",
|
||||
"UnaryStreamClientInterceptor",
|
||||
"UnaryStreamMultiCallable",
|
||||
"UnaryUnaryCall",
|
||||
"UnaryUnaryClientInterceptor",
|
||||
"UnaryUnaryMultiCallable",
|
||||
"UsageError",
|
||||
"init_grpc_aio",
|
||||
"insecure_channel",
|
||||
"secure_channel",
|
||||
"server",
|
||||
"shutdown_grpc_aio",
|
||||
)
|
||||
@@ -0,0 +1,257 @@
|
||||
# Copyright 2019 The gRPC Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Abstract base classes for client-side Call objects.
|
||||
|
||||
Call objects represents the RPC itself, and offer methods to access / modify
|
||||
its information. They also offer methods to manipulate the life-cycle of the
|
||||
RPC, e.g. cancellation.
|
||||
"""
|
||||
|
||||
from abc import ABCMeta
|
||||
from abc import abstractmethod
|
||||
from typing import Any, AsyncIterator, Generator, Generic, Optional, Union
|
||||
|
||||
import grpc
|
||||
|
||||
from ._metadata import Metadata
|
||||
from ._typing import DoneCallbackType
|
||||
from ._typing import EOFType
|
||||
from ._typing import RequestType
|
||||
from ._typing import ResponseType
|
||||
|
||||
__all__ = "Call", "RpcContext", "UnaryStreamCall", "UnaryUnaryCall"
|
||||
|
||||
|
||||
class RpcContext(metaclass=ABCMeta):
|
||||
"""Provides RPC-related information and action."""
|
||||
|
||||
@abstractmethod
|
||||
def cancelled(self) -> bool:
|
||||
"""Return True if the RPC is cancelled.
|
||||
|
||||
The RPC is cancelled when the cancellation was requested with cancel().
|
||||
|
||||
Returns:
|
||||
A bool indicates whether the RPC is cancelled or not.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def done(self) -> bool:
|
||||
"""Return True if the RPC is done.
|
||||
|
||||
An RPC is done if the RPC is completed, cancelled or aborted.
|
||||
|
||||
Returns:
|
||||
A bool indicates if the RPC is done.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def time_remaining(self) -> Optional[float]:
|
||||
"""Describes the length of allowed time remaining for the RPC.
|
||||
|
||||
Returns:
|
||||
A nonnegative float indicating the length of allowed time in seconds
|
||||
remaining for the RPC to complete before it is considered to have
|
||||
timed out, or None if no deadline was specified for the RPC.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def cancel(self) -> bool:
|
||||
"""Cancels the RPC.
|
||||
|
||||
Idempotent and has no effect if the RPC has already terminated.
|
||||
|
||||
Returns:
|
||||
A bool indicates if the cancellation is performed or not.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def add_done_callback(self, callback: DoneCallbackType) -> None:
|
||||
"""Registers a callback to be called on RPC termination.
|
||||
|
||||
Args:
|
||||
callback: A callable object will be called with the call object as
|
||||
its only argument.
|
||||
"""
|
||||
|
||||
|
||||
class Call(RpcContext, metaclass=ABCMeta):
|
||||
"""The abstract base class of an RPC on the client-side."""
|
||||
|
||||
@abstractmethod
|
||||
async def initial_metadata(self) -> Metadata:
|
||||
"""Accesses the initial metadata sent by the server.
|
||||
|
||||
Returns:
|
||||
The initial :term:`metadata`.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def trailing_metadata(self) -> Metadata:
|
||||
"""Accesses the trailing metadata sent by the server.
|
||||
|
||||
Returns:
|
||||
The trailing :term:`metadata`.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def code(self) -> grpc.StatusCode:
|
||||
"""Accesses the status code sent by the server.
|
||||
|
||||
Returns:
|
||||
The StatusCode value for the RPC.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def details(self) -> str:
|
||||
"""Accesses the details sent by the server.
|
||||
|
||||
Returns:
|
||||
The details string of the RPC.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def wait_for_connection(self) -> None:
|
||||
"""Waits until connected to peer and raises aio.AioRpcError if failed.
|
||||
|
||||
This is an EXPERIMENTAL method.
|
||||
|
||||
This method ensures the RPC has been successfully connected. Otherwise,
|
||||
an AioRpcError will be raised to explain the reason of the connection
|
||||
failure.
|
||||
|
||||
This method is recommended for building retry mechanisms.
|
||||
"""
|
||||
|
||||
|
||||
class UnaryUnaryCall(
|
||||
Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
|
||||
):
|
||||
"""The abstract base class of a unary-unary RPC on the client-side."""
|
||||
|
||||
@abstractmethod
|
||||
def __await__(self) -> Generator[Any, None, ResponseType]:
|
||||
"""Await the response message to be ready.
|
||||
|
||||
Returns:
|
||||
The response message of the RPC.
|
||||
"""
|
||||
|
||||
|
||||
class UnaryStreamCall(
|
||||
Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
|
||||
):
|
||||
@abstractmethod
|
||||
def __aiter__(self) -> AsyncIterator[ResponseType]:
|
||||
"""Returns the async iterator representation that yields messages.
|
||||
|
||||
Under the hood, it is calling the "read" method.
|
||||
|
||||
Returns:
|
||||
An async iterator object that yields messages.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def read(self) -> Union[EOFType, ResponseType]:
|
||||
"""Reads one message from the stream.
|
||||
|
||||
Read operations must be serialized when called from multiple
|
||||
coroutines.
|
||||
|
||||
Note that the iterator and read/write APIs may not be mixed on
|
||||
a single RPC.
|
||||
|
||||
Returns:
|
||||
A response message, or an `grpc.aio.EOF` to indicate the end of the
|
||||
stream.
|
||||
"""
|
||||
|
||||
|
||||
class StreamUnaryCall(
|
||||
Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
|
||||
):
|
||||
@abstractmethod
|
||||
async def write(self, request: RequestType) -> None:
|
||||
"""Writes one message to the stream.
|
||||
|
||||
Note that the iterator and read/write APIs may not be mixed on
|
||||
a single RPC.
|
||||
|
||||
Raises:
|
||||
An RpcError exception if the write failed.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def done_writing(self) -> None:
|
||||
"""Notifies server that the client is done sending messages.
|
||||
|
||||
After done_writing is called, any additional invocation to the write
|
||||
function will fail. This function is idempotent.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __await__(self) -> Generator[Any, None, ResponseType]:
|
||||
"""Await the response message to be ready.
|
||||
|
||||
Returns:
|
||||
The response message of the stream.
|
||||
"""
|
||||
|
||||
|
||||
class StreamStreamCall(
|
||||
Generic[RequestType, ResponseType], Call, metaclass=ABCMeta
|
||||
):
|
||||
@abstractmethod
|
||||
def __aiter__(self) -> AsyncIterator[ResponseType]:
|
||||
"""Returns the async iterator representation that yields messages.
|
||||
|
||||
Under the hood, it is calling the "read" method.
|
||||
|
||||
Returns:
|
||||
An async iterator object that yields messages.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def read(self) -> Union[EOFType, ResponseType]:
|
||||
"""Reads one message from the stream.
|
||||
|
||||
Read operations must be serialized when called from multiple
|
||||
coroutines.
|
||||
|
||||
Note that the iterator and read/write APIs may not be mixed on
|
||||
a single RPC.
|
||||
|
||||
Returns:
|
||||
A response message, or an `grpc.aio.EOF` to indicate the end of the
|
||||
stream.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def write(self, request: RequestType) -> None:
|
||||
"""Writes one message to the stream.
|
||||
|
||||
Note that the iterator and read/write APIs may not be mixed on
|
||||
a single RPC.
|
||||
|
||||
Raises:
|
||||
An RpcError exception if the write failed.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def done_writing(self) -> None:
|
||||
"""Notifies server that the client is done sending messages.
|
||||
|
||||
After done_writing is called, any additional invocation to the write
|
||||
function will fail. This function is idempotent.
|
||||
"""
|
||||
@@ -0,0 +1,364 @@
|
||||
# Copyright 2020 The gRPC Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Abstract base classes for Channel objects and Multicallable objects."""
|
||||
|
||||
import abc
|
||||
from typing import Generic, Optional
|
||||
|
||||
import grpc
|
||||
|
||||
from . import _base_call
|
||||
from ._typing import DeserializingFunction
|
||||
from ._typing import MetadataType
|
||||
from ._typing import RequestIterableType
|
||||
from ._typing import RequestType
|
||||
from ._typing import ResponseType
|
||||
from ._typing import SerializingFunction
|
||||
|
||||
|
||||
class UnaryUnaryMultiCallable(Generic[RequestType, ResponseType], abc.ABC):
|
||||
"""Enables asynchronous invocation of a unary-call RPC."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __call__(
|
||||
self,
|
||||
request: RequestType,
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> _base_call.UnaryUnaryCall[RequestType, ResponseType]:
|
||||
"""Asynchronously invokes the underlying RPC.
|
||||
|
||||
Args:
|
||||
request: The request value for the RPC.
|
||||
timeout: An optional duration of time in seconds to allow
|
||||
for the RPC.
|
||||
metadata: Optional :term:`metadata` to be transmitted to the
|
||||
service-side of the RPC.
|
||||
credentials: An optional CallCredentials for the RPC. Only valid for
|
||||
secure Channel.
|
||||
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
|
||||
compression: An element of grpc.Compression, e.g.
|
||||
grpc.Compression.Gzip.
|
||||
|
||||
Returns:
|
||||
A UnaryUnaryCall object.
|
||||
|
||||
Raises:
|
||||
RpcError: Indicates that the RPC terminated with non-OK status. The
|
||||
raised RpcError will also be a Call for the RPC affording the RPC's
|
||||
metadata, status code, and details.
|
||||
"""
|
||||
|
||||
|
||||
class UnaryStreamMultiCallable(Generic[RequestType, ResponseType], abc.ABC):
|
||||
"""Enables asynchronous invocation of a server-streaming RPC."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __call__(
|
||||
self,
|
||||
request: RequestType,
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> _base_call.UnaryStreamCall[RequestType, ResponseType]:
|
||||
"""Asynchronously invokes the underlying RPC.
|
||||
|
||||
Args:
|
||||
request: The request value for the RPC.
|
||||
timeout: An optional duration of time in seconds to allow
|
||||
for the RPC.
|
||||
metadata: Optional :term:`metadata` to be transmitted to the
|
||||
service-side of the RPC.
|
||||
credentials: An optional CallCredentials for the RPC. Only valid for
|
||||
secure Channel.
|
||||
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
|
||||
compression: An element of grpc.Compression, e.g.
|
||||
grpc.Compression.Gzip.
|
||||
|
||||
Returns:
|
||||
A UnaryStreamCall object.
|
||||
|
||||
Raises:
|
||||
RpcError: Indicates that the RPC terminated with non-OK status. The
|
||||
raised RpcError will also be a Call for the RPC affording the RPC's
|
||||
metadata, status code, and details.
|
||||
"""
|
||||
|
||||
|
||||
class StreamUnaryMultiCallable(abc.ABC):
|
||||
"""Enables asynchronous invocation of a client-streaming RPC."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __call__(
|
||||
self,
|
||||
request_iterator: Optional[RequestIterableType] = None,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> _base_call.StreamUnaryCall:
|
||||
"""Asynchronously invokes the underlying RPC.
|
||||
|
||||
Args:
|
||||
request_iterator: An optional async iterable or iterable of request
|
||||
messages for the RPC.
|
||||
timeout: An optional duration of time in seconds to allow
|
||||
for the RPC.
|
||||
metadata: Optional :term:`metadata` to be transmitted to the
|
||||
service-side of the RPC.
|
||||
credentials: An optional CallCredentials for the RPC. Only valid for
|
||||
secure Channel.
|
||||
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
|
||||
compression: An element of grpc.Compression, e.g.
|
||||
grpc.Compression.Gzip.
|
||||
|
||||
Returns:
|
||||
A StreamUnaryCall object.
|
||||
|
||||
Raises:
|
||||
RpcError: Indicates that the RPC terminated with non-OK status. The
|
||||
raised RpcError will also be a Call for the RPC affording the RPC's
|
||||
metadata, status code, and details.
|
||||
"""
|
||||
|
||||
|
||||
class StreamStreamMultiCallable(abc.ABC):
|
||||
"""Enables asynchronous invocation of a bidirectional-streaming RPC."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __call__(
|
||||
self,
|
||||
request_iterator: Optional[RequestIterableType] = None,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> _base_call.StreamStreamCall:
|
||||
"""Asynchronously invokes the underlying RPC.
|
||||
|
||||
Args:
|
||||
request_iterator: An optional async iterable or iterable of request
|
||||
messages for the RPC.
|
||||
timeout: An optional duration of time in seconds to allow
|
||||
for the RPC.
|
||||
metadata: Optional :term:`metadata` to be transmitted to the
|
||||
service-side of the RPC.
|
||||
credentials: An optional CallCredentials for the RPC. Only valid for
|
||||
secure Channel.
|
||||
wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism.
|
||||
compression: An element of grpc.Compression, e.g.
|
||||
grpc.Compression.Gzip.
|
||||
|
||||
Returns:
|
||||
A StreamStreamCall object.
|
||||
|
||||
Raises:
|
||||
RpcError: Indicates that the RPC terminated with non-OK status. The
|
||||
raised RpcError will also be a Call for the RPC affording the RPC's
|
||||
metadata, status code, and details.
|
||||
"""
|
||||
|
||||
|
||||
class Channel(abc.ABC):
|
||||
"""Enables asynchronous RPC invocation as a client.
|
||||
|
||||
Channel objects implement the Asynchronous Context Manager (aka. async
|
||||
with) type, although they are not supported to be entered and exited
|
||||
multiple times.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def __aenter__(self):
|
||||
"""Starts an asynchronous context manager.
|
||||
|
||||
Returns:
|
||||
Channel the channel that was instantiated.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Finishes the asynchronous context manager by closing the channel.
|
||||
|
||||
Still active RPCs will be cancelled.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def close(self, grace: Optional[float] = None):
|
||||
"""Closes this Channel and releases all resources held by it.
|
||||
|
||||
This method immediately stops the channel from executing new RPCs in
|
||||
all cases.
|
||||
|
||||
If a grace period is specified, this method waits until all active
|
||||
RPCs are finished or until the grace period is reached. RPCs that haven't
|
||||
been terminated within the grace period are aborted.
|
||||
If a grace period is not specified (by passing None for grace),
|
||||
all existing RPCs are cancelled immediately.
|
||||
|
||||
This method is idempotent.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_state(
|
||||
self, try_to_connect: bool = False
|
||||
) -> grpc.ChannelConnectivity:
|
||||
"""Checks the connectivity state of a channel.
|
||||
|
||||
This is an EXPERIMENTAL API.
|
||||
|
||||
If the channel reaches a stable connectivity state, it is guaranteed
|
||||
that the return value of this function will eventually converge to that
|
||||
state.
|
||||
|
||||
Args:
|
||||
try_to_connect: a bool indicate whether the Channel should try to
|
||||
connect to peer or not.
|
||||
|
||||
Returns: A ChannelConnectivity object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def wait_for_state_change(
|
||||
self,
|
||||
last_observed_state: grpc.ChannelConnectivity,
|
||||
) -> None:
|
||||
"""Waits for a change in connectivity state.
|
||||
|
||||
This is an EXPERIMENTAL API.
|
||||
|
||||
The function blocks until there is a change in the channel connectivity
|
||||
state from the "last_observed_state". If the state is already
|
||||
different, this function will return immediately.
|
||||
|
||||
There is an inherent race between the invocation of
|
||||
"Channel.wait_for_state_change" and "Channel.get_state". The state can
|
||||
change arbitrary many times during the race, so there is no way to
|
||||
observe every state transition.
|
||||
|
||||
If there is a need to put a timeout for this function, please refer to
|
||||
"asyncio.wait_for".
|
||||
|
||||
Args:
|
||||
last_observed_state: A grpc.ChannelConnectivity object representing
|
||||
the last known state.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def channel_ready(self) -> None:
|
||||
"""Creates a coroutine that blocks until the Channel is READY."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def unary_unary(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> UnaryUnaryMultiCallable:
|
||||
"""Creates a UnaryUnaryMultiCallable for a unary-unary method.
|
||||
|
||||
Args:
|
||||
method: The name of the RPC method.
|
||||
request_serializer: Optional :term:`serializer` for serializing the request
|
||||
message. Request goes unserialized in case None is passed.
|
||||
response_deserializer: Optional :term:`deserializer` for deserializing the
|
||||
response message. Response goes undeserialized in case None
|
||||
is passed.
|
||||
_registered_method: Implementation Private. Optional: A bool representing
|
||||
whether the method is registered.
|
||||
|
||||
Returns:
|
||||
A UnaryUnaryMultiCallable value for the named unary-unary method.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def unary_stream(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> UnaryStreamMultiCallable:
|
||||
"""Creates a UnaryStreamMultiCallable for a unary-stream method.
|
||||
|
||||
Args:
|
||||
method: The name of the RPC method.
|
||||
request_serializer: Optional :term:`serializer` for serializing the request
|
||||
message. Request goes unserialized in case None is passed.
|
||||
response_deserializer: Optional :term:`deserializer` for deserializing the
|
||||
response message. Response goes undeserialized in case None
|
||||
is passed.
|
||||
_registered_method: Implementation Private. Optional: A bool representing
|
||||
whether the method is registered.
|
||||
|
||||
Returns:
|
||||
A UnaryStreamMultiCallable value for the named unary-stream method.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def stream_unary(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> StreamUnaryMultiCallable:
|
||||
"""Creates a StreamUnaryMultiCallable for a stream-unary method.
|
||||
|
||||
Args:
|
||||
method: The name of the RPC method.
|
||||
request_serializer: Optional :term:`serializer` for serializing the request
|
||||
message. Request goes unserialized in case None is passed.
|
||||
response_deserializer: Optional :term:`deserializer` for deserializing the
|
||||
response message. Response goes undeserialized in case None
|
||||
is passed.
|
||||
_registered_method: Implementation Private. Optional: A bool representing
|
||||
whether the method is registered.
|
||||
|
||||
Returns:
|
||||
A StreamUnaryMultiCallable value for the named stream-unary method.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def stream_stream(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> StreamStreamMultiCallable:
|
||||
"""Creates a StreamStreamMultiCallable for a stream-stream method.
|
||||
|
||||
Args:
|
||||
method: The name of the RPC method.
|
||||
request_serializer: Optional :term:`serializer` for serializing the request
|
||||
message. Request goes unserialized in case None is passed.
|
||||
response_deserializer: Optional :term:`deserializer` for deserializing the
|
||||
response message. Response goes undeserialized in case None
|
||||
is passed.
|
||||
_registered_method: Implementation Private. Optional: A bool representing
|
||||
whether the method is registered.
|
||||
|
||||
Returns:
|
||||
A StreamStreamMultiCallable value for the named stream-stream method.
|
||||
"""
|
||||
@@ -0,0 +1,387 @@
|
||||
# Copyright 2020 The gRPC Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Abstract base classes for server-side classes."""
|
||||
|
||||
import abc
|
||||
from typing import Generic, Iterable, Mapping, NoReturn, Optional, Sequence
|
||||
|
||||
import grpc
|
||||
|
||||
from ._metadata import Metadata # pylint: disable=unused-import
|
||||
from ._typing import DoneCallbackType
|
||||
from ._typing import MetadataType
|
||||
from ._typing import RequestType
|
||||
from ._typing import ResponseType
|
||||
|
||||
|
||||
class Server(abc.ABC):
|
||||
"""Serves RPCs."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_generic_rpc_handlers(
|
||||
self, generic_rpc_handlers: Sequence[grpc.GenericRpcHandler]
|
||||
) -> None:
|
||||
"""Registers GenericRpcHandlers with this Server.
|
||||
|
||||
This method is only safe to call before the server is started.
|
||||
|
||||
Args:
|
||||
generic_rpc_handlers: A sequence of GenericRpcHandlers that will be
|
||||
used to service RPCs.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_insecure_port(self, address: str) -> int:
|
||||
"""Opens an insecure port for accepting RPCs.
|
||||
|
||||
A port is a communication endpoint that used by networking protocols,
|
||||
like TCP and UDP. To date, we only support TCP.
|
||||
|
||||
This method may only be called before starting the server.
|
||||
|
||||
Args:
|
||||
address: The address for which to open a port. If the port is 0,
|
||||
or not specified in the address, then the gRPC runtime will choose a port.
|
||||
|
||||
Returns:
|
||||
An integer port on which the server will accept RPC requests.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_secure_port(
|
||||
self, address: str, server_credentials: grpc.ServerCredentials
|
||||
) -> int:
|
||||
"""Opens a secure port for accepting RPCs.
|
||||
|
||||
A port is a communication endpoint that used by networking protocols,
|
||||
like TCP and UDP. To date, we only support TCP.
|
||||
|
||||
This method may only be called before starting the server.
|
||||
|
||||
Args:
|
||||
address: The address for which to open a port.
|
||||
if the port is 0, or not specified in the address, then the gRPC
|
||||
runtime will choose a port.
|
||||
server_credentials: A ServerCredentials object.
|
||||
|
||||
Returns:
|
||||
An integer port on which the server will accept RPC requests.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def start(self) -> None:
|
||||
"""Starts this Server.
|
||||
|
||||
This method may only be called once. (i.e. it is not idempotent).
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def stop(self, grace: Optional[float]) -> None:
|
||||
"""Stops this Server.
|
||||
|
||||
This method immediately stops the server from servicing new RPCs in
|
||||
all cases.
|
||||
|
||||
If a grace period is specified, this method waits until all active
|
||||
RPCs are finished or until the grace period is reached. RPCs that haven't
|
||||
been terminated within the grace period are aborted.
|
||||
If a grace period is not specified (by passing None for grace), all
|
||||
existing RPCs are aborted immediately and this method blocks until
|
||||
the last RPC handler terminates.
|
||||
|
||||
This method is idempotent and may be called at any time. Passing a
|
||||
smaller grace value in a subsequent call will have the effect of
|
||||
stopping the Server sooner (passing None will have the effect of
|
||||
stopping the server immediately). Passing a larger grace value in a
|
||||
subsequent call will not have the effect of stopping the server later
|
||||
(i.e. the most restrictive grace value is used).
|
||||
|
||||
Args:
|
||||
grace: A duration of time in seconds or None.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def wait_for_termination(
|
||||
self, timeout: Optional[float] = None
|
||||
) -> bool:
|
||||
"""Continues current coroutine once the server stops.
|
||||
|
||||
This is an EXPERIMENTAL API.
|
||||
|
||||
The wait will not consume computational resources during blocking, and
|
||||
it will block until one of the two following conditions are met:
|
||||
|
||||
1) The server is stopped or terminated;
|
||||
2) A timeout occurs if timeout is not `None`.
|
||||
|
||||
The timeout argument works in the same way as `threading.Event.wait()`.
|
||||
https://docs.python.org/3/library/threading.html#threading.Event.wait
|
||||
|
||||
Args:
|
||||
timeout: A floating point number specifying a timeout for the
|
||||
operation in seconds.
|
||||
|
||||
Returns:
|
||||
A bool indicates if the operation times out.
|
||||
"""
|
||||
|
||||
def add_registered_method_handlers( # noqa: B027
|
||||
self, service_name, method_handlers
|
||||
):
|
||||
"""Registers GenericRpcHandlers with this Server.
|
||||
|
||||
This method is only safe to call before the server is started.
|
||||
|
||||
Args:
|
||||
service_name: The service name.
|
||||
method_handlers: A dictionary that maps method names to corresponding
|
||||
RpcMethodHandler.
|
||||
"""
|
||||
|
||||
|
||||
# pylint: disable=too-many-public-methods
|
||||
class ServicerContext(Generic[RequestType, ResponseType], abc.ABC):
|
||||
"""A context object passed to method implementations."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def read(self) -> RequestType:
|
||||
"""Reads one message from the RPC.
|
||||
|
||||
Only one read operation is allowed simultaneously.
|
||||
|
||||
Returns:
|
||||
A response message of the RPC.
|
||||
|
||||
Raises:
|
||||
An RpcError exception if the read failed.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def write(self, message: ResponseType) -> None:
|
||||
"""Writes one message to the RPC.
|
||||
|
||||
Only one write operation is allowed simultaneously.
|
||||
|
||||
Raises:
|
||||
An RpcError exception if the write failed.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def send_initial_metadata(
|
||||
self, initial_metadata: MetadataType
|
||||
) -> None:
|
||||
"""Sends the initial metadata value to the client.
|
||||
|
||||
This method need not be called by implementations if they have no
|
||||
metadata to add to what the gRPC runtime will transmit.
|
||||
|
||||
Args:
|
||||
initial_metadata: The initial :term:`metadata`.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def abort(
|
||||
self,
|
||||
code: grpc.StatusCode,
|
||||
details: str = "",
|
||||
trailing_metadata: MetadataType = (),
|
||||
) -> NoReturn:
|
||||
"""Raises an exception to terminate the RPC with a non-OK status.
|
||||
|
||||
The code and details passed as arguments will supersede any existing
|
||||
ones.
|
||||
|
||||
Args:
|
||||
code: A StatusCode object to be sent to the client.
|
||||
It must not be StatusCode.OK.
|
||||
details: A UTF-8-encodable string to be sent to the client upon
|
||||
termination of the RPC.
|
||||
trailing_metadata: A sequence of tuple represents the trailing
|
||||
:term:`metadata`.
|
||||
|
||||
Raises:
|
||||
Exception: An exception is always raised to signal the abortion the
|
||||
RPC to the gRPC runtime.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_trailing_metadata(self, trailing_metadata: MetadataType) -> None:
|
||||
"""Sends the trailing metadata for the RPC.
|
||||
|
||||
This method need not be called by implementations if they have no
|
||||
metadata to add to what the gRPC runtime will transmit.
|
||||
|
||||
Args:
|
||||
trailing_metadata: The trailing :term:`metadata`.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def invocation_metadata(self) -> Optional[MetadataType]:
|
||||
"""Accesses the metadata sent by the client.
|
||||
|
||||
Returns:
|
||||
The invocation :term:`metadata`.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_code(self, code: grpc.StatusCode) -> None:
|
||||
"""Sets the value to be used as status code upon RPC completion.
|
||||
|
||||
This method need not be called by method implementations if they wish
|
||||
the gRPC runtime to determine the status code of the RPC.
|
||||
|
||||
Args:
|
||||
code: A StatusCode object to be sent to the client.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_details(self, details: str) -> None:
|
||||
"""Sets the value to be used the as detail string upon RPC completion.
|
||||
|
||||
This method need not be called by method implementations if they have
|
||||
no details to transmit.
|
||||
|
||||
Args:
|
||||
details: A UTF-8-encodable string to be sent to the client upon
|
||||
termination of the RPC.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_compression(self, compression: grpc.Compression) -> None:
|
||||
"""Set the compression algorithm to be used for the entire call.
|
||||
|
||||
Args:
|
||||
compression: An element of grpc.compression, e.g.
|
||||
grpc.compression.Gzip.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def disable_next_message_compression(self) -> None:
|
||||
"""Disables compression for the next response message.
|
||||
|
||||
This method will override any compression configuration set during
|
||||
server creation or set on the call.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def peer(self) -> str:
|
||||
"""Identifies the peer that invoked the RPC being serviced.
|
||||
|
||||
Returns:
|
||||
A string identifying the peer that invoked the RPC being serviced.
|
||||
The string format is determined by gRPC runtime.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def peer_identities(self) -> Optional[Iterable[bytes]]:
|
||||
"""Gets one or more peer identity(s).
|
||||
|
||||
Equivalent to
|
||||
servicer_context.auth_context().get(servicer_context.peer_identity_key())
|
||||
|
||||
Returns:
|
||||
An iterable of the identities, or None if the call is not
|
||||
authenticated. Each identity is returned as a raw bytes type.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def peer_identity_key(self) -> Optional[str]:
|
||||
"""The auth property used to identify the peer.
|
||||
|
||||
For example, "x509_common_name" or "x509_subject_alternative_name" are
|
||||
used to identify an SSL peer.
|
||||
|
||||
Returns:
|
||||
The auth property (string) that indicates the
|
||||
peer identity, or None if the call is not authenticated.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def auth_context(self) -> Mapping[str, Iterable[bytes]]:
|
||||
"""Gets the auth context for the call.
|
||||
|
||||
Returns:
|
||||
A map of strings to an iterable of bytes for each auth property.
|
||||
"""
|
||||
|
||||
def time_remaining(self) -> float:
|
||||
"""Describes the length of allowed time remaining for the RPC.
|
||||
|
||||
Returns:
|
||||
A nonnegative float indicating the length of allowed time in seconds
|
||||
remaining for the RPC to complete before it is considered to have
|
||||
timed out, or None if no deadline was specified for the RPC.
|
||||
"""
|
||||
|
||||
def trailing_metadata(self):
|
||||
"""Access value to be used as trailing metadata upon RPC completion.
|
||||
|
||||
This is an EXPERIMENTAL API.
|
||||
|
||||
Returns:
|
||||
The trailing :term:`metadata` for the RPC.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def code(self):
|
||||
"""Accesses the value to be used as status code upon RPC completion.
|
||||
|
||||
This is an EXPERIMENTAL API.
|
||||
|
||||
Returns:
|
||||
The StatusCode value for the RPC.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def details(self):
|
||||
"""Accesses the value to be used as detail string upon RPC completion.
|
||||
|
||||
This is an EXPERIMENTAL API.
|
||||
|
||||
Returns:
|
||||
The details string of the RPC.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def add_done_callback(self, callback: DoneCallbackType) -> None:
|
||||
"""Registers a callback to be called on RPC termination.
|
||||
|
||||
This is an EXPERIMENTAL API.
|
||||
|
||||
Args:
|
||||
callback: A callable object will be called with the servicer context
|
||||
object as its only argument.
|
||||
"""
|
||||
|
||||
def cancelled(self) -> bool:
|
||||
"""Return True if the RPC is cancelled.
|
||||
|
||||
The RPC is cancelled when the cancellation was requested with cancel().
|
||||
|
||||
This is an EXPERIMENTAL API.
|
||||
|
||||
Returns:
|
||||
A bool indicates whether the RPC is cancelled or not.
|
||||
"""
|
||||
|
||||
def done(self) -> bool:
|
||||
"""Return True if the RPC is done.
|
||||
|
||||
An RPC is done if the RPC is completed, cancelled or aborted.
|
||||
|
||||
This is an EXPERIMENTAL API.
|
||||
|
||||
Returns:
|
||||
A bool indicates if the RPC is done.
|
||||
"""
|
||||
@@ -0,0 +1,759 @@
|
||||
# Copyright 2019 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Invocation-side implementation of gRPC Asyncio Python."""
|
||||
|
||||
import asyncio
|
||||
import enum
|
||||
from functools import partial
|
||||
import inspect
|
||||
import logging
|
||||
import traceback
|
||||
from typing import (
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Generator,
|
||||
Generic,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
import grpc
|
||||
from grpc import _common
|
||||
from grpc._cython import cygrpc
|
||||
|
||||
from . import _base_call
|
||||
from ._metadata import Metadata
|
||||
from ._typing import DeserializingFunction
|
||||
from ._typing import DoneCallbackType
|
||||
from ._typing import EOFType
|
||||
from ._typing import MetadataType
|
||||
from ._typing import MetadatumType
|
||||
from ._typing import RequestIterableType
|
||||
from ._typing import RequestType
|
||||
from ._typing import ResponseType
|
||||
from ._typing import SerializingFunction
|
||||
|
||||
__all__ = "AioRpcError", "Call", "UnaryStreamCall", "UnaryUnaryCall"
|
||||
|
||||
_LOCAL_CANCELLATION_DETAILS = "Locally cancelled by application!"
|
||||
_GC_CANCELLATION_DETAILS = "Cancelled upon garbage collection!"
|
||||
_RPC_ALREADY_FINISHED_DETAILS = "RPC already finished."
|
||||
_RPC_HALF_CLOSED_DETAILS = 'RPC is half closed after calling "done_writing".'
|
||||
_API_STYLE_ERROR = (
|
||||
"The iterator and read/write APIs may not be mixed on a single RPC."
|
||||
)
|
||||
|
||||
_OK_CALL_REPRESENTATION = (
|
||||
'<{} of RPC that terminated with:\n\tstatus = {}\n\tdetails = "{}"\n>'
|
||||
)
|
||||
|
||||
_NON_OK_CALL_REPRESENTATION = (
|
||||
"<{} of RPC that terminated with:\n"
|
||||
"\tstatus = {}\n"
|
||||
'\tdetails = "{}"\n'
|
||||
'\tdebug_error_string = "{}"\n'
|
||||
">"
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AioRpcError(grpc.RpcError):
|
||||
"""An implementation of RpcError to be used by the asynchronous API.
|
||||
|
||||
Raised RpcError is a snapshot of the final status of the RPC, values are
|
||||
determined. Hence, its methods no longer needs to be coroutines.
|
||||
"""
|
||||
|
||||
_code: grpc.StatusCode
|
||||
_details: Optional[str]
|
||||
_initial_metadata: Optional[Metadata]
|
||||
_trailing_metadata: Optional[Metadata]
|
||||
_debug_error_string: Optional[str]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
code: grpc.StatusCode,
|
||||
initial_metadata: Metadata,
|
||||
trailing_metadata: Metadata,
|
||||
details: Optional[str] = None,
|
||||
debug_error_string: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
code: The status code with which the RPC has been finalized.
|
||||
initial_metadata: Optional initial metadata that could be sent by the
|
||||
Server.
|
||||
trailing_metadata: Optional metadata that could be sent by the Server.
|
||||
details: Optional details explaining the reason of the error.
|
||||
debug_error_string: Optional string
|
||||
"""
|
||||
super().__init__()
|
||||
self._code = code
|
||||
self._details = details
|
||||
self._initial_metadata = initial_metadata
|
||||
self._trailing_metadata = trailing_metadata
|
||||
self._debug_error_string = debug_error_string
|
||||
|
||||
def code(self) -> grpc.StatusCode:
|
||||
"""Accesses the status code sent by the server.
|
||||
|
||||
Returns:
|
||||
The `grpc.StatusCode` status code.
|
||||
"""
|
||||
return self._code
|
||||
|
||||
def details(self) -> Optional[str]:
|
||||
"""Accesses the details sent by the server.
|
||||
|
||||
Returns:
|
||||
The description of the error.
|
||||
"""
|
||||
return self._details
|
||||
|
||||
def initial_metadata(self) -> Metadata:
|
||||
"""Accesses the initial metadata sent by the server.
|
||||
|
||||
Returns:
|
||||
The initial metadata received.
|
||||
"""
|
||||
return self._initial_metadata
|
||||
|
||||
def trailing_metadata(self) -> Metadata:
|
||||
"""Accesses the trailing metadata sent by the server.
|
||||
|
||||
Returns:
|
||||
The trailing metadata received.
|
||||
"""
|
||||
return self._trailing_metadata
|
||||
|
||||
def debug_error_string(self) -> str:
|
||||
"""Accesses the debug error string sent by the server.
|
||||
|
||||
Returns:
|
||||
The debug error string received.
|
||||
"""
|
||||
return self._debug_error_string
|
||||
|
||||
def _repr(self) -> str:
|
||||
"""Assembles the error string for the RPC error."""
|
||||
return _NON_OK_CALL_REPRESENTATION.format(
|
||||
self.__class__.__name__,
|
||||
self._code,
|
||||
self._details,
|
||||
self._debug_error_string,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self._repr()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self._repr()
|
||||
|
||||
def __reduce__(self):
|
||||
return (
|
||||
type(self),
|
||||
(
|
||||
self._code,
|
||||
self._initial_metadata,
|
||||
self._trailing_metadata,
|
||||
self._details,
|
||||
self._debug_error_string,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _create_rpc_error(
|
||||
initial_metadata: MetadataType,
|
||||
status: cygrpc.AioRpcStatus,
|
||||
) -> AioRpcError:
|
||||
return AioRpcError(
|
||||
_common.CYGRPC_STATUS_CODE_TO_STATUS_CODE[status.code()],
|
||||
Metadata._create(initial_metadata),
|
||||
Metadata.from_tuple(status.trailing_metadata()),
|
||||
details=status.details(),
|
||||
debug_error_string=status.debug_error_string(),
|
||||
)
|
||||
|
||||
|
||||
class Call:
|
||||
"""Base implementation of client RPC Call object.
|
||||
|
||||
Implements logic around final status, metadata and cancellation.
|
||||
"""
|
||||
|
||||
_loop: asyncio.AbstractEventLoop
|
||||
_code: grpc.StatusCode
|
||||
_cython_call: cygrpc._AioCall
|
||||
_metadata: Tuple[MetadatumType, ...]
|
||||
_request_serializer: Optional[SerializingFunction]
|
||||
_response_deserializer: Optional[DeserializingFunction]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cython_call: cygrpc._AioCall,
|
||||
metadata: Metadata,
|
||||
request_serializer: Optional[SerializingFunction],
|
||||
response_deserializer: Optional[DeserializingFunction],
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> None:
|
||||
self._loop = loop
|
||||
self._cython_call = cython_call
|
||||
self._metadata = tuple(metadata)
|
||||
self._request_serializer = request_serializer
|
||||
self._response_deserializer = response_deserializer
|
||||
|
||||
def __del__(self) -> None:
|
||||
# The '_cython_call' object might be destructed before Call object
|
||||
if hasattr(self, "_cython_call") and not self._cython_call.done():
|
||||
self._cancel(_GC_CANCELLATION_DETAILS)
|
||||
|
||||
def cancelled(self) -> bool:
|
||||
return self._cython_call.cancelled()
|
||||
|
||||
def _cancel(self, details: str) -> bool:
|
||||
"""Forwards the application cancellation reasoning."""
|
||||
if not self._cython_call.done():
|
||||
self._cython_call.cancel(details)
|
||||
return True
|
||||
return False
|
||||
|
||||
def cancel(self) -> bool:
|
||||
return self._cancel(_LOCAL_CANCELLATION_DETAILS)
|
||||
|
||||
def done(self) -> bool:
|
||||
return self._cython_call.done()
|
||||
|
||||
def add_done_callback(self, callback: DoneCallbackType) -> None:
|
||||
cb = partial(callback, self)
|
||||
self._cython_call.add_done_callback(cb)
|
||||
|
||||
def time_remaining(self) -> Optional[float]:
|
||||
return self._cython_call.time_remaining()
|
||||
|
||||
async def initial_metadata(self) -> Metadata:
|
||||
raw_metadata_tuple = await self._cython_call.initial_metadata()
|
||||
return Metadata.from_tuple(raw_metadata_tuple)
|
||||
|
||||
async def trailing_metadata(self) -> Metadata:
|
||||
raw_metadata_tuple = (
|
||||
await self._cython_call.status()
|
||||
).trailing_metadata()
|
||||
if not raw_metadata_tuple:
|
||||
return Metadata()
|
||||
return Metadata.from_tuple(raw_metadata_tuple)
|
||||
|
||||
async def code(self) -> grpc.StatusCode:
|
||||
cygrpc_code = (await self._cython_call.status()).code()
|
||||
return _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE[cygrpc_code]
|
||||
|
||||
async def details(self) -> str:
|
||||
return (await self._cython_call.status()).details()
|
||||
|
||||
async def debug_error_string(self) -> str:
|
||||
return (await self._cython_call.status()).debug_error_string()
|
||||
|
||||
async def _raise_for_status(self) -> None:
|
||||
if self._cython_call.is_locally_cancelled():
|
||||
raise asyncio.CancelledError()
|
||||
code = await self.code()
|
||||
if code != grpc.StatusCode.OK:
|
||||
raise _create_rpc_error(
|
||||
await self.initial_metadata(),
|
||||
await self._cython_call.status(),
|
||||
)
|
||||
|
||||
def _repr(self) -> str:
|
||||
return repr(self._cython_call)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self._repr()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self._repr()
|
||||
|
||||
|
||||
class _APIStyle(enum.IntEnum):
|
||||
UNKNOWN = 0
|
||||
ASYNC_GENERATOR = 1
|
||||
READER_WRITER = 2
|
||||
|
||||
|
||||
class _UnaryResponseMixin(Call, Generic[ResponseType]):
|
||||
_call_response: asyncio.Task
|
||||
|
||||
def _init_unary_response_mixin(self, response_task: asyncio.Task):
|
||||
self._call_response = response_task
|
||||
|
||||
def cancel(self) -> bool:
|
||||
if super().cancel():
|
||||
self._call_response.cancel()
|
||||
return True
|
||||
return False
|
||||
|
||||
def __await__(self) -> Generator[Any, None, ResponseType]:
|
||||
"""Wait till the ongoing RPC request finishes."""
|
||||
try:
|
||||
response = yield from self._call_response
|
||||
except asyncio.CancelledError:
|
||||
# Even if we caught all other CancelledError, there is still
|
||||
# this corner case. If the application cancels immediately after
|
||||
# the Call object is created, we will observe this
|
||||
# `CancelledError`.
|
||||
if not self.cancelled():
|
||||
self.cancel()
|
||||
raise
|
||||
|
||||
# NOTE(lidiz) If we raise RpcError in the task, and users doesn't
|
||||
# 'await' on it. AsyncIO will log 'Task exception was never retrieved'.
|
||||
# Instead, if we move the exception raising here, the spam stops.
|
||||
# Unfortunately, there can only be one 'yield from' in '__await__'. So,
|
||||
# we need to access the private instance variable.
|
||||
if response is cygrpc.EOF:
|
||||
if self._cython_call.is_locally_cancelled():
|
||||
raise asyncio.CancelledError()
|
||||
else:
|
||||
raise _create_rpc_error(
|
||||
self._cython_call._initial_metadata,
|
||||
self._cython_call._status,
|
||||
)
|
||||
else:
|
||||
return response
|
||||
|
||||
|
||||
class _StreamResponseMixin(Call):
|
||||
_message_aiter: AsyncIterator[ResponseType]
|
||||
_preparation: asyncio.Task
|
||||
_response_style: _APIStyle
|
||||
|
||||
def _init_stream_response_mixin(self, preparation: asyncio.Task):
|
||||
self._message_aiter = None
|
||||
self._preparation = preparation
|
||||
self._response_style = _APIStyle.UNKNOWN
|
||||
|
||||
def _update_response_style(self, style: _APIStyle):
|
||||
if self._response_style is _APIStyle.UNKNOWN:
|
||||
self._response_style = style
|
||||
elif self._response_style is not style:
|
||||
raise cygrpc.UsageError(_API_STYLE_ERROR)
|
||||
|
||||
def cancel(self) -> bool:
|
||||
if super().cancel():
|
||||
self._preparation.cancel()
|
||||
return True
|
||||
return False
|
||||
|
||||
async def _fetch_stream_responses(self) -> ResponseType:
|
||||
message = await self._read()
|
||||
while message is not cygrpc.EOF:
|
||||
yield message
|
||||
message = await self._read()
|
||||
|
||||
# If the read operation failed, Core should explain why.
|
||||
await self._raise_for_status()
|
||||
|
||||
def __aiter__(self) -> AsyncIterator[ResponseType]:
|
||||
self._update_response_style(_APIStyle.ASYNC_GENERATOR)
|
||||
if self._message_aiter is None:
|
||||
self._message_aiter = self._fetch_stream_responses()
|
||||
return self._message_aiter
|
||||
|
||||
async def _read(self) -> ResponseType:
|
||||
# Wait for the request being sent
|
||||
await self._preparation
|
||||
|
||||
# Reads response message from Core
|
||||
try:
|
||||
raw_response = await self._cython_call.receive_serialized_message()
|
||||
except asyncio.CancelledError:
|
||||
if not self.cancelled():
|
||||
self.cancel()
|
||||
raise
|
||||
|
||||
if raw_response is cygrpc.EOF:
|
||||
return cygrpc.EOF
|
||||
return _common.deserialize(raw_response, self._response_deserializer)
|
||||
|
||||
async def read(self) -> Union[EOFType, ResponseType]:
|
||||
if self.done():
|
||||
await self._raise_for_status()
|
||||
return cygrpc.EOF
|
||||
self._update_response_style(_APIStyle.READER_WRITER)
|
||||
|
||||
response_message = await self._read()
|
||||
|
||||
if response_message is cygrpc.EOF:
|
||||
# If the read operation failed, Core should explain why.
|
||||
await self._raise_for_status()
|
||||
return response_message
|
||||
|
||||
|
||||
class _StreamRequestMixin(Call):
|
||||
_metadata_sent: asyncio.Event
|
||||
_done_writing_flag: bool
|
||||
_async_request_poller: Optional[asyncio.Task]
|
||||
_request_style: _APIStyle
|
||||
|
||||
def _init_stream_request_mixin(
|
||||
self, request_iterator: Optional[RequestIterableType]
|
||||
):
|
||||
self._metadata_sent = asyncio.Event()
|
||||
self._done_writing_flag = False
|
||||
|
||||
# If user passes in an async iterator, create a consumer Task.
|
||||
if request_iterator is not None:
|
||||
self._async_request_poller = self._loop.create_task(
|
||||
self._consume_request_iterator(request_iterator)
|
||||
)
|
||||
self._request_style = _APIStyle.ASYNC_GENERATOR
|
||||
else:
|
||||
self._async_request_poller = None
|
||||
self._request_style = _APIStyle.READER_WRITER
|
||||
|
||||
def _raise_for_different_style(self, style: _APIStyle):
|
||||
if self._request_style is not style:
|
||||
raise cygrpc.UsageError(_API_STYLE_ERROR)
|
||||
|
||||
def cancel(self) -> bool:
|
||||
if super().cancel():
|
||||
if self._async_request_poller is not None:
|
||||
self._async_request_poller.cancel()
|
||||
return True
|
||||
return False
|
||||
|
||||
def _metadata_sent_observer(self):
|
||||
self._metadata_sent.set()
|
||||
|
||||
async def _consume_request_iterator(
|
||||
self, request_iterator: RequestIterableType
|
||||
) -> None:
|
||||
try:
|
||||
if inspect.isasyncgen(request_iterator) or hasattr(
|
||||
request_iterator, "__aiter__"
|
||||
):
|
||||
async for request in request_iterator:
|
||||
try:
|
||||
await self._write(request)
|
||||
except AioRpcError as rpc_error:
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Exception while consuming the"
|
||||
" request_iterator: %s"
|
||||
),
|
||||
rpc_error,
|
||||
)
|
||||
return
|
||||
else:
|
||||
for request in request_iterator:
|
||||
try:
|
||||
await self._write(request)
|
||||
except AioRpcError as rpc_error:
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Exception while consuming the"
|
||||
" request_iterator: %s"
|
||||
),
|
||||
rpc_error,
|
||||
)
|
||||
return
|
||||
|
||||
await self._done_writing()
|
||||
except: # pylint: disable=bare-except # noqa: E722
|
||||
# Client iterators can raise exceptions, which we should handle by
|
||||
# cancelling the RPC and logging the client's error. No exceptions
|
||||
# should escape this function.
|
||||
_LOGGER.debug(
|
||||
"Client request_iterator raised exception:\n%s",
|
||||
traceback.format_exc(),
|
||||
)
|
||||
self.cancel()
|
||||
|
||||
async def _write(self, request: RequestType) -> None:
|
||||
if self.done():
|
||||
raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS)
|
||||
if self._done_writing_flag:
|
||||
raise asyncio.InvalidStateError(_RPC_HALF_CLOSED_DETAILS)
|
||||
if not self._metadata_sent.is_set():
|
||||
await self._metadata_sent.wait()
|
||||
if self.done():
|
||||
await self._raise_for_status()
|
||||
|
||||
serialized_request = _common.serialize(
|
||||
request, self._request_serializer
|
||||
)
|
||||
try:
|
||||
await self._cython_call.send_serialized_message(serialized_request)
|
||||
except cygrpc.InternalError as err:
|
||||
self._cython_call.set_internal_error(str(err))
|
||||
await self._raise_for_status()
|
||||
except asyncio.CancelledError:
|
||||
if not self.cancelled():
|
||||
self.cancel()
|
||||
raise
|
||||
|
||||
async def _done_writing(self) -> None:
|
||||
if self.done():
|
||||
# If the RPC is finished, do nothing.
|
||||
return
|
||||
if not self._done_writing_flag:
|
||||
# If the done writing is not sent before, try to send it.
|
||||
self._done_writing_flag = True
|
||||
try:
|
||||
await self._cython_call.send_receive_close()
|
||||
except asyncio.CancelledError:
|
||||
if not self.cancelled():
|
||||
self.cancel()
|
||||
raise
|
||||
|
||||
async def write(self, request: RequestType) -> None:
|
||||
self._raise_for_different_style(_APIStyle.READER_WRITER)
|
||||
await self._write(request)
|
||||
|
||||
async def done_writing(self) -> None:
|
||||
"""Signal peer that client is done writing.
|
||||
|
||||
This method is idempotent.
|
||||
"""
|
||||
self._raise_for_different_style(_APIStyle.READER_WRITER)
|
||||
await self._done_writing()
|
||||
|
||||
async def wait_for_connection(self) -> None:
|
||||
await self._metadata_sent.wait()
|
||||
if self.done():
|
||||
await self._raise_for_status()
|
||||
|
||||
|
||||
class UnaryUnaryCall(_UnaryResponseMixin, Call, _base_call.UnaryUnaryCall):
|
||||
"""Object for managing unary-unary RPC calls.
|
||||
|
||||
Returned when an instance of `UnaryUnaryMultiCallable` object is called.
|
||||
"""
|
||||
|
||||
_request: RequestType
|
||||
_invocation_task: asyncio.Task
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def __init__(
|
||||
self,
|
||||
request: RequestType,
|
||||
deadline: Optional[float],
|
||||
metadata: Metadata,
|
||||
credentials: Optional[grpc.CallCredentials],
|
||||
wait_for_ready: Optional[bool],
|
||||
channel: cygrpc.AioChannel,
|
||||
method: bytes,
|
||||
request_serializer: Optional[SerializingFunction],
|
||||
response_deserializer: Optional[DeserializingFunction],
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
channel.call(method, deadline, credentials, wait_for_ready),
|
||||
metadata,
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
loop,
|
||||
)
|
||||
self._request = request
|
||||
self._context = cygrpc.build_census_context()
|
||||
self._invocation_task = loop.create_task(self._invoke())
|
||||
self._init_unary_response_mixin(self._invocation_task)
|
||||
|
||||
async def _invoke(self) -> ResponseType:
|
||||
serialized_request = _common.serialize(
|
||||
self._request, self._request_serializer
|
||||
)
|
||||
|
||||
# NOTE(lidiz) asyncio.CancelledError is not a good transport for status,
|
||||
# because the asyncio.Task class do not cache the exception object.
|
||||
# https://github.com/python/cpython/blob/edad4d89e357c92f70c0324b937845d652b20afd/Lib/asyncio/tasks.py#L785
|
||||
try:
|
||||
serialized_response = await self._cython_call.unary_unary(
|
||||
serialized_request, self._metadata, self._context
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
if not self.cancelled():
|
||||
self.cancel()
|
||||
|
||||
if self._cython_call.is_ok():
|
||||
return _common.deserialize(
|
||||
serialized_response, self._response_deserializer
|
||||
)
|
||||
return cygrpc.EOF
|
||||
|
||||
async def wait_for_connection(self) -> None:
|
||||
await self._invocation_task
|
||||
if self.done():
|
||||
await self._raise_for_status()
|
||||
|
||||
|
||||
class UnaryStreamCall(_StreamResponseMixin, Call, _base_call.UnaryStreamCall):
|
||||
"""Object for managing unary-stream RPC calls.
|
||||
|
||||
Returned when an instance of `UnaryStreamMultiCallable` object is called.
|
||||
"""
|
||||
|
||||
_request: RequestType
|
||||
_send_unary_request_task: asyncio.Task
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def __init__(
|
||||
self,
|
||||
request: RequestType,
|
||||
deadline: Optional[float],
|
||||
metadata: Metadata,
|
||||
credentials: Optional[grpc.CallCredentials],
|
||||
wait_for_ready: Optional[bool],
|
||||
channel: cygrpc.AioChannel,
|
||||
method: bytes,
|
||||
request_serializer: Optional[SerializingFunction],
|
||||
response_deserializer: Optional[DeserializingFunction],
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
channel.call(method, deadline, credentials, wait_for_ready),
|
||||
metadata,
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
loop,
|
||||
)
|
||||
self._request = request
|
||||
self._context = cygrpc.build_census_context()
|
||||
self._send_unary_request_task = loop.create_task(
|
||||
self._send_unary_request()
|
||||
)
|
||||
self._init_stream_response_mixin(self._send_unary_request_task)
|
||||
|
||||
async def _send_unary_request(self) -> ResponseType:
|
||||
serialized_request = _common.serialize(
|
||||
self._request, self._request_serializer
|
||||
)
|
||||
try:
|
||||
await self._cython_call.initiate_unary_stream(
|
||||
serialized_request, self._metadata, self._context
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
if not self.cancelled():
|
||||
self.cancel()
|
||||
raise
|
||||
|
||||
async def wait_for_connection(self) -> None:
|
||||
await self._send_unary_request_task
|
||||
if self.done():
|
||||
await self._raise_for_status()
|
||||
|
||||
|
||||
# pylint: disable=too-many-ancestors
|
||||
class StreamUnaryCall(
|
||||
_StreamRequestMixin, _UnaryResponseMixin, Call, _base_call.StreamUnaryCall
|
||||
):
|
||||
"""Object for managing stream-unary RPC calls.
|
||||
|
||||
Returned when an instance of `StreamUnaryMultiCallable` object is called.
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def __init__(
|
||||
self,
|
||||
request_iterator: Optional[RequestIterableType],
|
||||
deadline: Optional[float],
|
||||
metadata: Metadata,
|
||||
credentials: Optional[grpc.CallCredentials],
|
||||
wait_for_ready: Optional[bool],
|
||||
channel: cygrpc.AioChannel,
|
||||
method: bytes,
|
||||
request_serializer: Optional[SerializingFunction],
|
||||
response_deserializer: Optional[DeserializingFunction],
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
channel.call(method, deadline, credentials, wait_for_ready),
|
||||
metadata,
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
loop,
|
||||
)
|
||||
|
||||
self._context = cygrpc.build_census_context()
|
||||
self._init_stream_request_mixin(request_iterator)
|
||||
self._init_unary_response_mixin(loop.create_task(self._conduct_rpc()))
|
||||
|
||||
async def _conduct_rpc(self) -> ResponseType:
|
||||
try:
|
||||
serialized_response = await self._cython_call.stream_unary(
|
||||
self._metadata, self._metadata_sent_observer, self._context
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
if not self.cancelled():
|
||||
self.cancel()
|
||||
raise
|
||||
|
||||
if self._cython_call.is_ok():
|
||||
return _common.deserialize(
|
||||
serialized_response, self._response_deserializer
|
||||
)
|
||||
return cygrpc.EOF
|
||||
|
||||
|
||||
class StreamStreamCall(
|
||||
_StreamRequestMixin, _StreamResponseMixin, Call, _base_call.StreamStreamCall
|
||||
):
|
||||
"""Object for managing stream-stream RPC calls.
|
||||
|
||||
Returned when an instance of `StreamStreamMultiCallable` object is called.
|
||||
"""
|
||||
|
||||
_initializer: asyncio.Task
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def __init__(
|
||||
self,
|
||||
request_iterator: Optional[RequestIterableType],
|
||||
deadline: Optional[float],
|
||||
metadata: Metadata,
|
||||
credentials: Optional[grpc.CallCredentials],
|
||||
wait_for_ready: Optional[bool],
|
||||
channel: cygrpc.AioChannel,
|
||||
method: bytes,
|
||||
request_serializer: Optional[SerializingFunction],
|
||||
response_deserializer: Optional[DeserializingFunction],
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
channel.call(method, deadline, credentials, wait_for_ready),
|
||||
metadata,
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
loop,
|
||||
)
|
||||
self._context = cygrpc.build_census_context()
|
||||
self._initializer = self._loop.create_task(self._prepare_rpc())
|
||||
self._init_stream_request_mixin(request_iterator)
|
||||
self._init_stream_response_mixin(self._initializer)
|
||||
|
||||
async def _prepare_rpc(self):
|
||||
"""Prepares the RPC for receiving/sending messages.
|
||||
|
||||
All other operations around the stream should only happen after the
|
||||
completion of this method.
|
||||
"""
|
||||
try:
|
||||
await self._cython_call.initiate_stream_stream(
|
||||
self._metadata, self._metadata_sent_observer, self._context
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
if not self.cancelled():
|
||||
self.cancel()
|
||||
# No need to raise RpcError here, because no one will `await` this task.
|
||||
@@ -0,0 +1,625 @@
|
||||
# Copyright 2019 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Invocation-side implementation of gRPC Asyncio Python."""
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
from typing import Any, Iterable, List, Optional, Sequence
|
||||
|
||||
import grpc
|
||||
from grpc import _common
|
||||
from grpc import _compression
|
||||
from grpc import _grpcio_metadata
|
||||
from grpc._cython import cygrpc
|
||||
|
||||
from . import _base_call
|
||||
from . import _base_channel
|
||||
from ._call import StreamStreamCall
|
||||
from ._call import StreamUnaryCall
|
||||
from ._call import UnaryStreamCall
|
||||
from ._call import UnaryUnaryCall
|
||||
from ._interceptor import ClientInterceptor
|
||||
from ._interceptor import InterceptedStreamStreamCall
|
||||
from ._interceptor import InterceptedStreamUnaryCall
|
||||
from ._interceptor import InterceptedUnaryStreamCall
|
||||
from ._interceptor import InterceptedUnaryUnaryCall
|
||||
from ._interceptor import StreamStreamClientInterceptor
|
||||
from ._interceptor import StreamUnaryClientInterceptor
|
||||
from ._interceptor import UnaryStreamClientInterceptor
|
||||
from ._interceptor import UnaryUnaryClientInterceptor
|
||||
from ._metadata import Metadata
|
||||
from ._typing import ChannelArgumentType
|
||||
from ._typing import DeserializingFunction
|
||||
from ._typing import MetadataType
|
||||
from ._typing import RequestIterableType
|
||||
from ._typing import RequestType
|
||||
from ._typing import ResponseType
|
||||
from ._typing import SerializingFunction
|
||||
from ._utils import _timeout_to_deadline
|
||||
|
||||
_USER_AGENT = "grpc-python-asyncio/{}".format(_grpcio_metadata.__version__)
|
||||
|
||||
if sys.version_info[1] < 7:
|
||||
|
||||
def _all_tasks() -> Iterable[asyncio.Task]:
|
||||
return asyncio.Task.all_tasks() # pylint: disable=no-member
|
||||
|
||||
else:
|
||||
|
||||
def _all_tasks() -> Iterable[asyncio.Task]:
|
||||
return asyncio.all_tasks()
|
||||
|
||||
|
||||
def _augment_channel_arguments(
|
||||
base_options: ChannelArgumentType, compression: Optional[grpc.Compression]
|
||||
):
|
||||
compression_channel_argument = _compression.create_channel_option(
|
||||
compression
|
||||
)
|
||||
user_agent_channel_argument = (
|
||||
(
|
||||
cygrpc.ChannelArgKey.primary_user_agent_string,
|
||||
_USER_AGENT,
|
||||
),
|
||||
)
|
||||
return (
|
||||
tuple(base_options)
|
||||
+ compression_channel_argument
|
||||
+ user_agent_channel_argument
|
||||
)
|
||||
|
||||
|
||||
class _BaseMultiCallable:
|
||||
"""Base class of all multi callable objects.
|
||||
|
||||
Handles the initialization logic and stores common attributes.
|
||||
"""
|
||||
|
||||
_loop: asyncio.AbstractEventLoop
|
||||
_channel: cygrpc.AioChannel
|
||||
_method: bytes
|
||||
_request_serializer: Optional[SerializingFunction]
|
||||
_response_deserializer: Optional[DeserializingFunction]
|
||||
_interceptors: Optional[Sequence[ClientInterceptor]]
|
||||
_references: List[Any]
|
||||
_loop: asyncio.AbstractEventLoop
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def __init__(
|
||||
self,
|
||||
channel: cygrpc.AioChannel,
|
||||
method: bytes,
|
||||
request_serializer: Optional[SerializingFunction],
|
||||
response_deserializer: Optional[DeserializingFunction],
|
||||
interceptors: Optional[Sequence[ClientInterceptor]],
|
||||
references: List[Any],
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> None:
|
||||
self._loop = loop
|
||||
self._channel = channel
|
||||
self._method = method
|
||||
self._request_serializer = request_serializer
|
||||
self._response_deserializer = response_deserializer
|
||||
self._interceptors = interceptors
|
||||
self._references = references
|
||||
|
||||
@staticmethod
|
||||
def _init_metadata(
|
||||
metadata: Optional[MetadataType] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> Metadata:
|
||||
"""Based on the provided values for <metadata> or <compression> initialise the final
|
||||
metadata, as it should be used for the current call.
|
||||
"""
|
||||
metadata = metadata or Metadata()
|
||||
if not isinstance(metadata, Metadata) and isinstance(
|
||||
metadata, Sequence
|
||||
):
|
||||
metadata = Metadata.from_tuple(tuple(metadata))
|
||||
if compression:
|
||||
metadata = Metadata(
|
||||
*_compression.augment_metadata(metadata, compression)
|
||||
)
|
||||
return metadata
|
||||
|
||||
|
||||
class UnaryUnaryMultiCallable(
|
||||
_BaseMultiCallable, _base_channel.UnaryUnaryMultiCallable
|
||||
):
|
||||
def __call__(
|
||||
self,
|
||||
request: RequestType,
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> _base_call.UnaryUnaryCall[RequestType, ResponseType]:
|
||||
metadata = self._init_metadata(metadata, compression)
|
||||
if not self._interceptors:
|
||||
call = UnaryUnaryCall(
|
||||
request,
|
||||
_timeout_to_deadline(timeout),
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
self._channel,
|
||||
self._method,
|
||||
self._request_serializer,
|
||||
self._response_deserializer,
|
||||
self._loop,
|
||||
)
|
||||
else:
|
||||
call = InterceptedUnaryUnaryCall(
|
||||
self._interceptors,
|
||||
request,
|
||||
timeout,
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
self._channel,
|
||||
self._method,
|
||||
self._request_serializer,
|
||||
self._response_deserializer,
|
||||
self._loop,
|
||||
)
|
||||
|
||||
return call
|
||||
|
||||
|
||||
class UnaryStreamMultiCallable(
|
||||
_BaseMultiCallable, _base_channel.UnaryStreamMultiCallable
|
||||
):
|
||||
def __call__(
|
||||
self,
|
||||
request: RequestType,
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> _base_call.UnaryStreamCall[RequestType, ResponseType]:
|
||||
metadata = self._init_metadata(metadata, compression)
|
||||
|
||||
if not self._interceptors:
|
||||
call = UnaryStreamCall(
|
||||
request,
|
||||
_timeout_to_deadline(timeout),
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
self._channel,
|
||||
self._method,
|
||||
self._request_serializer,
|
||||
self._response_deserializer,
|
||||
self._loop,
|
||||
)
|
||||
else:
|
||||
call = InterceptedUnaryStreamCall(
|
||||
self._interceptors,
|
||||
request,
|
||||
timeout,
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
self._channel,
|
||||
self._method,
|
||||
self._request_serializer,
|
||||
self._response_deserializer,
|
||||
self._loop,
|
||||
)
|
||||
|
||||
return call
|
||||
|
||||
|
||||
class StreamUnaryMultiCallable(
|
||||
_BaseMultiCallable, _base_channel.StreamUnaryMultiCallable
|
||||
):
|
||||
def __call__(
|
||||
self,
|
||||
request_iterator: Optional[RequestIterableType] = None,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> _base_call.StreamUnaryCall:
|
||||
metadata = self._init_metadata(metadata, compression)
|
||||
|
||||
if not self._interceptors:
|
||||
call = StreamUnaryCall(
|
||||
request_iterator,
|
||||
_timeout_to_deadline(timeout),
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
self._channel,
|
||||
self._method,
|
||||
self._request_serializer,
|
||||
self._response_deserializer,
|
||||
self._loop,
|
||||
)
|
||||
else:
|
||||
call = InterceptedStreamUnaryCall(
|
||||
self._interceptors,
|
||||
request_iterator,
|
||||
timeout,
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
self._channel,
|
||||
self._method,
|
||||
self._request_serializer,
|
||||
self._response_deserializer,
|
||||
self._loop,
|
||||
)
|
||||
|
||||
return call
|
||||
|
||||
|
||||
class StreamStreamMultiCallable(
|
||||
_BaseMultiCallable, _base_channel.StreamStreamMultiCallable
|
||||
):
|
||||
def __call__(
|
||||
self,
|
||||
request_iterator: Optional[RequestIterableType] = None,
|
||||
timeout: Optional[float] = None,
|
||||
metadata: Optional[MetadataType] = None,
|
||||
credentials: Optional[grpc.CallCredentials] = None,
|
||||
wait_for_ready: Optional[bool] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
) -> _base_call.StreamStreamCall:
|
||||
metadata = self._init_metadata(metadata, compression)
|
||||
|
||||
if not self._interceptors:
|
||||
call = StreamStreamCall(
|
||||
request_iterator,
|
||||
_timeout_to_deadline(timeout),
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
self._channel,
|
||||
self._method,
|
||||
self._request_serializer,
|
||||
self._response_deserializer,
|
||||
self._loop,
|
||||
)
|
||||
else:
|
||||
call = InterceptedStreamStreamCall(
|
||||
self._interceptors,
|
||||
request_iterator,
|
||||
timeout,
|
||||
metadata,
|
||||
credentials,
|
||||
wait_for_ready,
|
||||
self._channel,
|
||||
self._method,
|
||||
self._request_serializer,
|
||||
self._response_deserializer,
|
||||
self._loop,
|
||||
)
|
||||
|
||||
return call
|
||||
|
||||
|
||||
class Channel(_base_channel.Channel):
|
||||
_loop: asyncio.AbstractEventLoop
|
||||
_channel: cygrpc.AioChannel
|
||||
_unary_unary_interceptors: List[UnaryUnaryClientInterceptor]
|
||||
_unary_stream_interceptors: List[UnaryStreamClientInterceptor]
|
||||
_stream_unary_interceptors: List[StreamUnaryClientInterceptor]
|
||||
_stream_stream_interceptors: List[StreamStreamClientInterceptor]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
target: str,
|
||||
options: ChannelArgumentType,
|
||||
credentials: Optional[cygrpc.ChannelCredentials],
|
||||
compression: Optional[grpc.Compression],
|
||||
interceptors: Optional[Sequence[ClientInterceptor]],
|
||||
):
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
target: The target to which to connect.
|
||||
options: Configuration options for the channel.
|
||||
credentials: A cygrpc.ChannelCredentials or None.
|
||||
compression: An optional value indicating the compression method to be
|
||||
used over the lifetime of the channel.
|
||||
interceptors: An optional list of interceptors that would be used for
|
||||
intercepting any RPC executed with that channel.
|
||||
"""
|
||||
self._unary_unary_interceptors = []
|
||||
self._unary_stream_interceptors = []
|
||||
self._stream_unary_interceptors = []
|
||||
self._stream_stream_interceptors = []
|
||||
|
||||
if interceptors is not None:
|
||||
for interceptor in interceptors:
|
||||
if isinstance(interceptor, UnaryUnaryClientInterceptor):
|
||||
self._unary_unary_interceptors.append(interceptor)
|
||||
elif isinstance(interceptor, UnaryStreamClientInterceptor):
|
||||
self._unary_stream_interceptors.append(interceptor)
|
||||
elif isinstance(interceptor, StreamUnaryClientInterceptor):
|
||||
self._stream_unary_interceptors.append(interceptor)
|
||||
elif isinstance(interceptor, StreamStreamClientInterceptor):
|
||||
self._stream_stream_interceptors.append(interceptor)
|
||||
else:
|
||||
raise ValueError( # noqa: TRY004
|
||||
"Interceptor {} must be ".format(interceptor)
|
||||
+ "{} or ".format(UnaryUnaryClientInterceptor.__name__)
|
||||
+ "{} or ".format(UnaryStreamClientInterceptor.__name__)
|
||||
+ "{} or ".format(StreamUnaryClientInterceptor.__name__)
|
||||
+ "{}. ".format(StreamStreamClientInterceptor.__name__)
|
||||
)
|
||||
|
||||
self._loop = cygrpc.get_working_loop()
|
||||
self._channel = cygrpc.AioChannel(
|
||||
_common.encode(target),
|
||||
_augment_channel_arguments(options, compression),
|
||||
credentials,
|
||||
self._loop,
|
||||
)
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
await self._close(None)
|
||||
|
||||
async def _close(self, grace): # pylint: disable=too-many-branches
|
||||
if self._channel.closed():
|
||||
return
|
||||
|
||||
# No new calls will be accepted by the Cython channel.
|
||||
self._channel.closing()
|
||||
|
||||
# Iterate through running tasks
|
||||
tasks = _all_tasks()
|
||||
calls = []
|
||||
call_tasks = []
|
||||
for task in tasks:
|
||||
try:
|
||||
stack = task.get_stack(limit=1)
|
||||
except AttributeError as attribute_error:
|
||||
# NOTE(lidiz) tl;dr: If the Task is created with a CPython
|
||||
# object, it will trigger AttributeError.
|
||||
#
|
||||
# In the global finalizer, the event loop schedules
|
||||
# a CPython PyAsyncGenAThrow object.
|
||||
# https://github.com/python/cpython/blob/00e45877e33d32bb61aa13a2033e3bba370bda4d/Lib/asyncio/base_events.py#L484
|
||||
#
|
||||
# However, the PyAsyncGenAThrow object is written in C and
|
||||
# failed to include the normal Python frame objects. Hence,
|
||||
# this exception is a false negative, and it is safe to ignore
|
||||
# the failure. It is fixed by https://github.com/python/cpython/pull/18669,
|
||||
# but not available until 3.9 or 3.8.3. So, we have to keep it
|
||||
# for a while.
|
||||
# TODO(lidiz): drop this hack after 3.8 deprecation
|
||||
if "frame" in str(attribute_error):
|
||||
continue
|
||||
raise
|
||||
|
||||
# If the Task is created by a C-extension, the stack will be empty.
|
||||
if not stack:
|
||||
continue
|
||||
|
||||
# Locate ones created by `aio.Call`.
|
||||
frame = stack[0]
|
||||
candidate = frame.f_locals.get("self")
|
||||
# Explicitly check for a non-null candidate instead of the more pythonic 'if candidate:'
|
||||
# because doing 'if candidate:' assumes that the coroutine implements '__bool__' which
|
||||
# might not always be the case.
|
||||
if candidate is not None and isinstance(candidate, _base_call.Call):
|
||||
if hasattr(candidate, "_channel"):
|
||||
# For intercepted Call object
|
||||
if candidate._channel is not self._channel:
|
||||
continue
|
||||
elif hasattr(candidate, "_cython_call"):
|
||||
# For normal Call object
|
||||
if candidate._cython_call._channel is not self._channel:
|
||||
continue
|
||||
else:
|
||||
# Unidentified Call object
|
||||
error_msg = f"Unrecognized call object: {candidate}"
|
||||
raise cygrpc.InternalError(error_msg)
|
||||
|
||||
calls.append(candidate)
|
||||
call_tasks.append(task)
|
||||
|
||||
# If needed, try to wait for them to finish.
|
||||
# Call objects are not always awaitables.
|
||||
if grace and call_tasks:
|
||||
await asyncio.wait(call_tasks, timeout=grace)
|
||||
|
||||
# Time to cancel existing calls.
|
||||
for call in calls:
|
||||
call.cancel()
|
||||
|
||||
# Destroy the channel
|
||||
self._channel.close()
|
||||
|
||||
async def close(self, grace: Optional[float] = None):
|
||||
await self._close(grace)
|
||||
|
||||
def __del__(self):
|
||||
if hasattr(self, "_channel") and not self._channel.closed():
|
||||
self._channel.close()
|
||||
|
||||
def get_state(
|
||||
self, try_to_connect: bool = False
|
||||
) -> grpc.ChannelConnectivity:
|
||||
result = self._channel.check_connectivity_state(try_to_connect)
|
||||
return _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[result]
|
||||
|
||||
async def wait_for_state_change(
|
||||
self,
|
||||
last_observed_state: grpc.ChannelConnectivity,
|
||||
) -> None:
|
||||
assert await self._channel.watch_connectivity_state(
|
||||
last_observed_state.value[0], None
|
||||
)
|
||||
|
||||
async def channel_ready(self) -> None:
|
||||
state = self.get_state(try_to_connect=True)
|
||||
while state != grpc.ChannelConnectivity.READY:
|
||||
await self.wait_for_state_change(state)
|
||||
state = self.get_state(try_to_connect=True)
|
||||
|
||||
# TODO(xuanwn): Implement this method after we have
|
||||
# observability for Asyncio.
|
||||
def _get_registered_call_handle(self, method: str) -> int:
|
||||
pass
|
||||
|
||||
# TODO(xuanwn): Implement _registered_method after we have
|
||||
# observability for Asyncio.
|
||||
# pylint: disable=arguments-differ,unused-argument
|
||||
def unary_unary(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> UnaryUnaryMultiCallable:
|
||||
return UnaryUnaryMultiCallable(
|
||||
self._channel,
|
||||
_common.encode(method),
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
self._unary_unary_interceptors,
|
||||
[self],
|
||||
self._loop,
|
||||
)
|
||||
|
||||
# TODO(xuanwn): Implement _registered_method after we have
|
||||
# observability for Asyncio.
|
||||
# pylint: disable=arguments-differ,unused-argument
|
||||
def unary_stream(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> UnaryStreamMultiCallable:
|
||||
return UnaryStreamMultiCallable(
|
||||
self._channel,
|
||||
_common.encode(method),
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
self._unary_stream_interceptors,
|
||||
[self],
|
||||
self._loop,
|
||||
)
|
||||
|
||||
# TODO(xuanwn): Implement _registered_method after we have
|
||||
# observability for Asyncio.
|
||||
# pylint: disable=arguments-differ,unused-argument
|
||||
def stream_unary(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> StreamUnaryMultiCallable:
|
||||
return StreamUnaryMultiCallable(
|
||||
self._channel,
|
||||
_common.encode(method),
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
self._stream_unary_interceptors,
|
||||
[self],
|
||||
self._loop,
|
||||
)
|
||||
|
||||
# TODO(xuanwn): Implement _registered_method after we have
|
||||
# observability for Asyncio.
|
||||
# pylint: disable=arguments-differ,unused-argument
|
||||
def stream_stream(
|
||||
self,
|
||||
method: str,
|
||||
request_serializer: Optional[SerializingFunction] = None,
|
||||
response_deserializer: Optional[DeserializingFunction] = None,
|
||||
_registered_method: Optional[bool] = False,
|
||||
) -> StreamStreamMultiCallable:
|
||||
return StreamStreamMultiCallable(
|
||||
self._channel,
|
||||
_common.encode(method),
|
||||
request_serializer,
|
||||
response_deserializer,
|
||||
self._stream_stream_interceptors,
|
||||
[self],
|
||||
self._loop,
|
||||
)
|
||||
|
||||
|
||||
def insecure_channel(
|
||||
target: str,
|
||||
options: Optional[ChannelArgumentType] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
interceptors: Optional[Sequence[ClientInterceptor]] = None,
|
||||
):
|
||||
"""Creates an insecure asynchronous Channel to a server.
|
||||
|
||||
Args:
|
||||
target: The server address
|
||||
options: An optional list of key-value pairs (:term:`channel_arguments`
|
||||
in gRPC Core runtime) to configure the channel.
|
||||
compression: An optional value indicating the compression method to be
|
||||
used over the lifetime of the channel.
|
||||
interceptors: An optional sequence of interceptors that will be executed for
|
||||
any call executed with this channel.
|
||||
|
||||
Returns:
|
||||
A Channel.
|
||||
"""
|
||||
return Channel(
|
||||
target,
|
||||
() if options is None else options,
|
||||
None,
|
||||
compression,
|
||||
interceptors,
|
||||
)
|
||||
|
||||
|
||||
def secure_channel(
|
||||
target: str,
|
||||
credentials: grpc.ChannelCredentials,
|
||||
options: Optional[ChannelArgumentType] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
interceptors: Optional[Sequence[ClientInterceptor]] = None,
|
||||
):
|
||||
"""Creates a secure asynchronous Channel to a server.
|
||||
|
||||
Args:
|
||||
target: The server address.
|
||||
credentials: A ChannelCredentials instance.
|
||||
options: An optional list of key-value pairs (:term:`channel_arguments`
|
||||
in gRPC Core runtime) to configure the channel.
|
||||
compression: An optional value indicating the compression method to be
|
||||
used over the lifetime of the channel.
|
||||
interceptors: An optional sequence of interceptors that will be executed for
|
||||
any call executed with this channel.
|
||||
|
||||
Returns:
|
||||
An aio.Channel.
|
||||
"""
|
||||
return Channel(
|
||||
target,
|
||||
() if options is None else options,
|
||||
credentials._credentials,
|
||||
compression,
|
||||
interceptors,
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,167 @@
|
||||
# Copyright 2020 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Implementation of the metadata abstraction for gRPC Asyncio Python."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import OrderedDict
|
||||
from collections.abc import Collection
|
||||
from collections.abc import ItemsView
|
||||
from collections.abc import Iterable
|
||||
from collections.abc import Iterator
|
||||
from collections.abc import KeysView
|
||||
from collections.abc import Sequence
|
||||
from collections.abc import ValuesView
|
||||
from typing import Any, List, Optional, Tuple, Union
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
MetadataKey = str
|
||||
MetadataValue = Union[str, bytes]
|
||||
MetadatumType = Tuple[MetadataKey, MetadataValue]
|
||||
MetadataType = Union["Metadata", Sequence[MetadatumType]]
|
||||
|
||||
|
||||
class Metadata(Collection): # noqa: PLW1641
|
||||
"""Metadata abstraction for the asynchronous calls and interceptors.
|
||||
|
||||
The metadata is a mapping from str -> List[str]
|
||||
|
||||
Traits
|
||||
* Multiple entries are allowed for the same key
|
||||
* The order of the values by key is preserved
|
||||
* Getting by an element by key, retrieves the first mapped value
|
||||
* Supports an immutable view of the data
|
||||
* Allows partial mutation on the data without recreating the new object from scratch.
|
||||
"""
|
||||
|
||||
def __init__(self, *args: MetadatumType) -> None:
|
||||
self._metadata = OrderedDict()
|
||||
for md_key, md_value in args:
|
||||
self.add(md_key, md_value)
|
||||
|
||||
@classmethod
|
||||
def from_tuple(cls, raw_metadata: tuple):
|
||||
# Note: We unintentionally support non-tuple arguments here. We plan
|
||||
# to emit a DeprecationWarning when a non-tuple type is used.
|
||||
if raw_metadata:
|
||||
return cls(*raw_metadata)
|
||||
return cls()
|
||||
|
||||
@classmethod
|
||||
def _create(
|
||||
cls,
|
||||
raw_metadata: Union[None, Self, Iterable[MetadatumType]],
|
||||
) -> Self:
|
||||
# TODO(asheshvidyut): Make this method public and encourage people to use it instead
|
||||
# of `from_tuple` to create metadata from non-tuple types.
|
||||
if raw_metadata is None:
|
||||
return Metadata()
|
||||
if isinstance(raw_metadata, cls):
|
||||
return raw_metadata
|
||||
if raw_metadata:
|
||||
return cls(*raw_metadata)
|
||||
return cls()
|
||||
|
||||
def add(self, key: MetadataKey, value: MetadataValue) -> None:
|
||||
self._metadata.setdefault(key, [])
|
||||
self._metadata[key].append(value)
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Return the total number of elements that there are in the metadata,
|
||||
including multiple values for the same key.
|
||||
"""
|
||||
return sum(map(len, self._metadata.values()))
|
||||
|
||||
def __getitem__(self, key: MetadataKey) -> MetadataValue:
|
||||
"""When calling <metadata>[<key>], the first element of all those
|
||||
mapped for <key> is returned.
|
||||
"""
|
||||
try:
|
||||
return self._metadata[key][0]
|
||||
except (ValueError, IndexError) as e:
|
||||
error_msg = f"{key!r}"
|
||||
raise KeyError(error_msg) from e
|
||||
|
||||
def __setitem__(self, key: MetadataKey, value: MetadataValue) -> None:
|
||||
"""Calling metadata[<key>] = <value>
|
||||
Maps <value> to the first instance of <key>.
|
||||
"""
|
||||
if key not in self:
|
||||
self._metadata[key] = [value]
|
||||
else:
|
||||
current_values = self.get_all(key)
|
||||
self._metadata[key] = [value, *current_values[1:]]
|
||||
|
||||
def __delitem__(self, key: MetadataKey) -> None:
|
||||
"""``del metadata[<key>]`` deletes the first mapping for <key>."""
|
||||
current_values = self.get_all(key)
|
||||
if not current_values:
|
||||
raise KeyError(repr(key))
|
||||
self._metadata[key] = current_values[1:]
|
||||
|
||||
def delete_all(self, key: MetadataKey) -> None:
|
||||
"""Delete all mappings for <key>."""
|
||||
del self._metadata[key]
|
||||
|
||||
def __iter__(self) -> Iterator[Tuple[MetadataKey, MetadataValue]]:
|
||||
for key, values in self._metadata.items():
|
||||
for value in values:
|
||||
yield (key, value)
|
||||
|
||||
def keys(self) -> KeysView:
|
||||
return KeysView(self._metadata)
|
||||
|
||||
def values(self) -> ValuesView:
|
||||
return ValuesView(self._metadata)
|
||||
|
||||
def items(self) -> ItemsView:
|
||||
return ItemsView(self._metadata)
|
||||
|
||||
def get(
|
||||
self, key: MetadataKey, default: Optional[MetadataValue] = None
|
||||
) -> Optional[MetadataValue]:
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def get_all(self, key: MetadataKey) -> List[MetadataValue]:
|
||||
"""For compatibility with other Metadata abstraction objects (like in Java),
|
||||
this would return all items under the desired <key>.
|
||||
"""
|
||||
return self._metadata.get(key, [])
|
||||
|
||||
def set_all(self, key: MetadataKey, values: List[MetadataValue]) -> None:
|
||||
self._metadata[key] = values
|
||||
|
||||
def __contains__(self, key: MetadataKey) -> bool:
|
||||
return key in self._metadata
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, self.__class__):
|
||||
return self._metadata == other._metadata
|
||||
if isinstance(other, tuple):
|
||||
return tuple(self) == other
|
||||
return NotImplemented # pytype: disable=bad-return-type
|
||||
|
||||
def __add__(self, other: Any) -> "Metadata":
|
||||
if isinstance(other, self.__class__):
|
||||
return Metadata(*(tuple(self) + tuple(other)))
|
||||
if isinstance(other, tuple):
|
||||
return Metadata(*(tuple(self) + other))
|
||||
return NotImplemented # pytype: disable=bad-return-type
|
||||
|
||||
def __repr__(self) -> str:
|
||||
view = tuple(self)
|
||||
return "{0}({1!r})".format(self.__class__.__name__, view)
|
||||
@@ -0,0 +1,241 @@
|
||||
# Copyright 2019 The gRPC Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Server-side implementation of gRPC Asyncio Python."""
|
||||
|
||||
from concurrent.futures import Executor
|
||||
from typing import Any, Dict, Optional, Sequence
|
||||
|
||||
import grpc
|
||||
from grpc import _common
|
||||
from grpc import _compression
|
||||
from grpc._cython import cygrpc
|
||||
|
||||
from . import _base_server
|
||||
from ._interceptor import ServerInterceptor
|
||||
from ._typing import ChannelArgumentType
|
||||
|
||||
|
||||
def _augment_channel_arguments(
|
||||
base_options: ChannelArgumentType, compression: Optional[grpc.Compression]
|
||||
):
|
||||
compression_option = _compression.create_channel_option(compression)
|
||||
return tuple(base_options) + compression_option
|
||||
|
||||
|
||||
class Server(_base_server.Server):
|
||||
"""Serves RPCs."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
thread_pool: Optional[Executor],
|
||||
generic_handlers: Optional[Sequence[grpc.GenericRpcHandler]],
|
||||
interceptors: Optional[Sequence[Any]],
|
||||
options: ChannelArgumentType,
|
||||
maximum_concurrent_rpcs: Optional[int],
|
||||
compression: Optional[grpc.Compression],
|
||||
):
|
||||
self._loop = cygrpc.get_working_loop()
|
||||
if interceptors:
|
||||
invalid_interceptors = [
|
||||
interceptor
|
||||
for interceptor in interceptors
|
||||
if not isinstance(interceptor, ServerInterceptor)
|
||||
]
|
||||
if invalid_interceptors:
|
||||
error_msg = (
|
||||
"Interceptor must be ServerInterceptor,"
|
||||
"the following are invalid: {invalid_interceptors}"
|
||||
)
|
||||
# TODO(asheshvidyut): fix the value error below
|
||||
# not caught by ruff.
|
||||
raise ValueError(error_msg)
|
||||
self._server = cygrpc.AioServer(
|
||||
self._loop,
|
||||
thread_pool,
|
||||
generic_handlers,
|
||||
interceptors,
|
||||
_augment_channel_arguments(options, compression),
|
||||
maximum_concurrent_rpcs,
|
||||
)
|
||||
|
||||
def add_generic_rpc_handlers(
|
||||
self, generic_rpc_handlers: Sequence[grpc.GenericRpcHandler]
|
||||
) -> None:
|
||||
"""Registers GenericRpcHandlers with this Server.
|
||||
|
||||
This method is only safe to call before the server is started.
|
||||
|
||||
Args:
|
||||
generic_rpc_handlers: A sequence of GenericRpcHandlers that will be
|
||||
used to service RPCs.
|
||||
"""
|
||||
self._server.add_generic_rpc_handlers(generic_rpc_handlers)
|
||||
|
||||
def add_registered_method_handlers(
|
||||
self,
|
||||
service_name: str,
|
||||
method_handlers: Dict[str, grpc.RpcMethodHandler],
|
||||
) -> None:
|
||||
# TODO(xuanwn): Implement this for AsyncIO.
|
||||
pass
|
||||
|
||||
def add_insecure_port(self, address: str) -> int:
|
||||
"""Opens an insecure port for accepting RPCs.
|
||||
|
||||
This method may only be called before starting the server.
|
||||
|
||||
Args:
|
||||
address: The address for which to open a port. If the port is 0,
|
||||
or not specified in the address, then the gRPC runtime will choose a port.
|
||||
|
||||
Returns:
|
||||
An integer port on which the server will accept RPC requests.
|
||||
"""
|
||||
return _common.validate_port_binding_result(
|
||||
address, self._server.add_insecure_port(_common.encode(address))
|
||||
)
|
||||
|
||||
def add_secure_port(
|
||||
self, address: str, server_credentials: grpc.ServerCredentials
|
||||
) -> int:
|
||||
"""Opens a secure port for accepting RPCs.
|
||||
|
||||
This method may only be called before starting the server.
|
||||
|
||||
Args:
|
||||
address: The address for which to open a port.
|
||||
if the port is 0, or not specified in the address, then the gRPC
|
||||
runtime will choose a port.
|
||||
server_credentials: A ServerCredentials object.
|
||||
|
||||
Returns:
|
||||
An integer port on which the server will accept RPC requests.
|
||||
"""
|
||||
return _common.validate_port_binding_result(
|
||||
address,
|
||||
self._server.add_secure_port(
|
||||
_common.encode(address), server_credentials
|
||||
),
|
||||
)
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Starts this Server.
|
||||
|
||||
This method may only be called once. (i.e. it is not idempotent).
|
||||
"""
|
||||
await self._server.start()
|
||||
|
||||
async def stop(self, grace: Optional[float]) -> None:
|
||||
"""Stops this Server.
|
||||
|
||||
This method immediately stops the server from servicing new RPCs in
|
||||
all cases.
|
||||
|
||||
If a grace period is specified, this method waits until all active
|
||||
RPCs are finished or until the grace period is reached. RPCs that haven't
|
||||
been terminated within the grace period are aborted.
|
||||
If a grace period is not specified (by passing None for grace), all
|
||||
existing RPCs are aborted immediately and this method blocks until
|
||||
the last RPC handler terminates.
|
||||
|
||||
This method is idempotent and may be called at any time. Passing a
|
||||
smaller grace value in a subsequent call will have the effect of
|
||||
stopping the Server sooner (passing None will have the effect of
|
||||
stopping the server immediately). Passing a larger grace value in a
|
||||
subsequent call will not have the effect of stopping the server later
|
||||
(i.e. the most restrictive grace value is used).
|
||||
|
||||
Args:
|
||||
grace: A duration of time in seconds or None.
|
||||
"""
|
||||
await self._server.shutdown(grace)
|
||||
|
||||
async def wait_for_termination(
|
||||
self, timeout: Optional[float] = None
|
||||
) -> bool:
|
||||
"""Block current coroutine until the server stops.
|
||||
|
||||
This is an EXPERIMENTAL API.
|
||||
|
||||
The wait will not consume computational resources during blocking, and
|
||||
it will block until one of the two following conditions are met:
|
||||
|
||||
1) The server is stopped or terminated;
|
||||
2) A timeout occurs if timeout is not `None`.
|
||||
|
||||
The timeout argument works in the same way as `threading.Event.wait()`.
|
||||
https://docs.python.org/3/library/threading.html#threading.Event.wait
|
||||
|
||||
Args:
|
||||
timeout: A floating point number specifying a timeout for the
|
||||
operation in seconds.
|
||||
|
||||
Returns:
|
||||
A bool indicates if the operation times out.
|
||||
"""
|
||||
return await self._server.wait_for_termination(timeout)
|
||||
|
||||
def __del__(self):
|
||||
"""Schedules a graceful shutdown in current event loop.
|
||||
|
||||
The Cython AioServer doesn't hold a ref-count to this class. It should
|
||||
be safe to slightly extend the underlying Cython object's life span.
|
||||
"""
|
||||
if hasattr(self, "_server") and self._server.is_running():
|
||||
cygrpc.schedule_coro_threadsafe(
|
||||
self._server.shutdown(None),
|
||||
self._loop,
|
||||
)
|
||||
|
||||
|
||||
def server(
|
||||
migration_thread_pool: Optional[Executor] = None,
|
||||
handlers: Optional[Sequence[grpc.GenericRpcHandler]] = None,
|
||||
interceptors: Optional[Sequence[Any]] = None,
|
||||
options: Optional[ChannelArgumentType] = None,
|
||||
maximum_concurrent_rpcs: Optional[int] = None,
|
||||
compression: Optional[grpc.Compression] = None,
|
||||
):
|
||||
"""Creates a Server with which RPCs can be serviced.
|
||||
|
||||
Args:
|
||||
migration_thread_pool: A futures.ThreadPoolExecutor to be used by the
|
||||
Server to execute non-AsyncIO RPC handlers for migration purpose.
|
||||
handlers: An optional list of GenericRpcHandlers used for executing RPCs.
|
||||
More handlers may be added by calling add_generic_rpc_handlers any time
|
||||
before the server is started.
|
||||
interceptors: An optional list of ServerInterceptor objects that observe
|
||||
and optionally manipulate the incoming RPCs before handing them over to
|
||||
handlers. The interceptors are given control in the order they are
|
||||
specified. This is an EXPERIMENTAL API.
|
||||
options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC runtime)
|
||||
to configure the channel.
|
||||
maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server
|
||||
will service before returning RESOURCE_EXHAUSTED status, or None to
|
||||
indicate no limit.
|
||||
compression: An element of grpc.Compression, e.g.
|
||||
grpc.Compression.Gzip. This compression algorithm will be used for the
|
||||
lifetime of the server unless overridden by set_compression.
|
||||
|
||||
Returns:
|
||||
A Server object.
|
||||
"""
|
||||
return Server(
|
||||
migration_thread_pool,
|
||||
() if handlers is None else handlers,
|
||||
() if interceptors is None else interceptors,
|
||||
() if options is None else options,
|
||||
maximum_concurrent_rpcs,
|
||||
compression,
|
||||
)
|
||||
@@ -0,0 +1,46 @@
|
||||
# Copyright 2019 The gRPC Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Common types for gRPC Async API"""
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
AsyncIterable,
|
||||
Callable,
|
||||
Iterable,
|
||||
Sequence,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from grpc._cython.cygrpc import EOF
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from ._metadata import Metadata
|
||||
from ._metadata import MetadataKey
|
||||
from ._metadata import MetadataType
|
||||
from ._metadata import MetadataValue
|
||||
from ._metadata import MetadatumType
|
||||
|
||||
# pylint: enable=unused-import
|
||||
|
||||
RequestType = TypeVar("RequestType")
|
||||
ResponseType = TypeVar("ResponseType")
|
||||
SerializingFunction = Callable[[Any], bytes]
|
||||
DeserializingFunction = Callable[[bytes], Any]
|
||||
ChannelArgumentType = Sequence[Tuple[str, Any]]
|
||||
EOFType = type(EOF)
|
||||
DoneCallbackType = Callable[[Any], None]
|
||||
RequestIterableType = Union[Iterable[Any], AsyncIterable[Any]]
|
||||
ResponseIterableType = AsyncIterable[Any]
|
||||
@@ -0,0 +1,22 @@
|
||||
# Copyright 2019 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Internal utilities used by the gRPC Aio module."""
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def _timeout_to_deadline(timeout: Optional[float]) -> Optional[float]:
|
||||
if timeout is None:
|
||||
return None
|
||||
return time.time() + timeout
|
||||
@@ -0,0 +1,13 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,54 @@
|
||||
# Copyright 2017 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""API metadata conversion utilities."""
|
||||
|
||||
import collections
|
||||
|
||||
_Metadatum = collections.namedtuple(
|
||||
"_Metadatum",
|
||||
(
|
||||
"key",
|
||||
"value",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _beta_metadatum(key, value):
|
||||
beta_key = key if isinstance(key, (bytes,)) else key.encode("ascii")
|
||||
beta_value = value if isinstance(value, (bytes,)) else value.encode("ascii")
|
||||
return _Metadatum(beta_key, beta_value)
|
||||
|
||||
|
||||
def _metadatum(beta_key, beta_value):
|
||||
key = beta_key if isinstance(beta_key, (str,)) else beta_key.decode("utf8")
|
||||
if isinstance(beta_value, (str,)) or key[-4:] == "-bin":
|
||||
value = beta_value
|
||||
else:
|
||||
value = beta_value.decode("utf8")
|
||||
return _Metadatum(key, value)
|
||||
|
||||
|
||||
def beta(metadata):
|
||||
if metadata is None:
|
||||
return ()
|
||||
return tuple(_beta_metadatum(key, value) for key, value in metadata)
|
||||
|
||||
|
||||
def unbeta(beta_metadata):
|
||||
if beta_metadata is None:
|
||||
return ()
|
||||
return tuple(
|
||||
_metadatum(beta_key, beta_value)
|
||||
for beta_key, beta_value in beta_metadata
|
||||
)
|
||||
@@ -0,0 +1,457 @@
|
||||
# Copyright 2016 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Translates gRPC's server-side API into gRPC's server-side Beta API."""
|
||||
|
||||
import collections
|
||||
import threading
|
||||
|
||||
import grpc
|
||||
from grpc import _common
|
||||
from grpc.beta import _metadata
|
||||
from grpc.beta import interfaces
|
||||
from grpc.framework.common import cardinality
|
||||
from grpc.framework.common import style
|
||||
from grpc.framework.foundation import abandonment
|
||||
from grpc.framework.foundation import logging_pool
|
||||
from grpc.framework.foundation import stream
|
||||
from grpc.framework.interfaces.face import face
|
||||
|
||||
# pylint: disable=too-many-return-statements
|
||||
|
||||
_DEFAULT_POOL_SIZE = 8
|
||||
|
||||
|
||||
class _ServerProtocolContext(interfaces.GRPCServicerContext):
|
||||
def __init__(self, servicer_context):
|
||||
self._servicer_context = servicer_context
|
||||
|
||||
def peer(self):
|
||||
return self._servicer_context.peer()
|
||||
|
||||
def disable_next_response_compression(self):
|
||||
pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
|
||||
|
||||
|
||||
class _FaceServicerContext(face.ServicerContext):
|
||||
def __init__(self, servicer_context):
|
||||
self._servicer_context = servicer_context
|
||||
|
||||
def is_active(self):
|
||||
return self._servicer_context.is_active()
|
||||
|
||||
def time_remaining(self):
|
||||
return self._servicer_context.time_remaining()
|
||||
|
||||
def add_abortion_callback(self, abortion_callback):
|
||||
error_msg = "add_abortion_callback no longer supported server-side!"
|
||||
raise NotImplementedError(error_msg)
|
||||
|
||||
def cancel(self):
|
||||
self._servicer_context.cancel()
|
||||
|
||||
def protocol_context(self):
|
||||
return _ServerProtocolContext(self._servicer_context)
|
||||
|
||||
def invocation_metadata(self):
|
||||
return _metadata.beta(self._servicer_context.invocation_metadata())
|
||||
|
||||
def initial_metadata(self, initial_metadata):
|
||||
self._servicer_context.send_initial_metadata(
|
||||
_metadata.unbeta(initial_metadata)
|
||||
)
|
||||
|
||||
def terminal_metadata(self, terminal_metadata):
|
||||
self._servicer_context.set_terminal_metadata(
|
||||
_metadata.unbeta(terminal_metadata)
|
||||
)
|
||||
|
||||
def code(self, code):
|
||||
self._servicer_context.set_code(code)
|
||||
|
||||
def details(self, details):
|
||||
self._servicer_context.set_details(details)
|
||||
|
||||
|
||||
def _adapt_unary_request_inline(unary_request_inline):
|
||||
def adaptation(request, servicer_context):
|
||||
return unary_request_inline(
|
||||
request, _FaceServicerContext(servicer_context)
|
||||
)
|
||||
|
||||
return adaptation
|
||||
|
||||
|
||||
def _adapt_stream_request_inline(stream_request_inline):
|
||||
def adaptation(request_iterator, servicer_context):
|
||||
return stream_request_inline(
|
||||
request_iterator, _FaceServicerContext(servicer_context)
|
||||
)
|
||||
|
||||
return adaptation
|
||||
|
||||
|
||||
class _Callback(stream.Consumer):
|
||||
def __init__(self):
|
||||
self._condition = threading.Condition()
|
||||
self._values = []
|
||||
self._terminated = False
|
||||
self._cancelled = False
|
||||
|
||||
def consume(self, value):
|
||||
with self._condition:
|
||||
self._values.append(value)
|
||||
self._condition.notify_all()
|
||||
|
||||
def terminate(self):
|
||||
with self._condition:
|
||||
self._terminated = True
|
||||
self._condition.notify_all()
|
||||
|
||||
def consume_and_terminate(self, value):
|
||||
with self._condition:
|
||||
self._values.append(value)
|
||||
self._terminated = True
|
||||
self._condition.notify_all()
|
||||
|
||||
def cancel(self):
|
||||
with self._condition:
|
||||
self._cancelled = True
|
||||
self._condition.notify_all()
|
||||
|
||||
def draw_one_value(self):
|
||||
with self._condition:
|
||||
while True:
|
||||
if self._cancelled:
|
||||
raise abandonment.Abandoned()
|
||||
if self._values:
|
||||
return self._values.pop(0)
|
||||
if self._terminated:
|
||||
return None
|
||||
self._condition.wait()
|
||||
|
||||
def draw_all_values(self):
|
||||
with self._condition:
|
||||
while True:
|
||||
if self._cancelled:
|
||||
raise abandonment.Abandoned()
|
||||
if self._terminated:
|
||||
all_values = tuple(self._values)
|
||||
self._values = None
|
||||
return all_values
|
||||
self._condition.wait()
|
||||
|
||||
|
||||
def _run_request_pipe_thread(
|
||||
request_iterator, request_consumer, servicer_context
|
||||
):
|
||||
thread_joined = threading.Event()
|
||||
|
||||
def pipe_requests():
|
||||
for request in request_iterator:
|
||||
if not servicer_context.is_active() or thread_joined.is_set():
|
||||
return
|
||||
request_consumer.consume(request)
|
||||
if not servicer_context.is_active() or thread_joined.is_set():
|
||||
return
|
||||
request_consumer.terminate()
|
||||
|
||||
request_pipe_thread = threading.Thread(target=pipe_requests)
|
||||
request_pipe_thread.daemon = True
|
||||
request_pipe_thread.start()
|
||||
|
||||
|
||||
def _adapt_unary_unary_event(unary_unary_event):
|
||||
def adaptation(request, servicer_context):
|
||||
callback = _Callback()
|
||||
if not servicer_context.add_callback(callback.cancel):
|
||||
raise abandonment.Abandoned()
|
||||
unary_unary_event(
|
||||
request,
|
||||
callback.consume_and_terminate,
|
||||
_FaceServicerContext(servicer_context),
|
||||
)
|
||||
return callback.draw_all_values()[0]
|
||||
|
||||
return adaptation
|
||||
|
||||
|
||||
def _adapt_unary_stream_event(unary_stream_event):
|
||||
def adaptation(request, servicer_context):
|
||||
callback = _Callback()
|
||||
if not servicer_context.add_callback(callback.cancel):
|
||||
raise abandonment.Abandoned()
|
||||
unary_stream_event(
|
||||
request, callback, _FaceServicerContext(servicer_context)
|
||||
)
|
||||
while True:
|
||||
response = callback.draw_one_value()
|
||||
if response is None:
|
||||
return
|
||||
else:
|
||||
yield response
|
||||
|
||||
return adaptation
|
||||
|
||||
|
||||
def _adapt_stream_unary_event(stream_unary_event):
|
||||
def adaptation(request_iterator, servicer_context):
|
||||
callback = _Callback()
|
||||
if not servicer_context.add_callback(callback.cancel):
|
||||
raise abandonment.Abandoned()
|
||||
request_consumer = stream_unary_event(
|
||||
callback.consume_and_terminate,
|
||||
_FaceServicerContext(servicer_context),
|
||||
)
|
||||
_run_request_pipe_thread(
|
||||
request_iterator, request_consumer, servicer_context
|
||||
)
|
||||
return callback.draw_all_values()[0]
|
||||
|
||||
return adaptation
|
||||
|
||||
|
||||
def _adapt_stream_stream_event(stream_stream_event):
|
||||
def adaptation(request_iterator, servicer_context):
|
||||
callback = _Callback()
|
||||
if not servicer_context.add_callback(callback.cancel):
|
||||
raise abandonment.Abandoned()
|
||||
request_consumer = stream_stream_event(
|
||||
callback, _FaceServicerContext(servicer_context)
|
||||
)
|
||||
_run_request_pipe_thread(
|
||||
request_iterator, request_consumer, servicer_context
|
||||
)
|
||||
while True:
|
||||
response = callback.draw_one_value()
|
||||
if response is None:
|
||||
return
|
||||
else:
|
||||
yield response
|
||||
|
||||
return adaptation
|
||||
|
||||
|
||||
class _SimpleMethodHandler(
|
||||
collections.namedtuple(
|
||||
"_MethodHandler",
|
||||
(
|
||||
"request_streaming",
|
||||
"response_streaming",
|
||||
"request_deserializer",
|
||||
"response_serializer",
|
||||
"unary_unary",
|
||||
"unary_stream",
|
||||
"stream_unary",
|
||||
"stream_stream",
|
||||
),
|
||||
),
|
||||
grpc.RpcMethodHandler,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
def _simple_method_handler( # noqa: PLR0911
|
||||
implementation, request_deserializer, response_serializer
|
||||
):
|
||||
if implementation.style is style.Service.INLINE:
|
||||
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
|
||||
return _SimpleMethodHandler(
|
||||
False,
|
||||
False,
|
||||
request_deserializer,
|
||||
response_serializer,
|
||||
_adapt_unary_request_inline(implementation.unary_unary_inline),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
if implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
|
||||
return _SimpleMethodHandler(
|
||||
False,
|
||||
True,
|
||||
request_deserializer,
|
||||
response_serializer,
|
||||
None,
|
||||
_adapt_unary_request_inline(implementation.unary_stream_inline),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
if implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
|
||||
return _SimpleMethodHandler(
|
||||
True,
|
||||
False,
|
||||
request_deserializer,
|
||||
response_serializer,
|
||||
None,
|
||||
None,
|
||||
_adapt_stream_request_inline(
|
||||
implementation.stream_unary_inline
|
||||
),
|
||||
None,
|
||||
)
|
||||
if implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
|
||||
return _SimpleMethodHandler(
|
||||
True,
|
||||
True,
|
||||
request_deserializer,
|
||||
response_serializer,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
_adapt_stream_request_inline(
|
||||
implementation.stream_stream_inline
|
||||
),
|
||||
)
|
||||
elif implementation.style is style.Service.EVENT:
|
||||
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
|
||||
return _SimpleMethodHandler(
|
||||
False,
|
||||
False,
|
||||
request_deserializer,
|
||||
response_serializer,
|
||||
_adapt_unary_unary_event(implementation.unary_unary_event),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
if implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
|
||||
return _SimpleMethodHandler(
|
||||
False,
|
||||
True,
|
||||
request_deserializer,
|
||||
response_serializer,
|
||||
None,
|
||||
_adapt_unary_stream_event(implementation.unary_stream_event),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
if implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
|
||||
return _SimpleMethodHandler(
|
||||
True,
|
||||
False,
|
||||
request_deserializer,
|
||||
response_serializer,
|
||||
None,
|
||||
None,
|
||||
_adapt_stream_unary_event(implementation.stream_unary_event),
|
||||
None,
|
||||
)
|
||||
if implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
|
||||
return _SimpleMethodHandler(
|
||||
True,
|
||||
True,
|
||||
request_deserializer,
|
||||
response_serializer,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
_adapt_stream_stream_event(implementation.stream_stream_event),
|
||||
)
|
||||
raise ValueError()
|
||||
|
||||
|
||||
def _flatten_method_pair_map(method_pair_map):
|
||||
method_pair_map = method_pair_map or {}
|
||||
flat_map = {}
|
||||
for method_pair in method_pair_map:
|
||||
method = _common.fully_qualified_method(method_pair[0], method_pair[1])
|
||||
flat_map[method] = method_pair_map[method_pair]
|
||||
return flat_map
|
||||
|
||||
|
||||
class _GenericRpcHandler(grpc.GenericRpcHandler):
|
||||
def __init__(
|
||||
self,
|
||||
method_implementations,
|
||||
multi_method_implementation,
|
||||
request_deserializers,
|
||||
response_serializers,
|
||||
):
|
||||
self._method_implementations = _flatten_method_pair_map(
|
||||
method_implementations
|
||||
)
|
||||
self._request_deserializers = _flatten_method_pair_map(
|
||||
request_deserializers
|
||||
)
|
||||
self._response_serializers = _flatten_method_pair_map(
|
||||
response_serializers
|
||||
)
|
||||
self._multi_method_implementation = multi_method_implementation
|
||||
|
||||
def service(self, handler_call_details):
|
||||
method_implementation = self._method_implementations.get(
|
||||
handler_call_details.method
|
||||
)
|
||||
if method_implementation is not None:
|
||||
return _simple_method_handler(
|
||||
method_implementation,
|
||||
self._request_deserializers.get(handler_call_details.method),
|
||||
self._response_serializers.get(handler_call_details.method),
|
||||
)
|
||||
if self._multi_method_implementation is None:
|
||||
return None
|
||||
try:
|
||||
return None # TODO(nathaniel): call the multimethod.
|
||||
except face.NoSuchMethodError:
|
||||
return None
|
||||
|
||||
|
||||
class _Server(interfaces.Server):
|
||||
def __init__(self, grpc_server):
|
||||
self._grpc_server = grpc_server
|
||||
|
||||
def add_insecure_port(self, address):
|
||||
return self._grpc_server.add_insecure_port(address)
|
||||
|
||||
def add_secure_port(self, address, server_credentials):
|
||||
return self._grpc_server.add_secure_port(address, server_credentials)
|
||||
|
||||
def start(self):
|
||||
self._grpc_server.start()
|
||||
|
||||
def stop(self, grace):
|
||||
return self._grpc_server.stop(grace)
|
||||
|
||||
def __enter__(self):
|
||||
self._grpc_server.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._grpc_server.stop(None)
|
||||
return False
|
||||
|
||||
|
||||
def server(
|
||||
service_implementations,
|
||||
multi_method_implementation,
|
||||
request_deserializers,
|
||||
response_serializers,
|
||||
thread_pool,
|
||||
thread_pool_size,
|
||||
):
|
||||
generic_rpc_handler = _GenericRpcHandler(
|
||||
service_implementations,
|
||||
multi_method_implementation,
|
||||
request_deserializers,
|
||||
response_serializers,
|
||||
)
|
||||
if thread_pool is None:
|
||||
effective_thread_pool = logging_pool.pool(
|
||||
_DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size
|
||||
)
|
||||
else:
|
||||
effective_thread_pool = thread_pool
|
||||
return _Server(
|
||||
grpc.server(effective_thread_pool, handlers=(generic_rpc_handler,))
|
||||
)
|
||||
@@ -0,0 +1,345 @@
|
||||
# Copyright 2015-2016 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Entry points into the Beta API of gRPC Python."""
|
||||
|
||||
# threading is referenced from specification in this module.
|
||||
import threading # pylint: disable=unused-import
|
||||
|
||||
# interfaces, cardinality, and face are referenced from specification in this
|
||||
# module.
|
||||
import grpc
|
||||
from grpc import _auth
|
||||
from grpc.beta import _client_adaptations
|
||||
from grpc.beta import _metadata
|
||||
from grpc.beta import _server_adaptations
|
||||
from grpc.beta import interfaces # pylint: disable=unused-import
|
||||
from grpc.framework.common import cardinality # pylint: disable=unused-import
|
||||
from grpc.framework.interfaces.face import face # pylint: disable=unused-import
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
|
||||
ChannelCredentials = grpc.ChannelCredentials
|
||||
ssl_channel_credentials = grpc.ssl_channel_credentials
|
||||
CallCredentials = grpc.CallCredentials
|
||||
|
||||
|
||||
def metadata_call_credentials(metadata_plugin, name=None):
|
||||
def plugin(context, callback):
|
||||
def wrapped_callback(beta_metadata, error):
|
||||
callback(_metadata.unbeta(beta_metadata), error)
|
||||
|
||||
metadata_plugin(context, wrapped_callback)
|
||||
|
||||
return grpc.metadata_call_credentials(plugin, name=name)
|
||||
|
||||
|
||||
def google_call_credentials(credentials):
|
||||
"""Construct CallCredentials from GoogleCredentials.
|
||||
|
||||
Args:
|
||||
credentials: A GoogleCredentials object from the oauth2client library.
|
||||
|
||||
Returns:
|
||||
A CallCredentials object for use in a GRPCCallOptions object.
|
||||
"""
|
||||
return metadata_call_credentials(_auth.GoogleCallCredentials(credentials))
|
||||
|
||||
|
||||
access_token_call_credentials = grpc.access_token_call_credentials
|
||||
composite_call_credentials = grpc.composite_call_credentials
|
||||
composite_channel_credentials = grpc.composite_channel_credentials
|
||||
|
||||
|
||||
class Channel(object):
|
||||
"""A channel to a remote host through which RPCs may be conducted.
|
||||
|
||||
Only the "subscribe" and "unsubscribe" methods are supported for application
|
||||
use. This class' instance constructor and all other attributes are
|
||||
unsupported.
|
||||
"""
|
||||
|
||||
def __init__(self, channel):
|
||||
self._channel = channel
|
||||
|
||||
def subscribe(self, callback, try_to_connect=None):
|
||||
"""Subscribes to this Channel's connectivity.
|
||||
|
||||
Args:
|
||||
callback: A callable to be invoked and passed an
|
||||
interfaces.ChannelConnectivity identifying this Channel's connectivity.
|
||||
The callable will be invoked immediately upon subscription and again for
|
||||
every change to this Channel's connectivity thereafter until it is
|
||||
unsubscribed.
|
||||
try_to_connect: A boolean indicating whether or not this Channel should
|
||||
attempt to connect if it is not already connected and ready to conduct
|
||||
RPCs.
|
||||
"""
|
||||
self._channel.subscribe(callback, try_to_connect=try_to_connect)
|
||||
|
||||
def unsubscribe(self, callback):
|
||||
"""Unsubscribes a callback from this Channel's connectivity.
|
||||
|
||||
Args:
|
||||
callback: A callable previously registered with this Channel from having
|
||||
been passed to its "subscribe" method.
|
||||
"""
|
||||
self._channel.unsubscribe(callback)
|
||||
|
||||
|
||||
def insecure_channel(host, port):
|
||||
"""Creates an insecure Channel to a remote host.
|
||||
|
||||
Args:
|
||||
host: The name of the remote host to which to connect.
|
||||
port: The port of the remote host to which to connect.
|
||||
If None only the 'host' part will be used.
|
||||
|
||||
Returns:
|
||||
A Channel to the remote host through which RPCs may be conducted.
|
||||
"""
|
||||
channel = grpc.insecure_channel(
|
||||
host if port is None else "%s:%d" % (host, port)
|
||||
)
|
||||
return Channel(channel)
|
||||
|
||||
|
||||
def secure_channel(host, port, channel_credentials):
|
||||
"""Creates a secure Channel to a remote host.
|
||||
|
||||
Args:
|
||||
host: The name of the remote host to which to connect.
|
||||
port: The port of the remote host to which to connect.
|
||||
If None only the 'host' part will be used.
|
||||
channel_credentials: A ChannelCredentials.
|
||||
|
||||
Returns:
|
||||
A secure Channel to the remote host through which RPCs may be conducted.
|
||||
"""
|
||||
channel = grpc.secure_channel(
|
||||
host if port is None else "%s:%d" % (host, port), channel_credentials
|
||||
)
|
||||
return Channel(channel)
|
||||
|
||||
|
||||
class StubOptions(object):
|
||||
"""A value encapsulating the various options for creation of a Stub.
|
||||
|
||||
This class and its instances have no supported interface - it exists to define
|
||||
the type of its instances and its instances exist to be passed to other
|
||||
functions.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
host,
|
||||
request_serializers,
|
||||
response_deserializers,
|
||||
metadata_transformer,
|
||||
thread_pool,
|
||||
thread_pool_size,
|
||||
):
|
||||
self.host = host
|
||||
self.request_serializers = request_serializers
|
||||
self.response_deserializers = response_deserializers
|
||||
self.metadata_transformer = metadata_transformer
|
||||
self.thread_pool = thread_pool
|
||||
self.thread_pool_size = thread_pool_size
|
||||
|
||||
|
||||
_EMPTY_STUB_OPTIONS = StubOptions(None, None, None, None, None, None)
|
||||
|
||||
|
||||
def stub_options(
|
||||
host=None,
|
||||
request_serializers=None,
|
||||
response_deserializers=None,
|
||||
metadata_transformer=None,
|
||||
thread_pool=None,
|
||||
thread_pool_size=None,
|
||||
):
|
||||
"""Creates a StubOptions value to be passed at stub creation.
|
||||
|
||||
All parameters are optional and should always be passed by keyword.
|
||||
|
||||
Args:
|
||||
host: A host string to set on RPC calls.
|
||||
request_serializers: A dictionary from service name-method name pair to
|
||||
request serialization behavior.
|
||||
response_deserializers: A dictionary from service name-method name pair to
|
||||
response deserialization behavior.
|
||||
metadata_transformer: A callable that given a metadata object produces
|
||||
another metadata object to be used in the underlying communication on the
|
||||
wire.
|
||||
thread_pool: A thread pool to use in stubs.
|
||||
thread_pool_size: The size of thread pool to create for use in stubs;
|
||||
ignored if thread_pool has been passed.
|
||||
|
||||
Returns:
|
||||
A StubOptions value created from the passed parameters.
|
||||
"""
|
||||
return StubOptions(
|
||||
host,
|
||||
request_serializers,
|
||||
response_deserializers,
|
||||
metadata_transformer,
|
||||
thread_pool,
|
||||
thread_pool_size,
|
||||
)
|
||||
|
||||
|
||||
def generic_stub(channel, options=None):
|
||||
"""Creates a face.GenericStub on which RPCs can be made.
|
||||
|
||||
Args:
|
||||
channel: A Channel for use by the created stub.
|
||||
options: A StubOptions customizing the created stub.
|
||||
|
||||
Returns:
|
||||
A face.GenericStub on which RPCs can be made.
|
||||
"""
|
||||
effective_options = _EMPTY_STUB_OPTIONS if options is None else options
|
||||
return _client_adaptations.generic_stub(
|
||||
channel._channel, # pylint: disable=protected-access
|
||||
effective_options.host,
|
||||
effective_options.metadata_transformer,
|
||||
effective_options.request_serializers,
|
||||
effective_options.response_deserializers,
|
||||
)
|
||||
|
||||
|
||||
def dynamic_stub(channel, service, cardinalities, options=None):
|
||||
"""Creates a face.DynamicStub with which RPCs can be invoked.
|
||||
|
||||
Args:
|
||||
channel: A Channel for the returned face.DynamicStub to use.
|
||||
service: The package-qualified full name of the service.
|
||||
cardinalities: A dictionary from RPC method name to cardinality.Cardinality
|
||||
value identifying the cardinality of the RPC method.
|
||||
options: An optional StubOptions value further customizing the functionality
|
||||
of the returned face.DynamicStub.
|
||||
|
||||
Returns:
|
||||
A face.DynamicStub with which RPCs can be invoked.
|
||||
"""
|
||||
effective_options = _EMPTY_STUB_OPTIONS if options is None else options
|
||||
return _client_adaptations.dynamic_stub(
|
||||
channel._channel, # pylint: disable=protected-access
|
||||
service,
|
||||
cardinalities,
|
||||
effective_options.host,
|
||||
effective_options.metadata_transformer,
|
||||
effective_options.request_serializers,
|
||||
effective_options.response_deserializers,
|
||||
)
|
||||
|
||||
|
||||
ServerCredentials = grpc.ServerCredentials
|
||||
ssl_server_credentials = grpc.ssl_server_credentials
|
||||
|
||||
|
||||
class ServerOptions(object):
|
||||
"""A value encapsulating the various options for creation of a Server.
|
||||
|
||||
This class and its instances have no supported interface - it exists to define
|
||||
the type of its instances and its instances exist to be passed to other
|
||||
functions.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
multi_method_implementation,
|
||||
request_deserializers,
|
||||
response_serializers,
|
||||
thread_pool,
|
||||
thread_pool_size,
|
||||
default_timeout,
|
||||
maximum_timeout,
|
||||
):
|
||||
self.multi_method_implementation = multi_method_implementation
|
||||
self.request_deserializers = request_deserializers
|
||||
self.response_serializers = response_serializers
|
||||
self.thread_pool = thread_pool
|
||||
self.thread_pool_size = thread_pool_size
|
||||
self.default_timeout = default_timeout
|
||||
self.maximum_timeout = maximum_timeout
|
||||
|
||||
|
||||
_EMPTY_SERVER_OPTIONS = ServerOptions(None, None, None, None, None, None, None)
|
||||
|
||||
|
||||
def server_options(
|
||||
multi_method_implementation=None,
|
||||
request_deserializers=None,
|
||||
response_serializers=None,
|
||||
thread_pool=None,
|
||||
thread_pool_size=None,
|
||||
default_timeout=None,
|
||||
maximum_timeout=None,
|
||||
):
|
||||
"""Creates a ServerOptions value to be passed at server creation.
|
||||
|
||||
All parameters are optional and should always be passed by keyword.
|
||||
|
||||
Args:
|
||||
multi_method_implementation: A face.MultiMethodImplementation to be called
|
||||
to service an RPC if the server has no specific method implementation for
|
||||
the name of the RPC for which service was requested.
|
||||
request_deserializers: A dictionary from service name-method name pair to
|
||||
request deserialization behavior.
|
||||
response_serializers: A dictionary from service name-method name pair to
|
||||
response serialization behavior.
|
||||
thread_pool: A thread pool to use in stubs.
|
||||
thread_pool_size: The size of thread pool to create for use in stubs;
|
||||
ignored if thread_pool has been passed.
|
||||
default_timeout: A duration in seconds to allow for RPC service when
|
||||
servicing RPCs that did not include a timeout value when invoked.
|
||||
maximum_timeout: A duration in seconds to allow for RPC service when
|
||||
servicing RPCs no matter what timeout value was passed when the RPC was
|
||||
invoked.
|
||||
|
||||
Returns:
|
||||
A StubOptions value created from the passed parameters.
|
||||
"""
|
||||
return ServerOptions(
|
||||
multi_method_implementation,
|
||||
request_deserializers,
|
||||
response_serializers,
|
||||
thread_pool,
|
||||
thread_pool_size,
|
||||
default_timeout,
|
||||
maximum_timeout,
|
||||
)
|
||||
|
||||
|
||||
def server(service_implementations, options=None):
|
||||
"""Creates an interfaces.Server with which RPCs can be serviced.
|
||||
|
||||
Args:
|
||||
service_implementations: A dictionary from service name-method name pair to
|
||||
face.MethodImplementation.
|
||||
options: An optional ServerOptions value further customizing the
|
||||
functionality of the returned Server.
|
||||
|
||||
Returns:
|
||||
An interfaces.Server with which RPCs can be serviced.
|
||||
"""
|
||||
effective_options = _EMPTY_SERVER_OPTIONS if options is None else options
|
||||
return _server_adaptations.server(
|
||||
service_implementations,
|
||||
effective_options.multi_method_implementation,
|
||||
effective_options.request_deserializers,
|
||||
effective_options.response_serializers,
|
||||
effective_options.thread_pool,
|
||||
effective_options.thread_pool_size,
|
||||
)
|
||||
@@ -0,0 +1,163 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Constants and interfaces of the Beta API of gRPC Python."""
|
||||
|
||||
import abc
|
||||
|
||||
import grpc
|
||||
|
||||
ChannelConnectivity = grpc.ChannelConnectivity
|
||||
# FATAL_FAILURE was a Beta-API name for SHUTDOWN
|
||||
ChannelConnectivity.FATAL_FAILURE = ChannelConnectivity.SHUTDOWN
|
||||
|
||||
StatusCode = grpc.StatusCode
|
||||
|
||||
|
||||
class GRPCCallOptions(object):
|
||||
"""A value encapsulating gRPC-specific options passed on RPC invocation.
|
||||
|
||||
This class and its instances have no supported interface - it exists to
|
||||
define the type of its instances and its instances exist to be passed to
|
||||
other functions.
|
||||
"""
|
||||
|
||||
def __init__(self, disable_compression, subcall_of, credentials):
|
||||
self.disable_compression = disable_compression
|
||||
self.subcall_of = subcall_of
|
||||
self.credentials = credentials
|
||||
|
||||
|
||||
def grpc_call_options(disable_compression=False, credentials=None):
|
||||
"""Creates a GRPCCallOptions value to be passed at RPC invocation.
|
||||
|
||||
All parameters are optional and should always be passed by keyword.
|
||||
|
||||
Args:
|
||||
disable_compression: A boolean indicating whether or not compression should
|
||||
be disabled for the request object of the RPC. Only valid for
|
||||
request-unary RPCs.
|
||||
credentials: A CallCredentials object to use for the invoked RPC.
|
||||
"""
|
||||
return GRPCCallOptions(disable_compression, None, credentials)
|
||||
|
||||
|
||||
GRPCAuthMetadataContext = grpc.AuthMetadataContext
|
||||
GRPCAuthMetadataPluginCallback = grpc.AuthMetadataPluginCallback
|
||||
GRPCAuthMetadataPlugin = grpc.AuthMetadataPlugin
|
||||
|
||||
|
||||
class GRPCServicerContext(abc.ABC):
|
||||
"""Exposes gRPC-specific options and behaviors to code servicing RPCs."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def peer(self):
|
||||
"""Identifies the peer that invoked the RPC being serviced.
|
||||
|
||||
Returns:
|
||||
A string identifying the peer that invoked the RPC being serviced.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def disable_next_response_compression(self):
|
||||
"""Disables compression of the next response passed by the application."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class GRPCInvocationContext(abc.ABC):
|
||||
"""Exposes gRPC-specific options and behaviors to code invoking RPCs."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def disable_next_request_compression(self):
|
||||
"""Disables compression of the next request passed by the application."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class Server(abc.ABC):
|
||||
"""Services RPCs."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_insecure_port(self, address):
|
||||
"""Reserves a port for insecure RPC service once this Server becomes active.
|
||||
|
||||
This method may only be called before calling this Server's start method is
|
||||
called.
|
||||
|
||||
Args:
|
||||
address: The address for which to open a port.
|
||||
|
||||
Returns:
|
||||
An integer port on which RPCs will be serviced after this link has been
|
||||
started. This is typically the same number as the port number contained
|
||||
in the passed address, but will likely be different if the port number
|
||||
contained in the passed address was zero.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_secure_port(self, address, server_credentials):
|
||||
"""Reserves a port for secure RPC service after this Server becomes active.
|
||||
|
||||
This method may only be called before calling this Server's start method is
|
||||
called.
|
||||
|
||||
Args:
|
||||
address: The address for which to open a port.
|
||||
server_credentials: A ServerCredentials.
|
||||
|
||||
Returns:
|
||||
An integer port on which RPCs will be serviced after this link has been
|
||||
started. This is typically the same number as the port number contained
|
||||
in the passed address, but will likely be different if the port number
|
||||
contained in the passed address was zero.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def start(self):
|
||||
"""Starts this Server's service of RPCs.
|
||||
|
||||
This method may only be called while the server is not serving RPCs (i.e. it
|
||||
is not idempotent).
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def stop(self, grace):
|
||||
"""Stops this Server's service of RPCs.
|
||||
|
||||
All calls to this method immediately stop service of new RPCs. When existing
|
||||
RPCs are aborted is controlled by the grace period parameter passed to this
|
||||
method.
|
||||
|
||||
This method may be called at any time and is idempotent. Passing a smaller
|
||||
grace value than has been passed in a previous call will have the effect of
|
||||
stopping the Server sooner. Passing a larger grace value than has been
|
||||
passed in a previous call will not have the effect of stopping the server
|
||||
later.
|
||||
|
||||
Args:
|
||||
grace: A duration of time in seconds to allow existing RPCs to complete
|
||||
before being aborted by this Server's stopping. May be zero for
|
||||
immediate abortion of all in-progress RPCs.
|
||||
|
||||
Returns:
|
||||
A threading.Event that will be set when this Server has completely
|
||||
stopped. The returned event may not be set until after the full grace
|
||||
period (if some ongoing RPC continues for the full length of the period)
|
||||
of it may be set much sooner (such as if this Server had no RPCs underway
|
||||
at the time it was stopped or if all RPCs that it had underway completed
|
||||
very early in the grace period).
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
@@ -0,0 +1,148 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Utilities for the gRPC Python Beta API."""
|
||||
|
||||
import threading
|
||||
import time
|
||||
|
||||
# implementations is referenced from specification in this module.
|
||||
from grpc.beta import implementations # pylint: disable=unused-import
|
||||
from grpc.beta import interfaces
|
||||
from grpc.framework.foundation import callable_util
|
||||
from grpc.framework.foundation import future
|
||||
|
||||
_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
|
||||
'Exception calling connectivity future "done" callback!'
|
||||
)
|
||||
|
||||
|
||||
class _ChannelReadyFuture(future.Future):
|
||||
def __init__(self, channel):
|
||||
self._condition = threading.Condition()
|
||||
self._channel = channel
|
||||
|
||||
self._matured = False
|
||||
self._cancelled = False
|
||||
self._done_callbacks = []
|
||||
|
||||
def _block(self, timeout):
|
||||
until = None if timeout is None else time.time() + timeout
|
||||
with self._condition:
|
||||
while True:
|
||||
if self._cancelled:
|
||||
raise future.CancelledError()
|
||||
if self._matured:
|
||||
return
|
||||
if until is None:
|
||||
self._condition.wait()
|
||||
else:
|
||||
remaining = until - time.time()
|
||||
if remaining < 0:
|
||||
raise future.TimeoutError()
|
||||
self._condition.wait(timeout=remaining)
|
||||
|
||||
def _update(self, connectivity):
|
||||
with self._condition:
|
||||
if (
|
||||
not self._cancelled
|
||||
and connectivity is interfaces.ChannelConnectivity.READY
|
||||
):
|
||||
self._matured = True
|
||||
self._channel.unsubscribe(self._update)
|
||||
self._condition.notify_all()
|
||||
done_callbacks = tuple(self._done_callbacks)
|
||||
self._done_callbacks = None
|
||||
else:
|
||||
return
|
||||
|
||||
for done_callback in done_callbacks:
|
||||
callable_util.call_logging_exceptions(
|
||||
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self
|
||||
)
|
||||
|
||||
def cancel(self):
|
||||
with self._condition:
|
||||
if not self._matured:
|
||||
self._cancelled = True
|
||||
self._channel.unsubscribe(self._update)
|
||||
self._condition.notify_all()
|
||||
done_callbacks = tuple(self._done_callbacks)
|
||||
self._done_callbacks = None
|
||||
else:
|
||||
return False
|
||||
|
||||
for done_callback in done_callbacks:
|
||||
callable_util.call_logging_exceptions(
|
||||
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def cancelled(self):
|
||||
with self._condition:
|
||||
return self._cancelled
|
||||
|
||||
def running(self):
|
||||
with self._condition:
|
||||
return not self._cancelled and not self._matured
|
||||
|
||||
def done(self):
|
||||
with self._condition:
|
||||
return self._cancelled or self._matured
|
||||
|
||||
def result(self, timeout=None):
|
||||
self._block(timeout)
|
||||
|
||||
def exception(self, timeout=None):
|
||||
self._block(timeout)
|
||||
|
||||
def traceback(self, timeout=None):
|
||||
self._block(timeout)
|
||||
|
||||
def add_done_callback(self, fn):
|
||||
with self._condition:
|
||||
if not self._cancelled and not self._matured:
|
||||
self._done_callbacks.append(fn)
|
||||
return
|
||||
|
||||
fn(self)
|
||||
|
||||
def start(self):
|
||||
with self._condition:
|
||||
self._channel.subscribe(self._update, try_to_connect=True)
|
||||
|
||||
def __del__(self):
|
||||
with self._condition:
|
||||
if not self._cancelled and not self._matured:
|
||||
self._channel.unsubscribe(self._update)
|
||||
|
||||
|
||||
def channel_ready_future(channel):
|
||||
"""Creates a future.Future tracking when an implementations.Channel is ready.
|
||||
|
||||
Cancelling the returned future.Future does not tell the given
|
||||
implementations.Channel to abandon attempts it may have been making to
|
||||
connect; cancelling merely deactivates the return future.Future's
|
||||
subscription to the given implementations.Channel's connectivity.
|
||||
|
||||
Args:
|
||||
channel: An implementations.Channel.
|
||||
|
||||
Returns:
|
||||
A future.Future that matures when the given Channel has connectivity
|
||||
interfaces.ChannelConnectivity.READY.
|
||||
"""
|
||||
ready_future = _ChannelReadyFuture(channel)
|
||||
ready_future.start()
|
||||
return ready_future
|
||||
@@ -0,0 +1,192 @@
|
||||
# Copyright 2018 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""gRPC's experimental APIs.
|
||||
|
||||
These APIs are subject to be removed during any minor version release.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import sys
|
||||
from typing import Callable, Optional, Union
|
||||
import warnings
|
||||
|
||||
import grpc
|
||||
from grpc._cython import cygrpc as _cygrpc
|
||||
|
||||
_EXPERIMENTAL_APIS_USED = set()
|
||||
|
||||
|
||||
class ChannelOptions(object):
|
||||
"""Indicates a channel option unique to gRPC Python.
|
||||
|
||||
This enumeration is part of an EXPERIMENTAL API.
|
||||
|
||||
Attributes:
|
||||
SingleThreadedUnaryStream: Perform unary-stream RPCs on a single thread.
|
||||
"""
|
||||
|
||||
SingleThreadedUnaryStream = "SingleThreadedUnaryStream"
|
||||
|
||||
|
||||
class UsageError(Exception):
|
||||
"""Raised by the gRPC library to indicate usage not allowed by the API."""
|
||||
|
||||
|
||||
# It's important that there be a single insecure credentials object so that its
|
||||
# hash is deterministic and can be used for indexing in the simple stubs cache.
|
||||
_insecure_channel_credentials = grpc.ChannelCredentials(
|
||||
_cygrpc.channel_credentials_insecure()
|
||||
)
|
||||
|
||||
|
||||
def insecure_channel_credentials():
|
||||
"""Creates a ChannelCredentials for use with an insecure channel.
|
||||
|
||||
THIS IS AN EXPERIMENTAL API.
|
||||
"""
|
||||
return _insecure_channel_credentials
|
||||
|
||||
|
||||
class ExperimentalApiWarning(Warning):
|
||||
"""A warning that an API is experimental."""
|
||||
|
||||
|
||||
def _warn_experimental(api_name, stack_offset):
|
||||
if api_name not in _EXPERIMENTAL_APIS_USED:
|
||||
_EXPERIMENTAL_APIS_USED.add(api_name)
|
||||
msg = (
|
||||
"'{}' is an experimental API. It is subject to change or ".format(
|
||||
api_name
|
||||
)
|
||||
+ "removal between minor releases. Proceed with caution."
|
||||
)
|
||||
warnings.warn(msg, ExperimentalApiWarning, stacklevel=2 + stack_offset)
|
||||
|
||||
|
||||
def experimental_api(f):
|
||||
@functools.wraps(f)
|
||||
def _wrapper(*args, **kwargs):
|
||||
_warn_experimental(f.__name__, 1)
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return _wrapper
|
||||
|
||||
|
||||
def wrap_server_method_handler(wrapper, handler):
|
||||
"""Wraps the server method handler function.
|
||||
|
||||
The server implementation requires all server handlers being wrapped as
|
||||
RpcMethodHandler objects. This helper function ease the pain of writing
|
||||
server handler wrappers.
|
||||
|
||||
Args:
|
||||
wrapper: A wrapper function that takes in a method handler behavior
|
||||
(the actual function) and returns a wrapped function.
|
||||
handler: A RpcMethodHandler object to be wrapped.
|
||||
|
||||
Returns:
|
||||
A newly created RpcMethodHandler.
|
||||
"""
|
||||
if not handler:
|
||||
return None
|
||||
|
||||
if not handler.request_streaming:
|
||||
if not handler.response_streaming:
|
||||
# NOTE(lidiz) _replace is a public API:
|
||||
# https://docs.python.org/dev/library/collections.html
|
||||
return handler._replace(unary_unary=wrapper(handler.unary_unary))
|
||||
return handler._replace(unary_stream=wrapper(handler.unary_stream))
|
||||
if not handler.response_streaming:
|
||||
return handler._replace(stream_unary=wrapper(handler.stream_unary))
|
||||
return handler._replace(stream_stream=wrapper(handler.stream_stream))
|
||||
|
||||
|
||||
# A Callable to return in the async case
|
||||
# See the `ssl_channel_credentials_with_custom_signer` docstring for more detail on usage.
|
||||
PrivateKeySignCancel = Callable[[], None]
|
||||
PrivateKeySignatureAlgorithm = _cygrpc.PrivateKeySignatureAlgorithm
|
||||
PrivateKeySignOnComplete = Callable[[Union[bytes, Exception]], None]
|
||||
|
||||
# See the `ssl_channel_credentials_with_custom_signer` docstring for more detail on usage.
|
||||
# The custom signing function for a user to implement and pass to gRPC Python.
|
||||
CustomPrivateKeySign = Callable[
|
||||
[
|
||||
bytes,
|
||||
PrivateKeySignatureAlgorithm,
|
||||
"PrivateKeySignOnComplete",
|
||||
],
|
||||
Union[bytes, "PrivateKeySignCancel"],
|
||||
]
|
||||
|
||||
|
||||
@experimental_api
|
||||
def ssl_channel_credentials_with_custom_signer(
|
||||
*,
|
||||
private_key_sign_fn: "CustomPrivateKeySign",
|
||||
root_certificates: Optional[bytes] = None,
|
||||
certificate_chain: bytes,
|
||||
) -> grpc.ChannelCredentials:
|
||||
"""Creates a ChannelCredentials for use with an SSL-enabled Channel with a custom signer.
|
||||
|
||||
THIS IS AN EXPERIMENTAL API.
|
||||
This API will be removed in a future version and combined with `grpc.ssl_channel_credentials`.
|
||||
|
||||
Args:
|
||||
private_key_sign_fn: a function with the signature of
|
||||
`CustomPrivateKeySign`. This function can return synchronously or
|
||||
asynchronously. To return synchronously, return the signed bytes. To
|
||||
return asynchronously, return a callable matching the
|
||||
`PrivateKeySignCancel` signature.This can be a no-op if no cancellation is
|
||||
needed. In the async case, this function must return this callable
|
||||
quickly, then call the passed in `PrivateKeySignOnComplete` when the async
|
||||
signing operation is complete to trigger gRPC to continue the handshake.
|
||||
root_certificates: The PEM-encoded root certificates as a byte string,
|
||||
or None to retrieve them from a default location chosen by gRPC
|
||||
runtime.
|
||||
certificate_chain: The PEM-encoded certificate chain as a byte string
|
||||
to use
|
||||
|
||||
Returns:
|
||||
A ChannelCredentials for use with an SSL-enabled Channel.
|
||||
"""
|
||||
return grpc.ChannelCredentials(
|
||||
_cygrpc.SSLChannelCredentials(
|
||||
root_certificates, None, certificate_chain, private_key_sign_fn
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ChannelOptions",
|
||||
"ExperimentalApiWarning",
|
||||
"UsageError",
|
||||
"insecure_channel_credentials",
|
||||
"ssl_channel_credentials_with_custom_signer",
|
||||
"wrap_server_method_handler",
|
||||
)
|
||||
|
||||
if sys.version_info > (3, 6):
|
||||
from grpc._simple_stubs import stream_stream
|
||||
from grpc._simple_stubs import stream_unary
|
||||
from grpc._simple_stubs import unary_stream
|
||||
from grpc._simple_stubs import unary_unary
|
||||
|
||||
__all__ += (
|
||||
"stream_stream",
|
||||
"stream_unary",
|
||||
"unary_stream",
|
||||
"unary_unary",
|
||||
)
|
||||
@@ -0,0 +1,16 @@
|
||||
# Copyright 2020 The gRPC Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Alias of grpc.aio to keep backward compatibility."""
|
||||
|
||||
from grpc.aio import *
|
||||
@@ -0,0 +1,27 @@
|
||||
# Copyright 2018 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""gRPC's Python gEvent APIs."""
|
||||
|
||||
from grpc._cython import cygrpc as _cygrpc
|
||||
|
||||
|
||||
def init_gevent():
|
||||
"""Patches gRPC's libraries to be compatible with gevent.
|
||||
|
||||
This must be called AFTER the python standard lib has been patched,
|
||||
but BEFORE creating and gRPC objects.
|
||||
|
||||
In order for progress to be made, the application must drive the event loop.
|
||||
"""
|
||||
_cygrpc.init_grpc_gevent()
|
||||
@@ -0,0 +1,45 @@
|
||||
# Copyright 2018 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""gRPC's APIs for TLS Session Resumption support"""
|
||||
|
||||
from grpc._cython import cygrpc as _cygrpc
|
||||
|
||||
|
||||
def ssl_session_cache_lru(capacity):
|
||||
"""Creates an SSLSessionCache with LRU replacement policy
|
||||
|
||||
Args:
|
||||
capacity: Size of the cache
|
||||
|
||||
Returns:
|
||||
An SSLSessionCache with LRU replacement policy that can be passed as a value for
|
||||
the grpc.ssl_session_cache option to a grpc.Channel. SSL session caches are used
|
||||
to store session tickets, which clients can present to resume previous TLS sessions
|
||||
with a server.
|
||||
"""
|
||||
return SSLSessionCache(_cygrpc.SSLSessionCacheLRU(capacity))
|
||||
|
||||
|
||||
class SSLSessionCache(object):
|
||||
"""An encapsulation of a session cache used for TLS session resumption.
|
||||
|
||||
Instances of this class can be passed to a Channel as values for the
|
||||
grpc.ssl_session_cache option
|
||||
"""
|
||||
|
||||
def __init__(self, cache):
|
||||
self._cache = cache
|
||||
|
||||
def __int__(self):
|
||||
return int(self._cache)
|
||||
@@ -0,0 +1,13 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -0,0 +1,13 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -0,0 +1,26 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Defines an enum for classifying RPC methods by streaming semantics."""
|
||||
|
||||
import enum
|
||||
|
||||
|
||||
@enum.unique
|
||||
class Cardinality(enum.Enum):
|
||||
"""Describes the streaming semantics of an RPC method."""
|
||||
|
||||
UNARY_UNARY = "request-unary/response-unary"
|
||||
UNARY_STREAM = "request-unary/response-streaming"
|
||||
STREAM_UNARY = "request-streaming/response-unary"
|
||||
STREAM_STREAM = "request-streaming/response-streaming"
|
||||
@@ -0,0 +1,24 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Defines an enum for classifying RPC methods by control flow semantics."""
|
||||
|
||||
import enum
|
||||
|
||||
|
||||
@enum.unique
|
||||
class Service(enum.Enum):
|
||||
"""Describes the control flow style of RPC method implementation."""
|
||||
|
||||
INLINE = "inline"
|
||||
EVENT = "event"
|
||||
@@ -0,0 +1,13 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -0,0 +1,22 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Utilities for indicating abandonment of computation."""
|
||||
|
||||
|
||||
class Abandoned(Exception):
|
||||
"""Indicates that some computation is being abandoned.
|
||||
|
||||
Abandoning a computation is different than returning a value or raising
|
||||
an exception indicating some operational or programming defect.
|
||||
"""
|
||||
@@ -0,0 +1,98 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Utilities for working with callables."""
|
||||
|
||||
from abc import ABC
|
||||
import collections
|
||||
import enum
|
||||
import functools
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Outcome(ABC):
|
||||
"""A sum type describing the outcome of some call.
|
||||
|
||||
Attributes:
|
||||
kind: One of Kind.RETURNED or Kind.RAISED respectively indicating that the
|
||||
call returned a value or raised an exception.
|
||||
return_value: The value returned by the call. Must be present if kind is
|
||||
Kind.RETURNED.
|
||||
exception: The exception raised by the call. Must be present if kind is
|
||||
Kind.RAISED.
|
||||
"""
|
||||
|
||||
@enum.unique
|
||||
class Kind(enum.Enum):
|
||||
"""Identifies the general kind of the outcome of some call."""
|
||||
|
||||
RETURNED = enum.auto()
|
||||
RAISED = enum.auto()
|
||||
|
||||
|
||||
class _EasyOutcome(
|
||||
collections.namedtuple(
|
||||
"_EasyOutcome", ["kind", "return_value", "exception"]
|
||||
),
|
||||
Outcome,
|
||||
):
|
||||
"""A trivial implementation of Outcome."""
|
||||
|
||||
|
||||
def _call_logging_exceptions(behavior, message, *args, **kwargs):
|
||||
try:
|
||||
return _EasyOutcome(
|
||||
Outcome.Kind.RETURNED, behavior(*args, **kwargs), None
|
||||
)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
_LOGGER.exception(message)
|
||||
return _EasyOutcome(Outcome.Kind.RAISED, None, e)
|
||||
|
||||
|
||||
def with_exceptions_logged(behavior, message):
|
||||
"""Wraps a callable in a try-except that logs any exceptions it raises.
|
||||
|
||||
Args:
|
||||
behavior: Any callable.
|
||||
message: A string to log if the behavior raises an exception.
|
||||
|
||||
Returns:
|
||||
A callable that when executed invokes the given behavior. The returned
|
||||
callable takes the same arguments as the given behavior but returns a
|
||||
future.Outcome describing whether the given behavior returned a value or
|
||||
raised an exception.
|
||||
"""
|
||||
|
||||
@functools.wraps(behavior)
|
||||
def wrapped_behavior(*args, **kwargs):
|
||||
return _call_logging_exceptions(behavior, message, *args, **kwargs)
|
||||
|
||||
return wrapped_behavior
|
||||
|
||||
|
||||
def call_logging_exceptions(behavior, message, *args, **kwargs):
|
||||
"""Calls a behavior in a try-except that logs any exceptions it raises.
|
||||
|
||||
Args:
|
||||
behavior: Any callable.
|
||||
message: A string to log if the behavior raises an exception.
|
||||
*args: Positional arguments to pass to the given behavior.
|
||||
**kwargs: Keyword arguments to pass to the given behavior.
|
||||
|
||||
Returns:
|
||||
An Outcome describing whether the given behavior returned a value or raised
|
||||
an exception.
|
||||
"""
|
||||
return _call_logging_exceptions(behavior, message, *args, **kwargs)
|
||||
@@ -0,0 +1,219 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""A Future interface.
|
||||
|
||||
Python doesn't have a Future interface in its standard library. In the absence
|
||||
of such a standard, three separate, incompatible implementations
|
||||
(concurrent.futures.Future, ndb.Future, and asyncio.Future) have appeared. This
|
||||
interface attempts to be as compatible as possible with
|
||||
concurrent.futures.Future. From ndb.Future it adopts a traceback-object accessor
|
||||
method.
|
||||
|
||||
Unlike the concrete and implemented Future classes listed above, the Future
|
||||
class defined in this module is an entirely abstract interface that anyone may
|
||||
implement and use.
|
||||
|
||||
The one known incompatibility between this interface and the interface of
|
||||
concurrent.futures.Future is that this interface defines its own CancelledError
|
||||
and TimeoutError exceptions rather than raising the implementation-private
|
||||
concurrent.futures._base.CancelledError and the
|
||||
built-in-but-only-in-3.3-and-later TimeoutError.
|
||||
"""
|
||||
|
||||
import abc
|
||||
|
||||
|
||||
class TimeoutError(Exception):
|
||||
"""Indicates that a particular call timed out."""
|
||||
|
||||
|
||||
class CancelledError(Exception):
|
||||
"""Indicates that the computation underlying a Future was cancelled."""
|
||||
|
||||
|
||||
class Future(abc.ABC):
|
||||
"""A representation of a computation in another control flow.
|
||||
|
||||
Computations represented by a Future may be yet to be begun, may be ongoing,
|
||||
or may have already completed.
|
||||
"""
|
||||
|
||||
# NOTE(nathaniel): This isn't the return type that I would want to have if it
|
||||
# were up to me. Were this interface being written from scratch, the return
|
||||
# type of this method would probably be a sum type like:
|
||||
#
|
||||
# NOT_COMMENCED
|
||||
# COMMENCED_AND_NOT_COMPLETED
|
||||
# PARTIAL_RESULT<Partial_Result_Type>
|
||||
# COMPLETED<Result_Type>
|
||||
# UNCANCELLABLE
|
||||
# NOT_IMMEDIATELY_DETERMINABLE
|
||||
@abc.abstractmethod
|
||||
def cancel(self):
|
||||
"""Attempts to cancel the computation.
|
||||
|
||||
This method does not block.
|
||||
|
||||
Returns:
|
||||
True if the computation has not yet begun, will not be allowed to take
|
||||
place, and determination of both was possible without blocking. False
|
||||
under all other circumstances including but not limited to the
|
||||
computation's already having begun, the computation's already having
|
||||
finished, and the computation's having been scheduled for execution on a
|
||||
remote system for which a determination of whether or not it commenced
|
||||
before being cancelled cannot be made without blocking.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
# NOTE(nathaniel): Here too this isn't the return type that I'd want this
|
||||
# method to have if it were up to me. I think I'd go with another sum type
|
||||
# like:
|
||||
#
|
||||
# NOT_CANCELLED (this object's cancel method hasn't been called)
|
||||
# NOT_COMMENCED
|
||||
# COMMENCED_AND_NOT_COMPLETED
|
||||
# PARTIAL_RESULT<Partial_Result_Type>
|
||||
# COMPLETED<Result_Type>
|
||||
# UNCANCELLABLE
|
||||
# NOT_IMMEDIATELY_DETERMINABLE
|
||||
#
|
||||
# Notice how giving the cancel method the right semantics obviates most
|
||||
# reasons for this method to exist.
|
||||
@abc.abstractmethod
|
||||
def cancelled(self):
|
||||
"""Describes whether the computation was cancelled.
|
||||
|
||||
This method does not block.
|
||||
|
||||
Returns:
|
||||
True if the computation was cancelled any time before its result became
|
||||
immediately available. False under all other circumstances including but
|
||||
not limited to this object's cancel method not having been called and
|
||||
the computation's result having become immediately available.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def running(self):
|
||||
"""Describes whether the computation is taking place.
|
||||
|
||||
This method does not block.
|
||||
|
||||
Returns:
|
||||
True if the computation is scheduled to take place in the future or is
|
||||
taking place now, or False if the computation took place in the past or
|
||||
was cancelled.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
# NOTE(nathaniel): These aren't quite the semantics I'd like here either. I
|
||||
# would rather this only returned True in cases in which the underlying
|
||||
# computation completed successfully. A computation's having been cancelled
|
||||
# conflicts with considering that computation "done".
|
||||
@abc.abstractmethod
|
||||
def done(self):
|
||||
"""Describes whether the computation has taken place.
|
||||
|
||||
This method does not block.
|
||||
|
||||
Returns:
|
||||
True if the computation is known to have either completed or have been
|
||||
unscheduled or interrupted. False if the computation may possibly be
|
||||
executing or scheduled to execute later.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def result(self, timeout=None):
|
||||
"""Accesses the outcome of the computation or raises its exception.
|
||||
|
||||
This method may return immediately or may block.
|
||||
|
||||
Args:
|
||||
timeout: The length of time in seconds to wait for the computation to
|
||||
finish or be cancelled, or None if this method should block until the
|
||||
computation has finished or is cancelled no matter how long that takes.
|
||||
|
||||
Returns:
|
||||
The return value of the computation.
|
||||
|
||||
Raises:
|
||||
TimeoutError: If a timeout value is passed and the computation does not
|
||||
terminate within the allotted time.
|
||||
CancelledError: If the computation was cancelled.
|
||||
Exception: If the computation raised an exception, this call will raise
|
||||
the same exception.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def exception(self, timeout=None):
|
||||
"""Return the exception raised by the computation.
|
||||
|
||||
This method may return immediately or may block.
|
||||
|
||||
Args:
|
||||
timeout: The length of time in seconds to wait for the computation to
|
||||
terminate or be cancelled, or None if this method should block until
|
||||
the computation is terminated or is cancelled no matter how long that
|
||||
takes.
|
||||
|
||||
Returns:
|
||||
The exception raised by the computation, or None if the computation did
|
||||
not raise an exception.
|
||||
|
||||
Raises:
|
||||
TimeoutError: If a timeout value is passed and the computation does not
|
||||
terminate within the allotted time.
|
||||
CancelledError: If the computation was cancelled.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def traceback(self, timeout=None):
|
||||
"""Access the traceback of the exception raised by the computation.
|
||||
|
||||
This method may return immediately or may block.
|
||||
|
||||
Args:
|
||||
timeout: The length of time in seconds to wait for the computation to
|
||||
terminate or be cancelled, or None if this method should block until
|
||||
the computation is terminated or is cancelled no matter how long that
|
||||
takes.
|
||||
|
||||
Returns:
|
||||
The traceback of the exception raised by the computation, or None if the
|
||||
computation did not raise an exception.
|
||||
|
||||
Raises:
|
||||
TimeoutError: If a timeout value is passed and the computation does not
|
||||
terminate within the allotted time.
|
||||
CancelledError: If the computation was cancelled.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_done_callback(self, fn):
|
||||
"""Adds a function to be called at completion of the computation.
|
||||
|
||||
The callback will be passed this Future object describing the outcome of
|
||||
the computation.
|
||||
|
||||
If the computation has already completed, the callback will be called
|
||||
immediately.
|
||||
|
||||
Args:
|
||||
fn: A callable taking this Future object as its single parameter.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
@@ -0,0 +1,72 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""A thread pool that logs exceptions raised by tasks executed within it."""
|
||||
|
||||
from concurrent import futures
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _wrap(behavior):
|
||||
"""Wraps an arbitrary callable behavior in exception-logging."""
|
||||
|
||||
def _wrapping(*args, **kwargs):
|
||||
try:
|
||||
return behavior(*args, **kwargs)
|
||||
except Exception:
|
||||
_LOGGER.exception(
|
||||
"Unexpected exception from %s executed in logging pool!",
|
||||
behavior,
|
||||
)
|
||||
raise
|
||||
|
||||
return _wrapping
|
||||
|
||||
|
||||
class _LoggingPool(object):
|
||||
"""An exception-logging futures.ThreadPoolExecutor-compatible thread pool."""
|
||||
|
||||
def __init__(self, backing_pool):
|
||||
self._backing_pool = backing_pool
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._backing_pool.shutdown(wait=True)
|
||||
|
||||
def submit(self, fn, *args, **kwargs):
|
||||
return self._backing_pool.submit(_wrap(fn), *args, **kwargs)
|
||||
|
||||
def map(self, func, *iterables, **kwargs):
|
||||
return self._backing_pool.map(
|
||||
_wrap(func), *iterables, timeout=kwargs.get("timeout")
|
||||
)
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
self._backing_pool.shutdown(wait=wait)
|
||||
|
||||
|
||||
def pool(max_workers):
|
||||
"""Creates a thread pool that logs exceptions raised by the tasks within it.
|
||||
|
||||
Args:
|
||||
max_workers: The maximum number of worker threads to allow the pool.
|
||||
|
||||
Returns:
|
||||
A futures.ThreadPoolExecutor-compatible thread pool that logs exceptions
|
||||
raised by the tasks executed within it.
|
||||
"""
|
||||
return _LoggingPool(futures.ThreadPoolExecutor(max_workers))
|
||||
@@ -0,0 +1,43 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Interfaces related to streams of values or objects."""
|
||||
|
||||
import abc
|
||||
|
||||
|
||||
class Consumer(abc.ABC):
|
||||
"""Interface for consumers of finite streams of values or objects."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def consume(self, value):
|
||||
"""Accepts a value.
|
||||
|
||||
Args:
|
||||
value: Any value accepted by this Consumer.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def terminate(self):
|
||||
"""Indicates to this Consumer that no more values will be supplied."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def consume_and_terminate(self, value):
|
||||
"""Supplies a value and signals that no more values will be supplied.
|
||||
|
||||
Args:
|
||||
value: Any value accepted by this Consumer.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
@@ -0,0 +1,147 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Helpful utilities related to the stream module."""
|
||||
|
||||
import logging
|
||||
import threading
|
||||
|
||||
from grpc.framework.foundation import stream
|
||||
|
||||
_NO_VALUE = object()
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TransformingConsumer(stream.Consumer):
|
||||
"""A stream.Consumer that passes a transformation of its input to another."""
|
||||
|
||||
def __init__(self, transformation, downstream):
|
||||
self._transformation = transformation
|
||||
self._downstream = downstream
|
||||
|
||||
def consume(self, value):
|
||||
self._downstream.consume(self._transformation(value))
|
||||
|
||||
def terminate(self):
|
||||
self._downstream.terminate()
|
||||
|
||||
def consume_and_terminate(self, value):
|
||||
self._downstream.consume_and_terminate(self._transformation(value))
|
||||
|
||||
|
||||
class IterableConsumer(stream.Consumer):
|
||||
"""A Consumer that when iterated over emits the values it has consumed."""
|
||||
|
||||
def __init__(self):
|
||||
self._condition = threading.Condition()
|
||||
self._values = []
|
||||
self._active = True
|
||||
|
||||
def consume(self, value):
|
||||
with self._condition:
|
||||
if self._active:
|
||||
self._values.append(value)
|
||||
self._condition.notify()
|
||||
|
||||
def terminate(self):
|
||||
with self._condition:
|
||||
self._active = False
|
||||
self._condition.notify()
|
||||
|
||||
def consume_and_terminate(self, value):
|
||||
with self._condition:
|
||||
if self._active:
|
||||
self._values.append(value)
|
||||
self._active = False
|
||||
self._condition.notify()
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
return self.next()
|
||||
|
||||
def next(self):
|
||||
with self._condition:
|
||||
while self._active and not self._values:
|
||||
self._condition.wait()
|
||||
if self._values:
|
||||
return self._values.pop(0)
|
||||
raise StopIteration()
|
||||
|
||||
|
||||
class ThreadSwitchingConsumer(stream.Consumer):
|
||||
"""A Consumer decorator that affords serialization and asynchrony."""
|
||||
|
||||
def __init__(self, sink, pool):
|
||||
self._lock = threading.Lock()
|
||||
self._sink = sink
|
||||
self._pool = pool
|
||||
# True if self._spin has been submitted to the pool to be called once and
|
||||
# that call has not yet returned, False otherwise.
|
||||
self._spinning = False
|
||||
self._values = []
|
||||
self._active = True
|
||||
|
||||
def _spin(self, sink, value, terminate):
|
||||
while True:
|
||||
try:
|
||||
if value is _NO_VALUE:
|
||||
sink.terminate()
|
||||
elif terminate:
|
||||
sink.consume_and_terminate(value)
|
||||
else:
|
||||
sink.consume(value)
|
||||
except Exception as e: # pylint:disable=broad-except
|
||||
_LOGGER.exception(e)
|
||||
|
||||
with self._lock:
|
||||
if terminate:
|
||||
self._spinning = False
|
||||
return
|
||||
if self._values:
|
||||
value = self._values.pop(0)
|
||||
terminate = not self._values and not self._active
|
||||
elif not self._active:
|
||||
value = _NO_VALUE
|
||||
terminate = True
|
||||
else:
|
||||
self._spinning = False
|
||||
return
|
||||
|
||||
def consume(self, value):
|
||||
with self._lock:
|
||||
if self._active:
|
||||
if self._spinning:
|
||||
self._values.append(value)
|
||||
else:
|
||||
self._pool.submit(self._spin, self._sink, value, False)
|
||||
self._spinning = True
|
||||
|
||||
def terminate(self):
|
||||
with self._lock:
|
||||
if self._active:
|
||||
self._active = False
|
||||
if not self._spinning:
|
||||
self._pool.submit(self._spin, self._sink, _NO_VALUE, True)
|
||||
self._spinning = True
|
||||
|
||||
def consume_and_terminate(self, value):
|
||||
with self._lock:
|
||||
if self._active:
|
||||
self._active = False
|
||||
if self._spinning:
|
||||
self._values.append(value)
|
||||
else:
|
||||
self._pool.submit(self._spin, self._sink, value, True)
|
||||
self._spinning = True
|
||||
@@ -0,0 +1,13 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -0,0 +1,13 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
@@ -0,0 +1,328 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""The base interface of RPC Framework.
|
||||
|
||||
Implementations of this interface support the conduct of "operations":
|
||||
exchanges between two distinct ends of an arbitrary number of data payloads
|
||||
and metadata such as a name for the operation, initial and terminal metadata
|
||||
in each direction, and flow control. These operations may be used for transfers
|
||||
of data, remote procedure calls, status indication, or anything else
|
||||
applications choose.
|
||||
"""
|
||||
|
||||
# threading is referenced from specification in this module.
|
||||
import abc
|
||||
import enum
|
||||
import threading # pylint: disable=unused-import
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
|
||||
|
||||
class NoSuchMethodError(Exception):
|
||||
"""Indicates that an unrecognized operation has been called.
|
||||
|
||||
Attributes:
|
||||
code: A code value to communicate to the other side of the operation
|
||||
along with indication of operation termination. May be None.
|
||||
details: A details value to communicate to the other side of the
|
||||
operation along with indication of operation termination. May be None.
|
||||
"""
|
||||
|
||||
def __init__(self, code, details):
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
code: A code value to communicate to the other side of the operation
|
||||
along with indication of operation termination. May be None.
|
||||
details: A details value to communicate to the other side of the
|
||||
operation along with indication of operation termination. May be None.
|
||||
"""
|
||||
super(NoSuchMethodError, self).__init__()
|
||||
self.code = code
|
||||
self.details = details
|
||||
|
||||
|
||||
class Outcome(object):
|
||||
"""The outcome of an operation.
|
||||
|
||||
Attributes:
|
||||
kind: A Kind value coarsely identifying how the operation terminated.
|
||||
code: An application-specific code value or None if no such value was
|
||||
provided.
|
||||
details: An application-specific details value or None if no such value was
|
||||
provided.
|
||||
"""
|
||||
|
||||
@enum.unique
|
||||
class Kind(enum.Enum):
|
||||
"""Ways in which an operation can terminate."""
|
||||
|
||||
COMPLETED = "completed"
|
||||
CANCELLED = "cancelled"
|
||||
EXPIRED = "expired"
|
||||
LOCAL_SHUTDOWN = "local shutdown"
|
||||
REMOTE_SHUTDOWN = "remote shutdown"
|
||||
RECEPTION_FAILURE = "reception failure"
|
||||
TRANSMISSION_FAILURE = "transmission failure"
|
||||
LOCAL_FAILURE = "local failure"
|
||||
REMOTE_FAILURE = "remote failure"
|
||||
|
||||
|
||||
class Completion(abc.ABC):
|
||||
"""An aggregate of the values exchanged upon operation completion.
|
||||
|
||||
Attributes:
|
||||
terminal_metadata: A terminal metadata value for the operation.
|
||||
code: A code value for the operation.
|
||||
message: A message value for the operation.
|
||||
"""
|
||||
|
||||
|
||||
class OperationContext(abc.ABC):
|
||||
"""Provides operation-related information and action."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def outcome(self):
|
||||
"""Indicates the operation's outcome (or that the operation is ongoing).
|
||||
|
||||
Returns:
|
||||
None if the operation is still active or the Outcome value for the
|
||||
operation if it has terminated.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_termination_callback(self, callback):
|
||||
"""Adds a function to be called upon operation termination.
|
||||
|
||||
Args:
|
||||
callback: A callable to be passed an Outcome value on operation
|
||||
termination.
|
||||
|
||||
Returns:
|
||||
None if the operation has not yet terminated and the passed callback will
|
||||
later be called when it does terminate, or if the operation has already
|
||||
terminated an Outcome value describing the operation termination and the
|
||||
passed callback will not be called as a result of this method call.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def time_remaining(self):
|
||||
"""Describes the length of allowed time remaining for the operation.
|
||||
|
||||
Returns:
|
||||
A nonnegative float indicating the length of allowed time in seconds
|
||||
remaining for the operation to complete before it is considered to have
|
||||
timed out. Zero is returned if the operation has terminated.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel(self):
|
||||
"""Cancels the operation if the operation has not yet terminated."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def fail(self, exception):
|
||||
"""Indicates that the operation has failed.
|
||||
|
||||
Args:
|
||||
exception: An exception germane to the operation failure. May be None.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class Operator(abc.ABC):
|
||||
"""An interface through which to participate in an operation."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def advance(
|
||||
self,
|
||||
initial_metadata=None,
|
||||
payload=None,
|
||||
completion=None,
|
||||
allowance=None,
|
||||
):
|
||||
"""Progresses the operation.
|
||||
|
||||
Args:
|
||||
initial_metadata: An initial metadata value. Only one may ever be
|
||||
communicated in each direction for an operation, and they must be
|
||||
communicated no later than either the first payload or the completion.
|
||||
payload: A payload value.
|
||||
completion: A Completion value. May only ever be non-None once in either
|
||||
direction, and no payloads may be passed after it has been communicated.
|
||||
allowance: A positive integer communicating the number of additional
|
||||
payloads allowed to be passed by the remote side of the operation.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class ProtocolReceiver(abc.ABC):
|
||||
"""A means of receiving protocol values during an operation."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def context(self, protocol_context):
|
||||
"""Accepts the protocol context object for the operation.
|
||||
|
||||
Args:
|
||||
protocol_context: The protocol context object for the operation.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class Subscription(abc.ABC):
|
||||
"""Describes customer code's interest in values from the other side.
|
||||
|
||||
Attributes:
|
||||
kind: A Kind value describing the overall kind of this value.
|
||||
termination_callback: A callable to be passed the Outcome associated with
|
||||
the operation after it has terminated. Must be non-None if kind is
|
||||
Kind.TERMINATION_ONLY. Must be None otherwise.
|
||||
allowance: A callable behavior that accepts positive integers representing
|
||||
the number of additional payloads allowed to be passed to the other side
|
||||
of the operation. Must be None if kind is Kind.FULL. Must not be None
|
||||
otherwise.
|
||||
operator: An Operator to be passed values from the other side of the
|
||||
operation. Must be non-None if kind is Kind.FULL. Must be None otherwise.
|
||||
protocol_receiver: A ProtocolReceiver to be passed protocol objects as they
|
||||
become available during the operation. Must be non-None if kind is
|
||||
Kind.FULL.
|
||||
"""
|
||||
|
||||
@enum.unique
|
||||
class Kind(enum.Enum):
|
||||
NONE = "none"
|
||||
TERMINATION_ONLY = "termination only"
|
||||
FULL = "full"
|
||||
|
||||
|
||||
class Servicer(abc.ABC):
|
||||
"""Interface for service implementations."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def service(self, group, method, context, output_operator):
|
||||
"""Services an operation.
|
||||
|
||||
Args:
|
||||
group: The group identifier of the operation to be serviced.
|
||||
method: The method identifier of the operation to be serviced.
|
||||
context: An OperationContext object affording contextual information and
|
||||
actions.
|
||||
output_operator: An Operator that will accept output values of the
|
||||
operation.
|
||||
|
||||
Returns:
|
||||
A Subscription via which this object may or may not accept more values of
|
||||
the operation.
|
||||
|
||||
Raises:
|
||||
NoSuchMethodError: If this Servicer does not handle operations with the
|
||||
given group and method.
|
||||
abandonment.Abandoned: If the operation has been aborted and there no
|
||||
longer is any reason to service the operation.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class End(abc.ABC):
|
||||
"""Common type for entry-point objects on both sides of an operation."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def start(self):
|
||||
"""Starts this object's service of operations."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def stop(self, grace):
|
||||
"""Stops this object's service of operations.
|
||||
|
||||
This object will refuse service of new operations as soon as this method is
|
||||
called but operations under way at the time of the call may be given a
|
||||
grace period during which they are allowed to finish.
|
||||
|
||||
Args:
|
||||
grace: A duration of time in seconds to allow ongoing operations to
|
||||
terminate before being forcefully terminated by the stopping of this
|
||||
End. May be zero to terminate all ongoing operations and immediately
|
||||
stop.
|
||||
|
||||
Returns:
|
||||
A threading.Event that will be set to indicate all operations having
|
||||
terminated and this End having completely stopped. The returned event
|
||||
may not be set until after the full grace period (if some ongoing
|
||||
operation continues for the full length of the period) or it may be set
|
||||
much sooner (if for example this End had no operations in progress at
|
||||
the time its stop method was called).
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def operate(
|
||||
self,
|
||||
group,
|
||||
method,
|
||||
subscription,
|
||||
timeout,
|
||||
initial_metadata=None,
|
||||
payload=None,
|
||||
completion=None,
|
||||
protocol_options=None,
|
||||
):
|
||||
"""Commences an operation.
|
||||
|
||||
Args:
|
||||
group: The group identifier of the invoked operation.
|
||||
method: The method identifier of the invoked operation.
|
||||
subscription: A Subscription to which the results of the operation will be
|
||||
passed.
|
||||
timeout: A length of time in seconds to allow for the operation.
|
||||
initial_metadata: An initial metadata value to be sent to the other side
|
||||
of the operation. May be None if the initial metadata will be later
|
||||
passed via the returned operator or if there will be no initial metadata
|
||||
passed at all.
|
||||
payload: An initial payload for the operation.
|
||||
completion: A Completion value indicating the end of transmission to the
|
||||
other side of the operation.
|
||||
protocol_options: A value specified by the provider of a Base interface
|
||||
implementation affording custom state and behavior.
|
||||
|
||||
Returns:
|
||||
A pair of objects affording information about the operation and action
|
||||
continuing the operation. The first element of the returned pair is an
|
||||
OperationContext for the operation and the second element of the
|
||||
returned pair is an Operator to which operation values not passed in
|
||||
this call should later be passed.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def operation_stats(self):
|
||||
"""Reports the number of terminated operations broken down by outcome.
|
||||
|
||||
Returns:
|
||||
A dictionary from Outcome.Kind value to an integer identifying the number
|
||||
of operations that terminated with that outcome kind.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_idle_action(self, action):
|
||||
"""Adds an action to be called when this End has no ongoing operations.
|
||||
|
||||
Args:
|
||||
action: A callable that accepts no arguments.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
@@ -0,0 +1,83 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Utilities for use with the base interface of RPC Framework."""
|
||||
|
||||
import collections
|
||||
|
||||
from grpc.framework.interfaces.base import base
|
||||
|
||||
|
||||
class _Completion(
|
||||
base.Completion,
|
||||
collections.namedtuple(
|
||||
"_Completion",
|
||||
(
|
||||
"terminal_metadata",
|
||||
"code",
|
||||
"message",
|
||||
),
|
||||
),
|
||||
):
|
||||
"""A trivial implementation of base.Completion."""
|
||||
|
||||
|
||||
class _Subscription(
|
||||
base.Subscription,
|
||||
collections.namedtuple(
|
||||
"_Subscription",
|
||||
(
|
||||
"kind",
|
||||
"termination_callback",
|
||||
"allowance",
|
||||
"operator",
|
||||
"protocol_receiver",
|
||||
),
|
||||
),
|
||||
):
|
||||
"""A trivial implementation of base.Subscription."""
|
||||
|
||||
|
||||
_NONE_SUBSCRIPTION = _Subscription(
|
||||
base.Subscription.Kind.NONE, None, None, None, None
|
||||
)
|
||||
|
||||
|
||||
def completion(terminal_metadata, code, message):
|
||||
"""Creates a base.Completion aggregating the given operation values.
|
||||
|
||||
Args:
|
||||
terminal_metadata: A terminal metadata value for an operation.
|
||||
code: A code value for an operation.
|
||||
message: A message value for an operation.
|
||||
|
||||
Returns:
|
||||
A base.Completion aggregating the given operation values.
|
||||
"""
|
||||
return _Completion(terminal_metadata, code, message)
|
||||
|
||||
|
||||
def full_subscription(operator, protocol_receiver):
|
||||
"""Creates a "full" base.Subscription for the given base.Operator.
|
||||
|
||||
Args:
|
||||
operator: A base.Operator to be used in an operation.
|
||||
protocol_receiver: A base.ProtocolReceiver to be used in an operation.
|
||||
|
||||
Returns:
|
||||
A base.Subscription of kind base.Subscription.Kind.FULL wrapping the given
|
||||
base.Operator and base.ProtocolReceiver.
|
||||
"""
|
||||
return _Subscription(
|
||||
base.Subscription.Kind.FULL, None, None, operator, protocol_receiver
|
||||
)
|
||||
@@ -0,0 +1,13 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,245 @@
|
||||
# Copyright 2015 gRPC authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Utilities for RPC Framework's Face interface."""
|
||||
|
||||
import collections
|
||||
|
||||
# stream is referenced from specification in this module.
|
||||
from grpc.framework.common import cardinality
|
||||
from grpc.framework.common import style
|
||||
from grpc.framework.foundation import stream # pylint: disable=unused-import
|
||||
from grpc.framework.interfaces.face import face
|
||||
|
||||
|
||||
class _MethodImplementation(
|
||||
face.MethodImplementation,
|
||||
collections.namedtuple(
|
||||
"_MethodImplementation",
|
||||
[
|
||||
"cardinality",
|
||||
"style",
|
||||
"unary_unary_inline",
|
||||
"unary_stream_inline",
|
||||
"stream_unary_inline",
|
||||
"stream_stream_inline",
|
||||
"unary_unary_event",
|
||||
"unary_stream_event",
|
||||
"stream_unary_event",
|
||||
"stream_stream_event",
|
||||
],
|
||||
),
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
def unary_unary_inline(behavior):
|
||||
"""Creates an face.MethodImplementation for the given behavior.
|
||||
|
||||
Args:
|
||||
behavior: The implementation of a unary-unary RPC method as a callable value
|
||||
that takes a request value and an face.ServicerContext object and
|
||||
returns a response value.
|
||||
|
||||
Returns:
|
||||
An face.MethodImplementation derived from the given behavior.
|
||||
"""
|
||||
return _MethodImplementation(
|
||||
cardinality.Cardinality.UNARY_UNARY,
|
||||
style.Service.INLINE,
|
||||
behavior,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def unary_stream_inline(behavior):
|
||||
"""Creates an face.MethodImplementation for the given behavior.
|
||||
|
||||
Args:
|
||||
behavior: The implementation of a unary-stream RPC method as a callable
|
||||
value that takes a request value and an face.ServicerContext object and
|
||||
returns an iterator of response values.
|
||||
|
||||
Returns:
|
||||
An face.MethodImplementation derived from the given behavior.
|
||||
"""
|
||||
return _MethodImplementation(
|
||||
cardinality.Cardinality.UNARY_STREAM,
|
||||
style.Service.INLINE,
|
||||
None,
|
||||
behavior,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def stream_unary_inline(behavior):
|
||||
"""Creates an face.MethodImplementation for the given behavior.
|
||||
|
||||
Args:
|
||||
behavior: The implementation of a stream-unary RPC method as a callable
|
||||
value that takes an iterator of request values and an
|
||||
face.ServicerContext object and returns a response value.
|
||||
|
||||
Returns:
|
||||
An face.MethodImplementation derived from the given behavior.
|
||||
"""
|
||||
return _MethodImplementation(
|
||||
cardinality.Cardinality.STREAM_UNARY,
|
||||
style.Service.INLINE,
|
||||
None,
|
||||
None,
|
||||
behavior,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def stream_stream_inline(behavior):
|
||||
"""Creates an face.MethodImplementation for the given behavior.
|
||||
|
||||
Args:
|
||||
behavior: The implementation of a stream-stream RPC method as a callable
|
||||
value that takes an iterator of request values and an
|
||||
face.ServicerContext object and returns an iterator of response values.
|
||||
|
||||
Returns:
|
||||
An face.MethodImplementation derived from the given behavior.
|
||||
"""
|
||||
return _MethodImplementation(
|
||||
cardinality.Cardinality.STREAM_STREAM,
|
||||
style.Service.INLINE,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
behavior,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def unary_unary_event(behavior):
|
||||
"""Creates an face.MethodImplementation for the given behavior.
|
||||
|
||||
Args:
|
||||
behavior: The implementation of a unary-unary RPC method as a callable
|
||||
value that takes a request value, a response callback to which to pass
|
||||
the response value of the RPC, and an face.ServicerContext.
|
||||
|
||||
Returns:
|
||||
An face.MethodImplementation derived from the given behavior.
|
||||
"""
|
||||
return _MethodImplementation(
|
||||
cardinality.Cardinality.UNARY_UNARY,
|
||||
style.Service.EVENT,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
behavior,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def unary_stream_event(behavior):
|
||||
"""Creates an face.MethodImplementation for the given behavior.
|
||||
|
||||
Args:
|
||||
behavior: The implementation of a unary-stream RPC method as a callable
|
||||
value that takes a request value, a stream.Consumer to which to pass the
|
||||
response values of the RPC, and an face.ServicerContext.
|
||||
|
||||
Returns:
|
||||
An face.MethodImplementation derived from the given behavior.
|
||||
"""
|
||||
return _MethodImplementation(
|
||||
cardinality.Cardinality.UNARY_STREAM,
|
||||
style.Service.EVENT,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
behavior,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def stream_unary_event(behavior):
|
||||
"""Creates an face.MethodImplementation for the given behavior.
|
||||
|
||||
Args:
|
||||
behavior: The implementation of a stream-unary RPC method as a callable
|
||||
value that takes a response callback to which to pass the response value
|
||||
of the RPC and an face.ServicerContext and returns a stream.Consumer to
|
||||
which the request values of the RPC should be passed.
|
||||
|
||||
Returns:
|
||||
An face.MethodImplementation derived from the given behavior.
|
||||
"""
|
||||
return _MethodImplementation(
|
||||
cardinality.Cardinality.STREAM_UNARY,
|
||||
style.Service.EVENT,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
behavior,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def stream_stream_event(behavior):
|
||||
"""Creates an face.MethodImplementation for the given behavior.
|
||||
|
||||
Args:
|
||||
behavior: The implementation of a stream-stream RPC method as a callable
|
||||
value that takes a stream.Consumer to which to pass the response values
|
||||
of the RPC and an face.ServicerContext and returns a stream.Consumer to
|
||||
which the request values of the RPC should be passed.
|
||||
|
||||
Returns:
|
||||
An face.MethodImplementation derived from the given behavior.
|
||||
"""
|
||||
return _MethodImplementation(
|
||||
cardinality.Cardinality.STREAM_STREAM,
|
||||
style.Service.EVENT,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
behavior,
|
||||
)
|
||||
Reference in New Issue
Block a user