Move betterproto → src/betterproto
This change avoids some nasty import issues and also ensures that the right code is tested and arbitrary code is not included when packaging.
This commit is contained in:
committed by
Bouke Versteegh
parent
cebf9176a3
commit
459d12b24d
1012
src/betterproto/__init__.py
Normal file
1012
src/betterproto/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
9
src/betterproto/_types.py
Normal file
9
src/betterproto/_types.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import Message
|
||||
from grpclib._typing import IProtoMessage
|
||||
|
||||
# Bound type variable to allow methods to return `self` of subclasses
|
||||
T = TypeVar("T", bound="Message")
|
||||
ST = TypeVar("ST", bound="IProtoMessage")
|
||||
122
src/betterproto/casing.py
Normal file
122
src/betterproto/casing.py
Normal file
@@ -0,0 +1,122 @@
|
||||
import re
|
||||
|
||||
# Word delimiters and symbols that will not be preserved when re-casing.
|
||||
# language=PythonRegExp
|
||||
SYMBOLS = "[^a-zA-Z0-9]*"
|
||||
|
||||
# Optionally capitalized word.
|
||||
# language=PythonRegExp
|
||||
WORD = "[A-Z]*[a-z]*[0-9]*"
|
||||
|
||||
# Uppercase word, not followed by lowercase letters.
|
||||
# language=PythonRegExp
|
||||
WORD_UPPER = "[A-Z]+(?![a-z])[0-9]*"
|
||||
|
||||
|
||||
def safe_snake_case(value: str) -> str:
|
||||
"""Snake case a value taking into account Python keywords."""
|
||||
value = snake_case(value)
|
||||
if value in [
|
||||
"and",
|
||||
"as",
|
||||
"assert",
|
||||
"async",
|
||||
"await",
|
||||
"break",
|
||||
"class",
|
||||
"continue",
|
||||
"def",
|
||||
"del",
|
||||
"elif",
|
||||
"else",
|
||||
"except",
|
||||
"finally",
|
||||
"for",
|
||||
"from",
|
||||
"global",
|
||||
"if",
|
||||
"import",
|
||||
"in",
|
||||
"is",
|
||||
"lambda",
|
||||
"nonlocal",
|
||||
"not",
|
||||
"or",
|
||||
"pass",
|
||||
"raise",
|
||||
"return",
|
||||
"try",
|
||||
"while",
|
||||
"with",
|
||||
"yield",
|
||||
]:
|
||||
# https://www.python.org/dev/peps/pep-0008/#descriptive-naming-styles
|
||||
value += "_"
|
||||
return value
|
||||
|
||||
|
||||
def snake_case(value: str, strict: bool = True):
|
||||
"""
|
||||
Join words with an underscore into lowercase and remove symbols.
|
||||
@param value: value to convert
|
||||
@param strict: force single underscores
|
||||
"""
|
||||
|
||||
def substitute_word(symbols, word, is_start):
|
||||
if not word:
|
||||
return ""
|
||||
if strict:
|
||||
delimiter_count = 0 if is_start else 1 # Single underscore if strict.
|
||||
elif is_start:
|
||||
delimiter_count = len(symbols)
|
||||
elif word.isupper() or word.islower():
|
||||
delimiter_count = max(
|
||||
1, len(symbols)
|
||||
) # Preserve all delimiters if not strict.
|
||||
else:
|
||||
delimiter_count = len(symbols) + 1 # Extra underscore for leading capital.
|
||||
|
||||
return ("_" * delimiter_count) + word.lower()
|
||||
|
||||
snake = re.sub(
|
||||
f"(^)?({SYMBOLS})({WORD_UPPER}|{WORD})",
|
||||
lambda groups: substitute_word(groups[2], groups[3], groups[1] is not None),
|
||||
value,
|
||||
)
|
||||
return snake
|
||||
|
||||
|
||||
def pascal_case(value: str, strict: bool = True):
|
||||
"""
|
||||
Capitalize each word and remove symbols.
|
||||
@param value: value to convert
|
||||
@param strict: output only alphanumeric characters
|
||||
"""
|
||||
|
||||
def substitute_word(symbols, word):
|
||||
if strict:
|
||||
return word.capitalize() # Remove all delimiters
|
||||
|
||||
if word.islower():
|
||||
delimiter_length = len(symbols[:-1]) # Lose one delimiter
|
||||
else:
|
||||
delimiter_length = len(symbols) # Preserve all delimiters
|
||||
|
||||
return ("_" * delimiter_length) + word.capitalize()
|
||||
|
||||
return re.sub(
|
||||
f"({SYMBOLS})({WORD_UPPER}|{WORD})",
|
||||
lambda groups: substitute_word(groups[1], groups[2]),
|
||||
value,
|
||||
)
|
||||
|
||||
|
||||
def camel_case(value: str, strict: bool = True):
|
||||
"""
|
||||
Capitalize all words except first and remove symbols.
|
||||
"""
|
||||
return lowercase_first(pascal_case(value, strict=strict))
|
||||
|
||||
|
||||
def lowercase_first(value: str):
|
||||
return value[0:1].lower() + value[1:]
|
||||
0
src/betterproto/compile/__init__.py
Normal file
0
src/betterproto/compile/__init__.py
Normal file
160
src/betterproto/compile/importing.py
Normal file
160
src/betterproto/compile/importing.py
Normal file
@@ -0,0 +1,160 @@
|
||||
import os
|
||||
import re
|
||||
from typing import Dict, List, Set, Type
|
||||
|
||||
from betterproto import safe_snake_case
|
||||
from betterproto.compile.naming import pythonize_class_name
|
||||
from betterproto.lib.google import protobuf as google_protobuf
|
||||
|
||||
WRAPPER_TYPES: Dict[str, Type] = {
|
||||
".google.protobuf.DoubleValue": google_protobuf.DoubleValue,
|
||||
".google.protobuf.FloatValue": google_protobuf.FloatValue,
|
||||
".google.protobuf.Int32Value": google_protobuf.Int32Value,
|
||||
".google.protobuf.Int64Value": google_protobuf.Int64Value,
|
||||
".google.protobuf.UInt32Value": google_protobuf.UInt32Value,
|
||||
".google.protobuf.UInt64Value": google_protobuf.UInt64Value,
|
||||
".google.protobuf.BoolValue": google_protobuf.BoolValue,
|
||||
".google.protobuf.StringValue": google_protobuf.StringValue,
|
||||
".google.protobuf.BytesValue": google_protobuf.BytesValue,
|
||||
}
|
||||
|
||||
|
||||
def parse_source_type_name(field_type_name):
|
||||
"""
|
||||
Split full source type name into package and type name.
|
||||
E.g. 'root.package.Message' -> ('root.package', 'Message')
|
||||
'root.Message.SomeEnum' -> ('root', 'Message.SomeEnum')
|
||||
"""
|
||||
package_match = re.match(r"^\.?([^A-Z]+)\.(.+)", field_type_name)
|
||||
if package_match:
|
||||
package = package_match.group(1)
|
||||
name = package_match.group(2)
|
||||
else:
|
||||
package = ""
|
||||
name = field_type_name.lstrip(".")
|
||||
return package, name
|
||||
|
||||
|
||||
def get_type_reference(
|
||||
package: str, imports: set, source_type: str, unwrap: bool = True,
|
||||
) -> str:
|
||||
"""
|
||||
Return a Python type name for a proto type reference. Adds the import if
|
||||
necessary. Unwraps well known type if required.
|
||||
"""
|
||||
if unwrap:
|
||||
if source_type in WRAPPER_TYPES:
|
||||
wrapped_type = type(WRAPPER_TYPES[source_type]().value)
|
||||
return f"Optional[{wrapped_type.__name__}]"
|
||||
|
||||
if source_type == ".google.protobuf.Duration":
|
||||
return "timedelta"
|
||||
|
||||
if source_type == ".google.protobuf.Timestamp":
|
||||
return "datetime"
|
||||
|
||||
source_package, source_type = parse_source_type_name(source_type)
|
||||
|
||||
current_package: List[str] = package.split(".") if package else []
|
||||
py_package: List[str] = source_package.split(".") if source_package else []
|
||||
py_type: str = pythonize_class_name(source_type)
|
||||
|
||||
compiling_google_protobuf = current_package == ["google", "protobuf"]
|
||||
importing_google_protobuf = py_package == ["google", "protobuf"]
|
||||
if importing_google_protobuf and not compiling_google_protobuf:
|
||||
py_package = ["betterproto", "lib"] + py_package
|
||||
|
||||
if py_package[:1] == ["betterproto"]:
|
||||
return reference_absolute(imports, py_package, py_type)
|
||||
|
||||
if py_package == current_package:
|
||||
return reference_sibling(py_type)
|
||||
|
||||
if py_package[: len(current_package)] == current_package:
|
||||
return reference_descendent(current_package, imports, py_package, py_type)
|
||||
|
||||
if current_package[: len(py_package)] == py_package:
|
||||
return reference_ancestor(current_package, imports, py_package, py_type)
|
||||
|
||||
return reference_cousin(current_package, imports, py_package, py_type)
|
||||
|
||||
|
||||
def reference_absolute(imports, py_package, py_type):
|
||||
"""
|
||||
Returns a reference to a python type located in the root, i.e. sys.path.
|
||||
"""
|
||||
string_import = ".".join(py_package)
|
||||
string_alias = safe_snake_case(string_import)
|
||||
imports.add(f"import {string_import} as {string_alias}")
|
||||
return f'"{string_alias}.{py_type}"'
|
||||
|
||||
|
||||
def reference_sibling(py_type: str) -> str:
|
||||
"""
|
||||
Returns a reference to a python type within the same package as the current package.
|
||||
"""
|
||||
return f'"{py_type}"'
|
||||
|
||||
|
||||
def reference_descendent(
|
||||
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
|
||||
) -> str:
|
||||
"""
|
||||
Returns a reference to a python type in a package that is a descendent of the current package,
|
||||
and adds the required import that is aliased to avoid name conflicts.
|
||||
"""
|
||||
importing_descendent = py_package[len(current_package) :]
|
||||
string_from = ".".join(importing_descendent[:-1])
|
||||
string_import = importing_descendent[-1]
|
||||
if string_from:
|
||||
string_alias = "_".join(importing_descendent)
|
||||
imports.add(f"from .{string_from} import {string_import} as {string_alias}")
|
||||
return f'"{string_alias}.{py_type}"'
|
||||
else:
|
||||
imports.add(f"from . import {string_import}")
|
||||
return f'"{string_import}.{py_type}"'
|
||||
|
||||
|
||||
def reference_ancestor(
|
||||
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
|
||||
) -> str:
|
||||
"""
|
||||
Returns a reference to a python type in a package which is an ancestor to the current package,
|
||||
and adds the required import that is aliased (if possible) to avoid name conflicts.
|
||||
|
||||
Adds trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34).
|
||||
"""
|
||||
distance_up = len(current_package) - len(py_package)
|
||||
if py_package:
|
||||
string_import = py_package[-1]
|
||||
string_alias = f"_{'_' * distance_up}{string_import}__"
|
||||
string_from = f"..{'.' * distance_up}"
|
||||
imports.add(f"from {string_from} import {string_import} as {string_alias}")
|
||||
return f'"{string_alias}.{py_type}"'
|
||||
else:
|
||||
string_alias = f"{'_' * distance_up}{py_type}__"
|
||||
imports.add(f"from .{'.' * distance_up} import {py_type} as {string_alias}")
|
||||
return f'"{string_alias}"'
|
||||
|
||||
|
||||
def reference_cousin(
|
||||
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
|
||||
) -> str:
|
||||
"""
|
||||
Returns a reference to a python type in a package that is not descendent, ancestor or sibling,
|
||||
and adds the required import that is aliased to avoid name conflicts.
|
||||
"""
|
||||
shared_ancestry = os.path.commonprefix([current_package, py_package])
|
||||
distance_up = len(current_package) - len(shared_ancestry)
|
||||
string_from = f".{'.' * distance_up}" + ".".join(
|
||||
py_package[len(shared_ancestry) : -1]
|
||||
)
|
||||
string_import = py_package[-1]
|
||||
# Add trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34)
|
||||
string_alias = (
|
||||
f"{'_' * distance_up}"
|
||||
+ safe_snake_case(".".join(py_package[len(shared_ancestry) :]))
|
||||
+ "__"
|
||||
)
|
||||
imports.add(f"from {string_from} import {string_import} as {string_alias}")
|
||||
return f'"{string_alias}.{py_type}"'
|
||||
13
src/betterproto/compile/naming.py
Normal file
13
src/betterproto/compile/naming.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from betterproto import casing
|
||||
|
||||
|
||||
def pythonize_class_name(name):
|
||||
return casing.pascal_case(name)
|
||||
|
||||
|
||||
def pythonize_field_name(name: str):
|
||||
return casing.safe_snake_case(name)
|
||||
|
||||
|
||||
def pythonize_method_name(name: str):
|
||||
return casing.safe_snake_case(name)
|
||||
0
src/betterproto/grpc/__init__.py
Normal file
0
src/betterproto/grpc/__init__.py
Normal file
169
src/betterproto/grpc/grpclib_client.py
Normal file
169
src/betterproto/grpc/grpclib_client.py
Normal file
@@ -0,0 +1,169 @@
|
||||
from abc import ABC
|
||||
import asyncio
|
||||
import grpclib.const
|
||||
from typing import (
|
||||
AsyncIterable,
|
||||
AsyncIterator,
|
||||
Collection,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
TYPE_CHECKING,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
from .._types import ST, T
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from grpclib._typing import IProtoMessage
|
||||
from grpclib.client import Channel
|
||||
from grpclib.metadata import Deadline
|
||||
|
||||
|
||||
_Value = Union[str, bytes]
|
||||
_MetadataLike = Union[Mapping[str, _Value], Collection[Tuple[str, _Value]]]
|
||||
_MessageSource = Union[Iterable["IProtoMessage"], AsyncIterable["IProtoMessage"]]
|
||||
|
||||
|
||||
class ServiceStub(ABC):
|
||||
"""
|
||||
Base class for async gRPC clients.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
channel: "Channel",
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
deadline: Optional["Deadline"] = None,
|
||||
metadata: Optional[_MetadataLike] = None,
|
||||
) -> None:
|
||||
self.channel = channel
|
||||
self.timeout = timeout
|
||||
self.deadline = deadline
|
||||
self.metadata = metadata
|
||||
|
||||
def __resolve_request_kwargs(
|
||||
self,
|
||||
timeout: Optional[float],
|
||||
deadline: Optional["Deadline"],
|
||||
metadata: Optional[_MetadataLike],
|
||||
):
|
||||
return {
|
||||
"timeout": self.timeout if timeout is None else timeout,
|
||||
"deadline": self.deadline if deadline is None else deadline,
|
||||
"metadata": self.metadata if metadata is None else metadata,
|
||||
}
|
||||
|
||||
async def _unary_unary(
|
||||
self,
|
||||
route: str,
|
||||
request: "IProtoMessage",
|
||||
response_type: Type[T],
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
deadline: Optional["Deadline"] = None,
|
||||
metadata: Optional[_MetadataLike] = None,
|
||||
) -> T:
|
||||
"""Make a unary request and return the response."""
|
||||
async with self.channel.request(
|
||||
route,
|
||||
grpclib.const.Cardinality.UNARY_UNARY,
|
||||
type(request),
|
||||
response_type,
|
||||
**self.__resolve_request_kwargs(timeout, deadline, metadata),
|
||||
) as stream:
|
||||
await stream.send_message(request, end=True)
|
||||
response = await stream.recv_message()
|
||||
assert response is not None
|
||||
return response
|
||||
|
||||
async def _unary_stream(
|
||||
self,
|
||||
route: str,
|
||||
request: "IProtoMessage",
|
||||
response_type: Type[T],
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
deadline: Optional["Deadline"] = None,
|
||||
metadata: Optional[_MetadataLike] = None,
|
||||
) -> AsyncIterator[T]:
|
||||
"""Make a unary request and return the stream response iterator."""
|
||||
async with self.channel.request(
|
||||
route,
|
||||
grpclib.const.Cardinality.UNARY_STREAM,
|
||||
type(request),
|
||||
response_type,
|
||||
**self.__resolve_request_kwargs(timeout, deadline, metadata),
|
||||
) as stream:
|
||||
await stream.send_message(request, end=True)
|
||||
async for message in stream:
|
||||
yield message
|
||||
|
||||
async def _stream_unary(
|
||||
self,
|
||||
route: str,
|
||||
request_iterator: _MessageSource,
|
||||
request_type: Type[ST],
|
||||
response_type: Type[T],
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
deadline: Optional["Deadline"] = None,
|
||||
metadata: Optional[_MetadataLike] = None,
|
||||
) -> T:
|
||||
"""Make a stream request and return the response."""
|
||||
async with self.channel.request(
|
||||
route,
|
||||
grpclib.const.Cardinality.STREAM_UNARY,
|
||||
request_type,
|
||||
response_type,
|
||||
**self.__resolve_request_kwargs(timeout, deadline, metadata),
|
||||
) as stream:
|
||||
await self._send_messages(stream, request_iterator)
|
||||
response = await stream.recv_message()
|
||||
assert response is not None
|
||||
return response
|
||||
|
||||
async def _stream_stream(
|
||||
self,
|
||||
route: str,
|
||||
request_iterator: _MessageSource,
|
||||
request_type: Type[ST],
|
||||
response_type: Type[T],
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
deadline: Optional["Deadline"] = None,
|
||||
metadata: Optional[_MetadataLike] = None,
|
||||
) -> AsyncIterator[T]:
|
||||
"""
|
||||
Make a stream request and return an AsyncIterator to iterate over response
|
||||
messages.
|
||||
"""
|
||||
async with self.channel.request(
|
||||
route,
|
||||
grpclib.const.Cardinality.STREAM_STREAM,
|
||||
request_type,
|
||||
response_type,
|
||||
**self.__resolve_request_kwargs(timeout, deadline, metadata),
|
||||
) as stream:
|
||||
await stream.send_request()
|
||||
sending_task = asyncio.ensure_future(
|
||||
self._send_messages(stream, request_iterator)
|
||||
)
|
||||
try:
|
||||
async for response in stream:
|
||||
yield response
|
||||
except:
|
||||
sending_task.cancel()
|
||||
raise
|
||||
|
||||
@staticmethod
|
||||
async def _send_messages(stream, messages: _MessageSource):
|
||||
if isinstance(messages, AsyncIterable):
|
||||
async for message in messages:
|
||||
await stream.send_message(message)
|
||||
else:
|
||||
for message in messages:
|
||||
await stream.send_message(message)
|
||||
await stream.end()
|
||||
0
src/betterproto/grpc/util/__init__.py
Normal file
0
src/betterproto/grpc/util/__init__.py
Normal file
198
src/betterproto/grpc/util/async_channel.py
Normal file
198
src/betterproto/grpc/util/async_channel.py
Normal file
@@ -0,0 +1,198 @@
|
||||
import asyncio
|
||||
from typing import (
|
||||
AsyncIterable,
|
||||
AsyncIterator,
|
||||
Iterable,
|
||||
Optional,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class ChannelClosed(Exception):
|
||||
"""
|
||||
An exception raised on an attempt to send through a closed channel
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ChannelDone(Exception):
|
||||
"""
|
||||
An exception raised on an attempt to send receive from a channel that is both closed
|
||||
and empty.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class AsyncChannel(AsyncIterable[T]):
|
||||
"""
|
||||
A buffered async channel for sending items between coroutines with FIFO ordering.
|
||||
|
||||
This makes decoupled bidirectional steaming gRPC requests easy if used like:
|
||||
|
||||
.. code-block:: python
|
||||
client = GeneratedStub(grpclib_chan)
|
||||
request_channel = await AsyncChannel()
|
||||
# We can start be sending all the requests we already have
|
||||
await request_channel.send_from([RequestObject(...), RequestObject(...)])
|
||||
async for response in client.rpc_call(request_channel):
|
||||
# The response iterator will remain active until the connection is closed
|
||||
...
|
||||
# More items can be sent at any time
|
||||
await request_channel.send(RequestObject(...))
|
||||
...
|
||||
# The channel must be closed to complete the gRPC connection
|
||||
request_channel.close()
|
||||
|
||||
Items can be sent through the channel by either:
|
||||
- providing an iterable to the send_from method
|
||||
- passing them to the send method one at a time
|
||||
|
||||
Items can be received from the channel by either:
|
||||
- iterating over the channel with a for loop to get all items
|
||||
- calling the receive method to get one item at a time
|
||||
|
||||
If the channel is empty then receivers will wait until either an item appears or the
|
||||
channel is closed.
|
||||
|
||||
Once the channel is closed then subsequent attempt to send through the channel will
|
||||
fail with a ChannelClosed exception.
|
||||
|
||||
When th channel is closed and empty then it is done, and further attempts to receive
|
||||
from it will fail with a ChannelDone exception
|
||||
|
||||
If multiple coroutines receive from the channel concurrently, each item sent will be
|
||||
received by only one of the receivers.
|
||||
|
||||
:param source:
|
||||
An optional iterable will items that should be sent through the channel
|
||||
immediately.
|
||||
:param buffer_limit:
|
||||
Limit the number of items that can be buffered in the channel, A value less than
|
||||
1 implies no limit. If the channel is full then attempts to send more items will
|
||||
result in the sender waiting until an item is received from the channel.
|
||||
:param close:
|
||||
If set to True then the channel will automatically close after exhausting source
|
||||
or immediately if no source is provided.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, *, buffer_limit: int = 0, close: bool = False,
|
||||
):
|
||||
self._queue: asyncio.Queue[Union[T, object]] = asyncio.Queue(buffer_limit)
|
||||
self._closed = False
|
||||
self._waiting_receivers: int = 0
|
||||
# Track whether flush has been invoked so it can only happen once
|
||||
self._flushed = False
|
||||
|
||||
def __aiter__(self) -> AsyncIterator[T]:
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> T:
|
||||
if self.done():
|
||||
raise StopAsyncIteration
|
||||
self._waiting_receivers += 1
|
||||
try:
|
||||
result = await self._queue.get()
|
||||
if result is self.__flush:
|
||||
raise StopAsyncIteration
|
||||
return result
|
||||
finally:
|
||||
self._waiting_receivers -= 1
|
||||
self._queue.task_done()
|
||||
|
||||
def closed(self) -> bool:
|
||||
"""
|
||||
Returns True if this channel is closed and no-longer accepting new items
|
||||
"""
|
||||
return self._closed
|
||||
|
||||
def done(self) -> bool:
|
||||
"""
|
||||
Check if this channel is done.
|
||||
|
||||
:return: True if this channel is closed and and has been drained of items in
|
||||
which case any further attempts to receive an item from this channel will raise
|
||||
a ChannelDone exception.
|
||||
"""
|
||||
# After close the channel is not yet done until there is at least one waiting
|
||||
# receiver per enqueued item.
|
||||
return self._closed and self._queue.qsize() <= self._waiting_receivers
|
||||
|
||||
async def send_from(
|
||||
self, source: Union[Iterable[T], AsyncIterable[T]], close: bool = False
|
||||
) -> "AsyncChannel[T]":
|
||||
"""
|
||||
Iterates the given [Async]Iterable and sends all the resulting items.
|
||||
If close is set to True then subsequent send calls will be rejected with a
|
||||
ChannelClosed exception.
|
||||
:param source: an iterable of items to send
|
||||
:param close:
|
||||
if True then the channel will be closed after the source has been exhausted
|
||||
|
||||
"""
|
||||
if self._closed:
|
||||
raise ChannelClosed("Cannot send through a closed channel")
|
||||
if isinstance(source, AsyncIterable):
|
||||
async for item in source:
|
||||
await self._queue.put(item)
|
||||
else:
|
||||
for item in source:
|
||||
await self._queue.put(item)
|
||||
if close:
|
||||
# Complete the closing process
|
||||
self.close()
|
||||
return self
|
||||
|
||||
async def send(self, item: T) -> "AsyncChannel[T]":
|
||||
"""
|
||||
Send a single item over this channel.
|
||||
:param item: The item to send
|
||||
"""
|
||||
if self._closed:
|
||||
raise ChannelClosed("Cannot send through a closed channel")
|
||||
await self._queue.put(item)
|
||||
return self
|
||||
|
||||
async def receive(self) -> Optional[T]:
|
||||
"""
|
||||
Returns the next item from this channel when it becomes available,
|
||||
or None if the channel is closed before another item is sent.
|
||||
:return: An item from the channel
|
||||
"""
|
||||
if self.done():
|
||||
raise ChannelDone("Cannot receive from a closed channel")
|
||||
self._waiting_receivers += 1
|
||||
try:
|
||||
result = await self._queue.get()
|
||||
if result is self.__flush:
|
||||
return None
|
||||
return result
|
||||
finally:
|
||||
self._waiting_receivers -= 1
|
||||
self._queue.task_done()
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close this channel to new items
|
||||
"""
|
||||
self._closed = True
|
||||
asyncio.ensure_future(self._flush_queue())
|
||||
|
||||
async def _flush_queue(self):
|
||||
"""
|
||||
To be called after the channel is closed. Pushes a number of self.__flush
|
||||
objects to the queue to ensure no waiting consumers get deadlocked.
|
||||
"""
|
||||
if not self._flushed:
|
||||
self._flushed = True
|
||||
deadlocked_receivers = max(0, self._waiting_receivers - self._queue.qsize())
|
||||
for _ in range(deadlocked_receivers):
|
||||
await self._queue.put(self.__flush)
|
||||
|
||||
# A special signal object for flushing the queue when the channel is closed
|
||||
__flush = object()
|
||||
0
src/betterproto/lib/__init__.py
Normal file
0
src/betterproto/lib/__init__.py
Normal file
0
src/betterproto/lib/google/__init__.py
Normal file
0
src/betterproto/lib/google/__init__.py
Normal file
1312
src/betterproto/lib/google/protobuf/__init__.py
Normal file
1312
src/betterproto/lib/google/protobuf/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
2
src/betterproto/plugin.bat
Normal file
2
src/betterproto/plugin.bat
Normal file
@@ -0,0 +1,2 @@
|
||||
@SET plugin_dir=%~dp0
|
||||
@python %plugin_dir%/plugin.py %*
|
||||
460
src/betterproto/plugin.py
Executable file
460
src/betterproto/plugin.py
Executable file
@@ -0,0 +1,460 @@
|
||||
#!/usr/bin/env python
|
||||
import collections
|
||||
import itertools
|
||||
import os.path
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import textwrap
|
||||
from typing import List, Union
|
||||
|
||||
from google.protobuf.compiler.plugin_pb2 import CodeGeneratorRequest
|
||||
|
||||
import betterproto
|
||||
from betterproto.compile.importing import get_type_reference, parse_source_type_name
|
||||
from betterproto.compile.naming import (
|
||||
pythonize_class_name,
|
||||
pythonize_field_name,
|
||||
pythonize_method_name,
|
||||
)
|
||||
from betterproto.lib.google.protobuf import ServiceDescriptorProto
|
||||
|
||||
try:
|
||||
# betterproto[compiler] specific dependencies
|
||||
import black
|
||||
from google.protobuf.compiler import plugin_pb2 as plugin
|
||||
from google.protobuf.descriptor_pb2 import (
|
||||
DescriptorProto,
|
||||
EnumDescriptorProto,
|
||||
FieldDescriptorProto,
|
||||
)
|
||||
import google.protobuf.wrappers_pb2 as google_wrappers
|
||||
import jinja2
|
||||
except ImportError as err:
|
||||
missing_import = err.args[0][17:-1]
|
||||
print(
|
||||
"\033[31m"
|
||||
f"Unable to import `{missing_import}` from betterproto plugin! "
|
||||
"Please ensure that you've installed betterproto as "
|
||||
'`pip install "betterproto[compiler]"` so that compiler dependencies '
|
||||
"are included."
|
||||
"\033[0m"
|
||||
)
|
||||
raise SystemExit(1)
|
||||
|
||||
|
||||
def py_type(package: str, imports: set, field: FieldDescriptorProto) -> str:
|
||||
if field.type in [1, 2]:
|
||||
return "float"
|
||||
elif field.type in [3, 4, 5, 6, 7, 13, 15, 16, 17, 18]:
|
||||
return "int"
|
||||
elif field.type == 8:
|
||||
return "bool"
|
||||
elif field.type == 9:
|
||||
return "str"
|
||||
elif field.type in [11, 14]:
|
||||
# Type referencing another defined Message or a named enum
|
||||
return get_type_reference(package, imports, field.type_name)
|
||||
elif field.type == 12:
|
||||
return "bytes"
|
||||
else:
|
||||
raise NotImplementedError(f"Unknown type {field.type}")
|
||||
|
||||
|
||||
def get_py_zero(type_num: int) -> Union[str, float]:
|
||||
zero: Union[str, float] = 0
|
||||
if type_num in []:
|
||||
zero = 0.0
|
||||
elif type_num == 8:
|
||||
zero = "False"
|
||||
elif type_num == 9:
|
||||
zero = '""'
|
||||
elif type_num == 11:
|
||||
zero = "None"
|
||||
elif type_num == 12:
|
||||
zero = 'b""'
|
||||
|
||||
return zero
|
||||
|
||||
|
||||
def traverse(proto_file):
|
||||
# Todo: Keep information about nested hierarchy
|
||||
def _traverse(path, items, prefix=""):
|
||||
for i, item in enumerate(items):
|
||||
# Adjust the name since we flatten the hierarchy.
|
||||
# Todo: don't change the name, but include full name in returned tuple
|
||||
item.name = next_prefix = prefix + item.name
|
||||
yield item, path + [i]
|
||||
|
||||
if isinstance(item, DescriptorProto):
|
||||
for enum in item.enum_type:
|
||||
enum.name = next_prefix + enum.name
|
||||
yield enum, path + [i, 4]
|
||||
|
||||
if item.nested_type:
|
||||
for n, p in _traverse(path + [i, 3], item.nested_type, next_prefix):
|
||||
yield n, p
|
||||
|
||||
return itertools.chain(
|
||||
_traverse([5], proto_file.enum_type), _traverse([4], proto_file.message_type)
|
||||
)
|
||||
|
||||
|
||||
def get_comment(proto_file, path: List[int], indent: int = 4) -> str:
|
||||
pad = " " * indent
|
||||
for sci in proto_file.source_code_info.location:
|
||||
# print(list(sci.path), path, file=sys.stderr)
|
||||
if list(sci.path) == path and sci.leading_comments:
|
||||
lines = textwrap.wrap(
|
||||
sci.leading_comments.strip().replace("\n", ""), width=79 - indent
|
||||
)
|
||||
|
||||
if path[-2] == 2 and path[-4] != 6:
|
||||
# This is a field
|
||||
return f"{pad}# " + f"\n{pad}# ".join(lines)
|
||||
else:
|
||||
# This is a message, enum, service, or method
|
||||
if len(lines) == 1 and len(lines[0]) < 79 - indent - 6:
|
||||
lines[0] = lines[0].strip('"')
|
||||
return f'{pad}"""{lines[0]}"""'
|
||||
else:
|
||||
joined = f"\n{pad}".join(lines)
|
||||
return f'{pad}"""\n{pad}{joined}\n{pad}"""'
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def generate_code(request, response):
|
||||
plugin_options = request.parameter.split(",") if request.parameter else []
|
||||
|
||||
env = jinja2.Environment(
|
||||
trim_blocks=True,
|
||||
lstrip_blocks=True,
|
||||
loader=jinja2.FileSystemLoader("%s/templates/" % os.path.dirname(__file__)),
|
||||
)
|
||||
template = env.get_template("template.py.j2")
|
||||
|
||||
# Gather output packages
|
||||
output_package_files = collections.defaultdict()
|
||||
for proto_file in request.proto_file:
|
||||
if (
|
||||
proto_file.package == "google.protobuf"
|
||||
and "INCLUDE_GOOGLE" not in plugin_options
|
||||
):
|
||||
continue
|
||||
|
||||
output_package = proto_file.package
|
||||
output_package_files.setdefault(
|
||||
output_package, {"input_package": proto_file.package, "files": []}
|
||||
)
|
||||
output_package_files[output_package]["files"].append(proto_file)
|
||||
|
||||
# Initialize Template data for each package
|
||||
for output_package_name, output_package_content in output_package_files.items():
|
||||
template_data = {
|
||||
"input_package": output_package_content["input_package"],
|
||||
"files": [f.name for f in output_package_content["files"]],
|
||||
"imports": set(),
|
||||
"datetime_imports": set(),
|
||||
"typing_imports": set(),
|
||||
"messages": [],
|
||||
"enums": [],
|
||||
"services": [],
|
||||
}
|
||||
output_package_content["template_data"] = template_data
|
||||
|
||||
# Read Messages and Enums
|
||||
output_types = []
|
||||
for output_package_name, output_package_content in output_package_files.items():
|
||||
for proto_file in output_package_content["files"]:
|
||||
for item, path in traverse(proto_file):
|
||||
type_data = read_protobuf_type(
|
||||
item, path, proto_file, output_package_content
|
||||
)
|
||||
output_types.append(type_data)
|
||||
|
||||
# Read Services
|
||||
for output_package_name, output_package_content in output_package_files.items():
|
||||
for proto_file in output_package_content["files"]:
|
||||
for index, service in enumerate(proto_file.service):
|
||||
read_protobuf_service(
|
||||
service, index, proto_file, output_package_content, output_types
|
||||
)
|
||||
|
||||
# Render files
|
||||
output_paths = set()
|
||||
for output_package_name, output_package_content in output_package_files.items():
|
||||
template_data = output_package_content["template_data"]
|
||||
template_data["imports"] = sorted(template_data["imports"])
|
||||
template_data["datetime_imports"] = sorted(template_data["datetime_imports"])
|
||||
template_data["typing_imports"] = sorted(template_data["typing_imports"])
|
||||
|
||||
# Fill response
|
||||
output_path = pathlib.Path(*output_package_name.split("."), "__init__.py")
|
||||
output_paths.add(output_path)
|
||||
|
||||
f = response.file.add()
|
||||
f.name = str(output_path)
|
||||
|
||||
# Render and then format the output file.
|
||||
f.content = black.format_str(
|
||||
template.render(description=template_data),
|
||||
mode=black.FileMode(target_versions={black.TargetVersion.PY37}),
|
||||
)
|
||||
|
||||
# Make each output directory a package with __init__ file
|
||||
init_files = (
|
||||
set(
|
||||
directory.joinpath("__init__.py")
|
||||
for path in output_paths
|
||||
for directory in path.parents
|
||||
)
|
||||
- output_paths
|
||||
)
|
||||
|
||||
for init_file in init_files:
|
||||
init = response.file.add()
|
||||
init.name = str(init_file)
|
||||
|
||||
for output_package_name in sorted(output_paths.union(init_files)):
|
||||
print(f"Writing {output_package_name}", file=sys.stderr)
|
||||
|
||||
|
||||
def read_protobuf_type(item: DescriptorProto, path: List[int], proto_file, content):
|
||||
input_package_name = content["input_package"]
|
||||
template_data = content["template_data"]
|
||||
data = {
|
||||
"name": item.name,
|
||||
"py_name": pythonize_class_name(item.name),
|
||||
"descriptor": item,
|
||||
"package": input_package_name,
|
||||
}
|
||||
if isinstance(item, DescriptorProto):
|
||||
# print(item, file=sys.stderr)
|
||||
if item.options.map_entry:
|
||||
# Skip generated map entry messages since we just use dicts
|
||||
return
|
||||
|
||||
data.update(
|
||||
{
|
||||
"type": "Message",
|
||||
"comment": get_comment(proto_file, path),
|
||||
"properties": [],
|
||||
}
|
||||
)
|
||||
|
||||
for i, f in enumerate(item.field):
|
||||
t = py_type(input_package_name, template_data["imports"], f)
|
||||
zero = get_py_zero(f.type)
|
||||
|
||||
repeated = False
|
||||
packed = False
|
||||
|
||||
field_type = f.Type.Name(f.type).lower()[5:]
|
||||
|
||||
field_wraps = ""
|
||||
match_wrapper = re.match(r"\.google\.protobuf\.(.+)Value", f.type_name)
|
||||
if match_wrapper:
|
||||
wrapped_type = "TYPE_" + match_wrapper.group(1).upper()
|
||||
if hasattr(betterproto, wrapped_type):
|
||||
field_wraps = f"betterproto.{wrapped_type}"
|
||||
|
||||
map_types = None
|
||||
if f.type == 11:
|
||||
# This might be a map...
|
||||
message_type = f.type_name.split(".").pop().lower()
|
||||
# message_type = py_type(package)
|
||||
map_entry = f"{f.name.replace('_', '').lower()}entry"
|
||||
|
||||
if message_type == map_entry:
|
||||
for nested in item.nested_type:
|
||||
if nested.name.replace("_", "").lower() == map_entry:
|
||||
if nested.options.map_entry:
|
||||
# print("Found a map!", file=sys.stderr)
|
||||
k = py_type(
|
||||
input_package_name,
|
||||
template_data["imports"],
|
||||
nested.field[0],
|
||||
)
|
||||
v = py_type(
|
||||
input_package_name,
|
||||
template_data["imports"],
|
||||
nested.field[1],
|
||||
)
|
||||
t = f"Dict[{k}, {v}]"
|
||||
field_type = "map"
|
||||
map_types = (
|
||||
f.Type.Name(nested.field[0].type),
|
||||
f.Type.Name(nested.field[1].type),
|
||||
)
|
||||
template_data["typing_imports"].add("Dict")
|
||||
|
||||
if f.label == 3 and field_type != "map":
|
||||
# Repeated field
|
||||
repeated = True
|
||||
t = f"List[{t}]"
|
||||
zero = "[]"
|
||||
template_data["typing_imports"].add("List")
|
||||
|
||||
if f.type in [1, 2, 3, 4, 5, 6, 7, 8, 13, 15, 16, 17, 18]:
|
||||
packed = True
|
||||
|
||||
one_of = ""
|
||||
if f.HasField("oneof_index"):
|
||||
one_of = item.oneof_decl[f.oneof_index].name
|
||||
|
||||
if "Optional[" in t:
|
||||
template_data["typing_imports"].add("Optional")
|
||||
|
||||
if "timedelta" in t:
|
||||
template_data["datetime_imports"].add("timedelta")
|
||||
elif "datetime" in t:
|
||||
template_data["datetime_imports"].add("datetime")
|
||||
|
||||
data["properties"].append(
|
||||
{
|
||||
"name": f.name,
|
||||
"py_name": pythonize_field_name(f.name),
|
||||
"number": f.number,
|
||||
"comment": get_comment(proto_file, path + [2, i]),
|
||||
"proto_type": int(f.type),
|
||||
"field_type": field_type,
|
||||
"field_wraps": field_wraps,
|
||||
"map_types": map_types,
|
||||
"type": t,
|
||||
"zero": zero,
|
||||
"repeated": repeated,
|
||||
"packed": packed,
|
||||
"one_of": one_of,
|
||||
}
|
||||
)
|
||||
# print(f, file=sys.stderr)
|
||||
|
||||
template_data["messages"].append(data)
|
||||
return data
|
||||
elif isinstance(item, EnumDescriptorProto):
|
||||
# print(item.name, path, file=sys.stderr)
|
||||
data.update(
|
||||
{
|
||||
"type": "Enum",
|
||||
"comment": get_comment(proto_file, path),
|
||||
"entries": [
|
||||
{
|
||||
"name": v.name,
|
||||
"value": v.number,
|
||||
"comment": get_comment(proto_file, path + [2, i]),
|
||||
}
|
||||
for i, v in enumerate(item.value)
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
template_data["enums"].append(data)
|
||||
return data
|
||||
|
||||
|
||||
def lookup_method_input_type(method, types):
|
||||
package, name = parse_source_type_name(method.input_type)
|
||||
|
||||
for known_type in types:
|
||||
if known_type["type"] != "Message":
|
||||
continue
|
||||
|
||||
# Nested types are currently flattened without dots.
|
||||
# Todo: keep a fully quantified name in types, that is comparable with method.input_type
|
||||
if (
|
||||
package == known_type["package"]
|
||||
and name.replace(".", "") == known_type["name"]
|
||||
):
|
||||
return known_type
|
||||
|
||||
|
||||
def read_protobuf_service(
|
||||
service: ServiceDescriptorProto, index, proto_file, content, output_types
|
||||
):
|
||||
input_package_name = content["input_package"]
|
||||
template_data = content["template_data"]
|
||||
# print(service, file=sys.stderr)
|
||||
data = {
|
||||
"name": service.name,
|
||||
"py_name": pythonize_class_name(service.name),
|
||||
"comment": get_comment(proto_file, [6, index]),
|
||||
"methods": [],
|
||||
}
|
||||
for j, method in enumerate(service.method):
|
||||
method_input_message = lookup_method_input_type(method, output_types)
|
||||
|
||||
if method_input_message:
|
||||
for field in method_input_message["properties"]:
|
||||
if field["zero"] == "None":
|
||||
template_data["typing_imports"].add("Optional")
|
||||
|
||||
data["methods"].append(
|
||||
{
|
||||
"name": method.name,
|
||||
"py_name": pythonize_method_name(method.name),
|
||||
"comment": get_comment(proto_file, [6, index, 2, j], indent=8),
|
||||
"route": f"/{input_package_name}.{service.name}/{method.name}",
|
||||
"input": get_type_reference(
|
||||
input_package_name, template_data["imports"], method.input_type
|
||||
).strip('"'),
|
||||
"input_message": method_input_message,
|
||||
"output": get_type_reference(
|
||||
input_package_name,
|
||||
template_data["imports"],
|
||||
method.output_type,
|
||||
unwrap=False,
|
||||
),
|
||||
"client_streaming": method.client_streaming,
|
||||
"server_streaming": method.server_streaming,
|
||||
}
|
||||
)
|
||||
|
||||
if method.client_streaming:
|
||||
template_data["typing_imports"].add("AsyncIterable")
|
||||
template_data["typing_imports"].add("Iterable")
|
||||
template_data["typing_imports"].add("Union")
|
||||
if method.server_streaming:
|
||||
template_data["typing_imports"].add("AsyncIterator")
|
||||
template_data["services"].append(data)
|
||||
|
||||
|
||||
def main():
|
||||
"""The plugin's main entry point."""
|
||||
# Read request message from stdin
|
||||
data = sys.stdin.buffer.read()
|
||||
|
||||
# Parse request
|
||||
request = plugin.CodeGeneratorRequest()
|
||||
request.ParseFromString(data)
|
||||
|
||||
dump_file = os.getenv("BETTERPROTO_DUMP")
|
||||
if dump_file:
|
||||
dump_request(dump_file, request)
|
||||
|
||||
# Create response
|
||||
response = plugin.CodeGeneratorResponse()
|
||||
|
||||
# Generate code
|
||||
generate_code(request, response)
|
||||
|
||||
# Serialise response message
|
||||
output = response.SerializeToString()
|
||||
|
||||
# Write to stdout
|
||||
sys.stdout.buffer.write(output)
|
||||
|
||||
|
||||
def dump_request(dump_file: str, request: CodeGeneratorRequest):
|
||||
"""
|
||||
For developers: Supports running plugin.py standalone so its possible to debug it.
|
||||
Run protoc (or generate.py) with BETTERPROTO_DUMP="yourfile.bin" to write the request to a file.
|
||||
Then run plugin.py from your IDE in debugging mode, and redirect stdin to the file.
|
||||
"""
|
||||
with open(str(dump_file), "wb") as fh:
|
||||
sys.stderr.write(f"\033[31mWriting input from protoc to: {dump_file}\033[0m\n")
|
||||
fh.write(request.SerializeToString())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
0
src/betterproto/py.typed
Normal file
0
src/betterproto/py.typed
Normal file
135
src/betterproto/templates/template.py.j2
Normal file
135
src/betterproto/templates/template.py.j2
Normal file
@@ -0,0 +1,135 @@
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# sources: {{ ', '.join(description.files) }}
|
||||
# plugin: python-betterproto
|
||||
from dataclasses import dataclass
|
||||
{% if description.datetime_imports %}
|
||||
from datetime import {% for i in description.datetime_imports %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
|
||||
{% endif%}
|
||||
{% if description.typing_imports %}
|
||||
from typing import {% for i in description.typing_imports %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
import betterproto
|
||||
{% if description.services %}
|
||||
import grpclib
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% if description.enums %}{% for enum in description.enums %}
|
||||
class {{ enum.py_name }}(betterproto.Enum):
|
||||
{% if enum.comment %}
|
||||
{{ enum.comment }}
|
||||
|
||||
{% endif %}
|
||||
{% for entry in enum.entries %}
|
||||
{% if entry.comment %}
|
||||
{{ entry.comment }}
|
||||
{% endif %}
|
||||
{{ entry.name }} = {{ entry.value }}
|
||||
{% endfor %}
|
||||
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% for message in description.messages %}
|
||||
@dataclass
|
||||
class {{ message.py_name }}(betterproto.Message):
|
||||
{% if message.comment %}
|
||||
{{ message.comment }}
|
||||
|
||||
{% endif %}
|
||||
{% for field in message.properties %}
|
||||
{% if field.comment %}
|
||||
{{ field.comment }}
|
||||
{% endif %}
|
||||
{{ field.py_name }}: {{ field.type }} = betterproto.{{ field.field_type }}_field({{ field.number }}{% if field.field_type == 'map'%}, betterproto.{{ field.map_types[0] }}, betterproto.{{ field.map_types[1] }}{% endif %}{% if field.one_of %}, group="{{ field.one_of }}"{% endif %}{% if field.field_wraps %}, wraps={{ field.field_wraps }}{% endif %})
|
||||
{% endfor %}
|
||||
{% if not message.properties %}
|
||||
pass
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% endfor %}
|
||||
{% for service in description.services %}
|
||||
class {{ service.py_name }}Stub(betterproto.ServiceStub):
|
||||
{% if service.comment %}
|
||||
{{ service.comment }}
|
||||
|
||||
{% endif %}
|
||||
{% for method in service.methods %}
|
||||
async def {{ method.py_name }}(self
|
||||
{%- if not method.client_streaming -%}
|
||||
{%- if method.input_message and method.input_message.properties -%}, *,
|
||||
{%- for field in method.input_message.properties -%}
|
||||
{{ field.py_name }}: {% if field.zero == "None" and not field.type.startswith("Optional[") -%}
|
||||
Optional[{{ field.type }}]
|
||||
{%- else -%}
|
||||
{{ field.type }}
|
||||
{%- endif -%} = {{ field.zero }}
|
||||
{%- if not loop.last %}, {% endif -%}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- else -%}
|
||||
{# Client streaming: need a request iterator instead #}
|
||||
, request_iterator: Union[AsyncIterable["{{ method.input }}"], Iterable["{{ method.input }}"]]
|
||||
{%- endif -%}
|
||||
) -> {% if method.server_streaming %}AsyncIterator[{{ method.output }}]{% else %}{{ method.output }}{% endif %}:
|
||||
{% if method.comment %}
|
||||
{{ method.comment }}
|
||||
|
||||
{% endif %}
|
||||
{% if not method.client_streaming %}
|
||||
request = {{ method.input }}()
|
||||
{% for field in method.input_message.properties %}
|
||||
{% if field.field_type == 'message' %}
|
||||
if {{ field.py_name }} is not None:
|
||||
request.{{ field.py_name }} = {{ field.py_name }}
|
||||
{% else %}
|
||||
request.{{ field.py_name }} = {{ field.py_name }}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if method.server_streaming %}
|
||||
{% if method.client_streaming %}
|
||||
async for response in self._stream_stream(
|
||||
"{{ method.route }}",
|
||||
request_iterator,
|
||||
{{ method.input }},
|
||||
{{ method.output.strip('"') }},
|
||||
):
|
||||
yield response
|
||||
{% else %}{# i.e. not client streaming #}
|
||||
async for response in self._unary_stream(
|
||||
"{{ method.route }}",
|
||||
request,
|
||||
{{ method.output.strip('"') }},
|
||||
):
|
||||
yield response
|
||||
|
||||
{% endif %}{# if client streaming #}
|
||||
{% else %}{# i.e. not server streaming #}
|
||||
{% if method.client_streaming %}
|
||||
return await self._stream_unary(
|
||||
"{{ method.route }}",
|
||||
request_iterator,
|
||||
{{ method.input }},
|
||||
{{ method.output.strip('"') }}
|
||||
)
|
||||
{% else %}{# i.e. not client streaming #}
|
||||
return await self._unary_unary(
|
||||
"{{ method.route }}",
|
||||
request,
|
||||
{{ method.output.strip('"') }}
|
||||
)
|
||||
{% endif %}{# client streaming #}
|
||||
{% endif %}
|
||||
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
|
||||
{% for i in description.imports %}
|
||||
{{ i }}
|
||||
{% endfor %}
|
||||
Reference in New Issue
Block a user