Merge branch 'master_gh'

This commit is contained in:
Georg K 2025-04-14 03:54:30 +03:00
commit 5a518ed044
39 changed files with 1557 additions and 1113 deletions

View File

@ -2,7 +2,7 @@
There's lots to do, and we're working hard, so any help is welcome!
- :speech_balloon: Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)!
- :speech_balloon: Join us on [Discord](https://discord.gg/DEVteTupPb)!
What can you do?
@ -15,9 +15,9 @@ What can you do?
- File a bug (please check its not a duplicate)
- Propose an enhancement
- :white_check_mark: Create a PR:
- [Creating a failing test-case](https://github.com/danielgtaylor/python-betterproto/blob/master/betterproto/tests/README.md) to make bug-fixing easier
- [Creating a failing test-case](https://github.com/danielgtaylor/python-betterproto/blob/master/tests/README.md) to make bug-fixing easier
- Fix any of the open issues
- [Good first issues](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
- [Issues with tests](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22has+test%22)
- New bugfix or idea
- If you'd like to discuss your idea first, join us on Slack!
- If you'd like to discuss your idea first, join us on Discord!

View File

@ -8,7 +8,7 @@ body:
value: >
Thanks for taking the time to fill out a bug report!
If you're not sure it's a bug and you just have a question, the [community Slack channel](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ) is a better place for general questions than a GitHub issue.
If you're not sure it's a bug and you just have a question, the [community Discord channel](https://discord.gg/DEVteTupPb) is a better place for general questions than a GitHub issue.
- type: input
attributes:

View File

@ -2,5 +2,5 @@ name:
description:
contact_links:
- name: For questions about the library
about: Support questions are better answered in our Slack group.
url: https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ
about: Support questions are better answered in our Discord group.
url: https://discord.gg/DEVteTupPb

View File

@ -16,7 +16,7 @@ jobs:
fail-fast: false
matrix:
os: [Ubuntu, MacOS, Windows]
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
steps:
- uses: actions/checkout@v4
@ -41,7 +41,7 @@ jobs:
run: poetry config virtualenvs.in-project true
- name: Set up cache
uses: actions/cache@v3
uses: actions/cache@v4
id: cache
with:
path: .venv

View File

@ -13,6 +13,6 @@ jobs:
name: Check code/doc formatting
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- uses: pre-commit/action@v2.0.3
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: pre-commit/action@v3.0.1

View File

@ -25,11 +25,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@ -40,7 +40,7 @@ jobs:
# queries: security-extended,security-and-quality
- name: Autobuild
uses: github/codeql-action/autobuild@v2
uses: github/codeql-action/autobuild@v3
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v3

View File

@ -15,11 +15,11 @@ jobs:
name: Distribution
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.8
uses: actions/setup-python@v4
- uses: actions/checkout@v4
- name: Set up Python 3.9
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.9
- name: Install poetry
run: python -m pip install poetry
- name: Build package

1
.gitignore vendored
View File

@ -18,3 +18,4 @@ output
.asv
venv
.devcontainer
.ruff_cache

View File

@ -2,16 +2,13 @@ ci:
autofix_prs: false
repos:
- repo: https://github.com/pycqa/isort
rev: 5.11.5
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.1
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black
args: ["--target-version", "py310"]
- id: ruff-format
args: ["--diff", "src", "tests"]
- id: ruff
args: ["--select", "I", "src", "tests"]
- repo: https://github.com/PyCQA/doc8
rev: 0.10.1
@ -21,7 +18,7 @@ repos:
- toml
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.10.0
rev: v2.14.0
hooks:
- id: pretty-format-java
args: [--autofix, --aosp]

View File

@ -439,7 +439,7 @@ value3: str | int = 1
## Development
- _Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)!_
- _Join us on [Discord](https://discord.gg/DEVteTupPb)!_
- _See how you can help → [Contributing](.github/CONTRIBUTING.md)_
### Requirements
@ -575,7 +575,7 @@ protoc \
## Community
Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)!
Join us on [Discord](https://discord.gg/DEVteTupPb)!
## License

2037
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,10 @@
[tool.poetry]
[project]
name = "betterproto"
version = "2.0.0b7"
description = "A better Protobuf / gRPC generator & library"
authors = ["Daniel G. Taylor <danielgtaylor@gmail.com>"]
authors = [
{name = "Daniel G. Taylor", email = "danielgtaylor@gmail.com"}
]
readme = "README.md"
repository = "https://github.com/danielgtaylor/python-betterproto"
keywords = ["protobuf", "gRPC"]
@ -10,46 +12,54 @@ license = "MIT"
packages = [
{ include = "betterproto", from = "src" }
]
requires-python = ">=3.9,<4.0"
dynamic = ["dependencies"]
[tool.poetry.dependencies]
python = "^3.8"
black = { version = ">=23.1.0", optional = true }
# The Ruff version is pinned. To update it, also update it in .pre-commit-config.yaml
ruff = { version = "~0.9.1", optional = true }
grpclib = "^0.4.1"
jinja2 = { version = ">=3.0.3", optional = true }
python-dateutil = "^2.8"
isort = { version = "^5.11.5", optional = true }
typing-extensions = "^4.7.1"
betterproto-rust-codec = { version = "0.1.1", optional = true }
[tool.poetry.group.dev.dependencies]
asv = "^0.4.2"
bpython = "^0.19"
asv = "^0.6.4"
bpython = "^0.24"
jinja2 = ">=3.0.3"
mypy = "^1.11.2"
sphinx = "3.1.2"
sphinx-rtd-theme = "0.5.0"
pre-commit = "^2.17.0"
sphinx = "7.4.7"
sphinx-rtd-theme = "3.0.2"
pre-commit = "^4.0.1"
grpcio-tools = "^1.54.2"
tox = "^4.0.0"
[tool.poetry.group.test.dependencies]
poethepoet = ">=0.9.0"
pytest = "^6.2.5"
pytest-asyncio = "^0.12.0"
pytest-cov = "^2.9.0"
pytest = "^7.4.4"
pytest-asyncio = "^0.23.8"
pytest-cov = "^6.0.0"
pytest-mock = "^3.1.1"
pydantic = ">=2.0,<3"
protobuf = "^4"
cachelib = "^0.10.2"
protobuf = "^5"
cachelib = "^0.13.0"
tomlkit = ">=0.7.0"
[tool.poetry.scripts]
[project.scripts]
protoc-gen-python_betterproto = "betterproto.plugin:main"
[tool.poetry.extras]
compiler = ["black", "isort", "jinja2"]
[project.optional-dependencies]
compiler = ["ruff", "jinja2"]
rust-codec = ["betterproto-rust-codec"]
[tool.ruff]
extend-exclude = ["tests/output_*"]
target-version = "py38"
[tool.ruff.lint.isort]
combine-as-imports = true
lines-after-imports = 2
# Dev workflow tasks
@ -65,13 +75,29 @@ help = "Run tests"
cmd = "mypy src --ignore-missing-imports"
help = "Check types with mypy"
[tool.poe.tasks]
_black = "black . --exclude tests/output_ --target-version py310"
_isort = "isort . --extend-skip-glob 'tests/output_*/**/*'"
[tool.poe.tasks.format]
sequence = ["_black", "_isort"]
help = "Apply black and isort formatting to source code"
sequence = ["_format", "_sort-imports"]
help = "Format the source code, and sort the imports"
[tool.poe.tasks.check]
sequence = ["_check-format", "_check-imports"]
help = "Check that the source code is formatted and the imports sorted"
[tool.poe.tasks._format]
cmd = "ruff format src tests"
help = "Format the source code without sorting the imports"
[tool.poe.tasks._sort-imports]
cmd = "ruff check --select I --fix src tests"
help = "Sort the imports"
[tool.poe.tasks._check-format]
cmd = "ruff format --diff src tests"
help = "Check that the source code is formatted"
[tool.poe.tasks._check-imports]
cmd = "ruff check --select I src tests"
help = "Check that the imports are sorted"
[tool.poe.tasks.docs]
cmd = "sphinx-build docs docs/build"
@ -106,23 +132,6 @@ help = "Regenerate the types in betterproto.lib.std.google"
shell = "poe generate && tox"
help = "Run tests with multiple pythons"
[tool.poe.tasks.check-style]
cmd = "black . --check --diff"
help = "Check if code style is correct"
[tool.isort]
py_version = 37
profile = "black"
force_single_line = false
combine_as_imports = true
lines_after_imports = 2
include_trailing_comma = true
force_grid_wrap = 2
src_paths = ["src", "tests"]
[tool.black]
target-version = ['py37']
[tool.doc8]
paths = ["docs"]
max_line_length = 88
@ -156,5 +165,5 @@ require_poetry = true
"""
[build-system]
requires = ["poetry-core>=1.0.0,<2"]
requires = ["poetry-core>=2.0.0,<3"]
build-backend = "poetry.core.masonry.api"

View File

@ -66,8 +66,7 @@ if sys.version_info >= (3, 10):
from types import UnionType as _types_UnionType
else:
class _types_UnionType:
...
class _types_UnionType: ...
# Proto 3 data types
@ -2014,10 +2013,10 @@ class _Timestamp(Timestamp):
return f"{result}Z"
if (nanos % 1e6) == 0:
# Serialize 3 fractional digits.
return f"{result}.{int(nanos // 1e6) :03d}Z"
return f"{result}.{int(nanos // 1e6):03d}Z"
if (nanos % 1e3) == 0:
# Serialize 6 fractional digits.
return f"{result}.{int(nanos // 1e3) :06d}Z"
return f"{result}.{int(nanos // 1e3):06d}Z"
# Serialize 9 fractional digits.
return f"{result}.{nanos:09d}"

View File

@ -1,6 +1,5 @@
from __future__ import annotations
import sys
from enum import (
EnumMeta,
IntEnum,
@ -90,15 +89,8 @@ class EnumType(EnumMeta if TYPE_CHECKING else type):
def __iter__(cls) -> Generator[Enum, None, None]:
yield from cls._member_map_.values()
if sys.version_info >= (3, 8): # 3.8 added __reversed__ to dict_values
def __reversed__(cls) -> Generator[Enum, None, None]:
yield from reversed(cls._member_map_.values())
else:
def __reversed__(cls) -> Generator[Enum, None, None]:
yield from reversed(tuple(cls._member_map_.values()))
def __reversed__(cls) -> Generator[Enum, None, None]:
yield from reversed(cls._member_map_.values())
def __getitem__(cls, key: str) -> Enum:
return cls._member_map_[key]
@ -140,6 +132,9 @@ class Enum(IntEnum if TYPE_CHECKING else int, metaclass=EnumType):
super().__setattr__(self, "value", value)
return self
def __getnewargs_ex__(self) -> Tuple[Tuple[()], Dict[str, Any]]:
return (), {"name": self.name, "value": self.value}
def __str__(self) -> str:
return self.name or "None"

View File

@ -1204,9 +1204,9 @@ class EnumDescriptorProto(betterproto.Message):
name: str = betterproto.string_field(1)
value: List["EnumValueDescriptorProto"] = betterproto.message_field(2)
options: "EnumOptions" = betterproto.message_field(3)
reserved_range: List[
"EnumDescriptorProtoEnumReservedRange"
] = betterproto.message_field(4)
reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = (
betterproto.message_field(4)
)
"""
Range of reserved numeric values. Reserved numeric values may not be used
by enum values in the same enum declaration. Reserved ranges may not
@ -1792,9 +1792,9 @@ class FeatureSetDefaults(betterproto.Message):
for the closest matching edition, followed by proto merges.
"""
defaults: List[
"FeatureSetDefaultsFeatureSetEditionDefault"
] = betterproto.message_field(1)
defaults: List["FeatureSetDefaultsFeatureSetEditionDefault"] = (
betterproto.message_field(1)
)
minimum_edition: "Edition" = betterproto.enum_field(4)
"""
The minimum supported edition (inclusive) when this was constructed.

View File

@ -53,9 +53,9 @@ class CodeGeneratorRequest(betterproto.Message):
parameter: str = betterproto.string_field(2)
"""The generator parameter passed on the command-line."""
proto_file: List[
"betterproto_lib_pydantic_google_protobuf.FileDescriptorProto"
] = betterproto.message_field(15)
proto_file: List["betterproto_lib_pydantic_google_protobuf.FileDescriptorProto"] = (
betterproto.message_field(15)
)
"""
FileDescriptorProtos for all files in files_to_generate and everything
they import. The files will appear in topological order, so each file
@ -195,9 +195,9 @@ class CodeGeneratorResponseFile(betterproto.Message):
content: str = betterproto.string_field(15)
"""The file contents."""
generated_code_info: (
"betterproto_lib_pydantic_google_protobuf.GeneratedCodeInfo"
) = betterproto.message_field(16)
generated_code_info: "betterproto_lib_pydantic_google_protobuf.GeneratedCodeInfo" = betterproto.message_field(
16
)
"""
Information describing the file content being inserted. If an insertion
point is used, this information will be appropriately offset and inserted

View File

@ -1064,9 +1064,9 @@ class EnumDescriptorProto(betterproto.Message):
name: str = betterproto.string_field(1)
value: List["EnumValueDescriptorProto"] = betterproto.message_field(2)
options: "EnumOptions" = betterproto.message_field(3)
reserved_range: List[
"EnumDescriptorProtoEnumReservedRange"
] = betterproto.message_field(4)
reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = (
betterproto.message_field(4)
)
"""
Range of reserved numeric values. Reserved numeric values may not be used
by enum values in the same enum declaration. Reserved ranges may not
@ -1688,9 +1688,9 @@ class FeatureSetDefaults(betterproto.Message):
for the closest matching edition, followed by proto merges.
"""
defaults: List[
"FeatureSetDefaultsFeatureSetEditionDefault"
] = betterproto.message_field(1)
defaults: List["FeatureSetDefaultsFeatureSetEditionDefault"] = (
betterproto.message_field(1)
)
minimum_edition: "Edition" = betterproto.enum_field(4)
"""
The minimum supported edition (inclusive) when this was constructed.

View File

@ -46,9 +46,9 @@ class CodeGeneratorRequest(betterproto.Message):
parameter: str = betterproto.string_field(2)
"""The generator parameter passed on the command-line."""
proto_file: List[
"betterproto_lib_google_protobuf.FileDescriptorProto"
] = betterproto.message_field(15)
proto_file: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = (
betterproto.message_field(15)
)
"""
FileDescriptorProtos for all files in files_to_generate and everything
they import. The files will appear in topological order, so each file

View File

@ -1,4 +1,5 @@
import os.path
import subprocess
import sys
from .module_validation import ModuleValidator
@ -6,8 +7,6 @@ from .module_validation import ModuleValidator
try:
# betterproto[compiler] specific dependencies
import black
import isort.api
import jinja2
except ImportError as err:
print(
@ -32,6 +31,7 @@ def outputfile_compiler(output_file: OutputTemplate) -> str:
trim_blocks=True,
lstrip_blocks=True,
loader=jinja2.FileSystemLoader(templates_folder),
undefined=jinja2.StrictUndefined,
)
# Load the body first so we have a compleate list of imports needed.
body_template = env.get_template("template.py.j2")
@ -39,20 +39,17 @@ def outputfile_compiler(output_file: OutputTemplate) -> str:
code = body_template.render(output_file=output_file)
code = header_template.render(output_file=output_file) + code
code = isort.api.sort_code_string(
code=code,
show_diff=False,
py_version=37,
profile="black",
combine_as_imports=True,
lines_after_imports=2,
quiet=True,
force_grid_wrap=2,
known_third_party=["grpclib", "betterproto"],
# Sort imports, delete unused ones
code = subprocess.check_output(
["ruff", "check", "--select", "I,F401", "--fix", "--silent", "-"],
input=code,
encoding="utf-8",
)
code = black.format_str(
src_contents=code,
mode=black.Mode(),
# Format the code
code = subprocess.check_output(
["ruff", "format", "-"], input=code, encoding="utf-8"
)
# Validate the generated code.

View File

@ -153,11 +153,33 @@ def get_comment(
) -> str:
pad = " " * indent
for sci_loc in proto_file.source_code_info.location:
if list(sci_loc.path) == path and sci_loc.leading_comments:
lines = sci_loc.leading_comments.strip().split("\n")
if list(sci_loc.path) == path:
all_comments = list(sci_loc.leading_detached_comments)
if sci_loc.leading_comments:
all_comments.append(sci_loc.leading_comments)
if sci_loc.trailing_comments:
all_comments.append(sci_loc.trailing_comments)
lines = []
for comment in all_comments:
lines += comment.split("\n")
lines.append("")
# Remove consecutive empty lines
lines = [
line for i, line in enumerate(lines) if line or (i == 0 or lines[i - 1])
]
if lines and not lines[-1]:
lines.pop() # Remove the last empty line
# It is common for one line comments to start with a space, for example: // comment
# We don't add this space to the generated file.
lines = [line[1:] if line and line[0] == " " else line for line in lines]
# This is a field, message, enum, service, or method
if len(lines) == 1 and len(lines[0]) < 79 - indent - 6:
lines[0] = lines[0].strip('"')
return f'{pad}"""{lines[0]}"""'
else:
joined = f"\n{pad}".join(lines)
@ -238,7 +260,7 @@ class OutputTemplate:
parent_request: PluginRequestCompiler
package_proto_obj: FileDescriptorProto
input_files: List[str] = field(default_factory=list)
imports: Set[str] = field(default_factory=set)
imports_end: Set[str] = field(default_factory=set)
datetime_imports: Set[str] = field(default_factory=set)
pydantic_imports: Set[str] = field(default_factory=set)
builtins_import: bool = False
@ -328,12 +350,6 @@ class MessageCompiler(ProtoContentBase):
def py_name(self) -> str:
return pythonize_class_name(self.proto_name)
@property
def annotation(self) -> str:
if self.repeated:
return self.typing_compiler.list(self.py_name)
return self.py_name
@property
def deprecated_fields(self) -> Iterator[str]:
for f in self.fields:
@ -484,13 +500,6 @@ class FieldCompiler(MessageCompiler):
def optional(self) -> bool:
return self.proto_obj.proto3_optional
@property
def mutable(self) -> bool:
"""True if the field is a mutable type, otherwise False."""
return self.annotation.startswith(
("typing.List[", "typing.Dict[", "dict[", "list[", "Dict[", "List[")
)
@property
def field_type(self) -> str:
"""String representation of proto field type."""
@ -532,7 +541,7 @@ class FieldCompiler(MessageCompiler):
# Type referencing another defined Message or a named enum
return get_type_reference(
package=self.output_file.package,
imports=self.output_file.imports,
imports=self.output_file.imports_end,
source_type=self.proto_obj.type_name,
typing_compiler=self.typing_compiler,
pydantic=self.output_file.pydantic_dataclasses,
@ -661,6 +670,7 @@ class EnumDefinitionCompiler(MessageCompiler):
@dataclass
class ServiceCompiler(ProtoContentBase):
source_file: FileDescriptorProto
parent: OutputTemplate = PLACEHOLDER
proto_obj: DescriptorProto = PLACEHOLDER
path: List[int] = PLACEHOLDER
@ -682,6 +692,7 @@ class ServiceCompiler(ProtoContentBase):
@dataclass
class ServiceMethodCompiler(ProtoContentBase):
source_file: FileDescriptorProto
parent: ServiceCompiler
proto_obj: MethodDescriptorProto
path: List[int] = PLACEHOLDER
@ -730,7 +741,7 @@ class ServiceMethodCompiler(ProtoContentBase):
"""
return get_type_reference(
package=self.output_file.package,
imports=self.output_file.imports,
imports=self.output_file.imports_end,
source_type=self.proto_obj.input_type,
typing_compiler=self.output_file.typing_compiler,
unwrap=False,
@ -760,7 +771,7 @@ class ServiceMethodCompiler(ProtoContentBase):
"""
return get_type_reference(
package=self.output_file.package,
imports=self.output_file.imports,
imports=self.output_file.imports_end,
source_type=self.proto_obj.output_type,
typing_compiler=self.output_file.typing_compiler,
unwrap=False,

View File

@ -143,7 +143,7 @@ def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
for output_package_name, output_package in request_data.output_packages.items():
for proto_input_file in output_package.input_files:
for index, service in enumerate(proto_input_file.service):
read_protobuf_service(service, index, output_package)
read_protobuf_service(proto_input_file, service, index, output_package)
# Generate output files
output_paths: Set[pathlib.Path] = set()
@ -249,12 +249,21 @@ def read_protobuf_type(
def read_protobuf_service(
service: ServiceDescriptorProto, index: int, output_package: OutputTemplate
source_file: FileDescriptorProto,
service: ServiceDescriptorProto,
index: int,
output_package: OutputTemplate,
) -> None:
service_data = ServiceCompiler(
parent=output_package, proto_obj=service, path=[6, index]
source_file=source_file,
parent=output_package,
proto_obj=service,
path=[6, index],
)
for j, method in enumerate(service.method):
ServiceMethodCompiler(
parent=service_data, proto_obj=method, path=[6, index, 2, j]
source_file=source_file,
parent=service_data,
proto_obj=method,
path=[6, index, 2, j],
)

View File

@ -2,13 +2,26 @@
# sources: {{ ', '.join(output_file.input_filenames) }}
# plugin: python-betterproto
# This file has been @generated
__all__ = (
{%- for enum in output_file.enums -%}
"{{ enum.py_name }}",
{%- endfor -%}
{%- for message in output_file.messages -%}
"{{ message.py_name }}",
{%- endfor -%}
{%- for service in output_file.services -%}
"{{ service.py_name }}Stub",
"{{ service.py_name }}Base",
{%- endfor -%}
)
{% for i in output_file.python_module_imports|sort %}
import {{ i }}
{% endfor %}
{% if output_file.pydantic_dataclasses %}
from pydantic.dataclasses import dataclass
from pydantic.dataclasses import rebuild_dataclass
{%- else -%}
from dataclasses import dataclass
{% endif %}
@ -35,10 +48,6 @@ from betterproto.grpc.grpclib_server import ServiceBase
import grpclib
{% endif %}
{% for i in output_file.imports|sort %}
{{ i }}
{% endfor %}
{% if output_file.imports_type_checking_only %}
from typing import TYPE_CHECKING

View File

@ -77,14 +77,14 @@ class {{ service.py_name }}Stub(betterproto.ServiceStub):
, {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}"
{%- else -%}
{# Client streaming: need a request iterator instead #}
, {{ method.py_input_message_param }}_iterator: {{ output_file.typing_compiler.union(output_file.typing_compiler.async_iterable(method.py_input_message_type), output_file.typing_compiler.iterable(method.py_input_message_type)) }}
, {{ method.py_input_message_param }}_iterator: "{{ output_file.typing_compiler.union(output_file.typing_compiler.async_iterable(method.py_input_message_type), output_file.typing_compiler.iterable(method.py_input_message_type)) }}"
{%- endif -%}
,
*
, timeout: {{ output_file.typing_compiler.optional("float") }} = None
, deadline: {{ output_file.typing_compiler.optional('"Deadline"') }} = None
, metadata: {{ output_file.typing_compiler.optional('"MetadataLike"') }} = None
) -> {% if method.server_streaming %}{{ output_file.typing_compiler.async_iterator(method.py_output_message_type ) }}{% else %}"{{ method.py_output_message_type }}"{% endif %}:
) -> "{% if method.server_streaming %}{{ output_file.typing_compiler.async_iterator(method.py_output_message_type ) }}{% else %}{{ method.py_output_message_type }}{% endif %}":
{% if method.comment %}
{{ method.comment }}
@ -143,6 +143,10 @@ class {{ service.py_name }}Stub(betterproto.ServiceStub):
{% endfor %}
{% endfor %}
{% for i in output_file.imports_end %}
{{ i }}
{% endfor %}
{% for service in output_file.services %}
class {{ service.py_name }}Base(ServiceBase):
{% if service.comment %}
@ -211,11 +215,3 @@ class {{ service.py_name }}Base(ServiceBase):
}
{% endfor %}
{% if output_file.pydantic_dataclasses %}
{% for message in output_file.messages %}
{% if message.has_message_field %}
rebuild_dataclass({{ message.py_name }}) # type: ignore
{% endif %}
{% endfor %}
{% endif %}

View File

@ -4,17 +4,6 @@ import sys
import pytest
def pytest_addoption(parser):
parser.addoption(
"--repeat", type=int, default=1, help="repeat the operation multiple times"
)
@pytest.fixture(scope="session")
def repeat(request):
return request.config.getoption("repeat")
@pytest.fixture
def reset_sys_path():
original = copy.deepcopy(sys.path)

View File

@ -1,5 +1,4 @@
import asyncio
import sys
import uuid
import grpclib
@ -27,12 +26,12 @@ async def _test_client(client: ThingServiceClient, name="clean room", **kwargs):
def _assert_request_meta_received(deadline, metadata):
def server_side_test(stream):
assert stream.deadline._timestamp == pytest.approx(
deadline._timestamp, 1
), "The provided deadline should be received serverside"
assert (
stream.metadata["authorization"] == metadata["authorization"]
), "The provided authorization metadata should be received serverside"
assert stream.deadline._timestamp == pytest.approx(deadline._timestamp, 1), (
"The provided deadline should be received serverside"
)
assert stream.metadata["authorization"] == metadata["authorization"], (
"The provided authorization metadata should be received serverside"
)
return server_side_test
@ -91,9 +90,6 @@ async def test_trailer_only_error_stream_unary(
@pytest.mark.asyncio
@pytest.mark.skipif(
sys.version_info < (3, 8), reason="async mock spy does works for python3.8+"
)
async def test_service_call_mutable_defaults(mocker):
async with ChannelFor([ThingService()]) as channel:
client = ThingServiceClient(channel)
@ -269,9 +265,9 @@ async def test_async_gen_for_stream_stream_request():
else:
# No more things to send make sure channel is closed
request_chan.close()
assert response_index == len(
expected_things
), "Didn't receive all expected responses"
assert response_index == len(expected_things), (
"Didn't receive all expected responses"
)
@pytest.mark.asyncio

View File

@ -4,20 +4,20 @@ from tests.output_betterproto.casing import Test
def test_message_attributes():
message = Test()
assert hasattr(
message, "snake_case_message"
), "snake_case field name is same in python"
assert hasattr(message, "snake_case_message"), (
"snake_case field name is same in python"
)
assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python"
assert hasattr(message, "uppercase"), "UPPERCASE field is lowercase in python"
def test_message_casing():
assert hasattr(
casing, "SnakeCaseMessage"
), "snake_case Message name is converted to CamelCase in python"
assert hasattr(casing, "SnakeCaseMessage"), (
"snake_case Message name is converted to CamelCase in python"
)
def test_enum_casing():
assert hasattr(
casing, "MyEnum"
), "snake_case Enum name is converted to CamelCase in python"
assert hasattr(casing, "MyEnum"), (
"snake_case Enum name is converted to CamelCase in python"
)

View File

@ -2,13 +2,13 @@ import tests.output_betterproto.casing_inner_class as casing_inner_class
def test_message_casing_inner_class_name():
assert hasattr(
casing_inner_class, "TestInnerClass"
), "Inline defined Message is correctly converted to CamelCase"
assert hasattr(casing_inner_class, "TestInnerClass"), (
"Inline defined Message is correctly converted to CamelCase"
)
def test_message_casing_inner_class_attributes():
message = casing_inner_class.Test()
assert hasattr(
message.inner, "old_exp"
), "Inline defined Message attribute is snake_case"
assert hasattr(message.inner, "old_exp"), (
"Inline defined Message attribute is snake_case"
)

View File

@ -3,12 +3,12 @@ from tests.output_betterproto.casing_message_field_uppercase import Test
def test_message_casing():
message = Test()
assert hasattr(
message, "uppercase"
), "UPPERCASE attribute is converted to 'uppercase' in python"
assert hasattr(
message, "uppercase_v2"
), "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python"
assert hasattr(
message, "upper_camel_case"
), "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python"
assert hasattr(message, "uppercase"), (
"UPPERCASE attribute is converted to 'uppercase' in python"
)
assert hasattr(message, "uppercase_v2"), (
"UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python"
)
assert hasattr(message, "upper_camel_case"), (
"UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python"
)

View File

@ -0,0 +1,44 @@
syntax = "proto3";
package documentation;
// Documentation of message 1
// other line 1
// Documentation of message 2
// other line 2
message Test { // Documentation of message 3
// Documentation of field 1
// other line 1
// Documentation of field 2
// other line 2
uint32 x = 1; // Documentation of field 3
}
// Documentation of enum 1
// other line 1
// Documentation of enum 2
// other line 2
enum Enum { // Documentation of enum 3
// Documentation of variant 1
// other line 1
// Documentation of variant 2
// other line 2
Enum_Variant = 0; // Documentation of variant 3
}
// Documentation of service 1
// other line 1
// Documentation of service 2
// other line 2
service Service { // Documentation of service 3
// Documentation of method 1
// other line 1
// Documentation of method 2
// other line 2
rpc get(Test) returns (Test); // Documentation of method 3
}

View File

@ -27,9 +27,9 @@ def test_enum_is_comparable_with_int():
def test_enum_to_dict():
assert (
"choice" not in Test(choice=Choice.ZERO).to_dict()
), "Default enum value is not serialized"
assert "choice" not in Test(choice=Choice.ZERO).to_dict(), (
"Default enum value is not serialized"
)
assert (
Test(choice=Choice.ZERO).to_dict(include_default_values=True)["choice"]
== "ZERO"

View File

@ -26,5 +26,5 @@ import "other.proto";
// (root: Test & RootPackageMessage) <-------> (other: OtherPackageMessage)
message Test {
RootPackageMessage message = 1;
other.OtherPackageMessage other = 2;
other.OtherPackageMessage other_value = 2;
}

View File

@ -0,0 +1,19 @@
def test_all_definition():
"""
Check that a compiled module defines __all__ with the right value.
These modules have been chosen since they contain messages, services and enums.
"""
import tests.output_betterproto.enum as enum
import tests.output_betterproto.service as service
assert service.__all__ == (
"ThingType",
"DoThingRequest",
"DoThingResponse",
"GetThingRequest",
"GetThingResponse",
"TestStub",
"TestBase",
)
assert enum.__all__ == ("Choice", "ArithmeticOperator", "Test")

View File

@ -0,0 +1,37 @@
import ast
import inspect
def check(generated_doc: str, type: str) -> None:
assert f"Documentation of {type} 1" in generated_doc
assert "other line 1" in generated_doc
assert f"Documentation of {type} 2" in generated_doc
assert "other line 2" in generated_doc
assert f"Documentation of {type} 3" in generated_doc
def test_documentation() -> None:
from .output_betterproto.documentation import (
Enum,
ServiceBase,
ServiceStub,
Test,
)
check(Test.__doc__, "message")
source = inspect.getsource(Test)
tree = ast.parse(source)
check(tree.body[0].body[2].value.value, "field")
check(Enum.__doc__, "enum")
source = inspect.getsource(Enum)
tree = ast.parse(source)
check(tree.body[0].body[2].value.value, "variant")
check(ServiceBase.__doc__, "service")
check(ServiceBase.get.__doc__, "method")
check(ServiceStub.__doc__, "service")
check(ServiceStub.get.__doc__, "method")

View File

@ -621,9 +621,7 @@ iso_candidates = """2009-12-12T12:34
2010-02-18T16:00:00.23334444
2010-02-18T16:00:00,2283
2009-05-19 143922
2009-05-19 1439""".split(
"\n"
)
2009-05-19 1439""".split("\n")
def test_iso_datetime():

View File

@ -56,9 +56,9 @@ def test_reference_google_wellknown_types_non_wrappers(
)
assert name == expected_name
assert imports.__contains__(
expected_import
), f"{expected_import} not found in {imports}"
assert imports.__contains__(expected_import), (
f"{expected_import} not found in {imports}"
)
@pytest.mark.parametrize(
@ -102,9 +102,9 @@ def test_reference_google_wellknown_types_non_wrappers_pydantic(
)
assert name == expected_name
assert imports.__contains__(
expected_import
), f"{expected_import} not found in {imports}"
assert imports.__contains__(expected_import), (
f"{expected_import} not found in {imports}"
)
@pytest.mark.parametrize(

View File

@ -174,22 +174,21 @@ def test_message_equality(test_data: TestData) -> None:
@pytest.mark.parametrize("test_data", test_cases.messages_with_json, indirect=True)
def test_message_json(repeat, test_data: TestData) -> None:
def test_message_json(test_data: TestData) -> None:
plugin_module, _, json_data = test_data
for _ in range(repeat):
for sample in json_data:
if sample.belongs_to(test_input_config.non_symmetrical_json):
continue
for sample in json_data:
if sample.belongs_to(test_input_config.non_symmetrical_json):
continue
message: betterproto.Message = plugin_module.Test()
message: betterproto.Message = plugin_module.Test()
message.from_json(sample.json)
message_json = message.to_json(0)
message.from_json(sample.json)
message_json = message.to_json(0)
assert dict_replace_nans(json.loads(message_json)) == dict_replace_nans(
json.loads(sample.json)
)
assert dict_replace_nans(json.loads(message_json)) == dict_replace_nans(
json.loads(sample.json)
)
@pytest.mark.parametrize("test_data", test_cases.services, indirect=True)
@ -198,28 +197,27 @@ def test_service_can_be_instantiated(test_data: TestData) -> None:
@pytest.mark.parametrize("test_data", test_cases.messages_with_json, indirect=True)
def test_binary_compatibility(repeat, test_data: TestData) -> None:
def test_binary_compatibility(test_data: TestData) -> None:
plugin_module, reference_module, json_data = test_data
for sample in json_data:
reference_instance = Parse(sample.json, reference_module().Test())
reference_binary_output = reference_instance.SerializeToString()
for _ in range(repeat):
plugin_instance_from_json: betterproto.Message = (
plugin_module.Test().from_json(sample.json)
)
plugin_instance_from_binary = plugin_module.Test.FromString(
reference_binary_output
)
plugin_instance_from_json: betterproto.Message = plugin_module.Test().from_json(
sample.json
)
plugin_instance_from_binary = plugin_module.Test.FromString(
reference_binary_output
)
# Generally this can't be relied on, but here we are aiming to match the
# existing Python implementation and aren't doing anything tricky.
# https://developers.google.com/protocol-buffers/docs/encoding#implications
assert bytes(plugin_instance_from_json) == reference_binary_output
assert bytes(plugin_instance_from_binary) == reference_binary_output
# Generally this can't be relied on, but here we are aiming to match the
# existing Python implementation and aren't doing anything tricky.
# https://developers.google.com/protocol-buffers/docs/encoding#implications
assert bytes(plugin_instance_from_json) == reference_binary_output
assert bytes(plugin_instance_from_binary) == reference_binary_output
assert plugin_instance_from_json == plugin_instance_from_binary
assert dict_replace_nans(
plugin_instance_from_json.to_dict()
) == dict_replace_nans(plugin_instance_from_binary.to_dict())
assert plugin_instance_from_json == plugin_instance_from_binary
assert dict_replace_nans(
plugin_instance_from_json.to_dict()
) == dict_replace_nans(plugin_instance_from_binary.to_dict())

View File

@ -57,6 +57,11 @@ class Complex(betterproto.Message):
)
class BetterprotoEnum(betterproto.Enum):
UNSPECIFIED = 0
ONE = 1
def complex_msg():
return Complex(
foo_str="yep",
@ -201,3 +206,11 @@ def test_message_can_be_cached():
.string_value
== "world"
)
def test_pickle_enum():
enum = BetterprotoEnum.ONE
assert unpickled(enum) == enum
enum = BetterprotoEnum.UNSPECIFIED
assert unpickled(enum) == enum

View File

@ -62,7 +62,7 @@ def test_load_varint_file():
stream.read(2) # Skip until first multi-byte
assert betterproto.load_varint(stream) == (
123456789,
b"\x95\x9A\xEF\x3A",
b"\x95\x9a\xef\x3a",
) # Multi-byte varint
@ -338,7 +338,7 @@ def run_java_single_varint(value: int, tmp_path) -> int:
def test_single_varint(compile_jar, tmp_path):
single_byte = (1, b"\x01")
multi_byte = (123456789, b"\x95\x9A\xEF\x3A")
multi_byte = (123456789, b"\x95\x9a\xef\x3a")
# Write a single-byte varint to a file and have Java read it back
returned = run_java_single_varint(single_byte[0], tmp_path)
@ -351,8 +351,8 @@ def test_single_varint(compile_jar, tmp_path):
def test_multiple_varints(compile_jar, tmp_path):
single_byte = (1, b"\x01")
multi_byte = (123456789, b"\x95\x9A\xEF\x3A")
over32 = (3000000000, b"\x80\xBC\xC1\x96\x0B")
multi_byte = (123456789, b"\x95\x9a\xef\x3a")
over32 = (3000000000, b"\x80\xbc\xc1\x96\x0b")
# Write two varints to the same file
with open(tmp_path / "py_multiple_varints.out", "wb") as stream:

View File

@ -11,6 +11,6 @@ PROJECT_TOML = Path(__file__).joinpath("..", "..", "pyproject.toml").resolve()
def test_version():
with PROJECT_TOML.open() as toml_file:
project_config = tomlkit.loads(toml_file.read())
assert (
__version__ == project_config["tool"]["poetry"]["version"]
), "Project version should match in package and package config"
assert __version__ == project_config["project"]["version"], (
"Project version should match in package and package config"
)