Merge branch 'master_gh'

This commit is contained in:
Georg K 2025-04-14 03:54:30 +03:00
commit 5a518ed044
39 changed files with 1557 additions and 1113 deletions

View File

@ -2,7 +2,7 @@
There's lots to do, and we're working hard, so any help is welcome! There's lots to do, and we're working hard, so any help is welcome!
- :speech_balloon: Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)! - :speech_balloon: Join us on [Discord](https://discord.gg/DEVteTupPb)!
What can you do? What can you do?
@ -15,9 +15,9 @@ What can you do?
- File a bug (please check its not a duplicate) - File a bug (please check its not a duplicate)
- Propose an enhancement - Propose an enhancement
- :white_check_mark: Create a PR: - :white_check_mark: Create a PR:
- [Creating a failing test-case](https://github.com/danielgtaylor/python-betterproto/blob/master/betterproto/tests/README.md) to make bug-fixing easier - [Creating a failing test-case](https://github.com/danielgtaylor/python-betterproto/blob/master/tests/README.md) to make bug-fixing easier
- Fix any of the open issues - Fix any of the open issues
- [Good first issues](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) - [Good first issues](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
- [Issues with tests](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22has+test%22) - [Issues with tests](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22has+test%22)
- New bugfix or idea - New bugfix or idea
- If you'd like to discuss your idea first, join us on Slack! - If you'd like to discuss your idea first, join us on Discord!

View File

@ -8,7 +8,7 @@ body:
value: > value: >
Thanks for taking the time to fill out a bug report! Thanks for taking the time to fill out a bug report!
If you're not sure it's a bug and you just have a question, the [community Slack channel](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ) is a better place for general questions than a GitHub issue. If you're not sure it's a bug and you just have a question, the [community Discord channel](https://discord.gg/DEVteTupPb) is a better place for general questions than a GitHub issue.
- type: input - type: input
attributes: attributes:

View File

@ -2,5 +2,5 @@ name:
description: description:
contact_links: contact_links:
- name: For questions about the library - name: For questions about the library
about: Support questions are better answered in our Slack group. about: Support questions are better answered in our Discord group.
url: https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ url: https://discord.gg/DEVteTupPb

View File

@ -16,7 +16,7 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
os: [Ubuntu, MacOS, Windows] os: [Ubuntu, MacOS, Windows]
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -41,7 +41,7 @@ jobs:
run: poetry config virtualenvs.in-project true run: poetry config virtualenvs.in-project true
- name: Set up cache - name: Set up cache
uses: actions/cache@v3 uses: actions/cache@v4
id: cache id: cache
with: with:
path: .venv path: .venv

View File

@ -13,6 +13,6 @@ jobs:
name: Check code/doc formatting name: Check code/doc formatting
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- uses: actions/setup-python@v4 - uses: actions/setup-python@v5
- uses: pre-commit/action@v2.0.3 - uses: pre-commit/action@v3.0.1

View File

@ -25,11 +25,11 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v2 uses: github/codeql-action/init@v3
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file. # If you wish to specify custom queries, you can do so here or in a config file.
@ -40,7 +40,7 @@ jobs:
# queries: security-extended,security-and-quality # queries: security-extended,security-and-quality
- name: Autobuild - name: Autobuild
uses: github/codeql-action/autobuild@v2 uses: github/codeql-action/autobuild@v3
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2 uses: github/codeql-action/analyze@v3

View File

@ -15,11 +15,11 @@ jobs:
name: Distribution name: Distribution
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Set up Python 3.8 - name: Set up Python 3.9
uses: actions/setup-python@v4 uses: actions/setup-python@v5
with: with:
python-version: 3.8 python-version: 3.9
- name: Install poetry - name: Install poetry
run: python -m pip install poetry run: python -m pip install poetry
- name: Build package - name: Build package

1
.gitignore vendored
View File

@ -18,3 +18,4 @@ output
.asv .asv
venv venv
.devcontainer .devcontainer
.ruff_cache

View File

@ -2,16 +2,13 @@ ci:
autofix_prs: false autofix_prs: false
repos: repos:
- repo: https://github.com/pycqa/isort - repo: https://github.com/astral-sh/ruff-pre-commit
rev: 5.11.5 rev: v0.9.1
hooks: hooks:
- id: isort - id: ruff-format
args: ["--diff", "src", "tests"]
- repo: https://github.com/psf/black - id: ruff
rev: 23.1.0 args: ["--select", "I", "src", "tests"]
hooks:
- id: black
args: ["--target-version", "py310"]
- repo: https://github.com/PyCQA/doc8 - repo: https://github.com/PyCQA/doc8
rev: 0.10.1 rev: 0.10.1
@ -21,7 +18,7 @@ repos:
- toml - toml
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.10.0 rev: v2.14.0
hooks: hooks:
- id: pretty-format-java - id: pretty-format-java
args: [--autofix, --aosp] args: [--autofix, --aosp]

View File

@ -439,7 +439,7 @@ value3: str | int = 1
## Development ## Development
- _Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)!_ - _Join us on [Discord](https://discord.gg/DEVteTupPb)!_
- _See how you can help → [Contributing](.github/CONTRIBUTING.md)_ - _See how you can help → [Contributing](.github/CONTRIBUTING.md)_
### Requirements ### Requirements
@ -575,7 +575,7 @@ protoc \
## Community ## Community
Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)! Join us on [Discord](https://discord.gg/DEVteTupPb)!
## License ## License

2037
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,10 @@
[tool.poetry] [project]
name = "betterproto" name = "betterproto"
version = "2.0.0b7" version = "2.0.0b7"
description = "A better Protobuf / gRPC generator & library" description = "A better Protobuf / gRPC generator & library"
authors = ["Daniel G. Taylor <danielgtaylor@gmail.com>"] authors = [
{name = "Daniel G. Taylor", email = "danielgtaylor@gmail.com"}
]
readme = "README.md" readme = "README.md"
repository = "https://github.com/danielgtaylor/python-betterproto" repository = "https://github.com/danielgtaylor/python-betterproto"
keywords = ["protobuf", "gRPC"] keywords = ["protobuf", "gRPC"]
@ -10,46 +12,54 @@ license = "MIT"
packages = [ packages = [
{ include = "betterproto", from = "src" } { include = "betterproto", from = "src" }
] ]
requires-python = ">=3.9,<4.0"
dynamic = ["dependencies"]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8" # The Ruff version is pinned. To update it, also update it in .pre-commit-config.yaml
black = { version = ">=23.1.0", optional = true } ruff = { version = "~0.9.1", optional = true }
grpclib = "^0.4.1" grpclib = "^0.4.1"
jinja2 = { version = ">=3.0.3", optional = true } jinja2 = { version = ">=3.0.3", optional = true }
python-dateutil = "^2.8" python-dateutil = "^2.8"
isort = { version = "^5.11.5", optional = true }
typing-extensions = "^4.7.1" typing-extensions = "^4.7.1"
betterproto-rust-codec = { version = "0.1.1", optional = true } betterproto-rust-codec = { version = "0.1.1", optional = true }
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
asv = "^0.4.2" asv = "^0.6.4"
bpython = "^0.19" bpython = "^0.24"
jinja2 = ">=3.0.3" jinja2 = ">=3.0.3"
mypy = "^1.11.2" mypy = "^1.11.2"
sphinx = "3.1.2" sphinx = "7.4.7"
sphinx-rtd-theme = "0.5.0" sphinx-rtd-theme = "3.0.2"
pre-commit = "^2.17.0" pre-commit = "^4.0.1"
grpcio-tools = "^1.54.2" grpcio-tools = "^1.54.2"
tox = "^4.0.0" tox = "^4.0.0"
[tool.poetry.group.test.dependencies] [tool.poetry.group.test.dependencies]
poethepoet = ">=0.9.0" poethepoet = ">=0.9.0"
pytest = "^6.2.5" pytest = "^7.4.4"
pytest-asyncio = "^0.12.0" pytest-asyncio = "^0.23.8"
pytest-cov = "^2.9.0" pytest-cov = "^6.0.0"
pytest-mock = "^3.1.1" pytest-mock = "^3.1.1"
pydantic = ">=2.0,<3" pydantic = ">=2.0,<3"
protobuf = "^4" protobuf = "^5"
cachelib = "^0.10.2" cachelib = "^0.13.0"
tomlkit = ">=0.7.0" tomlkit = ">=0.7.0"
[tool.poetry.scripts] [project.scripts]
protoc-gen-python_betterproto = "betterproto.plugin:main" protoc-gen-python_betterproto = "betterproto.plugin:main"
[tool.poetry.extras] [project.optional-dependencies]
compiler = ["black", "isort", "jinja2"] compiler = ["ruff", "jinja2"]
rust-codec = ["betterproto-rust-codec"] rust-codec = ["betterproto-rust-codec"]
[tool.ruff]
extend-exclude = ["tests/output_*"]
target-version = "py38"
[tool.ruff.lint.isort]
combine-as-imports = true
lines-after-imports = 2
# Dev workflow tasks # Dev workflow tasks
@ -65,13 +75,29 @@ help = "Run tests"
cmd = "mypy src --ignore-missing-imports" cmd = "mypy src --ignore-missing-imports"
help = "Check types with mypy" help = "Check types with mypy"
[tool.poe.tasks]
_black = "black . --exclude tests/output_ --target-version py310"
_isort = "isort . --extend-skip-glob 'tests/output_*/**/*'"
[tool.poe.tasks.format] [tool.poe.tasks.format]
sequence = ["_black", "_isort"] sequence = ["_format", "_sort-imports"]
help = "Apply black and isort formatting to source code" help = "Format the source code, and sort the imports"
[tool.poe.tasks.check]
sequence = ["_check-format", "_check-imports"]
help = "Check that the source code is formatted and the imports sorted"
[tool.poe.tasks._format]
cmd = "ruff format src tests"
help = "Format the source code without sorting the imports"
[tool.poe.tasks._sort-imports]
cmd = "ruff check --select I --fix src tests"
help = "Sort the imports"
[tool.poe.tasks._check-format]
cmd = "ruff format --diff src tests"
help = "Check that the source code is formatted"
[tool.poe.tasks._check-imports]
cmd = "ruff check --select I src tests"
help = "Check that the imports are sorted"
[tool.poe.tasks.docs] [tool.poe.tasks.docs]
cmd = "sphinx-build docs docs/build" cmd = "sphinx-build docs docs/build"
@ -106,23 +132,6 @@ help = "Regenerate the types in betterproto.lib.std.google"
shell = "poe generate && tox" shell = "poe generate && tox"
help = "Run tests with multiple pythons" help = "Run tests with multiple pythons"
[tool.poe.tasks.check-style]
cmd = "black . --check --diff"
help = "Check if code style is correct"
[tool.isort]
py_version = 37
profile = "black"
force_single_line = false
combine_as_imports = true
lines_after_imports = 2
include_trailing_comma = true
force_grid_wrap = 2
src_paths = ["src", "tests"]
[tool.black]
target-version = ['py37']
[tool.doc8] [tool.doc8]
paths = ["docs"] paths = ["docs"]
max_line_length = 88 max_line_length = 88
@ -156,5 +165,5 @@ require_poetry = true
""" """
[build-system] [build-system]
requires = ["poetry-core>=1.0.0,<2"] requires = ["poetry-core>=2.0.0,<3"]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"

View File

@ -66,8 +66,7 @@ if sys.version_info >= (3, 10):
from types import UnionType as _types_UnionType from types import UnionType as _types_UnionType
else: else:
class _types_UnionType: class _types_UnionType: ...
...
# Proto 3 data types # Proto 3 data types
@ -2014,10 +2013,10 @@ class _Timestamp(Timestamp):
return f"{result}Z" return f"{result}Z"
if (nanos % 1e6) == 0: if (nanos % 1e6) == 0:
# Serialize 3 fractional digits. # Serialize 3 fractional digits.
return f"{result}.{int(nanos // 1e6) :03d}Z" return f"{result}.{int(nanos // 1e6):03d}Z"
if (nanos % 1e3) == 0: if (nanos % 1e3) == 0:
# Serialize 6 fractional digits. # Serialize 6 fractional digits.
return f"{result}.{int(nanos // 1e3) :06d}Z" return f"{result}.{int(nanos // 1e3):06d}Z"
# Serialize 9 fractional digits. # Serialize 9 fractional digits.
return f"{result}.{nanos:09d}" return f"{result}.{nanos:09d}"

View File

@ -1,6 +1,5 @@
from __future__ import annotations from __future__ import annotations
import sys
from enum import ( from enum import (
EnumMeta, EnumMeta,
IntEnum, IntEnum,
@ -90,16 +89,9 @@ class EnumType(EnumMeta if TYPE_CHECKING else type):
def __iter__(cls) -> Generator[Enum, None, None]: def __iter__(cls) -> Generator[Enum, None, None]:
yield from cls._member_map_.values() yield from cls._member_map_.values()
if sys.version_info >= (3, 8): # 3.8 added __reversed__ to dict_values
def __reversed__(cls) -> Generator[Enum, None, None]: def __reversed__(cls) -> Generator[Enum, None, None]:
yield from reversed(cls._member_map_.values()) yield from reversed(cls._member_map_.values())
else:
def __reversed__(cls) -> Generator[Enum, None, None]:
yield from reversed(tuple(cls._member_map_.values()))
def __getitem__(cls, key: str) -> Enum: def __getitem__(cls, key: str) -> Enum:
return cls._member_map_[key] return cls._member_map_[key]
@ -140,6 +132,9 @@ class Enum(IntEnum if TYPE_CHECKING else int, metaclass=EnumType):
super().__setattr__(self, "value", value) super().__setattr__(self, "value", value)
return self return self
def __getnewargs_ex__(self) -> Tuple[Tuple[()], Dict[str, Any]]:
return (), {"name": self.name, "value": self.value}
def __str__(self) -> str: def __str__(self) -> str:
return self.name or "None" return self.name or "None"

View File

@ -1204,9 +1204,9 @@ class EnumDescriptorProto(betterproto.Message):
name: str = betterproto.string_field(1) name: str = betterproto.string_field(1)
value: List["EnumValueDescriptorProto"] = betterproto.message_field(2) value: List["EnumValueDescriptorProto"] = betterproto.message_field(2)
options: "EnumOptions" = betterproto.message_field(3) options: "EnumOptions" = betterproto.message_field(3)
reserved_range: List[ reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = (
"EnumDescriptorProtoEnumReservedRange" betterproto.message_field(4)
] = betterproto.message_field(4) )
""" """
Range of reserved numeric values. Reserved numeric values may not be used Range of reserved numeric values. Reserved numeric values may not be used
by enum values in the same enum declaration. Reserved ranges may not by enum values in the same enum declaration. Reserved ranges may not
@ -1792,9 +1792,9 @@ class FeatureSetDefaults(betterproto.Message):
for the closest matching edition, followed by proto merges. for the closest matching edition, followed by proto merges.
""" """
defaults: List[ defaults: List["FeatureSetDefaultsFeatureSetEditionDefault"] = (
"FeatureSetDefaultsFeatureSetEditionDefault" betterproto.message_field(1)
] = betterproto.message_field(1) )
minimum_edition: "Edition" = betterproto.enum_field(4) minimum_edition: "Edition" = betterproto.enum_field(4)
""" """
The minimum supported edition (inclusive) when this was constructed. The minimum supported edition (inclusive) when this was constructed.

View File

@ -53,9 +53,9 @@ class CodeGeneratorRequest(betterproto.Message):
parameter: str = betterproto.string_field(2) parameter: str = betterproto.string_field(2)
"""The generator parameter passed on the command-line.""" """The generator parameter passed on the command-line."""
proto_file: List[ proto_file: List["betterproto_lib_pydantic_google_protobuf.FileDescriptorProto"] = (
"betterproto_lib_pydantic_google_protobuf.FileDescriptorProto" betterproto.message_field(15)
] = betterproto.message_field(15) )
""" """
FileDescriptorProtos for all files in files_to_generate and everything FileDescriptorProtos for all files in files_to_generate and everything
they import. The files will appear in topological order, so each file they import. The files will appear in topological order, so each file
@ -195,9 +195,9 @@ class CodeGeneratorResponseFile(betterproto.Message):
content: str = betterproto.string_field(15) content: str = betterproto.string_field(15)
"""The file contents.""" """The file contents."""
generated_code_info: ( generated_code_info: "betterproto_lib_pydantic_google_protobuf.GeneratedCodeInfo" = betterproto.message_field(
"betterproto_lib_pydantic_google_protobuf.GeneratedCodeInfo" 16
) = betterproto.message_field(16) )
""" """
Information describing the file content being inserted. If an insertion Information describing the file content being inserted. If an insertion
point is used, this information will be appropriately offset and inserted point is used, this information will be appropriately offset and inserted

View File

@ -1064,9 +1064,9 @@ class EnumDescriptorProto(betterproto.Message):
name: str = betterproto.string_field(1) name: str = betterproto.string_field(1)
value: List["EnumValueDescriptorProto"] = betterproto.message_field(2) value: List["EnumValueDescriptorProto"] = betterproto.message_field(2)
options: "EnumOptions" = betterproto.message_field(3) options: "EnumOptions" = betterproto.message_field(3)
reserved_range: List[ reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = (
"EnumDescriptorProtoEnumReservedRange" betterproto.message_field(4)
] = betterproto.message_field(4) )
""" """
Range of reserved numeric values. Reserved numeric values may not be used Range of reserved numeric values. Reserved numeric values may not be used
by enum values in the same enum declaration. Reserved ranges may not by enum values in the same enum declaration. Reserved ranges may not
@ -1688,9 +1688,9 @@ class FeatureSetDefaults(betterproto.Message):
for the closest matching edition, followed by proto merges. for the closest matching edition, followed by proto merges.
""" """
defaults: List[ defaults: List["FeatureSetDefaultsFeatureSetEditionDefault"] = (
"FeatureSetDefaultsFeatureSetEditionDefault" betterproto.message_field(1)
] = betterproto.message_field(1) )
minimum_edition: "Edition" = betterproto.enum_field(4) minimum_edition: "Edition" = betterproto.enum_field(4)
""" """
The minimum supported edition (inclusive) when this was constructed. The minimum supported edition (inclusive) when this was constructed.

View File

@ -46,9 +46,9 @@ class CodeGeneratorRequest(betterproto.Message):
parameter: str = betterproto.string_field(2) parameter: str = betterproto.string_field(2)
"""The generator parameter passed on the command-line.""" """The generator parameter passed on the command-line."""
proto_file: List[ proto_file: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = (
"betterproto_lib_google_protobuf.FileDescriptorProto" betterproto.message_field(15)
] = betterproto.message_field(15) )
""" """
FileDescriptorProtos for all files in files_to_generate and everything FileDescriptorProtos for all files in files_to_generate and everything
they import. The files will appear in topological order, so each file they import. The files will appear in topological order, so each file

View File

@ -1,4 +1,5 @@
import os.path import os.path
import subprocess
import sys import sys
from .module_validation import ModuleValidator from .module_validation import ModuleValidator
@ -6,8 +7,6 @@ from .module_validation import ModuleValidator
try: try:
# betterproto[compiler] specific dependencies # betterproto[compiler] specific dependencies
import black
import isort.api
import jinja2 import jinja2
except ImportError as err: except ImportError as err:
print( print(
@ -32,6 +31,7 @@ def outputfile_compiler(output_file: OutputTemplate) -> str:
trim_blocks=True, trim_blocks=True,
lstrip_blocks=True, lstrip_blocks=True,
loader=jinja2.FileSystemLoader(templates_folder), loader=jinja2.FileSystemLoader(templates_folder),
undefined=jinja2.StrictUndefined,
) )
# Load the body first so we have a compleate list of imports needed. # Load the body first so we have a compleate list of imports needed.
body_template = env.get_template("template.py.j2") body_template = env.get_template("template.py.j2")
@ -39,20 +39,17 @@ def outputfile_compiler(output_file: OutputTemplate) -> str:
code = body_template.render(output_file=output_file) code = body_template.render(output_file=output_file)
code = header_template.render(output_file=output_file) + code code = header_template.render(output_file=output_file) + code
code = isort.api.sort_code_string(
code=code, # Sort imports, delete unused ones
show_diff=False, code = subprocess.check_output(
py_version=37, ["ruff", "check", "--select", "I,F401", "--fix", "--silent", "-"],
profile="black", input=code,
combine_as_imports=True, encoding="utf-8",
lines_after_imports=2,
quiet=True,
force_grid_wrap=2,
known_third_party=["grpclib", "betterproto"],
) )
code = black.format_str(
src_contents=code, # Format the code
mode=black.Mode(), code = subprocess.check_output(
["ruff", "format", "-"], input=code, encoding="utf-8"
) )
# Validate the generated code. # Validate the generated code.

View File

@ -153,11 +153,33 @@ def get_comment(
) -> str: ) -> str:
pad = " " * indent pad = " " * indent
for sci_loc in proto_file.source_code_info.location: for sci_loc in proto_file.source_code_info.location:
if list(sci_loc.path) == path and sci_loc.leading_comments: if list(sci_loc.path) == path:
lines = sci_loc.leading_comments.strip().split("\n") all_comments = list(sci_loc.leading_detached_comments)
if sci_loc.leading_comments:
all_comments.append(sci_loc.leading_comments)
if sci_loc.trailing_comments:
all_comments.append(sci_loc.trailing_comments)
lines = []
for comment in all_comments:
lines += comment.split("\n")
lines.append("")
# Remove consecutive empty lines
lines = [
line for i, line in enumerate(lines) if line or (i == 0 or lines[i - 1])
]
if lines and not lines[-1]:
lines.pop() # Remove the last empty line
# It is common for one line comments to start with a space, for example: // comment
# We don't add this space to the generated file.
lines = [line[1:] if line and line[0] == " " else line for line in lines]
# This is a field, message, enum, service, or method # This is a field, message, enum, service, or method
if len(lines) == 1 and len(lines[0]) < 79 - indent - 6: if len(lines) == 1 and len(lines[0]) < 79 - indent - 6:
lines[0] = lines[0].strip('"')
return f'{pad}"""{lines[0]}"""' return f'{pad}"""{lines[0]}"""'
else: else:
joined = f"\n{pad}".join(lines) joined = f"\n{pad}".join(lines)
@ -238,7 +260,7 @@ class OutputTemplate:
parent_request: PluginRequestCompiler parent_request: PluginRequestCompiler
package_proto_obj: FileDescriptorProto package_proto_obj: FileDescriptorProto
input_files: List[str] = field(default_factory=list) input_files: List[str] = field(default_factory=list)
imports: Set[str] = field(default_factory=set) imports_end: Set[str] = field(default_factory=set)
datetime_imports: Set[str] = field(default_factory=set) datetime_imports: Set[str] = field(default_factory=set)
pydantic_imports: Set[str] = field(default_factory=set) pydantic_imports: Set[str] = field(default_factory=set)
builtins_import: bool = False builtins_import: bool = False
@ -328,12 +350,6 @@ class MessageCompiler(ProtoContentBase):
def py_name(self) -> str: def py_name(self) -> str:
return pythonize_class_name(self.proto_name) return pythonize_class_name(self.proto_name)
@property
def annotation(self) -> str:
if self.repeated:
return self.typing_compiler.list(self.py_name)
return self.py_name
@property @property
def deprecated_fields(self) -> Iterator[str]: def deprecated_fields(self) -> Iterator[str]:
for f in self.fields: for f in self.fields:
@ -484,13 +500,6 @@ class FieldCompiler(MessageCompiler):
def optional(self) -> bool: def optional(self) -> bool:
return self.proto_obj.proto3_optional return self.proto_obj.proto3_optional
@property
def mutable(self) -> bool:
"""True if the field is a mutable type, otherwise False."""
return self.annotation.startswith(
("typing.List[", "typing.Dict[", "dict[", "list[", "Dict[", "List[")
)
@property @property
def field_type(self) -> str: def field_type(self) -> str:
"""String representation of proto field type.""" """String representation of proto field type."""
@ -532,7 +541,7 @@ class FieldCompiler(MessageCompiler):
# Type referencing another defined Message or a named enum # Type referencing another defined Message or a named enum
return get_type_reference( return get_type_reference(
package=self.output_file.package, package=self.output_file.package,
imports=self.output_file.imports, imports=self.output_file.imports_end,
source_type=self.proto_obj.type_name, source_type=self.proto_obj.type_name,
typing_compiler=self.typing_compiler, typing_compiler=self.typing_compiler,
pydantic=self.output_file.pydantic_dataclasses, pydantic=self.output_file.pydantic_dataclasses,
@ -661,6 +670,7 @@ class EnumDefinitionCompiler(MessageCompiler):
@dataclass @dataclass
class ServiceCompiler(ProtoContentBase): class ServiceCompiler(ProtoContentBase):
source_file: FileDescriptorProto
parent: OutputTemplate = PLACEHOLDER parent: OutputTemplate = PLACEHOLDER
proto_obj: DescriptorProto = PLACEHOLDER proto_obj: DescriptorProto = PLACEHOLDER
path: List[int] = PLACEHOLDER path: List[int] = PLACEHOLDER
@ -682,6 +692,7 @@ class ServiceCompiler(ProtoContentBase):
@dataclass @dataclass
class ServiceMethodCompiler(ProtoContentBase): class ServiceMethodCompiler(ProtoContentBase):
source_file: FileDescriptorProto
parent: ServiceCompiler parent: ServiceCompiler
proto_obj: MethodDescriptorProto proto_obj: MethodDescriptorProto
path: List[int] = PLACEHOLDER path: List[int] = PLACEHOLDER
@ -730,7 +741,7 @@ class ServiceMethodCompiler(ProtoContentBase):
""" """
return get_type_reference( return get_type_reference(
package=self.output_file.package, package=self.output_file.package,
imports=self.output_file.imports, imports=self.output_file.imports_end,
source_type=self.proto_obj.input_type, source_type=self.proto_obj.input_type,
typing_compiler=self.output_file.typing_compiler, typing_compiler=self.output_file.typing_compiler,
unwrap=False, unwrap=False,
@ -760,7 +771,7 @@ class ServiceMethodCompiler(ProtoContentBase):
""" """
return get_type_reference( return get_type_reference(
package=self.output_file.package, package=self.output_file.package,
imports=self.output_file.imports, imports=self.output_file.imports_end,
source_type=self.proto_obj.output_type, source_type=self.proto_obj.output_type,
typing_compiler=self.output_file.typing_compiler, typing_compiler=self.output_file.typing_compiler,
unwrap=False, unwrap=False,

View File

@ -143,7 +143,7 @@ def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
for output_package_name, output_package in request_data.output_packages.items(): for output_package_name, output_package in request_data.output_packages.items():
for proto_input_file in output_package.input_files: for proto_input_file in output_package.input_files:
for index, service in enumerate(proto_input_file.service): for index, service in enumerate(proto_input_file.service):
read_protobuf_service(service, index, output_package) read_protobuf_service(proto_input_file, service, index, output_package)
# Generate output files # Generate output files
output_paths: Set[pathlib.Path] = set() output_paths: Set[pathlib.Path] = set()
@ -249,12 +249,21 @@ def read_protobuf_type(
def read_protobuf_service( def read_protobuf_service(
service: ServiceDescriptorProto, index: int, output_package: OutputTemplate source_file: FileDescriptorProto,
service: ServiceDescriptorProto,
index: int,
output_package: OutputTemplate,
) -> None: ) -> None:
service_data = ServiceCompiler( service_data = ServiceCompiler(
parent=output_package, proto_obj=service, path=[6, index] source_file=source_file,
parent=output_package,
proto_obj=service,
path=[6, index],
) )
for j, method in enumerate(service.method): for j, method in enumerate(service.method):
ServiceMethodCompiler( ServiceMethodCompiler(
parent=service_data, proto_obj=method, path=[6, index, 2, j] source_file=source_file,
parent=service_data,
proto_obj=method,
path=[6, index, 2, j],
) )

View File

@ -2,13 +2,26 @@
# sources: {{ ', '.join(output_file.input_filenames) }} # sources: {{ ', '.join(output_file.input_filenames) }}
# plugin: python-betterproto # plugin: python-betterproto
# This file has been @generated # This file has been @generated
__all__ = (
{%- for enum in output_file.enums -%}
"{{ enum.py_name }}",
{%- endfor -%}
{%- for message in output_file.messages -%}
"{{ message.py_name }}",
{%- endfor -%}
{%- for service in output_file.services -%}
"{{ service.py_name }}Stub",
"{{ service.py_name }}Base",
{%- endfor -%}
)
{% for i in output_file.python_module_imports|sort %} {% for i in output_file.python_module_imports|sort %}
import {{ i }} import {{ i }}
{% endfor %} {% endfor %}
{% if output_file.pydantic_dataclasses %} {% if output_file.pydantic_dataclasses %}
from pydantic.dataclasses import dataclass from pydantic.dataclasses import dataclass
from pydantic.dataclasses import rebuild_dataclass
{%- else -%} {%- else -%}
from dataclasses import dataclass from dataclasses import dataclass
{% endif %} {% endif %}
@ -35,10 +48,6 @@ from betterproto.grpc.grpclib_server import ServiceBase
import grpclib import grpclib
{% endif %} {% endif %}
{% for i in output_file.imports|sort %}
{{ i }}
{% endfor %}
{% if output_file.imports_type_checking_only %} {% if output_file.imports_type_checking_only %}
from typing import TYPE_CHECKING from typing import TYPE_CHECKING

View File

@ -77,14 +77,14 @@ class {{ service.py_name }}Stub(betterproto.ServiceStub):
, {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}" , {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}"
{%- else -%} {%- else -%}
{# Client streaming: need a request iterator instead #} {# Client streaming: need a request iterator instead #}
, {{ method.py_input_message_param }}_iterator: {{ output_file.typing_compiler.union(output_file.typing_compiler.async_iterable(method.py_input_message_type), output_file.typing_compiler.iterable(method.py_input_message_type)) }} , {{ method.py_input_message_param }}_iterator: "{{ output_file.typing_compiler.union(output_file.typing_compiler.async_iterable(method.py_input_message_type), output_file.typing_compiler.iterable(method.py_input_message_type)) }}"
{%- endif -%} {%- endif -%}
, ,
* *
, timeout: {{ output_file.typing_compiler.optional("float") }} = None , timeout: {{ output_file.typing_compiler.optional("float") }} = None
, deadline: {{ output_file.typing_compiler.optional('"Deadline"') }} = None , deadline: {{ output_file.typing_compiler.optional('"Deadline"') }} = None
, metadata: {{ output_file.typing_compiler.optional('"MetadataLike"') }} = None , metadata: {{ output_file.typing_compiler.optional('"MetadataLike"') }} = None
) -> {% if method.server_streaming %}{{ output_file.typing_compiler.async_iterator(method.py_output_message_type ) }}{% else %}"{{ method.py_output_message_type }}"{% endif %}: ) -> "{% if method.server_streaming %}{{ output_file.typing_compiler.async_iterator(method.py_output_message_type ) }}{% else %}{{ method.py_output_message_type }}{% endif %}":
{% if method.comment %} {% if method.comment %}
{{ method.comment }} {{ method.comment }}
@ -143,6 +143,10 @@ class {{ service.py_name }}Stub(betterproto.ServiceStub):
{% endfor %} {% endfor %}
{% endfor %} {% endfor %}
{% for i in output_file.imports_end %}
{{ i }}
{% endfor %}
{% for service in output_file.services %} {% for service in output_file.services %}
class {{ service.py_name }}Base(ServiceBase): class {{ service.py_name }}Base(ServiceBase):
{% if service.comment %} {% if service.comment %}
@ -211,11 +215,3 @@ class {{ service.py_name }}Base(ServiceBase):
} }
{% endfor %} {% endfor %}
{% if output_file.pydantic_dataclasses %}
{% for message in output_file.messages %}
{% if message.has_message_field %}
rebuild_dataclass({{ message.py_name }}) # type: ignore
{% endif %}
{% endfor %}
{% endif %}

View File

@ -4,17 +4,6 @@ import sys
import pytest import pytest
def pytest_addoption(parser):
parser.addoption(
"--repeat", type=int, default=1, help="repeat the operation multiple times"
)
@pytest.fixture(scope="session")
def repeat(request):
return request.config.getoption("repeat")
@pytest.fixture @pytest.fixture
def reset_sys_path(): def reset_sys_path():
original = copy.deepcopy(sys.path) original = copy.deepcopy(sys.path)

View File

@ -1,5 +1,4 @@
import asyncio import asyncio
import sys
import uuid import uuid
import grpclib import grpclib
@ -27,12 +26,12 @@ async def _test_client(client: ThingServiceClient, name="clean room", **kwargs):
def _assert_request_meta_received(deadline, metadata): def _assert_request_meta_received(deadline, metadata):
def server_side_test(stream): def server_side_test(stream):
assert stream.deadline._timestamp == pytest.approx( assert stream.deadline._timestamp == pytest.approx(deadline._timestamp, 1), (
deadline._timestamp, 1 "The provided deadline should be received serverside"
), "The provided deadline should be received serverside" )
assert ( assert stream.metadata["authorization"] == metadata["authorization"], (
stream.metadata["authorization"] == metadata["authorization"] "The provided authorization metadata should be received serverside"
), "The provided authorization metadata should be received serverside" )
return server_side_test return server_side_test
@ -91,9 +90,6 @@ async def test_trailer_only_error_stream_unary(
@pytest.mark.asyncio @pytest.mark.asyncio
@pytest.mark.skipif(
sys.version_info < (3, 8), reason="async mock spy does works for python3.8+"
)
async def test_service_call_mutable_defaults(mocker): async def test_service_call_mutable_defaults(mocker):
async with ChannelFor([ThingService()]) as channel: async with ChannelFor([ThingService()]) as channel:
client = ThingServiceClient(channel) client = ThingServiceClient(channel)
@ -269,9 +265,9 @@ async def test_async_gen_for_stream_stream_request():
else: else:
# No more things to send make sure channel is closed # No more things to send make sure channel is closed
request_chan.close() request_chan.close()
assert response_index == len( assert response_index == len(expected_things), (
expected_things "Didn't receive all expected responses"
), "Didn't receive all expected responses" )
@pytest.mark.asyncio @pytest.mark.asyncio

View File

@ -4,20 +4,20 @@ from tests.output_betterproto.casing import Test
def test_message_attributes(): def test_message_attributes():
message = Test() message = Test()
assert hasattr( assert hasattr(message, "snake_case_message"), (
message, "snake_case_message" "snake_case field name is same in python"
), "snake_case field name is same in python" )
assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python" assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python"
assert hasattr(message, "uppercase"), "UPPERCASE field is lowercase in python" assert hasattr(message, "uppercase"), "UPPERCASE field is lowercase in python"
def test_message_casing(): def test_message_casing():
assert hasattr( assert hasattr(casing, "SnakeCaseMessage"), (
casing, "SnakeCaseMessage" "snake_case Message name is converted to CamelCase in python"
), "snake_case Message name is converted to CamelCase in python" )
def test_enum_casing(): def test_enum_casing():
assert hasattr( assert hasattr(casing, "MyEnum"), (
casing, "MyEnum" "snake_case Enum name is converted to CamelCase in python"
), "snake_case Enum name is converted to CamelCase in python" )

View File

@ -2,13 +2,13 @@ import tests.output_betterproto.casing_inner_class as casing_inner_class
def test_message_casing_inner_class_name(): def test_message_casing_inner_class_name():
assert hasattr( assert hasattr(casing_inner_class, "TestInnerClass"), (
casing_inner_class, "TestInnerClass" "Inline defined Message is correctly converted to CamelCase"
), "Inline defined Message is correctly converted to CamelCase" )
def test_message_casing_inner_class_attributes(): def test_message_casing_inner_class_attributes():
message = casing_inner_class.Test() message = casing_inner_class.Test()
assert hasattr( assert hasattr(message.inner, "old_exp"), (
message.inner, "old_exp" "Inline defined Message attribute is snake_case"
), "Inline defined Message attribute is snake_case" )

View File

@ -3,12 +3,12 @@ from tests.output_betterproto.casing_message_field_uppercase import Test
def test_message_casing(): def test_message_casing():
message = Test() message = Test()
assert hasattr( assert hasattr(message, "uppercase"), (
message, "uppercase" "UPPERCASE attribute is converted to 'uppercase' in python"
), "UPPERCASE attribute is converted to 'uppercase' in python" )
assert hasattr( assert hasattr(message, "uppercase_v2"), (
message, "uppercase_v2" "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python"
), "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python" )
assert hasattr( assert hasattr(message, "upper_camel_case"), (
message, "upper_camel_case" "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python"
), "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python" )

View File

@ -0,0 +1,44 @@
syntax = "proto3";
package documentation;
// Documentation of message 1
// other line 1
// Documentation of message 2
// other line 2
message Test { // Documentation of message 3
// Documentation of field 1
// other line 1
// Documentation of field 2
// other line 2
uint32 x = 1; // Documentation of field 3
}
// Documentation of enum 1
// other line 1
// Documentation of enum 2
// other line 2
enum Enum { // Documentation of enum 3
// Documentation of variant 1
// other line 1
// Documentation of variant 2
// other line 2
Enum_Variant = 0; // Documentation of variant 3
}
// Documentation of service 1
// other line 1
// Documentation of service 2
// other line 2
service Service { // Documentation of service 3
// Documentation of method 1
// other line 1
// Documentation of method 2
// other line 2
rpc get(Test) returns (Test); // Documentation of method 3
}

View File

@ -27,9 +27,9 @@ def test_enum_is_comparable_with_int():
def test_enum_to_dict(): def test_enum_to_dict():
assert ( assert "choice" not in Test(choice=Choice.ZERO).to_dict(), (
"choice" not in Test(choice=Choice.ZERO).to_dict() "Default enum value is not serialized"
), "Default enum value is not serialized" )
assert ( assert (
Test(choice=Choice.ZERO).to_dict(include_default_values=True)["choice"] Test(choice=Choice.ZERO).to_dict(include_default_values=True)["choice"]
== "ZERO" == "ZERO"

View File

@ -26,5 +26,5 @@ import "other.proto";
// (root: Test & RootPackageMessage) <-------> (other: OtherPackageMessage) // (root: Test & RootPackageMessage) <-------> (other: OtherPackageMessage)
message Test { message Test {
RootPackageMessage message = 1; RootPackageMessage message = 1;
other.OtherPackageMessage other = 2; other.OtherPackageMessage other_value = 2;
} }

View File

@ -0,0 +1,19 @@
def test_all_definition():
"""
Check that a compiled module defines __all__ with the right value.
These modules have been chosen since they contain messages, services and enums.
"""
import tests.output_betterproto.enum as enum
import tests.output_betterproto.service as service
assert service.__all__ == (
"ThingType",
"DoThingRequest",
"DoThingResponse",
"GetThingRequest",
"GetThingResponse",
"TestStub",
"TestBase",
)
assert enum.__all__ == ("Choice", "ArithmeticOperator", "Test")

View File

@ -0,0 +1,37 @@
import ast
import inspect
def check(generated_doc: str, type: str) -> None:
assert f"Documentation of {type} 1" in generated_doc
assert "other line 1" in generated_doc
assert f"Documentation of {type} 2" in generated_doc
assert "other line 2" in generated_doc
assert f"Documentation of {type} 3" in generated_doc
def test_documentation() -> None:
from .output_betterproto.documentation import (
Enum,
ServiceBase,
ServiceStub,
Test,
)
check(Test.__doc__, "message")
source = inspect.getsource(Test)
tree = ast.parse(source)
check(tree.body[0].body[2].value.value, "field")
check(Enum.__doc__, "enum")
source = inspect.getsource(Enum)
tree = ast.parse(source)
check(tree.body[0].body[2].value.value, "variant")
check(ServiceBase.__doc__, "service")
check(ServiceBase.get.__doc__, "method")
check(ServiceStub.__doc__, "service")
check(ServiceStub.get.__doc__, "method")

View File

@ -621,9 +621,7 @@ iso_candidates = """2009-12-12T12:34
2010-02-18T16:00:00.23334444 2010-02-18T16:00:00.23334444
2010-02-18T16:00:00,2283 2010-02-18T16:00:00,2283
2009-05-19 143922 2009-05-19 143922
2009-05-19 1439""".split( 2009-05-19 1439""".split("\n")
"\n"
)
def test_iso_datetime(): def test_iso_datetime():

View File

@ -56,9 +56,9 @@ def test_reference_google_wellknown_types_non_wrappers(
) )
assert name == expected_name assert name == expected_name
assert imports.__contains__( assert imports.__contains__(expected_import), (
expected_import f"{expected_import} not found in {imports}"
), f"{expected_import} not found in {imports}" )
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -102,9 +102,9 @@ def test_reference_google_wellknown_types_non_wrappers_pydantic(
) )
assert name == expected_name assert name == expected_name
assert imports.__contains__( assert imports.__contains__(expected_import), (
expected_import f"{expected_import} not found in {imports}"
), f"{expected_import} not found in {imports}" )
@pytest.mark.parametrize( @pytest.mark.parametrize(

View File

@ -174,10 +174,9 @@ def test_message_equality(test_data: TestData) -> None:
@pytest.mark.parametrize("test_data", test_cases.messages_with_json, indirect=True) @pytest.mark.parametrize("test_data", test_cases.messages_with_json, indirect=True)
def test_message_json(repeat, test_data: TestData) -> None: def test_message_json(test_data: TestData) -> None:
plugin_module, _, json_data = test_data plugin_module, _, json_data = test_data
for _ in range(repeat):
for sample in json_data: for sample in json_data:
if sample.belongs_to(test_input_config.non_symmetrical_json): if sample.belongs_to(test_input_config.non_symmetrical_json):
continue continue
@ -198,16 +197,15 @@ def test_service_can_be_instantiated(test_data: TestData) -> None:
@pytest.mark.parametrize("test_data", test_cases.messages_with_json, indirect=True) @pytest.mark.parametrize("test_data", test_cases.messages_with_json, indirect=True)
def test_binary_compatibility(repeat, test_data: TestData) -> None: def test_binary_compatibility(test_data: TestData) -> None:
plugin_module, reference_module, json_data = test_data plugin_module, reference_module, json_data = test_data
for sample in json_data: for sample in json_data:
reference_instance = Parse(sample.json, reference_module().Test()) reference_instance = Parse(sample.json, reference_module().Test())
reference_binary_output = reference_instance.SerializeToString() reference_binary_output = reference_instance.SerializeToString()
for _ in range(repeat): plugin_instance_from_json: betterproto.Message = plugin_module.Test().from_json(
plugin_instance_from_json: betterproto.Message = ( sample.json
plugin_module.Test().from_json(sample.json)
) )
plugin_instance_from_binary = plugin_module.Test.FromString( plugin_instance_from_binary = plugin_module.Test.FromString(
reference_binary_output reference_binary_output

View File

@ -57,6 +57,11 @@ class Complex(betterproto.Message):
) )
class BetterprotoEnum(betterproto.Enum):
UNSPECIFIED = 0
ONE = 1
def complex_msg(): def complex_msg():
return Complex( return Complex(
foo_str="yep", foo_str="yep",
@ -201,3 +206,11 @@ def test_message_can_be_cached():
.string_value .string_value
== "world" == "world"
) )
def test_pickle_enum():
enum = BetterprotoEnum.ONE
assert unpickled(enum) == enum
enum = BetterprotoEnum.UNSPECIFIED
assert unpickled(enum) == enum

View File

@ -62,7 +62,7 @@ def test_load_varint_file():
stream.read(2) # Skip until first multi-byte stream.read(2) # Skip until first multi-byte
assert betterproto.load_varint(stream) == ( assert betterproto.load_varint(stream) == (
123456789, 123456789,
b"\x95\x9A\xEF\x3A", b"\x95\x9a\xef\x3a",
) # Multi-byte varint ) # Multi-byte varint
@ -338,7 +338,7 @@ def run_java_single_varint(value: int, tmp_path) -> int:
def test_single_varint(compile_jar, tmp_path): def test_single_varint(compile_jar, tmp_path):
single_byte = (1, b"\x01") single_byte = (1, b"\x01")
multi_byte = (123456789, b"\x95\x9A\xEF\x3A") multi_byte = (123456789, b"\x95\x9a\xef\x3a")
# Write a single-byte varint to a file and have Java read it back # Write a single-byte varint to a file and have Java read it back
returned = run_java_single_varint(single_byte[0], tmp_path) returned = run_java_single_varint(single_byte[0], tmp_path)
@ -351,8 +351,8 @@ def test_single_varint(compile_jar, tmp_path):
def test_multiple_varints(compile_jar, tmp_path): def test_multiple_varints(compile_jar, tmp_path):
single_byte = (1, b"\x01") single_byte = (1, b"\x01")
multi_byte = (123456789, b"\x95\x9A\xEF\x3A") multi_byte = (123456789, b"\x95\x9a\xef\x3a")
over32 = (3000000000, b"\x80\xBC\xC1\x96\x0B") over32 = (3000000000, b"\x80\xbc\xc1\x96\x0b")
# Write two varints to the same file # Write two varints to the same file
with open(tmp_path / "py_multiple_varints.out", "wb") as stream: with open(tmp_path / "py_multiple_varints.out", "wb") as stream:

View File

@ -11,6 +11,6 @@ PROJECT_TOML = Path(__file__).joinpath("..", "..", "pyproject.toml").resolve()
def test_version(): def test_version():
with PROJECT_TOML.open() as toml_file: with PROJECT_TOML.open() as toml_file:
project_config = tomlkit.loads(toml_file.read()) project_config = tomlkit.loads(toml_file.read())
assert ( assert __version__ == project_config["project"]["version"], (
__version__ == project_config["tool"]["poetry"]["version"] "Project version should match in package and package config"
), "Project version should match in package and package config" )