Compare commits
9 Commits
master
...
michael-sa
Author | SHA1 | Date | |
---|---|---|---|
|
0c02d1b21a | ||
|
ac32bcd25a | ||
|
72855227bd | ||
|
47081617c2 | ||
|
d734206fe5 | ||
|
bbf40f9694 | ||
|
6671d87cef | ||
|
cd66b0511a | ||
|
c48ca2e386 |
@ -1,35 +0,0 @@
|
||||
name: Release
|
||||
run-name: ${{ gitea.actor }} is runs ci pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
packaging:
|
||||
name: Distribution
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
EXT_FIX: "6"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- name: Install poetry
|
||||
run: python -m pip install poetry chardet
|
||||
- name: Install poetry compiler
|
||||
run: poetry install -E compiler
|
||||
- name: Set poetry version
|
||||
run: PV=$(poetry version -s) && poetry version ${PV}+jar3b${EXT_FIX}
|
||||
- name: Build package
|
||||
run: poetry build
|
||||
- name: Add pypi source
|
||||
run: poetry source add --priority=supplemental ahax https://git.ahax86.ru/api/packages/pub/pypi
|
||||
- name: Add pypi credentials
|
||||
run: poetry config http-basic.ahax ${{ secrets.REPO_USER }} ${{ secrets.REPO_PASS }}
|
||||
- name: Push to pypi
|
||||
run: poetry publish -r ahax -u ${{ secrets.REPO_USER }} -p ${{ secrets.REPO_PASS }} -n
|
||||
|
23
.github/CONTRIBUTING.md
vendored
23
.github/CONTRIBUTING.md
vendored
@ -1,23 +0,0 @@
|
||||
# Contributing
|
||||
|
||||
There's lots to do, and we're working hard, so any help is welcome!
|
||||
|
||||
- :speech_balloon: Join us on [Discord](https://discord.gg/DEVteTupPb)!
|
||||
|
||||
What can you do?
|
||||
|
||||
- :+1: Vote on [issues](https://github.com/danielgtaylor/python-betterproto/issues).
|
||||
- :speech_balloon: Give feedback on [Pull Requests](https://github.com/danielgtaylor/python-betterproto/pulls) and [Issues](https://github.com/danielgtaylor/python-betterproto/issues):
|
||||
- Suggestions
|
||||
- Express approval
|
||||
- Raise concerns
|
||||
- :small_red_triangle: Create an issue:
|
||||
- File a bug (please check its not a duplicate)
|
||||
- Propose an enhancement
|
||||
- :white_check_mark: Create a PR:
|
||||
- [Creating a failing test-case](https://github.com/danielgtaylor/python-betterproto/blob/master/tests/README.md) to make bug-fixing easier
|
||||
- Fix any of the open issues
|
||||
- [Good first issues](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
||||
- [Issues with tests](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22has+test%22)
|
||||
- New bugfix or idea
|
||||
- If you'd like to discuss your idea first, join us on Discord!
|
63
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
63
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -1,63 +0,0 @@
|
||||
name: Bug Report
|
||||
description: Report broken or incorrect behaviour
|
||||
labels: ["bug", "investigation needed"]
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: >
|
||||
Thanks for taking the time to fill out a bug report!
|
||||
|
||||
If you're not sure it's a bug and you just have a question, the [community Discord channel](https://discord.gg/DEVteTupPb) is a better place for general questions than a GitHub issue.
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Summary
|
||||
description: A simple summary of your bug report
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Reproduction Steps
|
||||
description: >
|
||||
What you did to make it happen.
|
||||
Ideally there should be a short code snippet in this section to help reproduce the bug.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected Results
|
||||
description: >
|
||||
What did you expect to happen?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Actual Results
|
||||
description: >
|
||||
What actually happened?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: System Information
|
||||
description: >
|
||||
Paste the result of `protoc --version; python --version; pip show betterproto` below.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Checklist
|
||||
options:
|
||||
- label: I have searched the issues for duplicates.
|
||||
required: true
|
||||
- label: I have shown the entire traceback, if possible.
|
||||
required: true
|
||||
- label: I have verified this issue occurs on the latest prelease of betterproto which can be installed using `pip install -U --pre betterproto`, if possible.
|
||||
required: true
|
||||
|
6
.github/ISSUE_TEMPLATE/config.yml
vendored
6
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,6 +0,0 @@
|
||||
name:
|
||||
description:
|
||||
contact_links:
|
||||
- name: For questions about the library
|
||||
about: Support questions are better answered in our Discord group.
|
||||
url: https://discord.gg/DEVteTupPb
|
49
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
49
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@ -1,49 +0,0 @@
|
||||
name: Feature Request
|
||||
description: Suggest a feature for this library
|
||||
labels: ["enhancement"]
|
||||
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: Summary
|
||||
description: >
|
||||
What problem is your feature trying to solve? What would become easier or possible if feature was implemented?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
multiple: false
|
||||
label: What is the feature request for?
|
||||
options:
|
||||
- The core library
|
||||
- RPC handling
|
||||
- The documentation
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: The Problem
|
||||
description: >
|
||||
What problem is your feature trying to solve?
|
||||
What would become easier or possible if feature was implemented?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: The Ideal Solution
|
||||
description: >
|
||||
What is your ideal solution to the problem?
|
||||
What would you like this feature to do?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: The Current Solution
|
||||
description: >
|
||||
What is the current solution to the problem, if any?
|
||||
validations:
|
||||
required: false
|
16
.github/PULL_REQUEST_TEMPLATE.md
vendored
16
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,16 +0,0 @@
|
||||
## Summary
|
||||
|
||||
<!-- What is this pull request for? Does it fix any issues? -->
|
||||
|
||||
## Checklist
|
||||
|
||||
<!-- Put an x inside [ ] to check it, like so: [x] -->
|
||||
|
||||
- [ ] If code changes were made then they have been tested.
|
||||
- [ ] I have updated the documentation to reflect the changes.
|
||||
- [ ] This PR fixes an issue.
|
||||
- [ ] This PR adds something new (e.g. new method or parameters).
|
||||
- [ ] This change has an associated test.
|
||||
- [ ] This PR is a breaking change (e.g. methods or parameters removed/renamed)
|
||||
- [ ] This PR is **not** a code change (e.g. documentation, README, ...)
|
||||
|
117
.github/workflows/ci.yml
vendored
117
.github/workflows/ci.yml
vendored
@ -1,65 +1,74 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: ${{ matrix.os }} / ${{ matrix.python-version }}
|
||||
runs-on: ${{ matrix.os }}-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [Ubuntu, MacOS, Windows]
|
||||
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
check-formatting:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
name: Consult black on python formatting
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.7
|
||||
- uses: Gr1N/setup-poetry@v2
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-poetry-
|
||||
- name: Install dependencies
|
||||
run: poetry install
|
||||
- name: Run black
|
||||
run: make check-style
|
||||
|
||||
run-tests:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
name: Run tests with tox
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [ '3.6', '3.7', '3.8']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Get full Python version
|
||||
id: full-python-version
|
||||
shell: bash
|
||||
run: echo "version=$(python -c "import sys; print('-'.join(str(v) for v in sys.version_info))")" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Install poetry
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install poetry
|
||||
echo "$HOME/.poetry/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Configure poetry
|
||||
shell: bash
|
||||
run: poetry config virtualenvs.in-project true
|
||||
|
||||
- name: Set up cache
|
||||
uses: actions/cache@v4
|
||||
id: cache
|
||||
- uses: Gr1N/setup-poetry@v2
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: .venv
|
||||
key: venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('**/poetry.lock') }}
|
||||
|
||||
- name: Ensure cache is healthy
|
||||
if: steps.cache.outputs.cache-hit == 'true'
|
||||
shell: bash
|
||||
run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv
|
||||
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-poetry-
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: poetry install -E compiler
|
||||
run: |
|
||||
sudo apt install protobuf-compiler libprotobuf-dev
|
||||
poetry install
|
||||
- name: Run tests
|
||||
run: |
|
||||
make generate
|
||||
make test
|
||||
|
||||
- name: Generate code from proto files
|
||||
shell: bash
|
||||
run: poetry run python -m tests.generate -v
|
||||
build-release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
- name: Execute test suite
|
||||
shell: bash
|
||||
run: poetry run python -m pytest tests/
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.7
|
||||
- uses: Gr1N/setup-poetry@v2
|
||||
- name: Build package
|
||||
run: poetry build
|
||||
- name: Publish package to PyPI
|
||||
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
|
||||
run: poetry publish -n
|
||||
env:
|
||||
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.pypi }}
|
||||
|
18
.github/workflows/code-quality.yml
vendored
18
.github/workflows/code-quality.yml
vendored
@ -1,18 +0,0 @@
|
||||
name: Code Quality
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
|
||||
jobs:
|
||||
check-formatting:
|
||||
name: Check code/doc formatting
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: pre-commit/action@v3.0.1
|
46
.github/workflows/codeql-analysis.yml
vendored
46
.github/workflows/codeql-analysis.yml
vendored
@ -1,46 +0,0 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
schedule:
|
||||
- cron: '19 1 * * 6'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
31
.github/workflows/release.yml
vendored
31
.github/workflows/release.yml
vendored
@ -1,31 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- '**'
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
|
||||
jobs:
|
||||
packaging:
|
||||
name: Distribution
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install poetry
|
||||
run: python -m pip install poetry
|
||||
- name: Build package
|
||||
run: poetry build
|
||||
- name: Publish package to PyPI
|
||||
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
|
||||
env:
|
||||
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.pypi }}
|
||||
run: poetry publish -n
|
7
.gitignore
vendored
7
.gitignore
vendored
@ -6,7 +6,7 @@
|
||||
.pytest_cache
|
||||
.python-version
|
||||
build/
|
||||
tests/output_*
|
||||
betterproto/tests/output_*
|
||||
**/__pycache__
|
||||
dist
|
||||
**/*.egg-info
|
||||
@ -14,8 +14,3 @@ output
|
||||
.idea
|
||||
.DS_Store
|
||||
.tox
|
||||
.venv
|
||||
.asv
|
||||
venv
|
||||
.devcontainer
|
||||
.ruff_cache
|
@ -1,25 +0,0 @@
|
||||
ci:
|
||||
autofix_prs: false
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
args: ["--diff", "src", "tests"]
|
||||
- id: ruff
|
||||
args: ["--select", "I", "src", "tests"]
|
||||
|
||||
- repo: https://github.com/PyCQA/doc8
|
||||
rev: 0.10.1
|
||||
hooks:
|
||||
- id: doc8
|
||||
additional_dependencies:
|
||||
- toml
|
||||
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.14.0
|
||||
hooks:
|
||||
- id: pretty-format-java
|
||||
args: [--autofix, --aosp]
|
||||
files: ^.*\.java$
|
@ -1,17 +0,0 @@
|
||||
version: 2
|
||||
formats: []
|
||||
|
||||
build:
|
||||
image: latest
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
fail_on_warning: false
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- method: pip
|
||||
path: .
|
||||
extra_requirements:
|
||||
- dev
|
172
CHANGELOG.md
172
CHANGELOG.md
@ -5,178 +5,6 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
- Versions suffixed with `b*` are in `beta` and can be installed with `pip install --pre betterproto`.
|
||||
|
||||
## [2.0.0b7] - 2024-08-11
|
||||
|
||||
- **Breaking**: Support `Pydantic` v2 and dropping support for v1 [#588](https://github.com/danielgtaylor/python-betterproto/pull/588)
|
||||
- **Breaking**: The attempting to access an unset `oneof` now raises an `AttributeError`
|
||||
field. To see how to access `oneof` fields now, refer to [#558](https://github.com/danielgtaylor/python-betterproto/pull/558)
|
||||
and [README.md](https://github.com/danielgtaylor/python-betterproto#one-of-support).
|
||||
- **Breaking**: A custom `Enum` has been implemented to match the behaviour of being an open set. Any checks for `isinstance(enum_member, enum.Enum)` and `issubclass(EnumSubclass, enum.Enum)` will now return `False`. This change also has the side effect of
|
||||
preventing any passthrough of `Enum` members (i.e. `Foo.RED.GREEN` doesn't work any more). See [#293](https://github.com/danielgtaylor/python-betterproto/pull/293) for more info, this fixed many bugs related to `Enum` handling.
|
||||
|
||||
- Add support for `pickle` methods [#535](https://github.com/danielgtaylor/python-betterproto/pull/535)
|
||||
- Add support for `Struct` and `Value` types [#551](https://github.com/danielgtaylor/python-betterproto/pull/551)
|
||||
- Add support for [`Rich` package](https://rich.readthedocs.io/en/latest/index.html) for pretty printing [#508](https://github.com/danielgtaylor/python-betterproto/pull/508)
|
||||
- Improve support for streaming messages [#518](https://github.com/danielgtaylor/python-betterproto/pull/518) [#529](https://github.com/danielgtaylor/python-betterproto/pull/529)
|
||||
- Improve performance of serializing / de-serializing messages [#545](https://github.com/danielgtaylor/python-betterproto/pull/545)
|
||||
- Improve the handling of message name collisions with typing by allowing the method / type of imports to be configured.
|
||||
Refer to [#582](https://github.com/danielgtaylor/python-betterproto/pull/582)
|
||||
and [README.md](https://github.com/danielgtaylor/python-betterproto#configuration-typing-imports).
|
||||
- Fix roundtrip parsing of `datetime`s [#534](https://github.com/danielgtaylor/python-betterproto/pull/534)
|
||||
- Fix accessing unset optional fields [#523](https://github.com/danielgtaylor/python-betterproto/pull/523)
|
||||
- Fix `Message` equality comparison [#513](https://github.com/danielgtaylor/python-betterproto/pull/513)
|
||||
- Fix behaviour with long comment messages [#532](https://github.com/danielgtaylor/python-betterproto/pull/532)
|
||||
- Add a warning when calling a deprecated message [#596](https://github.com/danielgtaylor/python-betterproto/pull/596)
|
||||
|
||||
## [2.0.0b6] - 2023-06-25
|
||||
|
||||
- **Breaking**: the minimum Python version has been bumped to `3.7` [#444](https://github.com/danielgtaylor/python-betterproto/pull/444)
|
||||
|
||||
- Support generating [Pydantic dataclasses](https://docs.pydantic.dev/latest/usage/dataclasses).
|
||||
Pydantic dataclasses are are drop-in replacement for dataclasses in the standard library that additionally supports validation.
|
||||
Pass `--python_betterproto_opt=pydantic_dataclasses` to enable this feature.
|
||||
Refer to [#406](https://github.com/danielgtaylor/python-betterproto/pull/406)
|
||||
and [README.md](https://github.com/danielgtaylor/python-betterproto#generating-pydantic-models) for more information.
|
||||
|
||||
- Added support for `@generated` marker [#382](https://github.com/danielgtaylor/python-betterproto/pull/382)
|
||||
- Pull down the `include_default_values` argument to `to_json()` [#405](https://github.com/danielgtaylor/python-betterproto/pull/405)
|
||||
- Pythonize input_type name in py_input_message [#436](https://github.com/danielgtaylor/python-betterproto/pull/436)
|
||||
- Widen `from_dict()` to accept any `Mapping` [#451](https://github.com/danielgtaylor/python-betterproto/pull/451)
|
||||
- Replace `pkg_resources` with `importlib` [#462](https://github.com/danielgtaylor/python-betterproto/pull/462)
|
||||
|
||||
- Fix typechecker compatiblity checks in server streaming methods [#413](https://github.com/danielgtaylor/python-betterproto/pull/413)
|
||||
- Fix "empty-valued" repeated fields not being serialised [#417](https://github.com/danielgtaylor/python-betterproto/pull/417)
|
||||
- Fix `dict` encoding for timezone-aware `datetimes` [#468](https://github.com/danielgtaylor/python-betterproto/pull/468)
|
||||
- Fix `to_pydict()` serialization for optional fields [#495](https://github.com/danielgtaylor/python-betterproto/pull/495)
|
||||
- Handle empty value objects properly [#481](https://github.com/danielgtaylor/python-betterproto/pull/481)
|
||||
|
||||
## [2.0.0b5] - 2022-08-01
|
||||
|
||||
- **Breaking**: Client and Service Stubs no longer pack and unpack the input message fields as parameters [#331](https://github.com/danielgtaylor/python-betterproto/pull/311)
|
||||
|
||||
Update your client calls and server handlers as follows:
|
||||
|
||||
Clients before:
|
||||
|
||||
```py
|
||||
response = await service.echo(value="hello", extra_times=1)
|
||||
```
|
||||
|
||||
Clients after:
|
||||
|
||||
```py
|
||||
response = await service.echo(EchoRequest(value="hello", extra_times=1))
|
||||
```
|
||||
|
||||
Servers before:
|
||||
|
||||
```py
|
||||
async def echo(self, value: str, extra_times: int) -> EchoResponse: ...
|
||||
```
|
||||
|
||||
Servers after:
|
||||
|
||||
```py
|
||||
async def echo(self, echo_request: EchoRequest) -> EchoResponse:
|
||||
# Use echo_request.value
|
||||
# Use echo_request.extra_times
|
||||
...
|
||||
```
|
||||
|
||||
- Add `to/from_pydict()` for `Message` [#203](https://github.com/danielgtaylor/python-betterproto/pull/203)
|
||||
- Format field comments also as docstrings [#304](https://github.com/danielgtaylor/python-betterproto/pull/304)
|
||||
- Implement `__deepcopy__` for `Message` [#339](https://github.com/danielgtaylor/python-betterproto/pull/339)
|
||||
- Run isort on compiled code [#355](https://github.com/danielgtaylor/python-betterproto/pull/355)
|
||||
- Expose timeout, deadline and metadata parameters from grpclib [#352](https://github.com/danielgtaylor/python-betterproto/pull/352)
|
||||
- Make `Message.__getattribute__` invisible to type checkers [#359](https://github.com/danielgtaylor/python-betterproto/pull/359)
|
||||
|
||||
- Fix map field edge-case [#254](https://github.com/danielgtaylor/python-betterproto/pull/254)
|
||||
- Fix message text in `NotImplementedError` [#325](https://github.com/danielgtaylor/python-betterproto/pull/325)
|
||||
- Fix `Message.from_dict()` in the presence of optional datetime fields [#329](https://github.com/danielgtaylor/python-betterproto/pull/329)
|
||||
- Support Jinja2 3.0 to prevent version conflicts [#330](https://github.com/danielgtaylor/python-betterproto/pull/330)
|
||||
- Fix overwriting top level `__init__.py` [#337](https://github.com/danielgtaylor/python-betterproto/pull/337)
|
||||
- Remove deprecation warnings when fields are initialised with non-default values [#348](https://github.com/danielgtaylor/python-betterproto/pull/348)
|
||||
- Ensure nested class names are converted to PascalCase [#353](https://github.com/danielgtaylor/python-betterproto/pull/353)
|
||||
- Fix `Message.to_dict()` mutating the underlying Message [#378](https://github.com/danielgtaylor/python-betterproto/pull/378)
|
||||
- Fix some parameters being missing from services [#381](https://github.com/danielgtaylor/python-betterproto/pull/381)
|
||||
|
||||
## [2.0.0b4] - 2022-01-03
|
||||
|
||||
- **Breaking**: the minimum Python version has been bumped to `3.6.2`
|
||||
|
||||
- Always add `AsyncIterator` to imports if there are services [#264](https://github.com/danielgtaylor/python-betterproto/pull/264)
|
||||
- Allow parsing of messages from `ByteStrings` [#266](https://github.com/danielgtaylor/python-betterproto/pull/266)
|
||||
- Add support for proto3 optional [#281](https://github.com/danielgtaylor/python-betterproto/pull/281)
|
||||
|
||||
- Fix compilation of fields with names identical to builtin types [#294](https://github.com/danielgtaylor/python-betterproto/pull/294)
|
||||
- Fix default values for enum service args [#299](https://github.com/danielgtaylor/python-betterproto/pull/299)
|
||||
|
||||
## [2.0.0b3] - 2021-04-07
|
||||
|
||||
- Generate grpclib service stubs [#170](https://github.com/danielgtaylor/python-betterproto/pull/170)
|
||||
- Add \_\_version\_\_ attribute to package [#134](https://github.com/danielgtaylor/python-betterproto/pull/134)
|
||||
- Use betterproto generated messages in the plugin [#161](https://github.com/danielgtaylor/python-betterproto/pull/161)
|
||||
- Sort the list of sources in generated file headers [#164](https://github.com/danielgtaylor/python-betterproto/pull/164)
|
||||
- Micro-optimization: use tuples instead of lists for conditions [#228](https://github.com/danielgtaylor/python-betterproto/pull/228)
|
||||
- Improve datestring parsing [#213](https://github.com/danielgtaylor/python-betterproto/pull/213)
|
||||
|
||||
- Fix serialization of repeated fields with empty messages [#180](https://github.com/danielgtaylor/python-betterproto/pull/180)
|
||||
- Fix compilation of fields named 'bytes' or 'str' [#226](https://github.com/danielgtaylor/python-betterproto/pull/226)
|
||||
- Fix json serialization of infinite and nan floats/doubles [#215](https://github.com/danielgtaylor/python-betterproto/pull/215)
|
||||
- Fix template bug resulting in empty \_\_post_init\_\_ methods [#162](https://github.com/danielgtaylor/python-betterproto/pull/162)
|
||||
- Fix serialization of zero-value messages in a oneof group [#176](https://github.com/danielgtaylor/python-betterproto/pull/176)
|
||||
- Fix missing typing and datetime imports [#183](https://github.com/danielgtaylor/python-betterproto/pull/183)
|
||||
- Fix code generation for empty services [#222](https://github.com/danielgtaylor/python-betterproto/pull/222)
|
||||
- Fix Message.to_dict and from_dict handling of repeated timestamps and durations [#211](https://github.com/danielgtaylor/python-betterproto/pull/211)
|
||||
- Fix incorrect routes in generated client when service is not in a package [#177](https://github.com/danielgtaylor/python-betterproto/pull/177)
|
||||
|
||||
## [2.0.0b2] - 2020-11-24
|
||||
|
||||
- Add support for deprecated message and fields [#126](https://github.com/danielgtaylor/python-betterproto/pull/126)
|
||||
- Add support for recursive messages [#130](https://github.com/danielgtaylor/python-betterproto/pull/130)
|
||||
- Add support for `bool(Message)` [#142](https://github.com/danielgtaylor/python-betterproto/pull/142)
|
||||
- Improve support for Python 3.9 [#140](https://github.com/danielgtaylor/python-betterproto/pull/140) [#173](https://github.com/danielgtaylor/python-betterproto/pull/173)
|
||||
- Improve keyword sanitisation for generated code [#137](https://github.com/danielgtaylor/python-betterproto/pull/137)
|
||||
|
||||
- Fix missing serialized_on_wire when message contains only lists [#81](https://github.com/danielgtaylor/python-betterproto/pull/81)
|
||||
- Fix circular dependencies [#100](https://github.com/danielgtaylor/python-betterproto/pull/100)
|
||||
- Fix to_dict enum fields when numbering is not consecutive [#102](https://github.com/danielgtaylor/python-betterproto/pull/102)
|
||||
- Fix argument generation for stub methods when using `import` with proto definition [#103](https://github.com/danielgtaylor/python-betterproto/pull/103)
|
||||
- Fix missing async/await keywords when casing [#104](https://github.com/danielgtaylor/python-betterproto/pull/104)
|
||||
- Fix mutable default arguments in generated code [#105](https://github.com/danielgtaylor/python-betterproto/pull/105)
|
||||
- Fix serialisation of default values in oneofs when calling to_dict() or to_json() [#110](https://github.com/danielgtaylor/python-betterproto/pull/110)
|
||||
- Fix static type checking for grpclib client [#124](https://github.com/danielgtaylor/python-betterproto/pull/124)
|
||||
- Fix python3.6 compatibility issue with dataclasses [#124](https://github.com/danielgtaylor/python-betterproto/pull/124)
|
||||
- Fix handling of trailer-only responses [#127](https://github.com/danielgtaylor/python-betterproto/pull/127)
|
||||
|
||||
- Refactor plugin.py to use modular dataclasses in tree-like structure to represent parsed data [#121](https://github.com/danielgtaylor/python-betterproto/pull/121)
|
||||
- Refactor template compilation logic [#136](https://github.com/danielgtaylor/python-betterproto/pull/136)
|
||||
|
||||
- Replace use of platform provided protoc with development dependency on grpcio-tools [#107](https://github.com/danielgtaylor/python-betterproto/pull/107)
|
||||
- Switch to using `poe` from `make` to manage project development tasks [#118](https://github.com/danielgtaylor/python-betterproto/pull/118)
|
||||
- Improve CI platform coverage [#128](https://github.com/danielgtaylor/python-betterproto/pull/128)
|
||||
|
||||
## [2.0.0b1] - 2020-07-04
|
||||
|
||||
[Upgrade Guide](./docs/upgrading.md)
|
||||
|
||||
> Several bugfixes and improvements required or will require small breaking changes, necessitating a new version.
|
||||
> `2.0.0` will be released once the interface is stable.
|
||||
|
||||
- Add support for gRPC and **stream-stream** [#83](https://github.com/danielgtaylor/python-betterproto/pull/83)
|
||||
- Switch from `pipenv` to `poetry` for development [#75](https://github.com/danielgtaylor/python-betterproto/pull/75)
|
||||
- Fix two packages with the same name suffix should not cause naming conflict [#25](https://github.com/danielgtaylor/python-betterproto/issues/25)
|
||||
|
||||
- Fix Import child package from root [#57](https://github.com/danielgtaylor/python-betterproto/issues/57)
|
||||
- Fix Import child package from package [#58](https://github.com/danielgtaylor/python-betterproto/issues/58)
|
||||
- Fix Import parent package from child package [#59](https://github.com/danielgtaylor/python-betterproto/issues/59)
|
||||
- Fix Import root package from child package [#60](https://github.com/danielgtaylor/python-betterproto/issues/60)
|
||||
- Fix Import root package from root [#61](https://github.com/danielgtaylor/python-betterproto/issues/61)
|
||||
|
||||
- Fix ALL_CAPS message fields are parsed incorrectly. [#11](https://github.com/danielgtaylor/python-betterproto/issues/11)
|
||||
|
||||
## [1.2.5] - 2020-04-27
|
||||
|
||||
- Add .j2 suffix to python template names to avoid confusing certain build tools [#72](https://github.com/danielgtaylor/python-betterproto/pull/72)
|
||||
|
21
LICENSE.md
21
LICENSE.md
@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 Daniel G. Taylor
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
42
Makefile
Normal file
42
Makefile
Normal file
@ -0,0 +1,42 @@
|
||||
.PHONY: help setup generate test types format clean plugin full-test check-style
|
||||
|
||||
help: ## - Show this help.
|
||||
@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/##//'
|
||||
|
||||
# Dev workflow tasks
|
||||
|
||||
generate: ## - Generate test cases (do this once before running test)
|
||||
poetry run ./betterproto/tests/generate.py
|
||||
|
||||
test: ## - Run tests
|
||||
poetry run pytest --cov betterproto
|
||||
|
||||
types: ## - Check types with mypy
|
||||
poetry run mypy betterproto --ignore-missing-imports
|
||||
|
||||
format: ## - Apply black formatting to source code
|
||||
poetry run black . --exclude tests/output_
|
||||
|
||||
clean: ## - Clean out generated files from the workspace
|
||||
rm -rf .coverage \
|
||||
.mypy_cache \
|
||||
.pytest_cache \
|
||||
dist \
|
||||
**/__pycache__ \
|
||||
betterproto/tests/output_*
|
||||
|
||||
# Manual testing
|
||||
|
||||
# By default write plugin output to a directory called output
|
||||
o=output
|
||||
plugin: ## - Execute the protoc plugin, with output write to `output` or the value passed to `-o`
|
||||
mkdir -p $(o)
|
||||
protoc --plugin=protoc-gen-custom=betterproto/plugin.py $(i) --custom_out=$(o)
|
||||
|
||||
# CI tasks
|
||||
|
||||
full-test: generate ## - Run full testing sequence with multiple pythons
|
||||
poetry run tox
|
||||
|
||||
check-style: ## - Check if code style is correct
|
||||
poetry run black . --check --diff --exclude tests/output_
|
238
README.md
238
README.md
@ -1,21 +1,18 @@
|
||||
# Better Protobuf / gRPC Support for Python
|
||||
|
||||

|
||||
|
||||
> :octocat: If you're reading this on github, please be aware that it might mention unreleased features! See the latest released README on [pypi](https://pypi.org/project/betterproto/).
|
||||

|
||||
|
||||
This project aims to provide an improved experience when using Protobuf / gRPC in a modern Python environment by making use of modern language features and generating readable, understandable, idiomatic Python code. It will not support legacy features or environments (e.g. Protobuf 2). The following are supported:
|
||||
|
||||
- Protobuf 3 & gRPC code generation
|
||||
- Both binary & JSON serialization is built-in
|
||||
- Python 3.7+ making use of:
|
||||
- Python 3.6+ making use of:
|
||||
- Enums
|
||||
- Dataclasses
|
||||
- `async`/`await`
|
||||
- Timezone-aware `datetime` and `timedelta` objects
|
||||
- Relative imports
|
||||
- Mypy type checking
|
||||
- [Pydantic Models](https://docs.pydantic.dev/) generation (see #generating-pydantic-models)
|
||||
|
||||
This project is heavily inspired by, and borrows functionality from:
|
||||
|
||||
@ -41,10 +38,9 @@ This project exists because I am unhappy with the state of the official Google p
|
||||
- Special wrapped types don't use Python's `None`
|
||||
- Timestamp/duration types don't use Python's built-in `datetime` module
|
||||
|
||||
|
||||
This project is a reimplementation from the ground up focused on idiomatic modern Python to help fix some of the above. While it may not be a 1:1 drop-in replacement due to changed method names and call patterns, the wire format is identical.
|
||||
|
||||
## Installation
|
||||
## Installation & Getting Started
|
||||
|
||||
First, install the package. Note that the `[compiler]` feature flag tells it to install extra dependencies only needed by the `protoc` plugin:
|
||||
|
||||
@ -56,13 +52,7 @@ pip install "betterproto[compiler]"
|
||||
pip install betterproto
|
||||
```
|
||||
|
||||
*Betterproto* is under active development. To install the latest beta version, use `pip install --pre betterproto`.
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Compiling proto files
|
||||
|
||||
Given you installed the compiler and have a proto file, e.g `example.proto`:
|
||||
Now, given you installed the compiler and have a proto file, e.g `example.proto`:
|
||||
|
||||
```protobuf
|
||||
syntax = "proto3";
|
||||
@ -75,20 +65,13 @@ message Greeting {
|
||||
}
|
||||
```
|
||||
|
||||
You can run the following to invoke protoc directly:
|
||||
You can run the following:
|
||||
|
||||
```sh
|
||||
mkdir lib
|
||||
protoc -I . --python_betterproto_out=lib example.proto
|
||||
```
|
||||
|
||||
or run the following to invoke protoc via grpcio-tools:
|
||||
|
||||
```sh
|
||||
pip install grpcio-tools
|
||||
python -m grpc_tools.protoc -I . --python_betterproto_out=lib example.proto
|
||||
```
|
||||
|
||||
This will generate `lib/hello/__init__.py` which looks like:
|
||||
|
||||
```python
|
||||
@ -137,7 +120,7 @@ Greeting(message="Hey!")
|
||||
|
||||
The generated Protobuf `Message` classes are compatible with [grpclib](https://github.com/vmagamedov/grpclib) so you are free to use it if you like. That said, this project also includes support for async gRPC stub generation with better static type checking and code completion support. It is enabled by default.
|
||||
|
||||
Given an example service definition:
|
||||
Given an example like:
|
||||
|
||||
```protobuf
|
||||
syntax = "proto3";
|
||||
@ -164,75 +147,22 @@ service Echo {
|
||||
}
|
||||
```
|
||||
|
||||
Generate echo proto file:
|
||||
You can use it like so (enable async in the interactive shell first):
|
||||
|
||||
```
|
||||
python -m grpc_tools.protoc -I . --python_betterproto_out=. echo.proto
|
||||
```
|
||||
```py
|
||||
>>> import echo
|
||||
>>> from grpclib.client import Channel
|
||||
|
||||
A client can be implemented as follows:
|
||||
```python
|
||||
import asyncio
|
||||
import echo
|
||||
>>> channel = Channel(host="127.0.0.1", port=1234)
|
||||
>>> service = echo.EchoStub(channel)
|
||||
>>> await service.echo(value="hello", extra_times=1)
|
||||
EchoResponse(values=["hello", "hello"])
|
||||
|
||||
from grpclib.client import Channel
|
||||
|
||||
|
||||
async def main():
|
||||
channel = Channel(host="127.0.0.1", port=50051)
|
||||
service = echo.EchoStub(channel)
|
||||
response = await service.echo(echo.EchoRequest(value="hello", extra_times=1))
|
||||
print(response)
|
||||
|
||||
async for response in service.echo_stream(echo.EchoRequest(value="hello", extra_times=1)):
|
||||
>>> async for response in service.echo_stream(value="hello", extra_times=1)
|
||||
print(response)
|
||||
|
||||
# don't forget to close the channel when done!
|
||||
channel.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(main())
|
||||
|
||||
```
|
||||
|
||||
which would output
|
||||
```python
|
||||
EchoResponse(values=['hello', 'hello'])
|
||||
EchoStreamResponse(value='hello')
|
||||
EchoStreamResponse(value='hello')
|
||||
```
|
||||
|
||||
This project also produces server-facing stubs that can be used to implement a Python
|
||||
gRPC server.
|
||||
To use them, simply subclass the base class in the generated files and override the
|
||||
service methods:
|
||||
|
||||
```python
|
||||
import asyncio
|
||||
from echo import EchoBase, EchoRequest, EchoResponse, EchoStreamResponse
|
||||
from grpclib.server import Server
|
||||
from typing import AsyncIterator
|
||||
|
||||
|
||||
class EchoService(EchoBase):
|
||||
async def echo(self, echo_request: "EchoRequest") -> "EchoResponse":
|
||||
return EchoResponse([echo_request.value for _ in range(echo_request.extra_times)])
|
||||
|
||||
async def echo_stream(self, echo_request: "EchoRequest") -> AsyncIterator["EchoStreamResponse"]:
|
||||
for _ in range(echo_request.extra_times):
|
||||
yield EchoStreamResponse(echo_request.value)
|
||||
|
||||
|
||||
async def main():
|
||||
server = Server([EchoService()])
|
||||
await server.start("127.0.0.1", 50051)
|
||||
await server.wait_closed()
|
||||
|
||||
if __name__ == '__main__':
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(main())
|
||||
EchoStreamResponse(value="hello")
|
||||
EchoStreamResponse(value="hello")
|
||||
```
|
||||
|
||||
### JSON
|
||||
@ -244,8 +174,8 @@ Both serializing and parsing are supported to/from JSON and Python dictionaries
|
||||
|
||||
For compatibility the default is to convert field names to `camelCase`. You can control this behavior by passing a casing value, e.g:
|
||||
|
||||
```python
|
||||
MyMessage().to_dict(casing=betterproto.Casing.SNAKE)
|
||||
```py
|
||||
>>> MyMessage().to_dict(casing=betterproto.Casing.SNAKE)
|
||||
```
|
||||
|
||||
### Determining if a message was sent
|
||||
@ -278,22 +208,7 @@ message Test {
|
||||
}
|
||||
```
|
||||
|
||||
On Python 3.10 and later, you can use a `match` statement to access the provided one-of field, which supports type-checking:
|
||||
|
||||
```py
|
||||
test = Test()
|
||||
match test:
|
||||
case Test(on=value):
|
||||
print(value) # value: bool
|
||||
case Test(count=value):
|
||||
print(value) # value: int
|
||||
case Test(name=value):
|
||||
print(value) # value: str
|
||||
case _:
|
||||
print("No value provided")
|
||||
```
|
||||
|
||||
You can also use `betterproto.which_one_of(message, group_name)` to determine which of the fields was set. It returns a tuple of the field name and value, or a blank string and `None` if unset.
|
||||
You can use `betterproto.which_one_of(message, group_name)` to determine which of the fields was set. It returns a tuple of the field name and value, or a blank string and `None` if unset.
|
||||
|
||||
```py
|
||||
>>> test = Test()
|
||||
@ -308,11 +223,17 @@ You can also use `betterproto.which_one_of(message, group_name)` to determine wh
|
||||
>>> test.count = 57
|
||||
>>> betterproto.which_one_of(test, "foo")
|
||||
["count", 57]
|
||||
>>> test.on
|
||||
False
|
||||
|
||||
# Default (zero) values also work.
|
||||
>>> test.name = ""
|
||||
>>> betterproto.which_one_of(test, "foo")
|
||||
["name", ""]
|
||||
>>> test.count
|
||||
0
|
||||
>>> test.on
|
||||
False
|
||||
```
|
||||
|
||||
Again this is a little different than the official Google code generator:
|
||||
@ -375,94 +296,22 @@ datetime.datetime(2019, 1, 1, 11, 59, 58, 800000, tzinfo=datetime.timezone.utc)
|
||||
{'ts': '2019-01-01T12:00:00Z', 'duration': '1.200s'}
|
||||
```
|
||||
|
||||
## Generating Pydantic Models
|
||||
|
||||
You can use python-betterproto to generate pydantic based models, using
|
||||
pydantic dataclasses. This means the results of the protobuf unmarshalling will
|
||||
be typed checked. The usage is the same, but you need to add a custom option
|
||||
when calling the protobuf compiler:
|
||||
|
||||
|
||||
```
|
||||
protoc -I . --python_betterproto_opt=pydantic_dataclasses --python_betterproto_out=lib example.proto
|
||||
```
|
||||
|
||||
With the important change being `--python_betterproto_opt=pydantic_dataclasses`. This will
|
||||
swap the dataclass implementation from the builtin python dataclass to the
|
||||
pydantic dataclass. You must have pydantic as a dependency in your project for
|
||||
this to work.
|
||||
|
||||
## Configuration typing imports
|
||||
|
||||
By default typing types will be imported directly from typing. This sometimes can lead to issues in generation if types that are being generated conflict with the name. In this case you can configure the way types are imported from 3 different options:
|
||||
|
||||
### Direct
|
||||
```
|
||||
protoc -I . --python_betterproto_opt=typing.direct --python_betterproto_out=lib example.proto
|
||||
```
|
||||
this configuration is the default, and will import types as follows:
|
||||
```
|
||||
from typing import (
|
||||
List,
|
||||
Optional,
|
||||
Union
|
||||
)
|
||||
...
|
||||
value: List[str] = []
|
||||
value2: Optional[str] = None
|
||||
value3: Union[str, int] = 1
|
||||
```
|
||||
### Root
|
||||
```
|
||||
protoc -I . --python_betterproto_opt=typing.root --python_betterproto_out=lib example.proto
|
||||
```
|
||||
this configuration loads the root typing module, and then access the types off of it directly:
|
||||
```
|
||||
import typing
|
||||
...
|
||||
value: typing.List[str] = []
|
||||
value2: typing.Optional[str] = None
|
||||
value3: typing.Union[str, int] = 1
|
||||
```
|
||||
|
||||
### 310
|
||||
```
|
||||
protoc -I . --python_betterproto_opt=typing.310 --python_betterproto_out=lib example.proto
|
||||
```
|
||||
this configuration avoid loading typing all together if possible and uses the python 3.10 pattern:
|
||||
```
|
||||
...
|
||||
value: list[str] = []
|
||||
value2: str | None = None
|
||||
value3: str | int = 1
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
- _Join us on [Discord](https://discord.gg/DEVteTupPb)!_
|
||||
- _See how you can help → [Contributing](.github/CONTRIBUTING.md)_
|
||||
Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)!
|
||||
|
||||
### Requirements
|
||||
|
||||
- Python (3.7 or higher)
|
||||
|
||||
- [poetry](https://python-poetry.org/docs/#installation)
|
||||
*Needed to install dependencies in a virtual environment*
|
||||
|
||||
- [poethepoet](https://github.com/nat-n/poethepoet) for running development tasks as defined in pyproject.toml
|
||||
- Can be installed to your host environment via `pip install poethepoet` then executed as simple `poe`
|
||||
- or run from the poetry venv as `poetry run poe`
|
||||
|
||||
### Setup
|
||||
First, make sure you have Python 3.6+ and `poetry` installed, along with the official [Protobuf Compiler](https://github.com/protocolbuffers/protobuf/releases) for your platform. Then:
|
||||
|
||||
```sh
|
||||
# Get set up with the virtual env & dependencies
|
||||
poetry install -E compiler
|
||||
poetry install
|
||||
|
||||
# Activate the poetry environment
|
||||
poetry shell
|
||||
```
|
||||
|
||||
To benefit from the collection of standard development tasks ensure you have make installed and run `make help` to see available tasks.
|
||||
|
||||
### Code style
|
||||
|
||||
This project enforces [black](https://github.com/psf/black) python code formatting.
|
||||
@ -470,7 +319,7 @@ This project enforces [black](https://github.com/psf/black) python code formatti
|
||||
Before committing changes run:
|
||||
|
||||
```sh
|
||||
poe format
|
||||
make format
|
||||
```
|
||||
|
||||
To avoid merge conflicts later, non-black formatted python code will fail in CI.
|
||||
@ -492,7 +341,7 @@ Adding a standard test case is easy.
|
||||
|
||||
It will be picked up automatically when you run the tests.
|
||||
|
||||
- See also: [Standard Tests Development Guide](tests/README.md)
|
||||
- See also: [Standard Tests Development Guide](betterproto/tests/README.md)
|
||||
|
||||
#### Custom tests
|
||||
|
||||
@ -504,35 +353,36 @@ Here's how to run the tests.
|
||||
|
||||
```sh
|
||||
# Generate assets from sample .proto files required by the tests
|
||||
poe generate
|
||||
make generate
|
||||
# Run the tests
|
||||
poe test
|
||||
make test
|
||||
```
|
||||
|
||||
To run tests as they are run in CI (with tox) run:
|
||||
|
||||
```sh
|
||||
poe full-test
|
||||
make full-test
|
||||
```
|
||||
|
||||
### (Re)compiling Google Well-known Types
|
||||
|
||||
Betterproto includes compiled versions for Google's well-known types at [src/betterproto/lib/google](src/betterproto/lib/google).
|
||||
Betterproto includes compiled versions for Google's well-known types at [betterproto/lib/google](betterproto/lib/google).
|
||||
Be sure to regenerate these files when modifying the plugin output format, and validate by running the tests.
|
||||
|
||||
Normally, the plugin does not compile any references to `google.protobuf`, since they are pre-compiled. To force compilation of `google.protobuf`, use the option `--custom_opt=INCLUDE_GOOGLE`.
|
||||
Normally, the plugin does not compile any references to `google.protobuf`, since they are pre-compiled. To force compilation of `google.protobuf`, use the option `--custom_opt=INCLUDE_GOOGLE`.
|
||||
|
||||
Assuming your `google.protobuf` source files (included with all releases of `protoc`) are located in `/usr/local/include`, you can regenerate them as follows:
|
||||
|
||||
```sh
|
||||
protoc \
|
||||
--plugin=protoc-gen-custom=src/betterproto/plugin/main.py \
|
||||
--plugin=protoc-gen-custom=betterproto/plugin.py \
|
||||
--custom_opt=INCLUDE_GOOGLE \
|
||||
--custom_out=src/betterproto/lib \
|
||||
--custom_out=betterproto/lib \
|
||||
-I /usr/local/include/ \
|
||||
/usr/local/include/google/protobuf/*.proto
|
||||
```
|
||||
|
||||
|
||||
### TODO
|
||||
|
||||
- [x] Fixed length fields
|
||||
@ -563,10 +413,10 @@ protoc \
|
||||
- [x] Enum strings
|
||||
- [x] Well known types support (timestamp, duration, wrappers)
|
||||
- [x] Support different casing (orig vs. camel vs. others?)
|
||||
- [x] Async service stubs
|
||||
- [ ] Async service stubs
|
||||
- [x] Unary-unary
|
||||
- [x] Server streaming response
|
||||
- [x] Client streaming request
|
||||
- [ ] Client streaming request
|
||||
- [x] Renaming messages and fields to conform to Python name standards
|
||||
- [x] Renaming clashes with language keywords
|
||||
- [x] Python package
|
||||
@ -575,7 +425,7 @@ protoc \
|
||||
|
||||
## Community
|
||||
|
||||
Join us on [Discord](https://discord.gg/DEVteTupPb)!
|
||||
Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)!
|
||||
|
||||
## License
|
||||
|
||||
|
157
asv.conf.json
157
asv.conf.json
@ -1,157 +0,0 @@
|
||||
{
|
||||
// The version of the config file format. Do not change, unless
|
||||
// you know what you are doing.
|
||||
"version": 1,
|
||||
|
||||
// The name of the project being benchmarked
|
||||
"project": "python-betterproto",
|
||||
|
||||
// The project's homepage
|
||||
"project_url": "https://github.com/danielgtaylor/python-betterproto",
|
||||
|
||||
// The URL or local path of the source code repository for the
|
||||
// project being benchmarked
|
||||
"repo": ".",
|
||||
|
||||
// The Python project's subdirectory in your repo. If missing or
|
||||
// the empty string, the project is assumed to be located at the root
|
||||
// of the repository.
|
||||
// "repo_subdir": "",
|
||||
|
||||
// Customizable commands for building, installing, and
|
||||
// uninstalling the project. See asv.conf.json documentation.
|
||||
//
|
||||
"install_command": ["python -m pip install ."],
|
||||
"uninstall_command": ["return-code=any python -m pip uninstall -y {project}"],
|
||||
"build_command": ["python -m pip wheel -w {build_cache_dir} {build_dir}"],
|
||||
|
||||
// List of branches to benchmark. If not provided, defaults to "master"
|
||||
// (for git) or "default" (for mercurial).
|
||||
// "branches": ["master"], // for git
|
||||
// "branches": ["default"], // for mercurial
|
||||
|
||||
// The DVCS being used. If not set, it will be automatically
|
||||
// determined from "repo" by looking at the protocol in the URL
|
||||
// (if remote), or by looking for special directories, such as
|
||||
// ".git" (if local).
|
||||
// "dvcs": "git",
|
||||
|
||||
// The tool to use to create environments. May be "conda",
|
||||
// "virtualenv" or other value depending on the plugins in use.
|
||||
// If missing or the empty string, the tool will be automatically
|
||||
// determined by looking for tools on the PATH environment
|
||||
// variable.
|
||||
"environment_type": "virtualenv",
|
||||
|
||||
// timeout in seconds for installing any dependencies in environment
|
||||
// defaults to 10 min
|
||||
//"install_timeout": 600,
|
||||
|
||||
// the base URL to show a commit for the project.
|
||||
// "show_commit_url": "http://github.com/owner/project/commit/",
|
||||
|
||||
// The Pythons you'd like to test against. If not provided, defaults
|
||||
// to the current version of Python used to run `asv`.
|
||||
// "pythons": ["2.7", "3.6"],
|
||||
|
||||
// The list of conda channel names to be searched for benchmark
|
||||
// dependency packages in the specified order
|
||||
// "conda_channels": ["conda-forge", "defaults"],
|
||||
|
||||
// The matrix of dependencies to test. Each key is the name of a
|
||||
// package (in PyPI) and the values are version numbers. An empty
|
||||
// list or empty string indicates to just test against the default
|
||||
// (latest) version. null indicates that the package is to not be
|
||||
// installed. If the package to be tested is only available from
|
||||
// PyPi, and the 'environment_type' is conda, then you can preface
|
||||
// the package name by 'pip+', and the package will be installed via
|
||||
// pip (with all the conda available packages installed first,
|
||||
// followed by the pip installed packages).
|
||||
//
|
||||
// "matrix": {
|
||||
// "numpy": ["1.6", "1.7"],
|
||||
// "six": ["", null], // test with and without six installed
|
||||
// "pip+emcee": [""], // emcee is only available for install with pip.
|
||||
// },
|
||||
|
||||
// Combinations of libraries/python versions can be excluded/included
|
||||
// from the set to test. Each entry is a dictionary containing additional
|
||||
// key-value pairs to include/exclude.
|
||||
//
|
||||
// An exclude entry excludes entries where all values match. The
|
||||
// values are regexps that should match the whole string.
|
||||
//
|
||||
// An include entry adds an environment. Only the packages listed
|
||||
// are installed. The 'python' key is required. The exclude rules
|
||||
// do not apply to includes.
|
||||
//
|
||||
// In addition to package names, the following keys are available:
|
||||
//
|
||||
// - python
|
||||
// Python version, as in the *pythons* variable above.
|
||||
// - environment_type
|
||||
// Environment type, as above.
|
||||
// - sys_platform
|
||||
// Platform, as in sys.platform. Possible values for the common
|
||||
// cases: 'linux2', 'win32', 'cygwin', 'darwin'.
|
||||
//
|
||||
// "exclude": [
|
||||
// {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows
|
||||
// {"environment_type": "conda", "six": null}, // don't run without six on conda
|
||||
// ],
|
||||
//
|
||||
// "include": [
|
||||
// // additional env for python2.7
|
||||
// {"python": "2.7", "numpy": "1.8"},
|
||||
// // additional env if run on windows+conda
|
||||
// {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""},
|
||||
// ],
|
||||
|
||||
// The directory (relative to the current directory) that benchmarks are
|
||||
// stored in. If not provided, defaults to "benchmarks"
|
||||
// "benchmark_dir": "benchmarks",
|
||||
|
||||
// The directory (relative to the current directory) to cache the Python
|
||||
// environments in. If not provided, defaults to "env"
|
||||
"env_dir": ".asv/env",
|
||||
|
||||
// The directory (relative to the current directory) that raw benchmark
|
||||
// results are stored in. If not provided, defaults to "results".
|
||||
"results_dir": ".asv/results",
|
||||
|
||||
// The directory (relative to the current directory) that the html tree
|
||||
// should be written to. If not provided, defaults to "html".
|
||||
"html_dir": ".asv/html",
|
||||
|
||||
// The number of characters to retain in the commit hashes.
|
||||
// "hash_length": 8,
|
||||
|
||||
// `asv` will cache results of the recent builds in each
|
||||
// environment, making them faster to install next time. This is
|
||||
// the number of builds to keep, per environment.
|
||||
// "build_cache_size": 2,
|
||||
|
||||
// The commits after which the regression search in `asv publish`
|
||||
// should start looking for regressions. Dictionary whose keys are
|
||||
// regexps matching to benchmark names, and values corresponding to
|
||||
// the commit (exclusive) after which to start looking for
|
||||
// regressions. The default is to start from the first commit
|
||||
// with results. If the commit is `null`, regression detection is
|
||||
// skipped for the matching benchmark.
|
||||
//
|
||||
// "regressions_first_commits": {
|
||||
// "some_benchmark": "352cdf", // Consider regressions only after this commit
|
||||
// "another_benchmark": null, // Skip regression detection altogether
|
||||
// },
|
||||
|
||||
// The thresholds for relative change in results, after which `asv
|
||||
// publish` starts reporting regressions. Dictionary of the same
|
||||
// form as in ``regressions_first_commits``, with values
|
||||
// indicating the thresholds. If multiple entries match, the
|
||||
// maximum is taken. If no entry matches, the default is 5%.
|
||||
//
|
||||
// "regressions_thresholds": {
|
||||
// "some_benchmark": 0.01, // Threshold of 1%
|
||||
// "another_benchmark": 0.5, // Threshold of 50%
|
||||
// },
|
||||
}
|
@ -1,117 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
import betterproto
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestMessage(betterproto.Message):
|
||||
foo: int = betterproto.uint32_field(1)
|
||||
bar: str = betterproto.string_field(2)
|
||||
baz: float = betterproto.float_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestNestedChildMessage(betterproto.Message):
|
||||
str_key: str = betterproto.string_field(1)
|
||||
bytes_key: bytes = betterproto.bytes_field(2)
|
||||
bool_key: bool = betterproto.bool_field(3)
|
||||
float_key: float = betterproto.float_field(4)
|
||||
int_key: int = betterproto.uint64_field(5)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestNestedMessage(betterproto.Message):
|
||||
foo: TestNestedChildMessage = betterproto.message_field(1)
|
||||
bar: TestNestedChildMessage = betterproto.message_field(2)
|
||||
baz: TestNestedChildMessage = betterproto.message_field(3)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestRepeatedMessage(betterproto.Message):
|
||||
foo_repeat: List[str] = betterproto.string_field(1)
|
||||
bar_repeat: List[int] = betterproto.int64_field(2)
|
||||
baz_repeat: List[bool] = betterproto.bool_field(3)
|
||||
|
||||
|
||||
class BenchMessage:
|
||||
"""Test creation and usage a proto message."""
|
||||
|
||||
def setup(self):
|
||||
self.cls = TestMessage
|
||||
self.instance = TestMessage()
|
||||
self.instance_filled = TestMessage(0, "test", 0.0)
|
||||
self.instance_filled_bytes = bytes(self.instance_filled)
|
||||
self.instance_filled_nested = TestNestedMessage(
|
||||
TestNestedChildMessage("foo", bytearray(b"test1"), True, 0.1234, 500),
|
||||
TestNestedChildMessage("bar", bytearray(b"test2"), True, 3.1415, 302),
|
||||
TestNestedChildMessage("baz", bytearray(b"test3"), False, 1e5, 300),
|
||||
)
|
||||
self.instance_filled_nested_bytes = bytes(self.instance_filled_nested)
|
||||
self.instance_filled_repeated = TestRepeatedMessage(
|
||||
[f"test{i}" for i in range(1_000)],
|
||||
[(i - 500) ** 3 for i in range(1_000)],
|
||||
[i % 2 == 0 for i in range(1_000)],
|
||||
)
|
||||
self.instance_filled_repeated_bytes = bytes(self.instance_filled_repeated)
|
||||
|
||||
def time_overhead(self):
|
||||
"""Overhead in class definition."""
|
||||
|
||||
@dataclass
|
||||
class Message(betterproto.Message):
|
||||
foo: int = betterproto.uint32_field(1)
|
||||
bar: str = betterproto.string_field(2)
|
||||
baz: float = betterproto.float_field(3)
|
||||
|
||||
def time_instantiation(self):
|
||||
"""Time instantiation"""
|
||||
self.cls()
|
||||
|
||||
def time_attribute_access(self):
|
||||
"""Time to access an attribute"""
|
||||
self.instance.foo
|
||||
self.instance.bar
|
||||
self.instance.baz
|
||||
|
||||
def time_init_with_values(self):
|
||||
"""Time to set an attribute"""
|
||||
self.cls(0, "test", 0.0)
|
||||
|
||||
def time_attribute_setting(self):
|
||||
"""Time to set attributes"""
|
||||
self.instance.foo = 0
|
||||
self.instance.bar = "test"
|
||||
self.instance.baz = 0.0
|
||||
|
||||
def time_serialize(self):
|
||||
"""Time serializing a message to wire."""
|
||||
bytes(self.instance_filled)
|
||||
|
||||
def time_deserialize(self):
|
||||
"""Time deserialize a message."""
|
||||
TestMessage().parse(self.instance_filled_bytes)
|
||||
|
||||
def time_serialize_nested(self):
|
||||
"""Time serializing a nested message to wire."""
|
||||
bytes(self.instance_filled_nested)
|
||||
|
||||
def time_deserialize_nested(self):
|
||||
"""Time deserialize a nested message."""
|
||||
TestNestedMessage().parse(self.instance_filled_nested_bytes)
|
||||
|
||||
def time_serialize_repeated(self):
|
||||
"""Time serializing a repeated message to wire."""
|
||||
bytes(self.instance_filled_repeated)
|
||||
|
||||
def time_deserialize_repeated(self):
|
||||
"""Time deserialize a repeated message."""
|
||||
TestRepeatedMessage().parse(self.instance_filled_repeated_bytes)
|
||||
|
||||
|
||||
class MemSuite:
|
||||
def setup(self):
|
||||
self.cls = TestMessage
|
||||
|
||||
def mem_instance(self):
|
||||
return self.cls()
|
1010
betterproto/__init__.py
Normal file
1010
betterproto/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,13 +1,8 @@
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from grpclib._typing import IProtoMessage
|
||||
|
||||
from . import Message
|
||||
from grpclib._protocols import IProtoMessage
|
||||
|
||||
# Bound type variable to allow methods to return `self` of subclasses
|
||||
T = TypeVar("T", bound="Message")
|
@ -1,7 +1,5 @@
|
||||
import keyword
|
||||
import re
|
||||
|
||||
|
||||
# Word delimiters and symbols that will not be preserved when re-casing.
|
||||
# language=PythonRegExp
|
||||
SYMBOLS = "[^a-zA-Z0-9]*"
|
||||
@ -18,28 +16,51 @@ WORD_UPPER = "[A-Z]+(?![a-z])[0-9]*"
|
||||
def safe_snake_case(value: str) -> str:
|
||||
"""Snake case a value taking into account Python keywords."""
|
||||
value = snake_case(value)
|
||||
value = sanitize_name(value)
|
||||
if value in [
|
||||
"and",
|
||||
"as",
|
||||
"assert",
|
||||
"break",
|
||||
"class",
|
||||
"continue",
|
||||
"def",
|
||||
"del",
|
||||
"elif",
|
||||
"else",
|
||||
"except",
|
||||
"finally",
|
||||
"for",
|
||||
"from",
|
||||
"global",
|
||||
"if",
|
||||
"import",
|
||||
"in",
|
||||
"is",
|
||||
"lambda",
|
||||
"nonlocal",
|
||||
"not",
|
||||
"or",
|
||||
"pass",
|
||||
"raise",
|
||||
"return",
|
||||
"try",
|
||||
"while",
|
||||
"with",
|
||||
"yield",
|
||||
]:
|
||||
# https://www.python.org/dev/peps/pep-0008/#descriptive-naming-styles
|
||||
value += "_"
|
||||
return value
|
||||
|
||||
|
||||
def snake_case(value: str, strict: bool = True) -> str:
|
||||
def snake_case(value: str, strict: bool = True):
|
||||
"""
|
||||
Join words with an underscore into lowercase and remove symbols.
|
||||
|
||||
Parameters
|
||||
-----------
|
||||
value: :class:`str`
|
||||
The value to convert.
|
||||
strict: :class:`bool`
|
||||
Whether or not to force single underscores.
|
||||
|
||||
Returns
|
||||
--------
|
||||
:class:`str`
|
||||
The value in snake_case.
|
||||
@param value: value to convert
|
||||
@param strict: force single underscores
|
||||
"""
|
||||
|
||||
def substitute_word(symbols: str, word: str, is_start: bool) -> str:
|
||||
def substitute_word(symbols, word, is_start):
|
||||
if not word:
|
||||
return ""
|
||||
if strict:
|
||||
@ -63,21 +84,11 @@ def snake_case(value: str, strict: bool = True) -> str:
|
||||
return snake
|
||||
|
||||
|
||||
def pascal_case(value: str, strict: bool = True) -> str:
|
||||
def pascal_case(value: str, strict: bool = True):
|
||||
"""
|
||||
Capitalize each word and remove symbols.
|
||||
|
||||
Parameters
|
||||
-----------
|
||||
value: :class:`str`
|
||||
The value to convert.
|
||||
strict: :class:`bool`
|
||||
Whether or not to output only alphanumeric characters.
|
||||
|
||||
Returns
|
||||
--------
|
||||
:class:`str`
|
||||
The value in PascalCase.
|
||||
@param value: value to convert
|
||||
@param strict: output only alphanumeric characters
|
||||
"""
|
||||
|
||||
def substitute_word(symbols, word):
|
||||
@ -98,46 +109,12 @@ def pascal_case(value: str, strict: bool = True) -> str:
|
||||
)
|
||||
|
||||
|
||||
def camel_case(value: str, strict: bool = True) -> str:
|
||||
def camel_case(value: str, strict: bool = True):
|
||||
"""
|
||||
Capitalize all words except first and remove symbols.
|
||||
|
||||
Parameters
|
||||
-----------
|
||||
value: :class:`str`
|
||||
The value to convert.
|
||||
strict: :class:`bool`
|
||||
Whether or not to output only alphanumeric characters.
|
||||
|
||||
Returns
|
||||
--------
|
||||
:class:`str`
|
||||
The value in camelCase.
|
||||
"""
|
||||
return lowercase_first(pascal_case(value, strict=strict))
|
||||
|
||||
|
||||
def lowercase_first(value: str) -> str:
|
||||
"""
|
||||
Lower cases the first character of the value.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
value: :class:`str`
|
||||
The value to lower case.
|
||||
|
||||
Returns
|
||||
-------
|
||||
:class:`str`
|
||||
The lower cased string.
|
||||
"""
|
||||
def lowercase_first(value: str):
|
||||
return value[0:1].lower() + value[1:]
|
||||
|
||||
|
||||
def sanitize_name(value: str) -> str:
|
||||
# https://www.python.org/dev/peps/pep-0008/#descriptive-naming-styles
|
||||
if keyword.iskeyword(value):
|
||||
return f"{value}_"
|
||||
if not value.isidentifier():
|
||||
return f"_{value}"
|
||||
return value
|
@ -1,23 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Dict,
|
||||
List,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
)
|
||||
from typing import Dict, List, Set, Type
|
||||
|
||||
from ..casing import safe_snake_case
|
||||
from ..lib.google import protobuf as google_protobuf
|
||||
from .naming import pythonize_class_name
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..plugin.typing_compiler import TypingCompiler
|
||||
from betterproto import safe_snake_case
|
||||
from betterproto.compile.naming import pythonize_class_name
|
||||
from betterproto.lib.google import protobuf as google_protobuf
|
||||
|
||||
WRAPPER_TYPES: Dict[str, Type] = {
|
||||
".google.protobuf.DoubleValue": google_protobuf.DoubleValue,
|
||||
@ -32,7 +19,7 @@ WRAPPER_TYPES: Dict[str, Type] = {
|
||||
}
|
||||
|
||||
|
||||
def parse_source_type_name(field_type_name: str) -> Tuple[str, str]:
|
||||
def parse_source_type_name(field_type_name):
|
||||
"""
|
||||
Split full source type name into package and type name.
|
||||
E.g. 'root.package.Message' -> ('root.package', 'Message')
|
||||
@ -49,13 +36,7 @@ def parse_source_type_name(field_type_name: str) -> Tuple[str, str]:
|
||||
|
||||
|
||||
def get_type_reference(
|
||||
*,
|
||||
package: str,
|
||||
imports: set,
|
||||
source_type: str,
|
||||
typing_compiler: TypingCompiler,
|
||||
unwrap: bool = True,
|
||||
pydantic: bool = False,
|
||||
package: str, imports: set, source_type: str, unwrap: bool = True,
|
||||
) -> str:
|
||||
"""
|
||||
Return a Python type name for a proto type reference. Adds the import if
|
||||
@ -64,12 +45,12 @@ def get_type_reference(
|
||||
if unwrap:
|
||||
if source_type in WRAPPER_TYPES:
|
||||
wrapped_type = type(WRAPPER_TYPES[source_type]().value)
|
||||
return typing_compiler.optional(wrapped_type.__name__)
|
||||
return f"Optional[{wrapped_type.__name__}]"
|
||||
|
||||
if source_type == ".google.protobuf.Duration":
|
||||
return "timedelta"
|
||||
|
||||
elif source_type == ".google.protobuf.Timestamp":
|
||||
if source_type == ".google.protobuf.Timestamp":
|
||||
return "datetime"
|
||||
|
||||
source_package, source_type = parse_source_type_name(source_type)
|
||||
@ -81,9 +62,7 @@ def get_type_reference(
|
||||
compiling_google_protobuf = current_package == ["google", "protobuf"]
|
||||
importing_google_protobuf = py_package == ["google", "protobuf"]
|
||||
if importing_google_protobuf and not compiling_google_protobuf:
|
||||
py_package = (
|
||||
["betterproto", "lib"] + (["pydantic"] if pydantic else []) + py_package
|
||||
)
|
||||
py_package = ["betterproto", "lib"] + py_package
|
||||
|
||||
if py_package[:1] == ["betterproto"]:
|
||||
return reference_absolute(imports, py_package, py_type)
|
||||
@ -100,14 +79,14 @@ def get_type_reference(
|
||||
return reference_cousin(current_package, imports, py_package, py_type)
|
||||
|
||||
|
||||
def reference_absolute(imports: Set[str], py_package: List[str], py_type: str) -> str:
|
||||
def reference_absolute(imports, py_package, py_type):
|
||||
"""
|
||||
Returns a reference to a python type located in the root, i.e. sys.path.
|
||||
"""
|
||||
string_import = ".".join(py_package)
|
||||
string_alias = safe_snake_case(string_import)
|
||||
imports.add(f"import {string_import} as {string_alias}")
|
||||
return f'"{string_alias}.{py_type}"'
|
||||
return f"{string_alias}.{py_type}"
|
||||
|
||||
|
||||
def reference_sibling(py_type: str) -> str:
|
||||
@ -121,9 +100,8 @@ def reference_descendent(
|
||||
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
|
||||
) -> str:
|
||||
"""
|
||||
Returns a reference to a python type in a package that is a descendent of the
|
||||
current package, and adds the required import that is aliased to avoid name
|
||||
conflicts.
|
||||
Returns a reference to a python type in a package that is a descendent of the current package,
|
||||
and adds the required import that is aliased to avoid name conflicts.
|
||||
"""
|
||||
importing_descendent = py_package[len(current_package) :]
|
||||
string_from = ".".join(importing_descendent[:-1])
|
||||
@ -131,19 +109,18 @@ def reference_descendent(
|
||||
if string_from:
|
||||
string_alias = "_".join(importing_descendent)
|
||||
imports.add(f"from .{string_from} import {string_import} as {string_alias}")
|
||||
return f'"{string_alias}.{py_type}"'
|
||||
return f"{string_alias}.{py_type}"
|
||||
else:
|
||||
imports.add(f"from . import {string_import}")
|
||||
return f'"{string_import}.{py_type}"'
|
||||
return f"{string_import}.{py_type}"
|
||||
|
||||
|
||||
def reference_ancestor(
|
||||
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
|
||||
) -> str:
|
||||
"""
|
||||
Returns a reference to a python type in a package which is an ancestor to the
|
||||
current package, and adds the required import that is aliased (if possible) to avoid
|
||||
name conflicts.
|
||||
Returns a reference to a python type in a package which is an ancestor to the current package,
|
||||
and adds the required import that is aliased (if possible) to avoid name conflicts.
|
||||
|
||||
Adds trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34).
|
||||
"""
|
||||
@ -153,21 +130,21 @@ def reference_ancestor(
|
||||
string_alias = f"_{'_' * distance_up}{string_import}__"
|
||||
string_from = f"..{'.' * distance_up}"
|
||||
imports.add(f"from {string_from} import {string_import} as {string_alias}")
|
||||
return f'"{string_alias}.{py_type}"'
|
||||
return f"{string_alias}.{py_type}"
|
||||
else:
|
||||
string_alias = f"{'_' * distance_up}{py_type}__"
|
||||
imports.add(f"from .{'.' * distance_up} import {py_type} as {string_alias}")
|
||||
return f'"{string_alias}"'
|
||||
return string_alias
|
||||
|
||||
|
||||
def reference_cousin(
|
||||
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
|
||||
) -> str:
|
||||
"""
|
||||
Returns a reference to a python type in a package that is not descendent, ancestor
|
||||
or sibling, and adds the required import that is aliased to avoid name conflicts.
|
||||
Returns a reference to a python type in a package that is not descendent, ancestor or sibling,
|
||||
and adds the required import that is aliased to avoid name conflicts.
|
||||
"""
|
||||
shared_ancestry = os.path.commonprefix([current_package, py_package]) # type: ignore
|
||||
shared_ancestry = os.path.commonprefix([current_package, py_package])
|
||||
distance_up = len(current_package) - len(shared_ancestry)
|
||||
string_from = f".{'.' * distance_up}" + ".".join(
|
||||
py_package[len(shared_ancestry) : -1]
|
||||
@ -180,4 +157,4 @@ def reference_cousin(
|
||||
+ "__"
|
||||
)
|
||||
imports.add(f"from {string_from} import {string_import} as {string_alias}")
|
||||
return f'"{string_alias}.{py_type}"'
|
||||
return f"{string_alias}.{py_type}"
|
13
betterproto/compile/naming.py
Normal file
13
betterproto/compile/naming.py
Normal file
@ -0,0 +1,13 @@
|
||||
from betterproto import casing
|
||||
|
||||
|
||||
def pythonize_class_name(name):
|
||||
return casing.pascal_case(name)
|
||||
|
||||
|
||||
def pythonize_field_name(name: str):
|
||||
return casing.safe_snake_case(name)
|
||||
|
||||
|
||||
def pythonize_method_name(name: str):
|
||||
return casing.safe_snake_case(name)
|
@ -1,7 +1,8 @@
|
||||
import asyncio
|
||||
from abc import ABC
|
||||
import asyncio
|
||||
import grpclib.const
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AsyncIterable,
|
||||
AsyncIterator,
|
||||
Collection,
|
||||
@ -9,28 +10,21 @@ from typing import (
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
TYPE_CHECKING,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
import grpclib.const
|
||||
|
||||
from .._types import ST, T
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from grpclib.client import Channel
|
||||
from grpclib._protocols import IProtoMessage
|
||||
from grpclib.client import Channel, Stream
|
||||
from grpclib.metadata import Deadline
|
||||
|
||||
from .._types import (
|
||||
ST,
|
||||
IProtoMessage,
|
||||
Message,
|
||||
T,
|
||||
)
|
||||
|
||||
|
||||
Value = Union[str, bytes]
|
||||
MetadataLike = Union[Mapping[str, Value], Collection[Tuple[str, Value]]]
|
||||
MessageSource = Union[Iterable["IProtoMessage"], AsyncIterable["IProtoMessage"]]
|
||||
_Value = Union[str, bytes]
|
||||
_MetadataLike = Union[Mapping[str, _Value], Collection[Tuple[str, _Value]]]
|
||||
_MessageSource = Union[Iterable["IProtoMessage"], AsyncIterable["IProtoMessage"]]
|
||||
|
||||
|
||||
class ServiceStub(ABC):
|
||||
@ -44,7 +38,7 @@ class ServiceStub(ABC):
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
deadline: Optional["Deadline"] = None,
|
||||
metadata: Optional[MetadataLike] = None,
|
||||
metadata: Optional[_MetadataLike] = None,
|
||||
) -> None:
|
||||
self.channel = channel
|
||||
self.timeout = timeout
|
||||
@ -55,7 +49,7 @@ class ServiceStub(ABC):
|
||||
self,
|
||||
timeout: Optional[float],
|
||||
deadline: Optional["Deadline"],
|
||||
metadata: Optional[MetadataLike],
|
||||
metadata: Optional[_MetadataLike],
|
||||
):
|
||||
return {
|
||||
"timeout": self.timeout if timeout is None else timeout,
|
||||
@ -67,12 +61,12 @@ class ServiceStub(ABC):
|
||||
self,
|
||||
route: str,
|
||||
request: "IProtoMessage",
|
||||
response_type: Type["T"],
|
||||
response_type: Type[T],
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
deadline: Optional["Deadline"] = None,
|
||||
metadata: Optional[MetadataLike] = None,
|
||||
) -> "T":
|
||||
metadata: Optional[_MetadataLike] = None,
|
||||
) -> T:
|
||||
"""Make a unary request and return the response."""
|
||||
async with self.channel.request(
|
||||
route,
|
||||
@ -83,19 +77,19 @@ class ServiceStub(ABC):
|
||||
) as stream:
|
||||
await stream.send_message(request, end=True)
|
||||
response = await stream.recv_message()
|
||||
assert response is not None
|
||||
return response
|
||||
assert response is not None
|
||||
return response
|
||||
|
||||
async def _unary_stream(
|
||||
self,
|
||||
route: str,
|
||||
request: "IProtoMessage",
|
||||
response_type: Type["T"],
|
||||
response_type: Type[T],
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
deadline: Optional["Deadline"] = None,
|
||||
metadata: Optional[MetadataLike] = None,
|
||||
) -> AsyncIterator["T"]:
|
||||
metadata: Optional[_MetadataLike] = None,
|
||||
) -> AsyncIterator[T]:
|
||||
"""Make a unary request and return the stream response iterator."""
|
||||
async with self.channel.request(
|
||||
route,
|
||||
@ -111,14 +105,14 @@ class ServiceStub(ABC):
|
||||
async def _stream_unary(
|
||||
self,
|
||||
route: str,
|
||||
request_iterator: MessageSource,
|
||||
request_type: Type["IProtoMessage"],
|
||||
response_type: Type["T"],
|
||||
request_iterator: _MessageSource,
|
||||
request_type: Type[ST],
|
||||
response_type: Type[T],
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
deadline: Optional["Deadline"] = None,
|
||||
metadata: Optional[MetadataLike] = None,
|
||||
) -> "T":
|
||||
metadata: Optional[_MetadataLike] = None,
|
||||
) -> T:
|
||||
"""Make a stream request and return the response."""
|
||||
async with self.channel.request(
|
||||
route,
|
||||
@ -127,23 +121,22 @@ class ServiceStub(ABC):
|
||||
response_type,
|
||||
**self.__resolve_request_kwargs(timeout, deadline, metadata),
|
||||
) as stream:
|
||||
await stream.send_request()
|
||||
await self._send_messages(stream, request_iterator)
|
||||
response = await stream.recv_message()
|
||||
assert response is not None
|
||||
return response
|
||||
assert response is not None
|
||||
return response
|
||||
|
||||
async def _stream_stream(
|
||||
self,
|
||||
route: str,
|
||||
request_iterator: MessageSource,
|
||||
request_type: Type["IProtoMessage"],
|
||||
response_type: Type["T"],
|
||||
request_iterator: _MessageSource,
|
||||
request_type: Type[ST],
|
||||
response_type: Type[T],
|
||||
*,
|
||||
timeout: Optional[float] = None,
|
||||
deadline: Optional["Deadline"] = None,
|
||||
metadata: Optional[MetadataLike] = None,
|
||||
) -> AsyncIterator["T"]:
|
||||
metadata: Optional[_MetadataLike] = None,
|
||||
) -> AsyncIterator[T]:
|
||||
"""
|
||||
Make a stream request and return an AsyncIterator to iterate over response
|
||||
messages.
|
||||
@ -167,7 +160,7 @@ class ServiceStub(ABC):
|
||||
raise
|
||||
|
||||
@staticmethod
|
||||
async def _send_messages(stream, messages: MessageSource):
|
||||
async def _send_messages(stream, messages: _MessageSource):
|
||||
if isinstance(messages, AsyncIterable):
|
||||
async for message in messages:
|
||||
await stream.send_message(message)
|
@ -8,7 +8,6 @@ from typing import (
|
||||
Union,
|
||||
)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@ -17,53 +16,57 @@ class ChannelClosed(Exception):
|
||||
An exception raised on an attempt to send through a closed channel
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ChannelDone(Exception):
|
||||
"""
|
||||
An exception raised on an attempt to send receive from a channel that is both closed
|
||||
An exception raised on an attempt to send recieve from a channel that is both closed
|
||||
and empty.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class AsyncChannel(AsyncIterable[T]):
|
||||
"""
|
||||
A buffered async channel for sending items between coroutines with FIFO ordering.
|
||||
|
||||
This makes decoupled bidirectional steaming gRPC requests easy if used like:
|
||||
This makes decoupled bidirection steaming gRPC requests easy if used like:
|
||||
|
||||
.. code-block:: python
|
||||
client = GeneratedStub(grpclib_chan)
|
||||
request_channel = await AsyncChannel()
|
||||
request_chan = await AsyncChannel()
|
||||
# We can start be sending all the requests we already have
|
||||
await request_channel.send_from([RequestObject(...), RequestObject(...)])
|
||||
async for response in client.rpc_call(request_channel):
|
||||
await request_chan.send_from([ReqestObject(...), ReqestObject(...)])
|
||||
async for response in client.rpc_call(request_chan):
|
||||
# The response iterator will remain active until the connection is closed
|
||||
...
|
||||
# More items can be sent at any time
|
||||
await request_channel.send(RequestObject(...))
|
||||
await request_chan.send(ReqestObject(...))
|
||||
...
|
||||
# The channel must be closed to complete the gRPC connection
|
||||
request_channel.close()
|
||||
request_chan.close()
|
||||
|
||||
Items can be sent through the channel by either:
|
||||
- providing an iterable to the send_from method
|
||||
- passing them to the send method one at a time
|
||||
|
||||
Items can be received from the channel by either:
|
||||
Items can be recieved from the channel by either:
|
||||
- iterating over the channel with a for loop to get all items
|
||||
- calling the receive method to get one item at a time
|
||||
- calling the recieve method to get one item at a time
|
||||
|
||||
If the channel is empty then receivers will wait until either an item appears or the
|
||||
If the channel is empty then recievers will wait until either an item appears or the
|
||||
channel is closed.
|
||||
|
||||
Once the channel is closed then subsequent attempt to send through the channel will
|
||||
fail with a ChannelClosed exception.
|
||||
|
||||
When th channel is closed and empty then it is done, and further attempts to receive
|
||||
When th channel is closed and empty then it is done, and further attempts to recieve
|
||||
from it will fail with a ChannelDone exception
|
||||
|
||||
If multiple coroutines receive from the channel concurrently, each item sent will be
|
||||
received by only one of the receivers.
|
||||
If multiple coroutines recieve from the channel concurrently, each item sent will be
|
||||
recieved by only one of the recievers.
|
||||
|
||||
:param source:
|
||||
An optional iterable will items that should be sent through the channel
|
||||
@ -71,16 +74,18 @@ class AsyncChannel(AsyncIterable[T]):
|
||||
:param buffer_limit:
|
||||
Limit the number of items that can be buffered in the channel, A value less than
|
||||
1 implies no limit. If the channel is full then attempts to send more items will
|
||||
result in the sender waiting until an item is received from the channel.
|
||||
result in the sender waiting until an item is recieved from the channel.
|
||||
:param close:
|
||||
If set to True then the channel will automatically close after exhausting source
|
||||
or immediately if no source is provided.
|
||||
"""
|
||||
|
||||
def __init__(self, *, buffer_limit: int = 0, close: bool = False):
|
||||
self._queue: asyncio.Queue[T] = asyncio.Queue(buffer_limit)
|
||||
def __init__(
|
||||
self, *, buffer_limit: int = 0, close: bool = False,
|
||||
):
|
||||
self._queue: asyncio.Queue[Union[T, object]] = asyncio.Queue(buffer_limit)
|
||||
self._closed = False
|
||||
self._waiting_receivers: int = 0
|
||||
self._waiting_recievers: int = 0
|
||||
# Track whether flush has been invoked so it can only happen once
|
||||
self._flushed = False
|
||||
|
||||
@ -90,14 +95,14 @@ class AsyncChannel(AsyncIterable[T]):
|
||||
async def __anext__(self) -> T:
|
||||
if self.done():
|
||||
raise StopAsyncIteration
|
||||
self._waiting_receivers += 1
|
||||
self._waiting_recievers += 1
|
||||
try:
|
||||
result = await self._queue.get()
|
||||
if result is self.__flush:
|
||||
raise StopAsyncIteration
|
||||
return result
|
||||
finally:
|
||||
self._waiting_receivers -= 1
|
||||
self._waiting_recievers -= 1
|
||||
self._queue.task_done()
|
||||
|
||||
def closed(self) -> bool:
|
||||
@ -111,12 +116,12 @@ class AsyncChannel(AsyncIterable[T]):
|
||||
Check if this channel is done.
|
||||
|
||||
:return: True if this channel is closed and and has been drained of items in
|
||||
which case any further attempts to receive an item from this channel will raise
|
||||
which case any further attempts to recieve an item from this channel will raise
|
||||
a ChannelDone exception.
|
||||
"""
|
||||
# After close the channel is not yet done until there is at least one waiting
|
||||
# receiver per enqueued item.
|
||||
return self._closed and self._queue.qsize() <= self._waiting_receivers
|
||||
# reciever per enqueued item.
|
||||
return self._closed and self._queue.qsize() <= self._waiting_recievers
|
||||
|
||||
async def send_from(
|
||||
self, source: Union[Iterable[T], AsyncIterable[T]], close: bool = False
|
||||
@ -153,22 +158,22 @@ class AsyncChannel(AsyncIterable[T]):
|
||||
await self._queue.put(item)
|
||||
return self
|
||||
|
||||
async def receive(self) -> Optional[T]:
|
||||
async def recieve(self) -> Optional[T]:
|
||||
"""
|
||||
Returns the next item from this channel when it becomes available,
|
||||
or None if the channel is closed before another item is sent.
|
||||
:return: An item from the channel
|
||||
"""
|
||||
if self.done():
|
||||
raise ChannelDone("Cannot receive from a closed channel")
|
||||
self._waiting_receivers += 1
|
||||
raise ChannelDone("Cannot recieve from a closed channel")
|
||||
self._waiting_recievers += 1
|
||||
try:
|
||||
result = await self._queue.get()
|
||||
if result is self.__flush:
|
||||
return None
|
||||
return result
|
||||
finally:
|
||||
self._waiting_receivers -= 1
|
||||
self._waiting_recievers -= 1
|
||||
self._queue.task_done()
|
||||
|
||||
def close(self):
|
||||
@ -185,8 +190,8 @@ class AsyncChannel(AsyncIterable[T]):
|
||||
"""
|
||||
if not self._flushed:
|
||||
self._flushed = True
|
||||
deadlocked_receivers = max(0, self._waiting_receivers - self._queue.qsize())
|
||||
for _ in range(deadlocked_receivers):
|
||||
deadlocked_recievers = max(0, self._waiting_recievers - self._queue.qsize())
|
||||
for _ in range(deadlocked_recievers):
|
||||
await self._queue.put(self.__flush)
|
||||
|
||||
# A special signal object for flushing the queue when the channel is closed
|
1312
betterproto/lib/google/protobuf/__init__.py
Normal file
1312
betterproto/lib/google/protobuf/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
2
betterproto/plugin.bat
Normal file
2
betterproto/plugin.bat
Normal file
@ -0,0 +1,2 @@
|
||||
@SET plugin_dir=%~dp0
|
||||
@python %plugin_dir%/plugin.py %*
|
403
betterproto/plugin.py
Executable file
403
betterproto/plugin.py
Executable file
@ -0,0 +1,403 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import itertools
|
||||
import os.path
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import textwrap
|
||||
from typing import List, Union
|
||||
|
||||
import betterproto
|
||||
from betterproto.compile.importing import get_type_reference
|
||||
from betterproto.compile.naming import (
|
||||
pythonize_class_name,
|
||||
pythonize_field_name,
|
||||
pythonize_method_name,
|
||||
)
|
||||
|
||||
try:
|
||||
# betterproto[compiler] specific dependencies
|
||||
import black
|
||||
from google.protobuf.compiler import plugin_pb2 as plugin
|
||||
from google.protobuf.descriptor_pb2 import (
|
||||
DescriptorProto,
|
||||
EnumDescriptorProto,
|
||||
FieldDescriptorProto,
|
||||
)
|
||||
import google.protobuf.wrappers_pb2 as google_wrappers
|
||||
import jinja2
|
||||
except ImportError as err:
|
||||
missing_import = err.args[0][17:-1]
|
||||
print(
|
||||
"\033[31m"
|
||||
f"Unable to import `{missing_import}` from betterproto plugin! "
|
||||
"Please ensure that you've installed betterproto as "
|
||||
'`pip install "betterproto[compiler]"` so that compiler dependencies '
|
||||
"are included."
|
||||
"\033[0m"
|
||||
)
|
||||
raise SystemExit(1)
|
||||
|
||||
|
||||
def py_type(package: str, imports: set, field: FieldDescriptorProto) -> str:
|
||||
if field.type in [1, 2]:
|
||||
return "float"
|
||||
elif field.type in [3, 4, 5, 6, 7, 13, 15, 16, 17, 18]:
|
||||
return "int"
|
||||
elif field.type == 8:
|
||||
return "bool"
|
||||
elif field.type == 9:
|
||||
return "str"
|
||||
elif field.type in [11, 14]:
|
||||
# Type referencing another defined Message or a named enum
|
||||
return get_type_reference(package, imports, field.type_name)
|
||||
elif field.type == 12:
|
||||
return "bytes"
|
||||
else:
|
||||
raise NotImplementedError(f"Unknown type {field.type}")
|
||||
|
||||
|
||||
def get_py_zero(type_num: int) -> Union[str, float]:
|
||||
zero: Union[str, float] = 0
|
||||
if type_num in []:
|
||||
zero = 0.0
|
||||
elif type_num == 8:
|
||||
zero = "False"
|
||||
elif type_num == 9:
|
||||
zero = '""'
|
||||
elif type_num == 11:
|
||||
zero = "None"
|
||||
elif type_num == 12:
|
||||
zero = 'b""'
|
||||
|
||||
return zero
|
||||
|
||||
|
||||
def traverse(proto_file):
|
||||
def _traverse(path, items, prefix=""):
|
||||
for i, item in enumerate(items):
|
||||
# Adjust the name since we flatten the heirarchy.
|
||||
item.name = next_prefix = prefix + item.name
|
||||
yield item, path + [i]
|
||||
|
||||
if isinstance(item, DescriptorProto):
|
||||
for enum in item.enum_type:
|
||||
enum.name = next_prefix + enum.name
|
||||
yield enum, path + [i, 4]
|
||||
|
||||
if item.nested_type:
|
||||
for n, p in _traverse(path + [i, 3], item.nested_type, next_prefix):
|
||||
yield n, p
|
||||
|
||||
return itertools.chain(
|
||||
_traverse([5], proto_file.enum_type), _traverse([4], proto_file.message_type)
|
||||
)
|
||||
|
||||
|
||||
def get_comment(proto_file, path: List[int], indent: int = 4) -> str:
|
||||
pad = " " * indent
|
||||
for sci in proto_file.source_code_info.location:
|
||||
# print(list(sci.path), path, file=sys.stderr)
|
||||
if list(sci.path) == path and sci.leading_comments:
|
||||
lines = textwrap.wrap(
|
||||
sci.leading_comments.strip().replace("\n", ""), width=79 - indent
|
||||
)
|
||||
|
||||
if path[-2] == 2 and path[-4] != 6:
|
||||
# This is a field
|
||||
return f"{pad}# " + f"\n{pad}# ".join(lines)
|
||||
else:
|
||||
# This is a message, enum, service, or method
|
||||
if len(lines) == 1 and len(lines[0]) < 79 - indent - 6:
|
||||
lines[0] = lines[0].strip('"')
|
||||
return f'{pad}"""{lines[0]}"""'
|
||||
else:
|
||||
joined = f"\n{pad}".join(lines)
|
||||
return f'{pad}"""\n{pad}{joined}\n{pad}"""'
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def generate_code(request, response):
|
||||
plugin_options = request.parameter.split(",") if request.parameter else []
|
||||
|
||||
env = jinja2.Environment(
|
||||
trim_blocks=True,
|
||||
lstrip_blocks=True,
|
||||
loader=jinja2.FileSystemLoader("%s/templates/" % os.path.dirname(__file__)),
|
||||
)
|
||||
template = env.get_template("template.py.j2")
|
||||
|
||||
output_map = {}
|
||||
for proto_file in request.proto_file:
|
||||
if (
|
||||
proto_file.package == "google.protobuf"
|
||||
and "INCLUDE_GOOGLE" not in plugin_options
|
||||
):
|
||||
continue
|
||||
|
||||
output_file = str(pathlib.Path(*proto_file.package.split("."), "__init__.py"))
|
||||
|
||||
if output_file not in output_map:
|
||||
output_map[output_file] = {"package": proto_file.package, "files": []}
|
||||
output_map[output_file]["files"].append(proto_file)
|
||||
|
||||
# TODO: Figure out how to handle gRPC request/response messages and add
|
||||
# processing below for Service.
|
||||
|
||||
for filename, options in output_map.items():
|
||||
package = options["package"]
|
||||
# print(package, filename, file=sys.stderr)
|
||||
output = {
|
||||
"package": package,
|
||||
"files": [f.name for f in options["files"]],
|
||||
"imports": set(),
|
||||
"datetime_imports": set(),
|
||||
"typing_imports": set(),
|
||||
"messages": [],
|
||||
"enums": [],
|
||||
"services": [],
|
||||
}
|
||||
|
||||
for proto_file in options["files"]:
|
||||
item: DescriptorProto
|
||||
for item, path in traverse(proto_file):
|
||||
data = {"name": item.name, "py_name": pythonize_class_name(item.name)}
|
||||
|
||||
if isinstance(item, DescriptorProto):
|
||||
# print(item, file=sys.stderr)
|
||||
if item.options.map_entry:
|
||||
# Skip generated map entry messages since we just use dicts
|
||||
continue
|
||||
|
||||
data.update(
|
||||
{
|
||||
"type": "Message",
|
||||
"comment": get_comment(proto_file, path),
|
||||
"properties": [],
|
||||
}
|
||||
)
|
||||
|
||||
for i, f in enumerate(item.field):
|
||||
t = py_type(package, output["imports"], f)
|
||||
zero = get_py_zero(f.type)
|
||||
|
||||
repeated = False
|
||||
packed = False
|
||||
|
||||
field_type = f.Type.Name(f.type).lower()[5:]
|
||||
|
||||
field_wraps = ""
|
||||
match_wrapper = re.match(
|
||||
r"\.google\.protobuf\.(.+)Value", f.type_name
|
||||
)
|
||||
if match_wrapper:
|
||||
wrapped_type = "TYPE_" + match_wrapper.group(1).upper()
|
||||
if hasattr(betterproto, wrapped_type):
|
||||
field_wraps = f"betterproto.{wrapped_type}"
|
||||
|
||||
map_types = None
|
||||
if f.type == 11:
|
||||
# This might be a map...
|
||||
message_type = f.type_name.split(".").pop().lower()
|
||||
# message_type = py_type(package)
|
||||
map_entry = f"{f.name.replace('_', '').lower()}entry"
|
||||
|
||||
if message_type == map_entry:
|
||||
for nested in item.nested_type:
|
||||
if (
|
||||
nested.name.replace("_", "").lower()
|
||||
== map_entry
|
||||
):
|
||||
if nested.options.map_entry:
|
||||
# print("Found a map!", file=sys.stderr)
|
||||
k = py_type(
|
||||
package,
|
||||
output["imports"],
|
||||
nested.field[0],
|
||||
)
|
||||
v = py_type(
|
||||
package,
|
||||
output["imports"],
|
||||
nested.field[1],
|
||||
)
|
||||
t = f"Dict[{k}, {v}]"
|
||||
field_type = "map"
|
||||
map_types = (
|
||||
f.Type.Name(nested.field[0].type),
|
||||
f.Type.Name(nested.field[1].type),
|
||||
)
|
||||
output["typing_imports"].add("Dict")
|
||||
|
||||
if f.label == 3 and field_type != "map":
|
||||
# Repeated field
|
||||
repeated = True
|
||||
t = f"List[{t}]"
|
||||
zero = "[]"
|
||||
output["typing_imports"].add("List")
|
||||
|
||||
if f.type in [1, 2, 3, 4, 5, 6, 7, 8, 13, 15, 16, 17, 18]:
|
||||
packed = True
|
||||
|
||||
one_of = ""
|
||||
if f.HasField("oneof_index"):
|
||||
one_of = item.oneof_decl[f.oneof_index].name
|
||||
|
||||
if "Optional[" in t:
|
||||
output["typing_imports"].add("Optional")
|
||||
|
||||
if "timedelta" in t:
|
||||
output["datetime_imports"].add("timedelta")
|
||||
elif "datetime" in t:
|
||||
output["datetime_imports"].add("datetime")
|
||||
|
||||
data["properties"].append(
|
||||
{
|
||||
"name": f.name,
|
||||
"py_name": pythonize_field_name(f.name),
|
||||
"number": f.number,
|
||||
"comment": get_comment(proto_file, path + [2, i]),
|
||||
"proto_type": int(f.type),
|
||||
"field_type": field_type,
|
||||
"field_wraps": field_wraps,
|
||||
"map_types": map_types,
|
||||
"type": t,
|
||||
"zero": zero,
|
||||
"repeated": repeated,
|
||||
"packed": packed,
|
||||
"one_of": one_of,
|
||||
}
|
||||
)
|
||||
# print(f, file=sys.stderr)
|
||||
|
||||
output["messages"].append(data)
|
||||
elif isinstance(item, EnumDescriptorProto):
|
||||
# print(item.name, path, file=sys.stderr)
|
||||
data.update(
|
||||
{
|
||||
"type": "Enum",
|
||||
"comment": get_comment(proto_file, path),
|
||||
"entries": [
|
||||
{
|
||||
"name": v.name,
|
||||
"value": v.number,
|
||||
"comment": get_comment(proto_file, path + [2, i]),
|
||||
}
|
||||
for i, v in enumerate(item.value)
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
output["enums"].append(data)
|
||||
|
||||
for i, service in enumerate(proto_file.service):
|
||||
# print(service, file=sys.stderr)
|
||||
|
||||
data = {
|
||||
"name": service.name,
|
||||
"py_name": pythonize_class_name(service.name),
|
||||
"comment": get_comment(proto_file, [6, i]),
|
||||
"methods": [],
|
||||
}
|
||||
|
||||
for j, method in enumerate(service.method):
|
||||
input_message = None
|
||||
input_type = get_type_reference(
|
||||
package, output["imports"], method.input_type
|
||||
).strip('"')
|
||||
for msg in output["messages"]:
|
||||
if msg["name"] == input_type:
|
||||
input_message = msg
|
||||
for field in msg["properties"]:
|
||||
if field["zero"] == "None":
|
||||
output["typing_imports"].add("Optional")
|
||||
break
|
||||
|
||||
data["methods"].append(
|
||||
{
|
||||
"name": method.name,
|
||||
"py_name": pythonize_method_name(method.name),
|
||||
"comment": get_comment(proto_file, [6, i, 2, j], indent=8),
|
||||
"route": f"/{package}.{service.name}/{method.name}",
|
||||
"input": get_type_reference(
|
||||
package, output["imports"], method.input_type
|
||||
).strip('"'),
|
||||
"input_message": input_message,
|
||||
"output": get_type_reference(
|
||||
package,
|
||||
output["imports"],
|
||||
method.output_type,
|
||||
unwrap=False,
|
||||
).strip('"'),
|
||||
"client_streaming": method.client_streaming,
|
||||
"server_streaming": method.server_streaming,
|
||||
}
|
||||
)
|
||||
|
||||
if method.client_streaming:
|
||||
output["typing_imports"].add("AsyncIterable")
|
||||
output["typing_imports"].add("Iterable")
|
||||
output["typing_imports"].add("Union")
|
||||
if method.server_streaming:
|
||||
output["typing_imports"].add("AsyncIterator")
|
||||
|
||||
output["services"].append(data)
|
||||
|
||||
output["imports"] = sorted(output["imports"])
|
||||
output["datetime_imports"] = sorted(output["datetime_imports"])
|
||||
output["typing_imports"] = sorted(output["typing_imports"])
|
||||
|
||||
# Fill response
|
||||
f = response.file.add()
|
||||
f.name = filename
|
||||
|
||||
# Render and then format the output file.
|
||||
f.content = black.format_str(
|
||||
template.render(description=output),
|
||||
mode=black.FileMode(target_versions=set([black.TargetVersion.PY37])),
|
||||
)
|
||||
|
||||
# Make each output directory a package with __init__ file
|
||||
output_paths = set(pathlib.Path(path) for path in output_map.keys())
|
||||
init_files = (
|
||||
set(
|
||||
directory.joinpath("__init__.py")
|
||||
for path in output_paths
|
||||
for directory in path.parents
|
||||
)
|
||||
- output_paths
|
||||
)
|
||||
|
||||
for init_file in init_files:
|
||||
init = response.file.add()
|
||||
init.name = str(init_file)
|
||||
|
||||
for filename in sorted(output_paths.union(init_files)):
|
||||
print(f"Writing {filename}", file=sys.stderr)
|
||||
|
||||
|
||||
def main():
|
||||
"""The plugin's main entry point."""
|
||||
# Read request message from stdin
|
||||
data = sys.stdin.buffer.read()
|
||||
|
||||
# Parse request
|
||||
request = plugin.CodeGeneratorRequest()
|
||||
request.ParseFromString(data)
|
||||
|
||||
# Create response
|
||||
response = plugin.CodeGeneratorResponse()
|
||||
|
||||
# Generate code
|
||||
generate_code(request, response)
|
||||
|
||||
# Serialise response message
|
||||
output = response.SerializeToString()
|
||||
|
||||
# Write to stdout
|
||||
sys.stdout.buffer.write(output)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
135
betterproto/templates/template.py.j2
Normal file
135
betterproto/templates/template.py.j2
Normal file
@ -0,0 +1,135 @@
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# sources: {{ ', '.join(description.files) }}
|
||||
# plugin: python-betterproto
|
||||
from dataclasses import dataclass
|
||||
{% if description.datetime_imports %}
|
||||
from datetime import {% for i in description.datetime_imports %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
|
||||
{% endif%}
|
||||
{% if description.typing_imports %}
|
||||
from typing import {% for i in description.typing_imports %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
import betterproto
|
||||
{% if description.services %}
|
||||
import grpclib
|
||||
{% endif %}
|
||||
|
||||
{% for i in description.imports %}
|
||||
{{ i }}
|
||||
{% endfor %}
|
||||
|
||||
|
||||
{% if description.enums %}{% for enum in description.enums %}
|
||||
class {{ enum.py_name }}(betterproto.Enum):
|
||||
{% if enum.comment %}
|
||||
{{ enum.comment }}
|
||||
|
||||
{% endif %}
|
||||
{% for entry in enum.entries %}
|
||||
{% if entry.comment %}
|
||||
{{ entry.comment }}
|
||||
{% endif %}
|
||||
{{ entry.name }} = {{ entry.value }}
|
||||
{% endfor %}
|
||||
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% for message in description.messages %}
|
||||
@dataclass
|
||||
class {{ message.py_name }}(betterproto.Message):
|
||||
{% if message.comment %}
|
||||
{{ message.comment }}
|
||||
|
||||
{% endif %}
|
||||
{% for field in message.properties %}
|
||||
{% if field.comment %}
|
||||
{{ field.comment }}
|
||||
{% endif %}
|
||||
{{ field.py_name }}: {{ field.type }} = betterproto.{{ field.field_type }}_field({{ field.number }}{% if field.field_type == 'map'%}, betterproto.{{ field.map_types[0] }}, betterproto.{{ field.map_types[1] }}{% endif %}{% if field.one_of %}, group="{{ field.one_of }}"{% endif %}{% if field.field_wraps %}, wraps={{ field.field_wraps }}{% endif %})
|
||||
{% endfor %}
|
||||
{% if not message.properties %}
|
||||
pass
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% endfor %}
|
||||
{% for service in description.services %}
|
||||
class {{ service.py_name }}Stub(betterproto.ServiceStub):
|
||||
{% if service.comment %}
|
||||
{{ service.comment }}
|
||||
|
||||
{% endif %}
|
||||
{% for method in service.methods %}
|
||||
async def {{ method.py_name }}(self
|
||||
{%- if not method.client_streaming -%}
|
||||
{%- if method.input_message and method.input_message.properties -%}, *,
|
||||
{%- for field in method.input_message.properties -%}
|
||||
{{ field.py_name }}: {% if field.zero == "None" and not field.type.startswith("Optional[") -%}
|
||||
Optional[{{ field.type }}]
|
||||
{%- else -%}
|
||||
{{ field.type }}
|
||||
{%- endif -%} = {{ field.zero }}
|
||||
{%- if not loop.last %}, {% endif -%}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- else -%}
|
||||
{# Client streaming: need a request iterator instead #}
|
||||
, request_iterator: Union[AsyncIterable["{{ method.input }}"], Iterable["{{ method.input }}"]]
|
||||
{%- endif -%}
|
||||
) -> {% if method.server_streaming %}AsyncIterator[{{ method.output }}]{% else %}{{ method.output }}{% endif %}:
|
||||
{% if method.comment %}
|
||||
{{ method.comment }}
|
||||
|
||||
{% endif %}
|
||||
{% if not method.client_streaming %}
|
||||
request = {{ method.input }}()
|
||||
{% for field in method.input_message.properties %}
|
||||
{% if field.field_type == 'message' %}
|
||||
if {{ field.py_name }} is not None:
|
||||
request.{{ field.py_name }} = {{ field.py_name }}
|
||||
{% else %}
|
||||
request.{{ field.py_name }} = {{ field.py_name }}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if method.server_streaming %}
|
||||
{% if method.client_streaming %}
|
||||
async for response in self._stream_stream(
|
||||
"{{ method.route }}",
|
||||
request_iterator,
|
||||
{{ method.input }},
|
||||
{{ method.output }},
|
||||
):
|
||||
yield response
|
||||
{% else %}{# i.e. not client streaming #}
|
||||
async for response in self._unary_stream(
|
||||
"{{ method.route }}",
|
||||
request,
|
||||
{{ method.output }},
|
||||
):
|
||||
yield response
|
||||
|
||||
{% endif %}{# if client streaming #}
|
||||
{% else %}{# i.e. not server streaming #}
|
||||
{% if method.client_streaming %}
|
||||
return await self._stream_unary(
|
||||
"{{ method.route }}",
|
||||
request_iterator,
|
||||
{{ method.input }},
|
||||
{{ method.output }}
|
||||
)
|
||||
{% else %}{# i.e. not client streaming #}
|
||||
return await self._unary_unary(
|
||||
"{{ method.route }}",
|
||||
request,
|
||||
{{ method.output }}
|
||||
)
|
||||
{% endif %}{# client streaming #}
|
||||
{% endif %}
|
||||
|
||||
{% endfor %}
|
||||
{% endfor %}
|
@ -50,7 +50,7 @@ You can add multiple `.proto` files to the test case, as long as one file matche
|
||||
`test_<name>.py` — *Custom test to validate specific aspects of the generated class*
|
||||
|
||||
```python
|
||||
from tests.output_betterproto.bool.bool import Test
|
||||
from betterproto.tests.output_betterproto.bool.bool import Test
|
||||
|
||||
def test_value():
|
||||
message = Test()
|
@ -1,22 +1,21 @@
|
||||
#!/usr/bin/env python
|
||||
import asyncio
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Set
|
||||
|
||||
from tests.util import (
|
||||
from betterproto.tests.util import (
|
||||
get_directories,
|
||||
inputs_path,
|
||||
output_path_betterproto,
|
||||
output_path_betterproto_pydantic,
|
||||
output_path_reference,
|
||||
protoc,
|
||||
protoc_plugin,
|
||||
protoc_reference,
|
||||
)
|
||||
|
||||
|
||||
# Force pure-python implementation instead of C++, otherwise imports
|
||||
# break things because we can't properly reset the symbol database.
|
||||
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
|
||||
@ -62,15 +61,13 @@ async def generate(whitelist: Set[str], verbose: bool):
|
||||
if result != 0:
|
||||
failed_test_cases.append(test_case_name)
|
||||
|
||||
if len(failed_test_cases) > 0:
|
||||
if failed_test_cases:
|
||||
sys.stderr.write(
|
||||
"\n\033[31;1;4mFailed to generate the following test cases:\033[0m\n"
|
||||
)
|
||||
for failed_test_case in failed_test_cases:
|
||||
sys.stderr.write(f"- {failed_test_case}\n")
|
||||
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
async def generate_test_case_output(
|
||||
test_case_input_path: Path, test_case_name: str, verbose: bool
|
||||
@ -80,12 +77,10 @@ async def generate_test_case_output(
|
||||
"""
|
||||
|
||||
test_case_output_path_reference = output_path_reference.joinpath(test_case_name)
|
||||
test_case_output_path_betterproto = output_path_betterproto
|
||||
test_case_output_path_betterproto_pyd = output_path_betterproto_pydantic
|
||||
test_case_output_path_betterproto = output_path_betterproto.joinpath(test_case_name)
|
||||
|
||||
os.makedirs(test_case_output_path_reference, exist_ok=True)
|
||||
os.makedirs(test_case_output_path_betterproto, exist_ok=True)
|
||||
os.makedirs(test_case_output_path_betterproto_pyd, exist_ok=True)
|
||||
|
||||
clear_directory(test_case_output_path_reference)
|
||||
clear_directory(test_case_output_path_betterproto)
|
||||
@ -93,75 +88,28 @@ async def generate_test_case_output(
|
||||
(
|
||||
(ref_out, ref_err, ref_code),
|
||||
(plg_out, plg_err, plg_code),
|
||||
(plg_out_pyd, plg_err_pyd, plg_code_pyd),
|
||||
) = await asyncio.gather(
|
||||
protoc(test_case_input_path, test_case_output_path_reference, True),
|
||||
protoc(test_case_input_path, test_case_output_path_betterproto, False),
|
||||
protoc(
|
||||
test_case_input_path, test_case_output_path_betterproto_pyd, False, True
|
||||
),
|
||||
protoc_reference(test_case_input_path, test_case_output_path_reference),
|
||||
protoc_plugin(test_case_input_path, test_case_output_path_betterproto),
|
||||
)
|
||||
|
||||
if ref_code == 0:
|
||||
print(f"\033[31;1;4mGenerated reference output for {test_case_name!r}\033[0m")
|
||||
else:
|
||||
print(
|
||||
f"\033[31;1;4mFailed to generate reference output for {test_case_name!r}\033[0m"
|
||||
)
|
||||
print(ref_err.decode())
|
||||
|
||||
message = f"Generated output for {test_case_name!r}"
|
||||
if verbose:
|
||||
print(f"\033[31;1;4m{message}\033[0m")
|
||||
if ref_out:
|
||||
print("Reference stdout:")
|
||||
sys.stdout.buffer.write(ref_out)
|
||||
sys.stdout.buffer.flush()
|
||||
|
||||
if ref_err:
|
||||
print("Reference stderr:")
|
||||
sys.stderr.buffer.write(ref_err)
|
||||
sys.stderr.buffer.flush()
|
||||
|
||||
if plg_code == 0:
|
||||
print(f"\033[31;1;4mGenerated plugin output for {test_case_name!r}\033[0m")
|
||||
else:
|
||||
print(
|
||||
f"\033[31;1;4mFailed to generate plugin output for {test_case_name!r}\033[0m"
|
||||
)
|
||||
print(plg_err.decode())
|
||||
|
||||
if verbose:
|
||||
if plg_out:
|
||||
print("Plugin stdout:")
|
||||
sys.stdout.buffer.write(plg_out)
|
||||
sys.stdout.buffer.flush()
|
||||
|
||||
if plg_err:
|
||||
print("Plugin stderr:")
|
||||
sys.stderr.buffer.write(plg_err)
|
||||
sys.stderr.buffer.flush()
|
||||
|
||||
if plg_code_pyd == 0:
|
||||
print(
|
||||
f"\033[31;1;4mGenerated plugin (pydantic compatible) output for {test_case_name!r}\033[0m"
|
||||
)
|
||||
sys.stdout.buffer.flush()
|
||||
sys.stderr.buffer.flush()
|
||||
else:
|
||||
print(
|
||||
f"\033[31;1;4mFailed to generate plugin (pydantic compatible) output for {test_case_name!r}\033[0m"
|
||||
)
|
||||
print(plg_err_pyd.decode())
|
||||
print(message)
|
||||
|
||||
if verbose:
|
||||
if plg_out_pyd:
|
||||
print("Plugin stdout:")
|
||||
sys.stdout.buffer.write(plg_out_pyd)
|
||||
sys.stdout.buffer.flush()
|
||||
|
||||
if plg_err_pyd:
|
||||
print("Plugin stderr:")
|
||||
sys.stderr.buffer.write(plg_err_pyd)
|
||||
sys.stderr.buffer.flush()
|
||||
|
||||
return max(ref_code, plg_code, plg_code_pyd)
|
||||
return max(ref_code, plg_code)
|
||||
|
||||
|
||||
HELP = "\n".join(
|
||||
@ -188,21 +136,7 @@ def main():
|
||||
else:
|
||||
verbose = False
|
||||
whitelist = set(sys.argv[1:])
|
||||
|
||||
if platform.system() == "Windows":
|
||||
# for python version prior to 3.8, loop policy needs to be set explicitly
|
||||
# https://docs.python.org/3/library/asyncio-policy.html#asyncio.DefaultEventLoopPolicy
|
||||
try:
|
||||
asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
|
||||
except AttributeError:
|
||||
# python < 3.7 does not have asyncio.WindowsProactorEventLoopPolicy
|
||||
asyncio.get_event_loop_policy().set_event_loop(asyncio.ProactorEventLoop())
|
||||
|
||||
try:
|
||||
asyncio.run(generate(whitelist, verbose))
|
||||
except AttributeError:
|
||||
# compatibility code for python < 3.7
|
||||
asyncio.get_event_loop().run_until_complete(generate(whitelist, verbose))
|
||||
asyncio.get_event_loop().run_until_complete(generate(whitelist, verbose))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
154
betterproto/tests/grpc/test_grpclib_client.py
Normal file
154
betterproto/tests/grpc/test_grpclib_client.py
Normal file
@ -0,0 +1,154 @@
|
||||
import asyncio
|
||||
from betterproto.tests.output_betterproto.service.service import (
|
||||
DoThingResponse,
|
||||
DoThingRequest,
|
||||
GetThingRequest,
|
||||
GetThingResponse,
|
||||
TestStub as ThingServiceClient,
|
||||
)
|
||||
import grpclib
|
||||
from grpclib.testing import ChannelFor
|
||||
import pytest
|
||||
from betterproto.grpc.util.async_channel import AsyncChannel
|
||||
from .thing_service import ThingService
|
||||
|
||||
|
||||
async def _test_client(client, name="clean room", **kwargs):
|
||||
response = await client.do_thing(name=name)
|
||||
assert response.names == [name]
|
||||
|
||||
|
||||
def _assert_request_meta_recieved(deadline, metadata):
|
||||
def server_side_test(stream):
|
||||
assert stream.deadline._timestamp == pytest.approx(
|
||||
deadline._timestamp, 1
|
||||
), "The provided deadline should be recieved serverside"
|
||||
assert (
|
||||
stream.metadata["authorization"] == metadata["authorization"]
|
||||
), "The provided authorization metadata should be recieved serverside"
|
||||
|
||||
return server_side_test
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_service_call():
|
||||
async with ChannelFor([ThingService()]) as channel:
|
||||
await _test_client(ThingServiceClient(channel))
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_service_call_with_upfront_request_params():
|
||||
# Setting deadline
|
||||
deadline = grpclib.metadata.Deadline.from_timeout(22)
|
||||
metadata = {"authorization": "12345"}
|
||||
async with ChannelFor(
|
||||
[ThingService(test_hook=_assert_request_meta_recieved(deadline, metadata),)]
|
||||
) as channel:
|
||||
await _test_client(
|
||||
ThingServiceClient(channel, deadline=deadline, metadata=metadata)
|
||||
)
|
||||
|
||||
# Setting timeout
|
||||
timeout = 99
|
||||
deadline = grpclib.metadata.Deadline.from_timeout(timeout)
|
||||
metadata = {"authorization": "12345"}
|
||||
async with ChannelFor(
|
||||
[ThingService(test_hook=_assert_request_meta_recieved(deadline, metadata),)]
|
||||
) as channel:
|
||||
await _test_client(
|
||||
ThingServiceClient(channel, timeout=timeout, metadata=metadata)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_service_call_lower_level_with_overrides():
|
||||
THING_TO_DO = "get milk"
|
||||
|
||||
# Setting deadline
|
||||
deadline = grpclib.metadata.Deadline.from_timeout(22)
|
||||
metadata = {"authorization": "12345"}
|
||||
kwarg_deadline = grpclib.metadata.Deadline.from_timeout(28)
|
||||
kwarg_metadata = {"authorization": "12345"}
|
||||
async with ChannelFor(
|
||||
[ThingService(test_hook=_assert_request_meta_recieved(deadline, metadata),)]
|
||||
) as channel:
|
||||
client = ThingServiceClient(channel, deadline=deadline, metadata=metadata)
|
||||
response = await client._unary_unary(
|
||||
"/service.Test/DoThing",
|
||||
DoThingRequest(THING_TO_DO),
|
||||
DoThingResponse,
|
||||
deadline=kwarg_deadline,
|
||||
metadata=kwarg_metadata,
|
||||
)
|
||||
assert response.names == [THING_TO_DO]
|
||||
|
||||
# Setting timeout
|
||||
timeout = 99
|
||||
deadline = grpclib.metadata.Deadline.from_timeout(timeout)
|
||||
metadata = {"authorization": "12345"}
|
||||
kwarg_timeout = 9000
|
||||
kwarg_deadline = grpclib.metadata.Deadline.from_timeout(kwarg_timeout)
|
||||
kwarg_metadata = {"authorization": "09876"}
|
||||
async with ChannelFor(
|
||||
[
|
||||
ThingService(
|
||||
test_hook=_assert_request_meta_recieved(kwarg_deadline, kwarg_metadata),
|
||||
)
|
||||
]
|
||||
) as channel:
|
||||
client = ThingServiceClient(channel, deadline=deadline, metadata=metadata)
|
||||
response = await client._unary_unary(
|
||||
"/service.Test/DoThing",
|
||||
DoThingRequest(THING_TO_DO),
|
||||
DoThingResponse,
|
||||
timeout=kwarg_timeout,
|
||||
metadata=kwarg_metadata,
|
||||
)
|
||||
assert response.names == [THING_TO_DO]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_gen_for_unary_stream_request():
|
||||
thing_name = "my milkshakes"
|
||||
|
||||
async with ChannelFor([ThingService()]) as channel:
|
||||
client = ThingServiceClient(channel)
|
||||
expected_versions = [5, 4, 3, 2, 1]
|
||||
async for response in client.get_thing_versions(name=thing_name):
|
||||
assert response.name == thing_name
|
||||
assert response.version == expected_versions.pop()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_gen_for_stream_stream_request():
|
||||
some_things = ["cake", "cricket", "coral reef"]
|
||||
more_things = ["ball", "that", "56kmodem", "liberal humanism", "cheesesticks"]
|
||||
expected_things = (*some_things, *more_things)
|
||||
|
||||
async with ChannelFor([ThingService()]) as channel:
|
||||
client = ThingServiceClient(channel)
|
||||
# Use an AsyncChannel to decouple sending and recieving, it'll send some_things
|
||||
# immediately and we'll use it to send more_things later, after recieving some
|
||||
# results
|
||||
request_chan = AsyncChannel()
|
||||
send_initial_requests = asyncio.ensure_future(
|
||||
request_chan.send_from(GetThingRequest(name) for name in some_things)
|
||||
)
|
||||
response_index = 0
|
||||
async for response in client.get_different_things(request_chan):
|
||||
assert response.name == expected_things[response_index]
|
||||
assert response.version == response_index + 1
|
||||
response_index += 1
|
||||
if more_things:
|
||||
# Send some more requests as we recieve reponses to be sure coordination of
|
||||
# send/recieve events doesn't matter
|
||||
await request_chan.send(GetThingRequest(more_things.pop(0)))
|
||||
elif not send_initial_requests.done():
|
||||
# Make sure the sending task it completed
|
||||
await send_initial_requests
|
||||
else:
|
||||
# No more things to send make sure channel is closed
|
||||
request_chan.close()
|
||||
assert response_index == len(
|
||||
expected_things
|
||||
), "Didn't recieve all exptected responses"
|
@ -1,11 +1,9 @@
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from typing import AsyncIterator
|
||||
|
||||
import pytest
|
||||
|
||||
import betterproto
|
||||
from betterproto.grpc.util.async_channel import AsyncChannel
|
||||
from dataclasses import dataclass
|
||||
import pytest
|
||||
from typing import AsyncIterator
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -29,7 +27,10 @@ class ClientStub:
|
||||
|
||||
|
||||
async def to_list(generator: AsyncIterator):
|
||||
return [value async for value in generator]
|
||||
result = []
|
||||
async for value in generator:
|
||||
result.append(value)
|
||||
return result
|
||||
|
||||
|
||||
@pytest.fixture
|
@ -1,14 +1,12 @@
|
||||
from typing import Dict
|
||||
|
||||
import grpclib
|
||||
import grpclib.server
|
||||
|
||||
from tests.output_betterproto.service import (
|
||||
DoThingRequest,
|
||||
from betterproto.tests.output_betterproto.service.service import (
|
||||
DoThingResponse,
|
||||
DoThingRequest,
|
||||
GetThingRequest,
|
||||
GetThingResponse,
|
||||
TestStub as ThingServiceClient,
|
||||
)
|
||||
import grpclib
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
class ThingService:
|
||||
@ -27,7 +25,7 @@ class ThingService:
|
||||
async def do_many_things(
|
||||
self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]"
|
||||
):
|
||||
thing_names = [request.name async for request in stream]
|
||||
thing_names = [request.name for request in stream]
|
||||
if self.test_hook is not None:
|
||||
self.test_hook(stream)
|
||||
await stream.send_message(DoThingResponse(thing_names))
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package bool;
|
||||
|
||||
message Test {
|
||||
bool value = 1;
|
||||
}
|
6
betterproto/tests/inputs/bool/test_bool.py
Normal file
6
betterproto/tests/inputs/bool/test_bool.py
Normal file
@ -0,0 +1,6 @@
|
||||
from betterproto.tests.output_betterproto.bool import Test
|
||||
|
||||
|
||||
def test_value():
|
||||
message = Test()
|
||||
assert not message.value, "Boolean is False by default"
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package bytes;
|
||||
|
||||
message Test {
|
||||
bytes data = 1;
|
||||
}
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package casing;
|
||||
|
||||
enum my_enum {
|
||||
ZERO = 0;
|
||||
ONE = 1;
|
23
betterproto/tests/inputs/casing/test_casing.py
Normal file
23
betterproto/tests/inputs/casing/test_casing.py
Normal file
@ -0,0 +1,23 @@
|
||||
import betterproto.tests.output_betterproto.casing as casing
|
||||
from betterproto.tests.output_betterproto.casing import Test
|
||||
|
||||
|
||||
def test_message_attributes():
|
||||
message = Test()
|
||||
assert hasattr(
|
||||
message, "snake_case_message"
|
||||
), "snake_case field name is same in python"
|
||||
assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python"
|
||||
assert hasattr(message, "uppercase"), "UPPERCASE field is lowercase in python"
|
||||
|
||||
|
||||
def test_message_casing():
|
||||
assert hasattr(
|
||||
casing, "SnakeCaseMessage"
|
||||
), "snake_case Message name is converted to CamelCase in python"
|
||||
|
||||
|
||||
def test_enum_casing():
|
||||
assert hasattr(
|
||||
casing, "MyEnum"
|
||||
), "snake_case Enum name is converted to CamelCase in python"
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package casing_message_field_uppercase;
|
||||
|
||||
message Test {
|
||||
int32 UPPERCASE = 1;
|
||||
int32 UPPERCASE_V2 = 2;
|
@ -0,0 +1,14 @@
|
||||
from betterproto.tests.output_betterproto.casing_message_field_uppercase import Test
|
||||
|
||||
|
||||
def test_message_casing():
|
||||
message = Test()
|
||||
assert hasattr(
|
||||
message, "uppercase"
|
||||
), "UPPERCASE attribute is converted to 'uppercase' in python"
|
||||
assert hasattr(
|
||||
message, "uppercase_v2"
|
||||
), "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python"
|
||||
assert hasattr(
|
||||
message, "upper_camel_case"
|
||||
), "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python"
|
@ -1,30 +1,22 @@
|
||||
# Test cases that are expected to fail, e.g. unimplemented features or bug-fixes.
|
||||
# Remove from list when fixed.
|
||||
xfail = {
|
||||
"import_circular_dependency",
|
||||
"oneof_enum", # 63
|
||||
"namespace_keywords", # 70
|
||||
"namespace_builtin_types", # 53
|
||||
"googletypes_struct", # 9
|
||||
"googletypes_value", # 9
|
||||
"enum_skipped_value", # 93
|
||||
"import_capitalized_package",
|
||||
"example", # This is the example in the readme. Not a test.
|
||||
}
|
||||
|
||||
services = {
|
||||
"googletypes_request",
|
||||
"googletypes_response",
|
||||
"googletypes_response_embedded",
|
||||
"service",
|
||||
"service_separate_packages",
|
||||
"import_service_input_message",
|
||||
"googletypes_service_returns_empty",
|
||||
"googletypes_service_returns_googletype",
|
||||
"example_service",
|
||||
"empty_service",
|
||||
"service_uppercase",
|
||||
}
|
||||
|
||||
|
||||
# Indicate json sample messages to skip when testing that json (de)serialization
|
||||
# is symmetrical becuase some cases legitimately are not symmetrical.
|
||||
# Each key references the name of the test scenario and the values in the tuple
|
||||
# Are the names of the json files.
|
||||
non_symmetrical_json = {"empty_repeated": ("empty_repeated",)}
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package double;
|
||||
|
||||
message Test {
|
||||
double count = 1;
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
syntax = "proto3";
|
||||
|
||||
message Test {
|
||||
enum MyEnum {
|
||||
ZERO = 0;
|
||||
ONE = 1;
|
||||
// TWO = 2;
|
||||
THREE = 3;
|
||||
FOUR = 4;
|
||||
}
|
||||
MyEnum x = 1;
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
from betterproto.tests.output_betterproto.enum_skipped_value import (
|
||||
Test,
|
||||
TestMyEnum,
|
||||
)
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="#93")
|
||||
def test_message_attributes():
|
||||
assert (
|
||||
Test(x=TestMyEnum.ONE).to_dict()["x"] == "ONE"
|
||||
), "MyEnum.ONE is not serialized to 'ONE'"
|
||||
assert (
|
||||
Test(x=TestMyEnum.THREE).to_dict()["x"] == "THREE"
|
||||
), "MyEnum.THREE is not serialized to 'THREE'"
|
||||
assert (
|
||||
Test(x=TestMyEnum.FOUR).to_dict()["x"] == "FOUR"
|
||||
), "MyEnum.FOUR is not serialized to 'FOUR'"
|
3
betterproto/tests/inputs/enums/enums.json
Normal file
3
betterproto/tests/inputs/enums/enums.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"greeting": "HEY"
|
||||
}
|
14
betterproto/tests/inputs/enums/enums.proto
Normal file
14
betterproto/tests/inputs/enums/enums.proto
Normal file
@ -0,0 +1,14 @@
|
||||
syntax = "proto3";
|
||||
|
||||
// Enum for the different greeting types
|
||||
enum Greeting {
|
||||
HI = 0;
|
||||
HEY = 1;
|
||||
// Formal greeting
|
||||
HELLO = 2;
|
||||
}
|
||||
|
||||
message Test {
|
||||
// Greeting enum example
|
||||
Greeting greeting = 1;
|
||||
}
|
8
betterproto/tests/inputs/example/example.proto
Normal file
8
betterproto/tests/inputs/example/example.proto
Normal file
@ -0,0 +1,8 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package hello;
|
||||
|
||||
// Greeting represents a message you can tell a user.
|
||||
message Greeting {
|
||||
string message = 1;
|
||||
}
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package fixed;
|
||||
|
||||
message Test {
|
||||
fixed32 foo = 1;
|
||||
sfixed32 bar = 2;
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package googletypes;
|
||||
|
||||
import "google/protobuf/duration.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/protobuf/wrappers.proto";
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package googletypes_response;
|
||||
|
||||
import "google/protobuf/wrappers.proto";
|
||||
|
||||
// Tests that wrapped values can be used directly as return values
|
@ -1,18 +1,10 @@
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Optional,
|
||||
)
|
||||
|
||||
import pytest
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import betterproto.lib.google.protobuf as protobuf
|
||||
from tests.mocks import MockChannel
|
||||
from tests.output_betterproto.googletypes_response import (
|
||||
Input,
|
||||
TestStub,
|
||||
)
|
||||
import pytest
|
||||
|
||||
from betterproto.tests.mocks import MockChannel
|
||||
from betterproto.tests.output_betterproto.googletypes_response import TestStub
|
||||
|
||||
test_cases = [
|
||||
(TestStub.get_double, protobuf.DoubleValue, 2.5),
|
||||
@ -29,16 +21,15 @@ test_cases = [
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases)
|
||||
async def test_channel_receives_wrapped_type(
|
||||
service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value
|
||||
async def test_channel_recieves_wrapped_type(
|
||||
service_method: Callable[[TestStub], Any], wrapper_class: Callable, value
|
||||
):
|
||||
wrapped_value = wrapper_class()
|
||||
wrapped_value.value = value
|
||||
channel = MockChannel(responses=[wrapped_value])
|
||||
service = TestStub(channel)
|
||||
method_param = Input()
|
||||
|
||||
await service_method(service, method_param)
|
||||
await service_method(service)
|
||||
|
||||
assert channel.requests[0]["response_type"] != Optional[type(value)]
|
||||
assert channel.requests[0]["response_type"] == type(wrapped_value)
|
||||
@ -48,7 +39,7 @@ async def test_channel_receives_wrapped_type(
|
||||
@pytest.mark.xfail
|
||||
@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases)
|
||||
async def test_service_unwraps_response(
|
||||
service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value
|
||||
service_method: Callable[[TestStub], Any], wrapper_class: Callable, value
|
||||
):
|
||||
"""
|
||||
grpclib does not unwrap wrapper values returned by services
|
||||
@ -56,9 +47,8 @@ async def test_service_unwraps_response(
|
||||
wrapped_value = wrapper_class()
|
||||
wrapped_value.value = value
|
||||
service = TestStub(MockChannel(responses=[wrapped_value]))
|
||||
method_param = Input()
|
||||
|
||||
response_value = await service_method(service, method_param)
|
||||
response_value = await service_method(service)
|
||||
|
||||
assert response_value == value
|
||||
assert type(response_value) == type(value)
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package googletypes_response_embedded;
|
||||
|
||||
import "google/protobuf/wrappers.proto";
|
||||
|
||||
// Tests that wrapped values are supported as part of output message
|
@ -1,8 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from tests.mocks import MockChannel
|
||||
from tests.output_betterproto.googletypes_response_embedded import (
|
||||
Input,
|
||||
from betterproto.tests.mocks import MockChannel
|
||||
from betterproto.tests.output_betterproto.googletypes_response_embedded import (
|
||||
Output,
|
||||
TestStub,
|
||||
)
|
||||
@ -27,7 +26,7 @@ async def test_service_passes_through_unwrapped_values_embedded_in_response():
|
||||
)
|
||||
|
||||
service = TestStub(MockChannel(responses=[output]))
|
||||
response = await service.get_output(Input())
|
||||
response = await service.get_output()
|
||||
|
||||
assert response.double_value == 10.0
|
||||
assert response.float_value == 12.0
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package googletypes_service_returns_empty;
|
||||
|
||||
import "google/protobuf/empty.proto";
|
||||
|
||||
service Test {
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package googletypes_service_returns_googletype;
|
||||
|
||||
import "google/protobuf/empty.proto";
|
||||
import "google/protobuf/struct.proto";
|
||||
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package googletypes_struct;
|
||||
|
||||
import "google/protobuf/struct.proto";
|
||||
|
||||
message Test {
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package googletypes_value;
|
||||
|
||||
import "google/protobuf/struct.proto";
|
||||
|
||||
// Tests that fields of type google.protobuf.Value can contain arbitrary JSON-values.
|
@ -0,0 +1,8 @@
|
||||
syntax = "proto3";
|
||||
|
||||
|
||||
package Capitalized;
|
||||
|
||||
message Message {
|
||||
|
||||
}
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_capitalized_package;
|
||||
|
||||
import "capitalized.proto";
|
||||
|
||||
// Tests that we can import from a package with a capital name, that looks like a nested type, but isn't.
|
@ -0,0 +1,7 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package package.childpackage;
|
||||
|
||||
message ChildMessage {
|
||||
|
||||
}
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_child_package_from_package;
|
||||
|
||||
import "package_message.proto";
|
||||
|
||||
// Tests generated imports when a message in a package refers to a message in a nested child package.
|
@ -2,7 +2,7 @@ syntax = "proto3";
|
||||
|
||||
import "child.proto";
|
||||
|
||||
package import_child_package_from_package.package;
|
||||
package package;
|
||||
|
||||
message PackageMessage {
|
||||
package.childpackage.ChildMessage c = 1;
|
@ -0,0 +1,7 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package childpackage;
|
||||
|
||||
message Message {
|
||||
|
||||
}
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_child_package_from_root;
|
||||
|
||||
import "child.proto";
|
||||
|
||||
// Tests generated imports when a message in root refers to a message in a child package.
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_circular_dependency;
|
||||
|
||||
import "root.proto";
|
||||
import "other.proto";
|
||||
|
||||
@ -26,5 +24,5 @@ import "other.proto";
|
||||
// (root: Test & RootPackageMessage) <-------> (other: OtherPackageMessage)
|
||||
message Test {
|
||||
RootPackageMessage message = 1;
|
||||
other.OtherPackageMessage other_value = 2;
|
||||
other.OtherPackageMessage other = 2;
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
syntax = "proto3";
|
||||
|
||||
import "root.proto";
|
||||
package import_circular_dependency.other;
|
||||
package other;
|
||||
|
||||
message OtherPackageMessage {
|
||||
RootPackageMessage rootPackageMessage = 1;
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_circular_dependency;
|
||||
|
||||
message RootPackageMessage {
|
||||
|
||||
}
|
@ -0,0 +1,6 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package cousin.cousin_subpackage;
|
||||
|
||||
message CousinMessage {
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_cousin_package.test.subpackage;
|
||||
package test.subpackage;
|
||||
|
||||
import "cousin.proto";
|
||||
|
@ -0,0 +1,6 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package cousin.subpackage;
|
||||
|
||||
message CousinMessage {
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_cousin_package_same_name.test.subpackage;
|
||||
package test.subpackage;
|
||||
|
||||
import "cousin.proto";
|
||||
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_packages_same_name;
|
||||
|
||||
import "users_v1.proto";
|
||||
import "posts_v1.proto";
|
||||
|
@ -0,0 +1,7 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package posts.v1;
|
||||
|
||||
message Post {
|
||||
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package users.v1;
|
||||
|
||||
message User {
|
||||
|
||||
}
|
@ -2,7 +2,7 @@ syntax = "proto3";
|
||||
|
||||
import "parent_package_message.proto";
|
||||
|
||||
package import_parent_package_from_child.parent.child;
|
||||
package parent.child;
|
||||
|
||||
// Tests generated imports when a message refers to a message defined in its parent package
|
||||
|
@ -1,6 +1,6 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_parent_package_from_child.parent;
|
||||
package parent;
|
||||
|
||||
message ParentPackageMessage {
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_root_package_from_child.child;
|
||||
package child;
|
||||
|
||||
import "root.proto";
|
||||
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_root_package_from_child;
|
||||
|
||||
|
||||
message RootMessage {
|
||||
}
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_root_sibling;
|
||||
|
||||
import "sibling.proto";
|
||||
|
||||
// Tests generated imports when a message in the root package refers to another message in the root package
|
@ -1,7 +1,5 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package import_root_sibling;
|
||||
|
||||
message SiblingMessage {
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user