46 Commits

Author SHA1 Message Date
Vasili Syrakis
ce9f492f50 Increment version to 1.2.3 2020-04-15 14:24:02 +10:00
Vasilios Syrakis
93a6334015 Update CHANGELOG.md 2020-04-15 14:21:30 +10:00
Adam Ehlers Nyholm Thomsen
36a14026d8 Fix issue that occurs with naming when proto is double nested (#21) 2020-04-15 14:10:43 +10:00
Vasilios Syrakis
04a2fcd3eb Merge pull request #31 from nat-n/fix_readme
Fix test instructions to match pipfile
2020-04-14 10:55:18 +10:00
Nat Noordanus
582a12577c Fix test instructions to match pipfile 2020-04-12 18:52:43 +02:00
Vasilios Syrakis
3616190451 Merge pull request #30 from nat-n/p36_support
#27 Add support for python 3.6
2020-04-08 09:37:48 +10:00
Nat Noordanus
9b990ee1bd Make pipenv play nice with the setup-python ci workflow 2020-04-05 15:58:12 +02:00
Vasilios Syrakis
72a77b0d65 Merge pull request #28 from tanishq-dubey/patch-1
Update README.md for pip syntax
2020-04-05 14:52:48 +10:00
Nat Noordanus
b2b36c8575 Apply black formatting 2020-04-03 19:54:19 +02:00
Nat Noordanus
203105f048 Add support for python 3.6
Changes:
- Update config and docs to reference 3.6
- Add backports of dataclasses and datetime.fromisoformat for python_version<"3.7"
- Support both 3.7 and 3.6 usages of undocumented __origin__ attribute on typing objects
- Make github ci run tests for python 3.6 as well
2020-04-03 19:52:19 +02:00
Tanishq Dubey
fe11f74227 Update README.md
Add quotes to the README so pip syntax is correct
2020-03-30 09:50:11 -04:00
Daniel G. Taylor
dc7a3e9bdf Update changelog 2020-01-30 17:48:12 -08:00
Daniel G. Taylor
f2e8afc609 Merge pull request #16 from cetanu/patch-1
Exclude empty lists from to_dict output
2020-01-30 17:31:25 -08:00
Daniel G. Taylor
dbd438e682 Update to emit empty lists if asked for defaults 2020-01-30 17:28:22 -08:00
Daniel G. Taylor
dce1c89fbe Merge branch 'master' into patch-1 2020-01-30 17:22:47 -08:00
Daniel G. Taylor
c78851b1b8 Merge pull request #12 from ulasozguler/master
Added `include_default_values` parameter to `to_dict` function
2020-01-30 17:19:34 -08:00
Vasilios Syrakis
4554d91f89 Exclude empty lists from to_dict output 2020-01-29 22:32:35 +11:00
ulas
c0170f4d80 Added include_default_values parameter to to_dict function. 2020-01-22 19:16:57 +03:00
Daniel G. Taylor
559b8833d8 Bump version to 1.2.2 2020-01-09 16:47:25 -08:00
Daniel G. Taylor
7ccef16579 Mention no proto 2, fixes #6 2020-01-09 16:43:45 -08:00
Daniel G. Taylor
d8785b4622 Merge pull request #10 from qix/master
Fix serialization of dataclass constructor parameters
2020-01-09 16:35:06 -08:00
Daniel G. Taylor
45e7a30300 Merge pull request #7 from ulasozguler/master
Fix - propagate `casing` param of `to_dict` function recursively
2020-01-09 16:32:29 -08:00
Josh Yudaken
d7559c22f8 Fix serialization of dataclass constructor parameters 2020-01-08 11:29:45 -05:00
ulas
f9c351a98d propagate casing param recursively. 2019-12-04 19:28:53 +03:00
Daniel G. Taylor
feea790116 Bump library version 2019-10-29 22:00:27 -07:00
Daniel G. Taylor
33f74f6a45 Fix comment indent bug; bump version 2019-10-29 21:59:23 -07:00
Daniel G. Taylor
3d5c12c532 Add changelog, version bump 2019-10-28 21:13:25 -07:00
Daniel G. Taylor
706bd5a475 Slightly simplify gRPC helper functions 2019-10-28 20:58:33 -07:00
Daniel G. Taylor
52beeb0d73 Fix typo in example 2019-10-28 20:44:57 -07:00
Daniel G. Taylor
7e2dc595db Autoformat files after rendering 2019-10-28 20:44:50 -07:00
Daniel G. Taylor
6fd9612ee1 Doc updates, version bump for release 2019-10-27 15:43:52 -07:00
Daniel G. Taylor
ba520f88a4 Install Protobuf include files on CI host 2019-10-27 15:40:33 -07:00
Daniel G. Taylor
b0b64fcbaf Fix tests attempt 3 2019-10-27 15:29:04 -07:00
Daniel G. Taylor
7900c7c9db Fix tests 2019-10-27 15:21:20 -07:00
Daniel G. Taylor
fcc273e294 Fix tests 2019-10-27 15:18:10 -07:00
Daniel G. Taylor
f820397751 Add missing optional types test 2019-10-27 15:14:06 -07:00
Daniel G. Taylor
16687211a2 Typing fixes 2019-10-27 15:13:51 -07:00
Daniel G. Taylor
eb5020db2a Fix bool parsing bug 2019-10-27 14:59:38 -07:00
Daniel G. Taylor
035793aec3 Support wrapper types 2019-10-27 14:55:25 -07:00
Daniel G. Taylor
c79535b614 Support Duration/Timestamp Google well-known types 2019-10-26 23:07:30 -07:00
Daniel G. Taylor
5daf61f64c Refactor default value code 2019-10-25 21:16:32 -07:00
Daniel G. Taylor
4679c571c3 Fix comment newlines 2019-10-25 12:28:26 -07:00
Daniel G. Taylor
ff8463cf12 Handle fields that clash with Python reserved keywords 2019-10-23 21:28:31 -07:00
Daniel G. Taylor
eff9021529 Some informational output from the plugin, do not overwrite __init__.py 2019-10-23 15:07:05 -07:00
Daniel G. Taylor
d43d5af5ce Better JSON casing support, renaming messages/fields 2019-10-23 15:06:34 -07:00
Daniel G. Taylor
ef0a1bf50c Use specific version of pypi publish image 2019-10-23 15:03:13 -07:00
26 changed files with 938 additions and 526 deletions

View File

@@ -3,7 +3,34 @@ name: CI
on: [push, pull_request]
jobs:
build:
run-tests:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.6', '3.7' ]
name: Python ${{ matrix.python-version }} test
steps:
- uses: actions/checkout@v1
- uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- uses: dschep/install-pipenv-action@v1
- name: Install dependencies
run: |
sudo apt install protobuf-compiler libprotobuf-dev
pipenv install --dev --python ${pythonLocation}/python
- name: Run tests
run: |
cp .env.default .env
pipenv run pip install -e .
pipenv run generate
pipenv run test
build-release:
runs-on: ubuntu-latest
steps:
@@ -14,18 +41,14 @@ jobs:
- uses: dschep/install-pipenv-action@v1
- name: Install dependencies
run: |
sudo apt install protobuf-compiler
pipenv install --dev
- name: Run tests
run: |
pipenv run generate
pipenv run test
sudo apt install protobuf-compiler libprotobuf-dev
pipenv install --dev --python ${pythonLocation}/python
- name: Build package
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
run: pipenv run python setup.py sdist
- name: Publish package
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
uses: pypa/gh-action-pypi-publish@master
uses: pypa/gh-action-pypi-publish@v1.0.0a0
with:
user: __token__
password: ${{ secrets.pypi }}

2
.gitignore vendored
View File

@@ -2,6 +2,8 @@
.vscode/settings.json
.mypy_cache
.pytest_cache
.python-version
build/
betterproto/tests/*.bin
betterproto/tests/*_pb2.py
betterproto/tests/*.py

54
CHANGELOG.md Normal file
View File

@@ -0,0 +1,54 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [1.2.3] - 2020-04-15
- Exclude empty lists from `to_dict` by default [#16](https://github.com/danielgtaylor/python-betterproto/pull/16)
- Add `include_default_values` parameter for `to_dict` [#12](https://github.com/danielgtaylor/python-betterproto/pull/12)
- Fix class names being prepended with duplicates when using protocol buffers that are nested more than once [#21](https://github.com/danielgtaylor/python-betterproto/pull/21)
- Add support for python 3.6 [#30](https://github.com/danielgtaylor/python-betterproto/pull/30)
## [1.2.2] - 2020-01-09
- Mention lack of Proto 2 support in README.
- Fix serialization of constructor parameters [#10](https://github.com/danielgtaylor/python-betterproto/pull/10)
- Fix `casing` parameter propagation [#7](https://github.com/danielgtaylor/python-betterproto/pull/7)
## [1.2.1] - 2019-10-29
- Fix comment indentation bug in rendered gRPC methods.
## [1.2.0] - 2019-10-28
- Generated code output auto-formatting via [Black](https://github.com/psf/black)
- Simplified gRPC helper functions
## [1.1.0] - 2019-10-27
- Better JSON casing support
- Handle field names which clash with Python reserved words
- Better handling of default values from type introspection
- Support for Google Duration & Timestamp types
- Support for Google wrapper types
- Documentation updates
## [1.0.1] - 2019-10-22
- README to the PyPI details page
## [1.0.0] - 2019-10-22
- Initial release
[unreleased]: https://github.com/danielgtaylor/python-betterproto/compare/v1.2.3...HEAD
[1.2.3]: https://github.com/danielgtaylor/python-betterproto/compare/v1.2.2...v1.2.3
[1.2.2]: https://github.com/danielgtaylor/python-betterproto/compare/v1.2.1...v1.2.2
[1.2.1]: https://github.com/danielgtaylor/python-betterproto/compare/v1.2.0...v1.2.1
[1.2.0]: https://github.com/danielgtaylor/python-betterproto/compare/v1.1.0...v1.2.0
[1.1.0]: https://github.com/danielgtaylor/python-betterproto/compare/v1.0.1...v1.1.0
[1.0.1]: https://github.com/danielgtaylor/python-betterproto/compare/v1.0.0...v1.0.1
[1.0.0]: https://github.com/danielgtaylor/python-betterproto/releases/tag/v1.0.0

10
Pipfile
View File

@@ -9,16 +9,24 @@ mypy = "*"
isort = "*"
pytest = "*"
rope = "*"
v = {editable = true,version = "*"}
[packages]
protobuf = "*"
jinja2 = "*"
grpclib = "*"
stringcase = "*"
black = "*"
backports-datetime-fromisoformat = "*"
dataclasses = "*"
[requires]
python_version = "3.7"
python_version = "3.6"
[scripts]
plugin = "protoc --plugin=protoc-gen-custom=betterproto/plugin.py --custom_out=output"
generate = "python betterproto/tests/generate.py"
test = "pytest ./betterproto/tests"
[pipenv]
allow_prereleases = true

344
Pipfile.lock generated
View File

@@ -1,344 +0,0 @@
{
"_meta": {
"hash": {
"sha256": "f698150037f2a8ac554e4d37ecd4619ba35d1aa570f5b641d048ec9c6b23eb40"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.7"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"grpclib": {
"hashes": [
"sha256:d19e2ea87cb073e5b0825dfee15336fd2b1c09278d271816e04c90faddc107ea"
],
"index": "pypi",
"version": "==0.3.0"
},
"h2": {
"hashes": [
"sha256:ac377fcf586314ef3177bfd90c12c7826ab0840edeb03f0f24f511858326049e",
"sha256:b8a32bd282594424c0ac55845377eea13fa54fe4a8db012f3a198ed923dc3ab4"
],
"version": "==3.1.1"
},
"hpack": {
"hashes": [
"sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89",
"sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"
],
"version": "==3.0.0"
},
"hyperframe": {
"hashes": [
"sha256:5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40",
"sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f"
],
"version": "==5.2.0"
},
"jinja2": {
"hashes": [
"sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f",
"sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de"
],
"index": "pypi",
"version": "==2.10.3"
},
"markupsafe": {
"hashes": [
"sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
"sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
"sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
"sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
"sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
"sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
"sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
"sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
"sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
"sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
"sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
"sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
"sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
"sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
"sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
"sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
"sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
"sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
"sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
"sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
"sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
"sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
"sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
"sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
"sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
"sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
"sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"
],
"version": "==1.1.1"
},
"multidict": {
"hashes": [
"sha256:024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f",
"sha256:041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3",
"sha256:045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef",
"sha256:047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b",
"sha256:068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73",
"sha256:148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc",
"sha256:1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3",
"sha256:1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd",
"sha256:31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351",
"sha256:34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941",
"sha256:3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d",
"sha256:4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1",
"sha256:4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b",
"sha256:4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a",
"sha256:5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3",
"sha256:61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7",
"sha256:6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0",
"sha256:76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0",
"sha256:7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014",
"sha256:7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5",
"sha256:7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036",
"sha256:8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d",
"sha256:8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a",
"sha256:c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce",
"sha256:c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1",
"sha256:ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a",
"sha256:d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9",
"sha256:d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7",
"sha256:db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b"
],
"version": "==4.5.2"
},
"protobuf": {
"hashes": [
"sha256:125713564d8cfed7610e52444c9769b8dcb0b55e25cc7841f2290ee7bc86636f",
"sha256:1accdb7a47e51503be64d9a57543964ba674edac103215576399d2d0e34eac77",
"sha256:27003d12d4f68e3cbea9eb67427cab3bfddd47ff90670cb367fcd7a3a89b9657",
"sha256:3264f3c431a631b0b31e9db2ae8c927b79fc1a7b1b06b31e8e5bcf2af91fe896",
"sha256:3c5ab0f5c71ca5af27143e60613729e3488bb45f6d3f143dc918a20af8bab0bf",
"sha256:45dcf8758873e3f69feab075e5f3177270739f146255225474ee0b90429adef6",
"sha256:56a77d61a91186cc5676d8e11b36a5feb513873e4ae88d2ee5cf530d52bbcd3b",
"sha256:5984e4947bbcef5bd849d6244aec507d31786f2dd3344139adc1489fb403b300",
"sha256:6b0441da73796dd00821763bb4119674eaf252776beb50ae3883bed179a60b2a",
"sha256:6f6677c5ade94d4fe75a912926d6796d5c71a2a90c2aeefe0d6f211d75c74789",
"sha256:84a825a9418d7196e2acc48f8746cf1ee75877ed2f30433ab92a133f3eaf8fbe",
"sha256:b842c34fe043ccf78b4a6cf1019d7b80113707d68c88842d061fa2b8fb6ddedc",
"sha256:ca33d2f09dae149a1dcf942d2d825ebb06343b77b437198c9e2ef115cf5d5bc1",
"sha256:db83b5c12c0cd30150bb568e6feb2435c49ce4e68fe2d7b903113f0e221e58fe",
"sha256:f50f3b1c5c1c1334ca7ce9cad5992f098f460ffd6388a3cabad10b66c2006b09",
"sha256:f99f127909731cafb841c52f9216e447d3e4afb99b17bebfad327a75aee206de"
],
"index": "pypi",
"version": "==3.10.0"
},
"six": {
"hashes": [
"sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
"sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
],
"version": "==1.12.0"
}
},
"develop": {
"atomicwrites": {
"hashes": [
"sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
"sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
],
"version": "==1.3.0"
},
"attrs": {
"hashes": [
"sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2",
"sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396"
],
"version": "==19.2.0"
},
"entrypoints": {
"hashes": [
"sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
"sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
],
"version": "==0.3"
},
"flake8": {
"hashes": [
"sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548",
"sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"
],
"index": "pypi",
"version": "==3.7.8"
},
"importlib-metadata": {
"hashes": [
"sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26",
"sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"
],
"markers": "python_version < '3.8'",
"version": "==0.23"
},
"isort": {
"hashes": [
"sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
"sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
],
"index": "pypi",
"version": "==4.3.21"
},
"mccabe": {
"hashes": [
"sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
"sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
],
"version": "==0.6.1"
},
"more-itertools": {
"hashes": [
"sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
"sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
],
"version": "==7.2.0"
},
"mypy": {
"hashes": [
"sha256:1d98fd818ad3128a5408148c9e4a5edce6ed6b58cc314283e631dd5d9216527b",
"sha256:22ee018e8fc212fe601aba65d3699689dd29a26410ef0d2cc1943de7bec7e3ac",
"sha256:3a24f80776edc706ec8d05329e854d5b9e464cd332e25cde10c8da2da0a0db6c",
"sha256:42a78944e80770f21609f504ca6c8173f7768043205b5ac51c9144e057dcf879",
"sha256:4b2b20106973548975f0c0b1112eceb4d77ed0cafe0a231a1318f3b3a22fc795",
"sha256:591a9625b4d285f3ba69f541c84c0ad9e7bffa7794da3fa0585ef13cf95cb021",
"sha256:5b4b70da3d8bae73b908a90bb2c387b977e59d484d22c604a2131f6f4397c1a3",
"sha256:84edda1ffeda0941b2ab38ecf49302326df79947fa33d98cdcfbf8ca9cf0bb23",
"sha256:b2b83d29babd61b876ae375786960a5374bba0e4aba3c293328ca6ca5dc448dd",
"sha256:cc4502f84c37223a1a5ab700649b5ab1b5e4d2bf2d426907161f20672a21930b",
"sha256:e29e24dd6e7f39f200a5bb55dcaa645d38a397dd5a6674f6042ef02df5795046"
],
"index": "pypi",
"version": "==0.730"
},
"mypy-extensions": {
"hashes": [
"sha256:a161e3b917053de87dbe469987e173e49fb454eca10ef28b48b384538cc11458"
],
"version": "==0.4.2"
},
"packaging": {
"hashes": [
"sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47",
"sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"
],
"version": "==19.2"
},
"pluggy": {
"hashes": [
"sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6",
"sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"
],
"version": "==0.13.0"
},
"py": {
"hashes": [
"sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
"sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
],
"version": "==1.8.0"
},
"pycodestyle": {
"hashes": [
"sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
"sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
],
"version": "==2.5.0"
},
"pyflakes": {
"hashes": [
"sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
"sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
],
"version": "==2.1.1"
},
"pyparsing": {
"hashes": [
"sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
"sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
],
"version": "==2.4.2"
},
"pytest": {
"hashes": [
"sha256:7e4800063ccfc306a53c461442526c5571e1462f61583506ce97e4da6a1d88c8",
"sha256:ca563435f4941d0cb34767301c27bc65c510cb82e90b9ecf9cb52dc2c63caaa0"
],
"index": "pypi",
"version": "==5.2.1"
},
"rope": {
"hashes": [
"sha256:6b728fdc3e98a83446c27a91fc5d56808a004f8beab7a31ab1d7224cecc7d969",
"sha256:c5c5a6a87f7b1a2095fb311135e2a3d1f194f5ecb96900fdd0a9100881f48aaf",
"sha256:f0dcf719b63200d492b85535ebe5ea9b29e0d0b8aebeb87fe03fc1a65924fdaf"
],
"index": "pypi",
"version": "==0.14.0"
},
"six": {
"hashes": [
"sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
"sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
],
"version": "==1.12.0"
},
"typed-ast": {
"hashes": [
"sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
"sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
"sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
"sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
"sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
"sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
"sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
"sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
"sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
"sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
"sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
"sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
"sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
"sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
"sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
],
"version": "==1.4.0"
},
"typing-extensions": {
"hashes": [
"sha256:2ed632b30bb54fc3941c382decfd0ee4148f5c591651c9272473fea2c6397d95",
"sha256:b1edbbf0652660e32ae780ac9433f4231e7339c7f9a8057d0f042fcbcea49b87",
"sha256:d8179012ec2c620d3791ca6fe2bf7979d979acdbef1fca0bc56b37411db682ed"
],
"version": "==3.7.4"
},
"wcwidth": {
"hashes": [
"sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
"sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
],
"version": "==0.1.7"
},
"zipp": {
"hashes": [
"sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
"sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
],
"version": "==0.6.0"
}
}
}

View File

@@ -2,14 +2,15 @@
![](https://github.com/danielgtaylor/python-betterproto/workflows/CI/badge.svg)
This project aims to provide an improved experience when using Protobuf / gRPC in a modern Python environment by making use of modern language features and generating readable, understandable, idiomatic Python code. It will not support legacy features or environments. The following are supported:
This project aims to provide an improved experience when using Protobuf / gRPC in a modern Python environment by making use of modern language features and generating readable, understandable, idiomatic Python code. It will not support legacy features or environments (e.g. Protobuf 2). The following are supported:
- Protobuf 3 & gRPC code generation
- Both binary & JSON serialization is built-in
- Python 3.7+ making use of:
- Python 3.6+ making use of:
- Enums
- Dataclasses
- `async`/`await`
- Timezone-aware `datetime` and `timedelta` objects
- Relative imports
- Mypy type checking
@@ -34,6 +35,8 @@ This project exists because I am unhappy with the state of the official Google p
- Much code looks like C++ or Java ported 1:1 to Python
- Capitalized function names like `HasField()` and `SerializeToString()`
- Uses `SerializeToString()` rather than the built-in `__bytes__()`
- Special wrapped types don't use Python's `None`
- Timestamp/duration types don't use Python's built-in `datetime` module
This project is a reimplementation from the ground up focused on idiomatic modern Python to help fix some of the above. While it may not be a 1:1 drop-in replacement due to changed method names and call patterns, the wire format is identical.
@@ -43,7 +46,7 @@ First, install the package. Note that the `[compiler]` feature flag tells it to
```sh
# Install both the library and compiler
$ pip install betterproto[compiler]
$ pip install "betterproto[compiler]"
# Install just the library (to use the generated code output)
$ pip install betterproto
@@ -155,7 +158,7 @@ You can use it like so (enable async in the interactive shell first):
EchoResponse(values=["hello", "hello"])
>>> async for response in service.echo_stream(value="hello", extra_times=1)
print(response)
print(response)
EchoStreamResponse(value="hello")
EchoStreamResponse(value="hello")
@@ -168,6 +171,12 @@ Both serializing and parsing are supported to/from JSON and Python dictionaries
- Dicts: `Message().to_dict()`, `Message().from_dict(...)`
- JSON: `Message().to_json()`, `Message().from_json(...)`
For compatibility the default is to convert field names to `camelCase`. You can control this behavior by passing a casing value, e.g:
```py
>>> MyMessage().to_dict(casing=betterproto.Casing.SNAKE)
```
### Determining if a message was sent
Sometimes it is useful to be able to determine whether a message has been sent on the wire. This is how the Google wrapper types work to let you know whether a value is unset, set as the default (zero value), or set as something else, for example.
@@ -238,9 +247,56 @@ Again this is a little different than the official Google code generator:
["foo", "foo's value"]
```
### Well-Known Google Types
Google provides several well-known message types like a timestamp, duration, and several wrappers used to provide optional zero value support. Each of these has a special JSON representation and is handled a little differently from normal messages. The Python mapping for these is as follows:
| Google Message | Python Type | Default |
| --------------------------- | ---------------------------------------- | ---------------------- |
| `google.protobuf.duration` | [`datetime.timedelta`][td] | `0` |
| `google.protobuf.timestamp` | Timezone-aware [`datetime.datetime`][dt] | `1970-01-01T00:00:00Z` |
| `google.protobuf.*Value` | `Optional[...]` | `None` |
[td]: https://docs.python.org/3/library/datetime.html#timedelta-objects
[dt]: https://docs.python.org/3/library/datetime.html#datetime.datetime
For the wrapper types, the Python type corresponds to the wrapped type, e.g. `google.protobuf.BoolValue` becomes `Optional[bool]` while `google.protobuf.Int32Value` becomes `Optional[int]`. All of the optional values default to `None`, so don't forget to check for that possible state. Given:
```protobuf
syntax = "proto3";
import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
message Test {
google.protobuf.BoolValue maybe = 1;
google.protobuf.Timestamp ts = 2;
google.protobuf.Duration duration = 3;
}
```
You can do stuff like:
```py
>>> t = Test().from_dict({"maybe": True, "ts": "2019-01-01T12:00:00Z", "duration": "1.200s"})
>>> t
Test(maybe=True, ts=datetime.datetime(2019, 1, 1, 12, 0, tzinfo=datetime.timezone.utc), duration=datetime.timedelta(seconds=1, microseconds=200000))
>>> t.ts - t.duration
datetime.datetime(2019, 1, 1, 11, 59, 58, 800000, tzinfo=datetime.timezone.utc)
>>> t.ts.isoformat()
'2019-01-01T12:00:00+00:00'
>>> t.maybe = None
>>> t.to_dict()
{'ts': '2019-01-01T12:00:00Z', 'duration': '1.200s'}
```
## Development
First, make sure you have Python 3.7+ and `pipenv` installed, along with the official [Protobuf Compiler](https://github.com/protocolbuffers/protobuf/releases) for your platform. Then:
First, make sure you have Python 3.6+ and `pipenv` installed, along with the official [Protobuf Compiler](https://github.com/protocolbuffers/protobuf/releases) for your platform. Then:
```sh
# Get set up with the virtual env & dependencies
@@ -267,7 +323,7 @@ Here's how to run the tests.
$ pipenv run generate
# Run the tests
$ pipenv run tests
$ pipenv run test
```
### TODO
@@ -295,14 +351,14 @@ $ pipenv run tests
- [x] Bytes as base64
- [ ] Any support
- [x] Enum strings
- [ ] Well known types support (timestamp, duration, wrappers)
- [ ] Support different casing (orig vs. camel vs. others?)
- [x] Well known types support (timestamp, duration, wrappers)
- [x] Support different casing (orig vs. camel vs. others?)
- [ ] Async service stubs
- [x] Unary-unary
- [x] Server streaming response
- [ ] Client streaming request
- [ ] Renaming messages and fields to conform to Python name standards
- [ ] Renaming clashes with language keywords and standard library top-level packages
- [x] Renaming messages and fields to conform to Python name standards
- [x] Renaming clashes with language keywords
- [x] Python package
- [x] Automate running tests
- [ ] Cleanup!

View File

@@ -3,8 +3,10 @@ import enum
import inspect
import json
import struct
import sys
from abc import ABC
from base64 import b64encode, b64decode
from datetime import datetime, timedelta, timezone
from typing import (
Any,
AsyncGenerator,
@@ -20,10 +22,24 @@ from typing import (
TypeVar,
Union,
get_type_hints,
TYPE_CHECKING,
)
import grpclib.client
import grpclib.const
import stringcase
from .casing import safe_snake_case
if TYPE_CHECKING:
from grpclib._protocols import IProtoMessage
if not (sys.version_info.major == 3 and sys.version_info.minor >= 7):
# Apply backport of datetime.fromisoformat from 3.7
from backports.datetime_fromisoformat import MonkeyPatch
MonkeyPatch.patch_fromisoformat()
# Proto 3 data types
TYPE_ENUM = "enum"
@@ -101,6 +117,17 @@ WIRE_FIXED_64_TYPES = [TYPE_DOUBLE, TYPE_FIXED64, TYPE_SFIXED64]
WIRE_LEN_DELIM_TYPES = [TYPE_STRING, TYPE_BYTES, TYPE_MESSAGE, TYPE_MAP]
# Protobuf datetimes start at the Unix Epoch in 1970 in UTC.
DATETIME_ZERO = datetime(1970, 1, 1, tzinfo=timezone.utc)
class Casing(enum.Enum):
"""Casing constants for serialization."""
CAMEL = stringcase.camelcase
SNAKE = stringcase.snakecase
class _PLACEHOLDER:
pass
@@ -108,18 +135,6 @@ class _PLACEHOLDER:
PLACEHOLDER: Any = _PLACEHOLDER()
def get_default(proto_type: str) -> Any:
"""Get the default (zero value) for a given type."""
return {
TYPE_BOOL: False,
TYPE_FLOAT: 0.0,
TYPE_DOUBLE: 0.0,
TYPE_STRING: "",
TYPE_BYTES: b"",
TYPE_MAP: {},
}.get(proto_type, 0)
@dataclasses.dataclass(frozen=True)
class FieldMetadata:
"""Stores internal metadata used for parsing & serialization."""
@@ -129,9 +144,11 @@ class FieldMetadata:
# Protobuf type name
proto_type: str
# Map information if the proto_type is a map
map_types: Optional[Tuple[str, str]]
map_types: Optional[Tuple[str, str]] = None
# Groups several "one-of" fields together
group: Optional[str]
group: Optional[str] = None
# Describes the wrapped type (e.g. when using google.protobuf.BoolValue)
wraps: Optional[str] = None
@staticmethod
def get(field: dataclasses.Field) -> "FieldMetadata":
@@ -145,11 +162,14 @@ def dataclass_field(
*,
map_types: Optional[Tuple[str, str]] = None,
group: Optional[str] = None,
wraps: Optional[str] = None,
) -> dataclasses.Field:
"""Creates a dataclass field with attached protobuf metadata."""
return dataclasses.field(
default=PLACEHOLDER,
metadata={"betterproto": FieldMetadata(number, proto_type, map_types, group)},
metadata={
"betterproto": FieldMetadata(number, proto_type, map_types, group, wraps)
},
)
@@ -222,8 +242,10 @@ def bytes_field(number: int, group: Optional[str] = None) -> Any:
return dataclass_field(number, TYPE_BYTES, group=group)
def message_field(number: int, group: Optional[str] = None) -> Any:
return dataclass_field(number, TYPE_MESSAGE, group=group)
def message_field(
number: int, group: Optional[str] = None, wraps: Optional[str] = None
) -> Any:
return dataclass_field(number, TYPE_MESSAGE, group=group, wraps=wraps)
def map_field(
@@ -274,7 +296,7 @@ def encode_varint(value: int) -> bytes:
return bytes(b + [bits])
def _preprocess_single(proto_type: str, value: Any) -> bytes:
def _preprocess_single(proto_type: str, wraps: str, value: Any) -> bytes:
"""Adjusts values before serialization."""
if proto_type in [
TYPE_ENUM,
@@ -297,16 +319,37 @@ def _preprocess_single(proto_type: str, value: Any) -> bytes:
elif proto_type == TYPE_STRING:
return value.encode("utf-8")
elif proto_type == TYPE_MESSAGE:
if isinstance(value, datetime):
# Convert the `datetime` to a timestamp message.
seconds = int(value.timestamp())
nanos = int(value.microsecond * 1e3)
value = _Timestamp(seconds=seconds, nanos=nanos)
elif isinstance(value, timedelta):
# Convert the `timedelta` to a duration message.
total_ms = value // timedelta(microseconds=1)
seconds = int(total_ms / 1e6)
nanos = int((total_ms % 1e6) * 1e3)
value = _Duration(seconds=seconds, nanos=nanos)
elif wraps:
if value is None:
return b""
value = _get_wrapper(wraps)(value=value)
return bytes(value)
return value
def _serialize_single(
field_number: int, proto_type: str, value: Any, *, serialize_empty: bool = False
field_number: int,
proto_type: str,
value: Any,
*,
serialize_empty: bool = False,
wraps: str = "",
) -> bytes:
"""Serializes a single field and value."""
value = _preprocess_single(proto_type, value)
value = _preprocess_single(proto_type, wraps, value)
output = b""
if proto_type in WIRE_VARINT_TYPES:
@@ -319,7 +362,7 @@ def _serialize_single(
key = encode_varint((field_number << 3) | 1)
output += key + value
elif proto_type in WIRE_LEN_DELIM_TYPES:
if len(value) or serialize_empty:
if len(value) or serialize_empty or wraps:
key = encode_varint((field_number << 3) | 2)
output += key + encode_varint(len(value)) + value
else:
@@ -359,7 +402,6 @@ def parse_fields(value: bytes) -> Generator[ParsedField, None, None]:
while i < len(value):
start = i
num_wire, i = decode_varint(value, i)
# print(num_wire, i)
number = num_wire >> 3
wire_type = num_wire & 0x7
@@ -375,8 +417,6 @@ def parse_fields(value: bytes) -> Generator[ParsedField, None, None]:
elif wire_type == 5:
decoded, i = value[i : i + 4], i + 4
# print(ParsedField(number=number, wire_type=wire_type, value=decoded))
yield ParsedField(
number=number, wire_type=wire_type, value=decoded, raw=value[start:i]
)
@@ -393,14 +433,22 @@ class Message(ABC):
to go between Python, binary and JSON protobuf message representations.
"""
_serialized_on_wire: bool
_unknown_fields: bytes
_group_map: Dict[str, dict]
def __post_init__(self) -> None:
# Keep track of whether every field was default
all_sentinel = True
# Set a default value for each field in the class after `__init__` has
# already been run.
group_map = {"fields": {}, "groups": {}}
group_map: Dict[str, dict] = {"fields": {}, "groups": {}}
for field in dataclasses.fields(self):
meta = FieldMetadata.get(field)
if meta.group:
# This is part of a one-of group.
group_map["fields"][field.name] = meta.group
if meta.group not in group_map["groups"]:
@@ -409,6 +457,7 @@ class Message(ABC):
if getattr(self, field.name) != PLACEHOLDER:
# Skip anything not set to the sentinel value
all_sentinel = False
if meta.group:
# This was set, so make it the selected value of the one-of.
@@ -419,7 +468,7 @@ class Message(ABC):
setattr(self, field.name, self._get_field_default(field, meta))
# Now that all the defaults are set, reset it!
self.__dict__["_serialized_on_wire"] = False
self.__dict__["_serialized_on_wire"] = not all_sentinel
self.__dict__["_unknown_fields"] = b""
self.__dict__["_group_map"] = group_map
@@ -450,6 +499,11 @@ class Message(ABC):
meta = FieldMetadata.get(field)
value = getattr(self, field.name)
if value is None:
# Optional items should be skipped. This is used for the Google
# wrapper types.
continue
# Being selected in a a group means this field is the one that is
# currently set in a `oneof` group, so it must be serialized even
# if the value is the default zero value.
@@ -457,42 +511,48 @@ class Message(ABC):
if meta.group and self._group_map["groups"][meta.group]["current"] == field:
selected_in_group = True
if isinstance(value, list):
if not len(value) and not selected_in_group:
# Empty values are not serialized
continue
serialize_empty = False
if isinstance(value, Message) and value._serialized_on_wire:
# Empty messages can still be sent on the wire if they were
# set (or received empty).
serialize_empty = True
if value == self._get_field_default(field, meta) and not (
selected_in_group or serialize_empty
):
# Default (zero) values are not serialized. Two exceptions are
# if this is the selected oneof item or if we know we have to
# serialize an empty message (i.e. zero value was explicitly
# set by the user).
continue
if isinstance(value, list):
if meta.proto_type in PACKED_TYPES:
# Packed lists look like a length-delimited field. First,
# preprocess/encode each value into a buffer and then
# treat it like a field of raw bytes.
buf = b""
for item in value:
buf += _preprocess_single(meta.proto_type, item)
buf += _preprocess_single(meta.proto_type, "", item)
output += _serialize_single(meta.number, TYPE_BYTES, buf)
else:
for item in value:
output += _serialize_single(meta.number, meta.proto_type, item)
output += _serialize_single(
meta.number, meta.proto_type, item, wraps=meta.wraps or ""
)
elif isinstance(value, dict):
if not len(value) and not selected_in_group:
# Empty values are not serialized
continue
for k, v in value.items():
assert meta.map_types
sk = _serialize_single(1, meta.map_types[0], k)
sv = _serialize_single(2, meta.map_types[1], v)
output += _serialize_single(meta.number, meta.proto_type, sk + sv)
else:
if value == get_default(meta.proto_type) and not selected_in_group:
# Default (zero) values are not serialized
continue
serialize_empty = False
if isinstance(value, Message) and value._serialized_on_wire:
serialize_empty = True
output += _serialize_single(
meta.number, meta.proto_type, value, serialize_empty=serialize_empty
meta.number,
meta.proto_type,
value,
serialize_empty=serialize_empty,
wraps=meta.wraps or "",
)
return output + self._unknown_fields
@@ -500,30 +560,45 @@ class Message(ABC):
# For compatibility with other libraries
SerializeToString = __bytes__
def _cls_for(self, field: dataclasses.Field, index: int = 0) -> Type:
"""Get the message class for a field from the type hints."""
def _type_hint(self, field_name: str) -> Type:
module = inspect.getmodule(self.__class__)
type_hints = get_type_hints(self.__class__, vars(module))
cls = type_hints[field.name]
return type_hints[field_name]
def _cls_for(self, field: dataclasses.Field, index: int = 0) -> Type:
"""Get the message class for a field from the type hints."""
cls = self._type_hint(field.name)
if hasattr(cls, "__args__") and index >= 0:
cls = type_hints[field.name].__args__[index]
cls = cls.__args__[index]
return cls
def _get_field_default(self, field: dataclasses.Field, meta: FieldMetadata) -> Any:
t = self._cls_for(field, index=-1)
t = self._type_hint(field.name)
value: Any = 0
if meta.proto_type == TYPE_MAP:
# Maps cannot be repeated, so we check these first.
value = {}
elif hasattr(t, "__args__") and len(t.__args__) == 1:
# Anything else with type args is a list.
value = []
elif meta.proto_type == TYPE_MESSAGE:
# Message means creating an instance of the right type.
value = t()
if hasattr(t, "__origin__"):
if t.__origin__ in (dict, Dict):
# This is some kind of map (dict in Python).
value = {}
elif t.__origin__ in (list, List):
# This is some kind of list (repeated) field.
value = []
elif t.__origin__ == Union and t.__args__[1] == type(None):
# This is an optional (wrapped) field. For setting the default we
# really don't care what kind of field it is.
value = None
else:
value = t()
elif issubclass(t, Enum):
# Enums always default to zero.
value = 0
elif t == datetime:
# Offsets are relative to 1970-01-01T00:00:00Z
value = DATETIME_ZERO
else:
value = get_default(meta.proto_type)
# This is either a primitive scalar or another message type. Calling
# it should result in its zero value.
value = t()
return value
@@ -540,6 +615,9 @@ class Message(ABC):
elif meta.proto_type in [TYPE_SINT32, TYPE_SINT64]:
# Undo zig-zag encoding
value = (value >> 1) ^ (-(value & 1))
elif meta.proto_type == TYPE_BOOL:
# Booleans use a varint encoding, so convert it to true/false.
value = value > 0
elif wire_type in [WIRE_FIXED_32, WIRE_FIXED_64]:
fmt = _pack_fmt(meta.proto_type)
value = struct.unpack(fmt, value)[0]
@@ -548,8 +626,18 @@ class Message(ABC):
value = value.decode("utf-8")
elif meta.proto_type == TYPE_MESSAGE:
cls = self._cls_for(field)
value = cls().parse(value)
value._serialized_on_wire = True
if cls == datetime:
value = _Timestamp().parse(value).to_datetime()
elif cls == timedelta:
value = _Duration().parse(value).to_timedelta()
elif meta.wraps:
# This is a Google wrapper value message around a single
# scalar type.
value = _get_wrapper(meta.wraps)().parse(value).value
else:
value = cls().parse(value)
value._serialized_on_wire = True
elif meta.proto_type == TYPE_MAP:
# TODO: This is slow, use a cache to make it faster since each
# key/value pair will recreate the class.
@@ -624,48 +712,68 @@ class Message(ABC):
def FromString(cls: Type[T], data: bytes) -> T:
return cls().parse(data)
def to_dict(self) -> dict:
def to_dict(
self, casing: Casing = Casing.CAMEL, include_default_values: bool = False
) -> dict:
"""
Returns a dict representation of this message instance which can be
used to serialize to e.g. JSON.
used to serialize to e.g. JSON. Defaults to camel casing for
compatibility but can be set to other modes.
`include_default_values` can be set to `True` to include default
values of fields. E.g. an `int32` type field with `0` value will
not be in returned dict if `include_default_values` is set to
`False`.
"""
output: Dict[str, Any] = {}
for field in dataclasses.fields(self):
meta = FieldMetadata.get(field)
v = getattr(self, field.name)
cased_name = casing(field.name).rstrip("_") # type: ignore
if meta.proto_type == "message":
if isinstance(v, list):
if isinstance(v, datetime):
if v != DATETIME_ZERO or include_default_values:
output[cased_name] = _Timestamp.timestamp_to_json(v)
elif isinstance(v, timedelta):
if v != timedelta(0) or include_default_values:
output[cased_name] = _Duration.delta_to_json(v)
elif meta.wraps:
if v is not None or include_default_values:
output[cased_name] = v
elif isinstance(v, list):
# Convert each item.
v = [i.to_dict() for i in v]
output[field.name] = v
elif v._serialized_on_wire:
output[field.name] = v.to_dict()
v = [i.to_dict(casing, include_default_values) for i in v]
if v or include_default_values:
output[cased_name] = v
else:
if v._serialized_on_wire or include_default_values:
output[cased_name] = v.to_dict(casing, include_default_values)
elif meta.proto_type == "map":
for k in v:
if hasattr(v[k], "to_dict"):
v[k] = v[k].to_dict()
v[k] = v[k].to_dict(casing, include_default_values)
if v:
output[field.name] = v
elif v != get_default(meta.proto_type):
if v or include_default_values:
output[cased_name] = v
elif v != self._get_field_default(field, meta) or include_default_values:
if meta.proto_type in INT_64_TYPES:
if isinstance(v, list):
output[field.name] = [str(n) for n in v]
output[cased_name] = [str(n) for n in v]
else:
output[field.name] = str(v)
output[cased_name] = str(v)
elif meta.proto_type == TYPE_BYTES:
if isinstance(v, list):
output[field.name] = [b64encode(b).decode("utf8") for b in v]
output[cased_name] = [b64encode(b).decode("utf8") for b in v]
else:
output[field.name] = b64encode(v).decode("utf8")
output[cased_name] = b64encode(v).decode("utf8")
elif meta.proto_type == TYPE_ENUM:
enum_values = list(self._cls_for(field))
enum_values = list(self._cls_for(field)) # type: ignore
if isinstance(v, list):
output[field.name] = [enum_values[e].name for e in v]
output[cased_name] = [enum_values[e].name for e in v]
else:
output[field.name] = enum_values[v].name
output[cased_name] = enum_values[v].name
else:
output[field.name] = v
output[cased_name] = v
return output
def from_dict(self: T, value: dict) -> T:
@@ -674,44 +782,58 @@ class Message(ABC):
returns the instance itself and is therefore assignable and chainable.
"""
self._serialized_on_wire = True
for field in dataclasses.fields(self):
meta = FieldMetadata.get(field)
if field.name in value and value[field.name] is not None:
if meta.proto_type == "message":
v = getattr(self, field.name)
# print(v, value[field.name])
if isinstance(v, list):
cls = self._cls_for(field)
for i in range(len(value[field.name])):
v.append(cls().from_dict(value[field.name][i]))
else:
v.from_dict(value[field.name])
elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE:
v = getattr(self, field.name)
cls = self._cls_for(field, index=1)
for k in value[field.name]:
v[k] = cls().from_dict(value[field.name][k])
else:
v = value[field.name]
if meta.proto_type in INT_64_TYPES:
if isinstance(value[field.name], list):
v = [int(n) for n in value[field.name]]
else:
v = int(value[field.name])
elif meta.proto_type == TYPE_BYTES:
if isinstance(value[field.name], list):
v = [b64decode(n) for n in value[field.name]]
else:
v = b64decode(value[field.name])
elif meta.proto_type == TYPE_ENUM:
enum_cls = self._cls_for(field)
if isinstance(v, list):
v = [enum_cls.from_string(e) for e in v]
elif isinstance(v, str):
v = enum_cls.from_string(v)
fields_by_name = {f.name: f for f in dataclasses.fields(self)}
for key in value:
snake_cased = safe_snake_case(key)
if snake_cased in fields_by_name:
field = fields_by_name[snake_cased]
meta = FieldMetadata.get(field)
if v is not None:
setattr(self, field.name, v)
if value[key] is not None:
if meta.proto_type == "message":
v = getattr(self, field.name)
if isinstance(v, list):
cls = self._cls_for(field)
for i in range(len(value[key])):
v.append(cls().from_dict(value[key][i]))
elif isinstance(v, datetime):
v = datetime.fromisoformat(
value[key].replace("Z", "+00:00")
)
setattr(self, field.name, v)
elif isinstance(v, timedelta):
v = timedelta(seconds=float(value[key][:-1]))
setattr(self, field.name, v)
elif meta.wraps:
setattr(self, field.name, value[key])
else:
v.from_dict(value[key])
elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE:
v = getattr(self, field.name)
cls = self._cls_for(field, index=1)
for k in value[key]:
v[k] = cls().from_dict(value[key][k])
else:
v = value[key]
if meta.proto_type in INT_64_TYPES:
if isinstance(value[key], list):
v = [int(n) for n in value[key]]
else:
v = int(value[key])
elif meta.proto_type == TYPE_BYTES:
if isinstance(value[key], list):
v = [b64decode(n) for n in value[key]]
else:
v = b64decode(value[key])
elif meta.proto_type == TYPE_ENUM:
enum_cls = self._cls_for(field)
if isinstance(v, list):
v = [enum_cls.from_string(e) for e in v]
elif isinstance(v, str):
v = enum_cls.from_string(v)
if v is not None:
setattr(self, field.name, v)
return self
def to_json(self, indent: Union[None, int, str] = None) -> str:
@@ -743,6 +865,141 @@ def which_one_of(message: Message, group_name: str) -> Tuple[str, Any]:
return (field.name, getattr(message, field.name))
@dataclasses.dataclass
class _Duration(Message):
# Signed seconds of the span of time. Must be from -315,576,000,000 to
# +315,576,000,000 inclusive. Note: these bounds are computed from: 60
# sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
seconds: int = int64_field(1)
# Signed fractions of a second at nanosecond resolution of the span of time.
# Durations less than one second are represented with a 0 `seconds` field and
# a positive or negative `nanos` field. For durations of one second or more,
# a non-zero value for the `nanos` field must be of the same sign as the
# `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive.
nanos: int = int32_field(2)
def to_timedelta(self) -> timedelta:
return timedelta(seconds=self.seconds, microseconds=self.nanos / 1e3)
@staticmethod
def delta_to_json(delta: timedelta) -> str:
parts = str(delta.total_seconds()).split(".")
if len(parts) > 1:
while len(parts[1]) not in [3, 6, 9]:
parts[1] = parts[1] + "0"
return ".".join(parts) + "s"
@dataclasses.dataclass
class _Timestamp(Message):
# Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must
# be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.
seconds: int = int64_field(1)
# Non-negative fractions of a second at nanosecond resolution. Negative
# second values with fractions must still have non-negative nanos values that
# count forward in time. Must be from 0 to 999,999,999 inclusive.
nanos: int = int32_field(2)
def to_datetime(self) -> datetime:
ts = self.seconds + (self.nanos / 1e9)
return datetime.fromtimestamp(ts, tz=timezone.utc)
@staticmethod
def timestamp_to_json(dt: datetime) -> str:
nanos = dt.microsecond * 1e3
copy = dt.replace(microsecond=0, tzinfo=None)
result = copy.isoformat()
if (nanos % 1e9) == 0:
# If there are 0 fractional digits, the fractional
# point '.' should be omitted when serializing.
return result + "Z"
if (nanos % 1e6) == 0:
# Serialize 3 fractional digits.
return result + ".%03dZ" % (nanos / 1e6)
if (nanos % 1e3) == 0:
# Serialize 6 fractional digits.
return result + ".%06dZ" % (nanos / 1e3)
# Serialize 9 fractional digits.
return result + ".%09dZ" % nanos
class _WrappedMessage(Message):
"""
Google protobuf wrapper types base class. JSON representation is just the
value itself.
"""
value: Any
def to_dict(self, casing: Casing = Casing.CAMEL) -> Any:
return self.value
def from_dict(self: T, value: Any) -> T:
if value is not None:
self.value = value
return self
@dataclasses.dataclass
class _BoolValue(_WrappedMessage):
value: bool = bool_field(1)
@dataclasses.dataclass
class _Int32Value(_WrappedMessage):
value: int = int32_field(1)
@dataclasses.dataclass
class _UInt32Value(_WrappedMessage):
value: int = uint32_field(1)
@dataclasses.dataclass
class _Int64Value(_WrappedMessage):
value: int = int64_field(1)
@dataclasses.dataclass
class _UInt64Value(_WrappedMessage):
value: int = uint64_field(1)
@dataclasses.dataclass
class _FloatValue(_WrappedMessage):
value: float = float_field(1)
@dataclasses.dataclass
class _DoubleValue(_WrappedMessage):
value: float = double_field(1)
@dataclasses.dataclass
class _StringValue(_WrappedMessage):
value: str = string_field(1)
@dataclasses.dataclass
class _BytesValue(_WrappedMessage):
value: bytes = bytes_field(1)
def _get_wrapper(proto_type: str) -> Type:
"""Get the wrapper message class for a wrapped type."""
return {
TYPE_BOOL: _BoolValue,
TYPE_INT32: _Int32Value,
TYPE_UINT32: _UInt32Value,
TYPE_INT64: _Int64Value,
TYPE_UINT64: _UInt64Value,
TYPE_FLOAT: _FloatValue,
TYPE_DOUBLE: _DoubleValue,
TYPE_STRING: _StringValue,
TYPE_BYTES: _BytesValue,
}[proto_type]
class ServiceStub(ABC):
"""
Base class for async gRPC service stubs.
@@ -752,11 +1009,11 @@ class ServiceStub(ABC):
self.channel = channel
async def _unary_unary(
self, route: str, request_type: Type, response_type: Type[T], request: Any
self, route: str, request: "IProtoMessage", response_type: Type[T]
) -> T:
"""Make a unary request and return the response."""
async with self.channel.request(
route, grpclib.const.Cardinality.UNARY_UNARY, request_type, response_type
route, grpclib.const.Cardinality.UNARY_UNARY, type(request), response_type
) as stream:
await stream.send_message(request, end=True)
response = await stream.recv_message()
@@ -764,11 +1021,11 @@ class ServiceStub(ABC):
return response
async def _unary_stream(
self, route: str, request_type: Type, response_type: Type[T], request: Any
self, route: str, request: "IProtoMessage", response_type: Type[T]
) -> AsyncGenerator[T, None]:
"""Make a unary request and return the stream response iterator."""
async with self.channel.request(
route, grpclib.const.Cardinality.UNARY_STREAM, request_type, response_type
route, grpclib.const.Cardinality.UNARY_STREAM, type(request), response_type
) as stream:
await stream.send_message(request, end=True)
async for message in stream:

41
betterproto/casing.py Normal file
View File

@@ -0,0 +1,41 @@
import stringcase
def safe_snake_case(value: str) -> str:
"""Snake case a value taking into account Python keywords."""
value = stringcase.snakecase(value)
if value in [
"and",
"as",
"assert",
"break",
"class",
"continue",
"def",
"del",
"elif",
"else",
"except",
"finally",
"for",
"from",
"global",
"if",
"import",
"in",
"is",
"lambda",
"nonlocal",
"not",
"or",
"pass",
"raise",
"return",
"try",
"while",
"with",
"yield",
]:
# https://www.python.org/dev/peps/pep-0008/#descriptive-naming-styles
value += "_"
return value

View File

@@ -9,13 +9,16 @@ import textwrap
from typing import Any, List, Tuple
try:
import jinja2
import black
except ImportError:
print(
"Unable to import `jinja2`. Did you install the compiler feature with `pip install betterproto[compiler]`?"
"Unable to import `black` formatter. Did you install the compiler feature with `pip install betterproto[compiler]`?"
)
raise SystemExit(1)
import jinja2
import stringcase
from google.protobuf.compiler import plugin_pb2 as plugin
from google.protobuf.descriptor_pb2 import (
DescriptorProto,
@@ -25,11 +28,20 @@ from google.protobuf.descriptor_pb2 import (
ServiceDescriptorProto,
)
from betterproto.casing import safe_snake_case
def snake_case(value: str) -> str:
return (
re.sub(r"(?<=[a-z])[A-Z]|[A-Z](?=[^A-Z])", r"_\g<0>", value).lower().strip("_")
)
WRAPPER_TYPES = {
"google.protobuf.DoubleValue": "float",
"google.protobuf.FloatValue": "float",
"google.protobuf.Int64Value": "int",
"google.protobuf.UInt64Value": "int",
"google.protobuf.Int32Value": "int",
"google.protobuf.UInt32Value": "int",
"google.protobuf.BoolValue": "bool",
"google.protobuf.StringValue": "str",
"google.protobuf.BytesValue": "bytes",
}
def get_ref_type(package: str, imports: set, type_name: str) -> str:
@@ -37,15 +49,33 @@ def get_ref_type(package: str, imports: set, type_name: str) -> str:
Return a Python type name for a proto type reference. Adds the import if
necessary.
"""
# If the package name is a blank string, then this should still work
# because by convention packages are lowercase and message/enum types are
# pascal-cased. May require refactoring in the future.
type_name = type_name.lstrip(".")
if type_name in WRAPPER_TYPES:
return f"Optional[{WRAPPER_TYPES[type_name]}]"
if type_name == "google.protobuf.Duration":
return "timedelta"
if type_name == "google.protobuf.Timestamp":
return "datetime"
if type_name.startswith(package):
# This is the current package, which has nested types flattened.
type_name = f'"{type_name.lstrip(package).lstrip(".").replace(".", "")}"'
parts = type_name.lstrip(package).lstrip(".").split(".")
if len(parts) == 1 or (len(parts) > 1 and parts[0][0] == parts[0][0].upper()):
# This is the current package, which has nested types flattened.
# foo.bar_thing => FooBarThing
cased = [stringcase.pascalcase(part) for part in parts]
type_name = f'"{"".join(cased)}"'
if "." in type_name:
# This is imported from another package. No need
# to use a forward ref and we need to add the import.
parts = type_name.split(".")
parts[-1] = stringcase.pascalcase(parts[-1])
imports.add(f"from .{'.'.join(parts[:-2])} import {parts[-2]}")
type_name = f"{parts[-2]}.{parts[-1]}"
@@ -92,19 +122,19 @@ def get_py_zero(type_num: int) -> str:
def traverse(proto_file):
def _traverse(path, items):
def _traverse(path, items, prefix = ''):
for i, item in enumerate(items):
# Adjust the name since we flatten the heirarchy.
item.name = next_prefix = prefix + item.name
yield item, path + [i]
if isinstance(item, DescriptorProto):
for enum in item.enum_type:
enum.name = item.name + enum.name
enum.name = next_prefix + enum.name
yield enum, path + [i, 4]
if item.nested_type:
for n, p in _traverse(path + [i, 3], item.nested_type):
# Adjust the name since we flatten the heirarchy.
n.name = item.name + n.name
for n, p in _traverse(path + [i, 3], item.nested_type, next_prefix):
yield n, p
return itertools.chain(
@@ -112,25 +142,26 @@ def traverse(proto_file):
)
def get_comment(proto_file, path: List[int]) -> str:
def get_comment(proto_file, path: List[int], indent: int = 4) -> str:
pad = " " * indent
for sci in proto_file.source_code_info.location:
# print(list(sci.path), path, file=sys.stderr)
if list(sci.path) == path and sci.leading_comments:
lines = textwrap.wrap(
sci.leading_comments.strip().replace("\n", ""), width=75
sci.leading_comments.strip().replace("\n", ""), width=79 - indent
)
if path[-2] == 2 and path[-4] != 6:
# This is a field
return " # " + " # ".join(lines)
return f"{pad}# " + f"\n{pad}# ".join(lines)
else:
# This is a message, enum, service, or method
if len(lines) == 1 and len(lines[0]) < 70:
if len(lines) == 1 and len(lines[0]) < 79 - indent - 6:
lines[0] = lines[0].strip('"')
return f' """{lines[0]}"""'
return f'{pad}"""{lines[0]}"""'
else:
joined = "\n ".join(lines)
return f' """\n {joined}\n """'
joined = f"\n{pad}".join(lines)
return f'{pad}"""\n{pad}{joined}\n{pad}"""'
return ""
@@ -146,6 +177,9 @@ def generate_code(request, response):
output_map = {}
for proto_file in request.proto_file:
out = proto_file.package
if out == "google.protobuf":
continue
if not out:
out = os.path.splitext(proto_file.name)[0].replace(os.path.sep, ".")
@@ -163,6 +197,7 @@ def generate_code(request, response):
"package": package,
"files": [f.name for f in options["files"]],
"imports": set(),
"datetime_imports": set(),
"typing_imports": set(),
"messages": [],
"enums": [],
@@ -179,7 +214,7 @@ def generate_code(request, response):
for item, path in traverse(proto_file):
# print(item, file=sys.stderr)
# print(path, file=sys.stderr)
data = {"name": item.name}
data = {"name": item.name, "py_name": stringcase.pascalcase(item.name)}
if isinstance(item, DescriptorProto):
# print(item, file=sys.stderr)
@@ -203,6 +238,14 @@ def generate_code(request, response):
packed = False
field_type = f.Type.Name(f.type).lower()[5:]
field_wraps = ""
if f.type_name.startswith(
".google.protobuf"
) and f.type_name.endswith("Value"):
w = f.type_name.split(".").pop()[:-5].upper()
field_wraps = f"betterproto.TYPE_{w}"
map_types = None
if f.type == 11:
# This might be a map...
@@ -252,13 +295,23 @@ def generate_code(request, response):
if f.HasField("oneof_index"):
one_of = item.oneof_decl[f.oneof_index].name
if "Optional[" in t:
output["typing_imports"].add("Optional")
if "timedelta" in t:
output["datetime_imports"].add("timedelta")
elif "datetime" in t:
output["datetime_imports"].add("datetime")
data["properties"].append(
{
"name": f.name,
"py_name": safe_snake_case(f.name),
"number": f.number,
"comment": get_comment(proto_file, path + [2, i]),
"proto_type": int(f.type),
"field_type": field_type,
"field_wraps": field_wraps,
"map_types": map_types,
"type": t,
"zero": zero,
@@ -294,6 +347,7 @@ def generate_code(request, response):
data = {
"name": service.name,
"py_name": stringcase.pascalcase(service.name),
"comment": get_comment(proto_file, [6, i]),
"methods": [],
}
@@ -317,8 +371,8 @@ def generate_code(request, response):
data["methods"].append(
{
"name": method.name,
"py_name": snake_case(method.name),
"comment": get_comment(proto_file, [6, i, 2, j]),
"py_name": stringcase.snakecase(method.name),
"comment": get_comment(proto_file, [6, i, 2, j], indent=8),
"route": f"/{package}.{service.name}/{method.name}",
"input": get_ref_type(
package, output["imports"], method.input_type
@@ -338,6 +392,7 @@ def generate_code(request, response):
output["services"].append(data)
output["imports"] = sorted(output["imports"])
output["datetime_imports"] = sorted(output["datetime_imports"])
output["typing_imports"] = sorted(output["typing_imports"])
# Fill response
@@ -345,8 +400,11 @@ def generate_code(request, response):
# print(filename, file=sys.stderr)
f.name = filename.replace(".", os.path.sep) + ".py"
# f.content = json.dumps(output, indent=2)
f.content = template.render(description=output).rstrip("\n") + "\n"
# Render and then format the output file.
f.content = black.format_str(
template.render(description=output),
mode=black.FileMode(target_versions=set([black.TargetVersion.PY37])),
)
inits = set([""])
for f in response.file:
@@ -361,10 +419,20 @@ def generate_code(request, response):
inits.add(base)
for base in inits:
name = os.path.join(base, "__init__.py")
if os.path.exists(name):
# Never overwrite inits as they may have custom stuff in them.
continue
init = response.file.add()
init.name = os.path.join(base, "__init__.py")
init.name = name
init.content = b""
filenames = sorted([f.name for f in response.file])
for fname in filenames:
print(f"Writing {fname}", file=sys.stderr)
def main():
"""The plugin's main entry point."""

View File

@@ -2,6 +2,10 @@
# sources: {{ ', '.join(description.files) }}
# plugin: python-betterproto
from dataclasses import dataclass
{% if description.datetime_imports %}
from datetime import {% for i in description.datetime_imports %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
{% endif%}
{% if description.typing_imports %}
from typing import {% for i in description.typing_imports %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
@@ -11,14 +15,14 @@ import betterproto
{% if description.services %}
import grpclib
{% endif %}
{% for i in description.imports %}
{% for i in description.imports %}
{{ i }}
{% endfor %}
{% if description.enums %}{% for enum in description.enums %}
class {{ enum.name }}(betterproto.Enum):
class {{ enum.py_name }}(betterproto.Enum):
{% if enum.comment %}
{{ enum.comment }}
@@ -35,7 +39,7 @@ class {{ enum.name }}(betterproto.Enum):
{% endif %}
{% for message in description.messages %}
@dataclass
class {{ message.name }}(betterproto.Message):
class {{ message.py_name }}(betterproto.Message):
{% if message.comment %}
{{ message.comment }}
@@ -44,7 +48,7 @@ class {{ message.name }}(betterproto.Message):
{% if field.comment %}
{{ field.comment }}
{% endif %}
{{ field.name }}: {{ field.type }} = betterproto.{{ field.field_type }}_field({{ field.number }}{% if field.field_type == 'map'%}, betterproto.{{ field.map_types[0] }}, betterproto.{{ field.map_types[1] }}{% endif %}{% if field.one_of %}, group="{{ field.one_of }}"{% endif %})
{{ field.py_name }}: {{ field.type }} = betterproto.{{ field.field_type }}_field({{ field.number }}{% if field.field_type == 'map'%}, betterproto.{{ field.map_types[0] }}, betterproto.{{ field.map_types[1] }}{% endif %}{% if field.one_of %}, group="{{ field.one_of }}"{% endif %}{% if field.field_wraps %}, wraps={{ field.field_wraps }}{% endif %})
{% endfor %}
{% if not message.properties %}
pass
@@ -53,13 +57,13 @@ class {{ message.name }}(betterproto.Message):
{% endfor %}
{% for service in description.services %}
class {{ service.name }}Stub(betterproto.ServiceStub):
class {{ service.py_name }}Stub(betterproto.ServiceStub):
{% if service.comment %}
{{ service.comment }}
{% endif %}
{% for method in service.methods %}
async def {{ method.py_name }}(self{% if method.input_message and method.input_message.properties %}, *, {% for field in method.input_message.properties %}{{ field.name }}: {% if field.zero == "None" %}Optional[{{ field.type }}]{% else %}{{ field.type }}{% endif %} = {{ field.zero }}{% if not loop.last %}, {% endif %}{% endfor %}{% endif %}) -> {% if method.server_streaming %}AsyncGenerator[{{ method.output }}, None]{% else %}{{ method.output }}{% endif %}:
async def {{ method.py_name }}(self{% if method.input_message and method.input_message.properties %}, *, {% for field in method.input_message.properties %}{{ field.name }}: {% if field.zero == "None" and not field.type.startswith("Optional[") %}Optional[{{ field.type }}]{% else %}{{ field.type }}{% endif %} = {{ field.zero }}{% if not loop.last %}, {% endif %}{% endfor %}{% endif %}) -> {% if method.server_streaming %}AsyncGenerator[{{ method.output }}, None]{% else %}{{ method.output }}{% endif %}:
{% if method.comment %}
{{ method.comment }}
@@ -77,17 +81,15 @@ class {{ service.name }}Stub(betterproto.ServiceStub):
{% if method.server_streaming %}
async for response in self._unary_stream(
"{{ method.route }}",
{{ method.input }},
{{ method.output }},
request,
{{ method.output }},
):
yield response
{% else %}
return await self._unary_unary(
"{{ method.route }}",
{{ method.input }},
{{ method.output }},
request,
{{ method.output }},
)
{% endif %}

View File

@@ -0,0 +1,3 @@
{
"value": true
}

View File

@@ -0,0 +1,5 @@
syntax = "proto3";
message Test {
bool value = 1;
}

View File

@@ -0,0 +1,4 @@
{
"camelCase": 1,
"snakeCase": "ONE"
}

View File

@@ -0,0 +1,12 @@
syntax = "proto3";
enum my_enum {
ZERO = 0;
ONE = 1;
TWO = 2;
}
message Test {
int32 camelCase = 1;
my_enum snake_case = 2;
}

View File

@@ -72,7 +72,8 @@ if __name__ == "__main__":
input_json = open(filename).read()
parsed = Parse(input_json, imported.Test())
serialized = parsed.SerializeToString()
serialized_json = MessageToJson(parsed, preserving_proto_field_name=True)
preserve = "casing" not in filename
serialized_json = MessageToJson(parsed, preserving_proto_field_name=preserve)
s_loaded = json.loads(serialized_json)
in_loaded = json.loads(input_json)

View File

@@ -0,0 +1 @@
{}

View File

@@ -0,0 +1,5 @@
{
"maybe": false,
"ts": "1972-01-01T10:00:20.021Z",
"duration": "1.200s"
}

View File

@@ -0,0 +1,12 @@
syntax = "proto3";
import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
message Test {
google.protobuf.BoolValue maybe = 1;
google.protobuf.Timestamp ts = 2;
google.protobuf.Duration duration = 3;
google.protobuf.Int32Value important = 4;
}

View File

@@ -0,0 +1,5 @@
{
"for": 1,
"with": 2,
"as": 3
}

View File

@@ -0,0 +1,7 @@
syntax = "proto3";
message Test {
int32 for = 1;
int32 with = 2;
int32 as = 3;
}

View File

@@ -0,0 +1,11 @@
{
"root": {
"name": "double-nested",
"parent": {
"child": [{"foo": "hello"}],
"enumChild": ["A"],
"rootParentChild": [{"a": "hello"}],
"bar": true
}
}
}

View File

@@ -0,0 +1,26 @@
syntax = "proto3";
message Test {
message Root {
message Parent {
message RootParentChild {
string a = 1;
}
enum EnumChild{
A = 0;
B = 1;
}
message Child {
string foo = 1;
}
reserved 1;
repeated Child child = 2;
repeated EnumChild enumChild=3;
repeated RootParentChild rootParentChild=4;
bool bar = 5;
}
string name = 1;
Parent parent = 2;
}
Root root = 1;
}

View File

@@ -1,5 +1,6 @@
import betterproto
from dataclasses import dataclass
from typing import Optional
def test_has_field():
@@ -32,6 +33,21 @@ def test_has_field():
assert betterproto.serialized_on_wire(foo.bar) == False
def test_class_init():
@dataclass
class Bar(betterproto.Message):
name: str = betterproto.string_field(1)
@dataclass
class Foo(betterproto.Message):
name: str = betterproto.string_field(1)
child: Bar = betterproto.message_field(2)
foo = Foo(name="foo", child=Bar(name="bar"))
assert foo.to_dict() == {"name": "foo", "child": {"name": "bar"}}
def test_enum_as_int_json():
class TestEnum(betterproto.Enum):
ZERO = 0
@@ -115,3 +131,135 @@ def test_oneof_support():
assert betterproto.which_one_of(foo2, "group1")[0] == "bar"
assert foo.bar == 0
assert betterproto.which_one_of(foo2, "group2")[0] == ""
def test_json_casing():
@dataclass
class CasingTest(betterproto.Message):
pascal_case: int = betterproto.int32_field(1)
camel_case: int = betterproto.int32_field(2)
snake_case: int = betterproto.int32_field(3)
kabob_case: int = betterproto.int32_field(4)
# Parsing should accept almost any input
test = CasingTest().from_dict(
{"PascalCase": 1, "camelCase": 2, "snake_case": 3, "kabob-case": 4}
)
assert test == CasingTest(1, 2, 3, 4)
# Serializing should be strict.
assert test.to_dict() == {
"pascalCase": 1,
"camelCase": 2,
"snakeCase": 3,
"kabobCase": 4,
}
assert test.to_dict(casing=betterproto.Casing.SNAKE) == {
"pascal_case": 1,
"camel_case": 2,
"snake_case": 3,
"kabob_case": 4,
}
def test_optional_flag():
@dataclass
class Request(betterproto.Message):
flag: Optional[bool] = betterproto.message_field(1, wraps=betterproto.TYPE_BOOL)
# Serialization of not passed vs. set vs. zero-value.
assert bytes(Request()) == b""
assert bytes(Request(flag=True)) == b"\n\x02\x08\x01"
assert bytes(Request(flag=False)) == b"\n\x00"
# Differentiate between not passed and the zero-value.
assert Request().parse(b"").flag == None
assert Request().parse(b"\n\x00").flag == False
def test_to_dict_default_values():
@dataclass
class TestMessage(betterproto.Message):
some_int: int = betterproto.int32_field(1)
some_double: float = betterproto.double_field(2)
some_str: str = betterproto.string_field(3)
some_bool: bool = betterproto.bool_field(4)
# Empty dict
test = TestMessage().from_dict({})
assert test.to_dict(include_default_values=True) == {
"someInt": 0,
"someDouble": 0.0,
"someStr": "",
"someBool": False,
}
# All default values
test = TestMessage().from_dict(
{"someInt": 0, "someDouble": 0.0, "someStr": "", "someBool": False}
)
assert test.to_dict(include_default_values=True) == {
"someInt": 0,
"someDouble": 0.0,
"someStr": "",
"someBool": False,
}
# Some default and some other values
@dataclass
class TestMessage2(betterproto.Message):
some_int: int = betterproto.int32_field(1)
some_double: float = betterproto.double_field(2)
some_str: str = betterproto.string_field(3)
some_bool: bool = betterproto.bool_field(4)
some_default_int: int = betterproto.int32_field(5)
some_default_double: float = betterproto.double_field(6)
some_default_str: str = betterproto.string_field(7)
some_default_bool: bool = betterproto.bool_field(8)
test = TestMessage2().from_dict(
{
"someInt": 2,
"someDouble": 1.2,
"someStr": "hello",
"someBool": True,
"someDefaultInt": 0,
"someDefaultDouble": 0.0,
"someDefaultStr": "",
"someDefaultBool": False,
}
)
assert test.to_dict(include_default_values=True) == {
"someInt": 2,
"someDouble": 1.2,
"someStr": "hello",
"someBool": True,
"someDefaultInt": 0,
"someDefaultDouble": 0.0,
"someDefaultStr": "",
"someDefaultBool": False,
}
# Nested messages
@dataclass
class TestChildMessage(betterproto.Message):
some_other_int: int = betterproto.int32_field(1)
@dataclass
class TestParentMessage(betterproto.Message):
some_int: int = betterproto.int32_field(1)
some_double: float = betterproto.double_field(2)
some_message: TestChildMessage = betterproto.message_field(3)
test = TestParentMessage().from_dict({"someInt": 0, "someDouble": 1.2,})
assert test.to_dict(include_default_values=True) == {
"someInt": 0,
"someDouble": 1.2,
"someMessage": {"someOtherInt": 0},
}

View File

@@ -1,5 +1,5 @@
[tool.black]
target-version = ['py37']
target-version = ['py36']
[tool.isort]
multi_line_output = 3

View File

@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup(
name="betterproto",
version="1.0.1",
version="1.2.3",
description="A better Protobuf / gRPC generator & library",
long_description=open("README.md", "r").read(),
long_description_content_type="text/markdown",
@@ -17,8 +17,13 @@ setup(
exclude=["tests", "*.tests", "*.tests.*", "output", "output.*"]
),
package_data={"betterproto": ["py.typed", "templates/template.py"]},
python_requires=">=3.7",
install_requires=["grpclib"],
extras_require={"compiler": ["jinja2", "protobuf"]},
python_requires=">=3.6",
install_requires=[
'dataclasses; python_version<"3.7"',
'backports-datetime-fromisoformat; python_version<"3.7"',
"grpclib",
"stringcase",
],
extras_require={"compiler": ["black", "jinja2", "protobuf"]},
zip_safe=False,
)