Compare commits
445 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
a32a326d38 | ||
|
256f499c90 | ||
|
5a518ed044 | ||
|
08e8a68893 | ||
|
6a65ca94bc | ||
|
dbc612c7f3 | ||
|
f33a42b082 | ||
|
36b5fd1495 | ||
|
f41934a0e2 | ||
|
37fa3abbac | ||
|
ed7eefac6f | ||
|
1741a126b3 | ||
|
1a23f09c16 | ||
|
335eee7537 | ||
|
a99be78fe4 | ||
|
849c12fe88 | ||
|
c621ef8a8d | ||
|
34b8249b91 | ||
|
6a3bbe3f25 | ||
|
65ee2fc702 | ||
|
7c43c39eab | ||
|
f8ecc42478 | ||
|
c2bcd31fe3 | ||
|
c9dfe9ab1f | ||
|
32d642d2a0 | ||
|
1161803069 | ||
|
8d25c96cea | ||
|
8fdcb381b7 | ||
|
4cf6e7d95c | ||
|
63458e2da0 | ||
|
efaef5095c | ||
|
1538e156a1 | ||
|
4e9a17c227 | ||
|
f96f51650c | ||
|
970624fe08 | ||
|
32eaa51e8d | ||
|
5fdd0bb24f | ||
|
8b59234856 | ||
|
7c6c627938 | ||
|
696b7ae9fc | ||
|
6dce440975 | ||
|
1f88b67eeb | ||
|
1f79bdd7e4 | ||
|
6606cd3bb9 | ||
|
576f878ddc | ||
|
6bdfa67fa1 | ||
|
b075402a93 | ||
|
49ac12634b | ||
|
a7f0d028ff | ||
|
acca29731f | ||
|
ecbe8dc04d | ||
|
85d2990ca1 | ||
|
c3c20556e0 | ||
|
df1ba911b7 | ||
|
126b256b4c | ||
|
e98c47861d | ||
|
dbd31929d3 | ||
|
7dee36e073 | ||
|
5666393f9d | ||
|
ce5093eec0 | ||
|
c47e83fe5b | ||
|
b8a091ae70 | ||
|
61fc2f4160 | ||
|
d34b16993d | ||
|
9ed579fa35 | ||
|
1d296f1a88 | ||
|
bd7de203e1 | ||
|
d9b7608980 | ||
|
02aa4e88b7 | ||
|
1dd001b6d3 | ||
|
e309513131 | ||
|
24db53290e | ||
|
2bcb05a905 | ||
|
ca6b9fe1a2 | ||
|
4f18ed1325 | ||
|
6b36b9ba9f | ||
|
61d192e207 | ||
|
8b5dd6c1f8 | ||
|
3514991133 | ||
|
e3b44f491f | ||
|
c82816b8be | ||
|
aa81680c83 | ||
|
8659c51123 | ||
|
0fda2cc05d | ||
|
4cdf1bb9e0 | ||
|
d203659a44 | ||
|
6faac1d1ca | ||
|
098989e9e9 | ||
|
182aedaec4 | ||
|
a7532bbadc | ||
|
73d1fa3d5b | ||
|
c00bc96db7 | ||
|
d3e9621aa8 | ||
|
fcbd8a3759 | ||
|
aad7d2ad76 | ||
|
37e53fce85 | ||
|
2b41383745 | ||
|
b0b6cd24ad | ||
|
b81195eb44 | ||
|
d2af2f2fac | ||
|
e7f07fa2a1 | ||
|
50fa4e6268 | ||
|
2fa0be2141 | ||
|
13d656587c | ||
|
6df8cef3f0 | ||
|
1b1bd47cb1 | ||
|
0adcc9020c | ||
|
bfc0fac754 | ||
|
8fbf4476a8 | ||
|
591ec5efb3 | ||
|
f31d51cf3c | ||
|
496eba2750 | ||
|
d663a318b7 | ||
|
2fb37dd108 | ||
|
42d2df6de6 | ||
|
3fd5a0d662 | ||
|
bc13e7070d | ||
|
6536181902 | ||
|
85e4be96d8 | ||
|
06c26ba60d | ||
|
6a70b8e8ea | ||
|
3ca092a724 | ||
|
6f7d706a8e | ||
|
ac96d8254b | ||
|
e7133adeb3 | ||
|
204e04dd69 | ||
|
b9b0b22d57 | ||
|
402c21256f | ||
|
5f7e4d58ef | ||
|
1aaf7728cc | ||
|
70310c9e8c | ||
|
18a518efa7 | ||
|
62da35b3ea | ||
|
69f4192341 | ||
|
9c1bf25304 | ||
|
a836fb23bc | ||
|
bd69862a02 | ||
|
74205e3319 | ||
|
3f377e3bfd | ||
|
8c727d904f | ||
|
eeddc844a5 | ||
|
9b5594adbe | ||
|
d991040ff6 | ||
|
d260f071e0 | ||
|
6dd7baa26c | ||
|
573c7292a6 | ||
|
d77f44ebb7 | ||
|
671c0ff4ac | ||
|
9cecc8c3ff | ||
|
bc3cfc5562 | ||
|
b0a36d12e4 | ||
|
a4d2d39546 | ||
|
c424b6f8db | ||
|
421fdba309 | ||
|
fb2793e0b6 | ||
|
ad8b91766a | ||
|
a33126544b | ||
|
02e41afd09 | ||
|
7368299a70 | ||
|
deb623ed14 | ||
|
95339bf74d | ||
|
5b639c82b2 | ||
|
7c5ee47e68 | ||
|
bb646fe26f | ||
|
fc90653ab1 | ||
|
2a73dbac98 | ||
|
891c9e5d6c | ||
|
a890514b5c | ||
|
fe1e712fdb | ||
|
7a358a63cf | ||
|
342e6559dc | ||
|
2f62189346 | ||
|
8a215367ad | ||
|
6c1c41e9cc | ||
|
9e6881999e | ||
|
59f5f88c0d | ||
|
8eea5fe256 | ||
|
1d54ef8f99 | ||
|
73cea12e1f | ||
|
a157f05480 | ||
|
69dfe9cafc | ||
|
a8a082e4e7 | ||
|
e44de6da06 | ||
|
a5e0ef910f | ||
|
8f7af272cc | ||
|
bf9412e083 | ||
|
4630c1cc67 | ||
|
d3e4fbb311 | ||
|
58556e0eb6 | ||
|
a3f5f21738 | ||
|
0028cc384a | ||
|
034e2e7da0 | ||
|
ca16b6ed34 | ||
|
16d554db75 | ||
|
9ef5503728 | ||
|
c93351ef21 | ||
|
80bef7c94f | ||
|
804805f0f5 | ||
|
43c134d27c | ||
|
0cd9510b54 | ||
|
beafc812ff | ||
|
3d8c0cb713 | ||
|
c513853301 | ||
|
c1a76a5f5e | ||
|
2745953a8e | ||
|
b5dcac1250 | ||
|
cbd3437080 | ||
|
2585a07fcf | ||
|
6c29771f4c | ||
|
0ba0692dec | ||
|
42e197f985 | ||
|
459d12b24d | ||
|
cebf9176a3 | ||
|
8864f4fdbd | ||
|
03211604bc | ||
|
1d7ba850e9 | ||
|
b2651335ce | ||
|
5a591ef2a4 | ||
|
8d7d0efb9b | ||
|
b891d257f6 | ||
|
8bcb67b66f | ||
|
72d72b4603 | ||
|
3273ae4d2c | ||
|
6fe666473d | ||
|
0338fcba29 | ||
|
0f3ad25770 | ||
|
586e28d2dc | ||
|
a8d8159d27 | ||
|
3f519d4fb1 | ||
|
dedead048f | ||
|
87b3a4b86d | ||
|
f2e87192b0 | ||
|
98d00f0d21 | ||
|
bde6d06835 | ||
|
23dcbc2695 | ||
|
0af0cf4bfb | ||
|
eaa4f7f5d9 | ||
|
cdddb2f42a | ||
|
d21cd6e391 | ||
|
af7115429a | ||
|
0d9387abec | ||
|
f4ebcb0f65 | ||
|
81711d2427 | ||
|
e3135ce766 | ||
|
9532844929 | ||
|
0c5d1ff868 | ||
|
5fb4b4b7ff | ||
|
4f820b4a6a | ||
|
75a4c230da | ||
|
5c9a12e2f6 | ||
|
e1ccd540a9 | ||
|
4e78fe9579 | ||
|
50bb67bf5d | ||
|
1ecbf1a125 | ||
|
0814729c5a | ||
|
f7aa6150e2 | ||
|
159c30ddd8 | ||
|
c8229e53a7 | ||
|
3185c67098 | ||
|
52eea5ce4c | ||
|
4b6f55dce5 | ||
|
fdbe0205f1 | ||
|
09f821921f | ||
|
a757da1b29 | ||
|
e2d672a422 | ||
|
63f5191f02 | ||
|
87f4b34930 | ||
|
2c360a55f2 | ||
|
04dce524aa | ||
|
8edec81b11 | ||
|
32c8e77274 | ||
|
d9fa6d2dd3 | ||
|
c88edfd093 | ||
|
a46979c8a6 | ||
|
83e13aa606 | ||
|
3ca75dadd7 | ||
|
5d2f3a2cd9 | ||
|
65c1f366ef | ||
|
34c34bd15a | ||
|
fb54917f2c | ||
|
1a95a7988e | ||
|
76db2f153e | ||
|
8567892352 | ||
|
3105e952ea | ||
|
7c8d47de6d | ||
|
c00e2aef19 | ||
|
fdf3b2e764 | ||
|
f7c2fd1194 | ||
|
d8abb850f8 | ||
|
d7ba27de2b | ||
|
57523a9e7f | ||
|
e5e61c873c | ||
|
9fd1c058e6 | ||
|
d336153845 | ||
|
9a45ea9f16 | ||
|
bb7f5229fb | ||
|
f7769a19d1 | ||
|
28a288924f | ||
|
5c700618fd | ||
|
a914306f33 | ||
|
67422db6b9 | ||
|
061bf86a9c | ||
|
d31f90be6b | ||
|
919b0a6a7d | ||
|
7ecf3fe0e6 | ||
|
ff14948a4e | ||
|
cb00273257 | ||
|
973d68a154 | ||
|
ab9857b5fd | ||
|
2f658df666 | ||
|
b813d1cedb | ||
|
f5ce1b7108 | ||
|
62fc421d60 | ||
|
eeed1c0db7 | ||
|
2a3e1e1827 | ||
|
53ce1255d3 | ||
|
e8991339e9 | ||
|
4556d67503 | ||
|
f087c6c9bd | ||
|
eec24e4ee8 | ||
|
91111ab7d8 | ||
|
fcff3dff74 | ||
|
5c4969ff1c | ||
|
ed33a48d64 | ||
|
ee362a7a73 | ||
|
261e55b2c8 | ||
|
98930ce0d7 | ||
|
d7d277eb0d | ||
|
3860c0ab11 | ||
|
cd1c2dc3b5 | ||
|
be2a24d15c | ||
|
a5effb219a | ||
|
b354aeb692 | ||
|
6d9e3fc580 | ||
|
72de590651 | ||
|
3c70f21074 | ||
|
4b7d5d3de4 | ||
|
2d57f0d122 | ||
|
142e976c40 | ||
|
382fabb96c | ||
|
18598e77d4 | ||
|
6871053ab2 | ||
|
5bb6931df7 | ||
|
e8a9960b73 | ||
|
f25c66777a | ||
|
a68505b80e | ||
|
2f9497e064 | ||
|
33964b883e | ||
|
ec7574086d | ||
|
8a42027bc9 | ||
|
71737cf696 | ||
|
659ddd9c44 | ||
|
5b6997870a | ||
|
cdf7645722 | ||
|
ca20069ca3 | ||
|
59a4a7da43 | ||
|
15af4367e5 | ||
|
ec5683e572 | ||
|
20150fdcf3 | ||
|
d11b7d04c5 | ||
|
e2d35f4696 | ||
|
c3f08b9ef2 | ||
|
24d44898f4 | ||
|
074448c996 | ||
|
0fe557bd3c | ||
|
1a87ea43a1 | ||
|
983e0895a2 | ||
|
4a2baf3f0a | ||
|
8f0caf1db2 | ||
|
c50d9e2fdc | ||
|
35548cb43e | ||
|
b711d1e11f | ||
|
917de09bb6 | ||
|
1f7f39049e | ||
|
3d001a2a1a | ||
|
de61ddab21 | ||
|
5e2d9febea | ||
|
f6af077ffe | ||
|
92088ebda8 | ||
|
c3e3837f71 | ||
|
6bd9c7835c | ||
|
6ec902c1b5 | ||
|
960dba2ae8 | ||
|
4b4bdefb6f | ||
|
dfa0a56b39 | ||
|
dd4873dfba | ||
|
91f586f7d7 | ||
|
33fb83faad | ||
|
77c04414f5 | ||
|
6969ff7ff6 | ||
|
13e08fdaa8 | ||
|
6775632f77 | ||
|
b12f1e4e61 | ||
|
7e9ba0866c | ||
|
3546f55146 | ||
|
499489f1d3 | ||
|
ce9f492f50 | ||
|
93a6334015 | ||
|
36a14026d8 | ||
|
04a2fcd3eb | ||
|
5759e323bd | ||
|
c762c9c549 | ||
|
582a12577c | ||
|
3616190451 | ||
|
9b990ee1bd | ||
|
72a77b0d65 | ||
|
b2b36c8575 | ||
|
203105f048 | ||
|
fe11f74227 | ||
|
dc7a3e9bdf | ||
|
f2e8afc609 | ||
|
dbd438e682 | ||
|
dce1c89fbe | ||
|
c78851b1b8 | ||
|
4554d91f89 | ||
|
c0170f4d80 | ||
|
559b8833d8 | ||
|
7ccef16579 | ||
|
d8785b4622 | ||
|
45e7a30300 | ||
|
d7559c22f8 | ||
|
f9c351a98d | ||
|
feea790116 | ||
|
33f74f6a45 | ||
|
3d5c12c532 | ||
|
706bd5a475 | ||
|
52beeb0d73 | ||
|
7e2dc595db | ||
|
6fd9612ee1 | ||
|
ba520f88a4 | ||
|
b0b64fcbaf | ||
|
7900c7c9db | ||
|
fcc273e294 | ||
|
f820397751 | ||
|
16687211a2 | ||
|
eb5020db2a | ||
|
035793aec3 | ||
|
c79535b614 | ||
|
5daf61f64c | ||
|
4679c571c3 | ||
|
ff8463cf12 | ||
|
eff9021529 | ||
|
d43d5af5ce | ||
|
ef0a1bf50c | ||
|
0e389abbef |
35
.gitea/workflows/release.yml
Normal file
35
.gitea/workflows/release.yml
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
name: Release
|
||||||
|
run-name: ${{ gitea.actor }} is runs ci pipeline
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
packaging:
|
||||||
|
name: Distribution
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
EXT_FIX: "6"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Set up Python 3.9
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.9'
|
||||||
|
- name: Install poetry
|
||||||
|
run: python -m pip install poetry chardet
|
||||||
|
- name: Install poetry compiler
|
||||||
|
run: poetry install -E compiler
|
||||||
|
- name: Set poetry version
|
||||||
|
run: PV=$(poetry version -s) && poetry version ${PV}+jar3b${EXT_FIX}
|
||||||
|
- name: Build package
|
||||||
|
run: poetry build
|
||||||
|
- name: Add pypi source
|
||||||
|
run: poetry source add --priority=supplemental ahax https://git.ahax86.ru/api/packages/pub/pypi
|
||||||
|
- name: Add pypi credentials
|
||||||
|
run: poetry config http-basic.ahax ${{ secrets.REPO_USER }} ${{ secrets.REPO_PASS }}
|
||||||
|
- name: Push to pypi
|
||||||
|
run: poetry publish -r ahax -u ${{ secrets.REPO_USER }} -p ${{ secrets.REPO_PASS }} -n
|
||||||
|
|
23
.github/CONTRIBUTING.md
vendored
Normal file
23
.github/CONTRIBUTING.md
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Contributing
|
||||||
|
|
||||||
|
There's lots to do, and we're working hard, so any help is welcome!
|
||||||
|
|
||||||
|
- :speech_balloon: Join us on [Discord](https://discord.gg/DEVteTupPb)!
|
||||||
|
|
||||||
|
What can you do?
|
||||||
|
|
||||||
|
- :+1: Vote on [issues](https://github.com/danielgtaylor/python-betterproto/issues).
|
||||||
|
- :speech_balloon: Give feedback on [Pull Requests](https://github.com/danielgtaylor/python-betterproto/pulls) and [Issues](https://github.com/danielgtaylor/python-betterproto/issues):
|
||||||
|
- Suggestions
|
||||||
|
- Express approval
|
||||||
|
- Raise concerns
|
||||||
|
- :small_red_triangle: Create an issue:
|
||||||
|
- File a bug (please check its not a duplicate)
|
||||||
|
- Propose an enhancement
|
||||||
|
- :white_check_mark: Create a PR:
|
||||||
|
- [Creating a failing test-case](https://github.com/danielgtaylor/python-betterproto/blob/master/tests/README.md) to make bug-fixing easier
|
||||||
|
- Fix any of the open issues
|
||||||
|
- [Good first issues](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
||||||
|
- [Issues with tests](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22has+test%22)
|
||||||
|
- New bugfix or idea
|
||||||
|
- If you'd like to discuss your idea first, join us on Discord!
|
63
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
63
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
name: Bug Report
|
||||||
|
description: Report broken or incorrect behaviour
|
||||||
|
labels: ["bug", "investigation needed"]
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: >
|
||||||
|
Thanks for taking the time to fill out a bug report!
|
||||||
|
|
||||||
|
If you're not sure it's a bug and you just have a question, the [community Discord channel](https://discord.gg/DEVteTupPb) is a better place for general questions than a GitHub issue.
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: Summary
|
||||||
|
description: A simple summary of your bug report
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Reproduction Steps
|
||||||
|
description: >
|
||||||
|
What you did to make it happen.
|
||||||
|
Ideally there should be a short code snippet in this section to help reproduce the bug.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Expected Results
|
||||||
|
description: >
|
||||||
|
What did you expect to happen?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Actual Results
|
||||||
|
description: >
|
||||||
|
What actually happened?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: System Information
|
||||||
|
description: >
|
||||||
|
Paste the result of `protoc --version; python --version; pip show betterproto` below.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Checklist
|
||||||
|
options:
|
||||||
|
- label: I have searched the issues for duplicates.
|
||||||
|
required: true
|
||||||
|
- label: I have shown the entire traceback, if possible.
|
||||||
|
required: true
|
||||||
|
- label: I have verified this issue occurs on the latest prelease of betterproto which can be installed using `pip install -U --pre betterproto`, if possible.
|
||||||
|
required: true
|
||||||
|
|
6
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
6
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
name:
|
||||||
|
description:
|
||||||
|
contact_links:
|
||||||
|
- name: For questions about the library
|
||||||
|
about: Support questions are better answered in our Discord group.
|
||||||
|
url: https://discord.gg/DEVteTupPb
|
49
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
49
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
name: Feature Request
|
||||||
|
description: Suggest a feature for this library
|
||||||
|
labels: ["enhancement"]
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: Summary
|
||||||
|
description: >
|
||||||
|
What problem is your feature trying to solve? What would become easier or possible if feature was implemented?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
attributes:
|
||||||
|
multiple: false
|
||||||
|
label: What is the feature request for?
|
||||||
|
options:
|
||||||
|
- The core library
|
||||||
|
- RPC handling
|
||||||
|
- The documentation
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: The Problem
|
||||||
|
description: >
|
||||||
|
What problem is your feature trying to solve?
|
||||||
|
What would become easier or possible if feature was implemented?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: The Ideal Solution
|
||||||
|
description: >
|
||||||
|
What is your ideal solution to the problem?
|
||||||
|
What would you like this feature to do?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: The Current Solution
|
||||||
|
description: >
|
||||||
|
What is the current solution to the problem, if any?
|
||||||
|
validations:
|
||||||
|
required: false
|
16
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
16
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
## Summary
|
||||||
|
|
||||||
|
<!-- What is this pull request for? Does it fix any issues? -->
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
<!-- Put an x inside [ ] to check it, like so: [x] -->
|
||||||
|
|
||||||
|
- [ ] If code changes were made then they have been tested.
|
||||||
|
- [ ] I have updated the documentation to reflect the changes.
|
||||||
|
- [ ] This PR fixes an issue.
|
||||||
|
- [ ] This PR adds something new (e.g. new method or parameters).
|
||||||
|
- [ ] This change has an associated test.
|
||||||
|
- [ ] This PR is a breaking change (e.g. methods or parameters removed/renamed)
|
||||||
|
- [ ] This PR is **not** a code change (e.g. documentation, README, ...)
|
||||||
|
|
78
.github/workflows/ci.yml
vendored
78
.github/workflows/ci.yml
vendored
@ -1,23 +1,65 @@
|
|||||||
name: CI
|
name: CI
|
||||||
|
|
||||||
on: [push, pull_request]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- '**'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
tests:
|
||||||
|
name: ${{ matrix.os }} / ${{ matrix.python-version }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.os }}-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [Ubuntu, MacOS, Windows]
|
||||||
|
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v1
|
|
||||||
with:
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
python-version: 3.7
|
uses: actions/setup-python@v5
|
||||||
- uses: dschep/install-pipenv-action@v1
|
with:
|
||||||
- name: Install dependencies
|
python-version: ${{ matrix.python-version }}
|
||||||
run: |
|
|
||||||
sudo apt install protobuf-compiler
|
- name: Get full Python version
|
||||||
pipenv install --dev
|
id: full-python-version
|
||||||
- name: Run tests
|
shell: bash
|
||||||
run: |
|
run: echo "version=$(python -c "import sys; print('-'.join(str(v) for v in sys.version_info))")" >> "$GITHUB_OUTPUT"
|
||||||
pipenv run generate
|
|
||||||
pipenv run test
|
- name: Install poetry
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python -m pip install poetry
|
||||||
|
echo "$HOME/.poetry/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: Configure poetry
|
||||||
|
shell: bash
|
||||||
|
run: poetry config virtualenvs.in-project true
|
||||||
|
|
||||||
|
- name: Set up cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
id: cache
|
||||||
|
with:
|
||||||
|
path: .venv
|
||||||
|
key: venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('**/poetry.lock') }}
|
||||||
|
|
||||||
|
- name: Ensure cache is healthy
|
||||||
|
if: steps.cache.outputs.cache-hit == 'true'
|
||||||
|
shell: bash
|
||||||
|
run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
run: poetry install -E compiler
|
||||||
|
|
||||||
|
- name: Generate code from proto files
|
||||||
|
shell: bash
|
||||||
|
run: poetry run python -m tests.generate -v
|
||||||
|
|
||||||
|
- name: Execute test suite
|
||||||
|
shell: bash
|
||||||
|
run: poetry run python -m pytest tests/
|
||||||
|
18
.github/workflows/code-quality.yml
vendored
Normal file
18
.github/workflows/code-quality.yml
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
name: Code Quality
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- '**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-formatting:
|
||||||
|
name: Check code/doc formatting
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
- uses: pre-commit/action@v3.0.1
|
46
.github/workflows/codeql-analysis.yml
vendored
Normal file
46
.github/workflows/codeql-analysis.yml
vendored
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "master" ]
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- '**'
|
||||||
|
schedule:
|
||||||
|
- cron: '19 1 * * 6'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'python' ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v3
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
|
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v3
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v3
|
31
.github/workflows/release.yml
vendored
Normal file
31
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
name: Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
tags:
|
||||||
|
- '**'
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- '**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
packaging:
|
||||||
|
name: Distribution
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Set up Python 3.9
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install poetry
|
||||||
|
run: python -m pip install poetry
|
||||||
|
- name: Build package
|
||||||
|
run: poetry build
|
||||||
|
- name: Publish package to PyPI
|
||||||
|
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
|
||||||
|
env:
|
||||||
|
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.pypi }}
|
||||||
|
run: poetry publish -n
|
18
.gitignore
vendored
18
.gitignore
vendored
@ -1,13 +1,21 @@
|
|||||||
|
.coverage
|
||||||
|
.DS_Store
|
||||||
.env
|
.env
|
||||||
.vscode/settings.json
|
.vscode/settings.json
|
||||||
.mypy_cache
|
.mypy_cache
|
||||||
.pytest_cache
|
.pytest_cache
|
||||||
betterproto/tests/*.bin
|
.python-version
|
||||||
betterproto/tests/*_pb2.py
|
build/
|
||||||
betterproto/tests/*.py
|
tests/output_*
|
||||||
!betterproto/tests/generate.py
|
|
||||||
!betterproto/tests/test_*.py
|
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
dist
|
dist
|
||||||
**/*.egg-info
|
**/*.egg-info
|
||||||
output
|
output
|
||||||
|
.idea
|
||||||
|
.DS_Store
|
||||||
|
.tox
|
||||||
|
.venv
|
||||||
|
.asv
|
||||||
|
venv
|
||||||
|
.devcontainer
|
||||||
|
.ruff_cache
|
25
.pre-commit-config.yaml
Normal file
25
.pre-commit-config.yaml
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
ci:
|
||||||
|
autofix_prs: false
|
||||||
|
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
|
rev: v0.9.1
|
||||||
|
hooks:
|
||||||
|
- id: ruff-format
|
||||||
|
args: ["--diff", "src", "tests"]
|
||||||
|
- id: ruff
|
||||||
|
args: ["--select", "I", "src", "tests"]
|
||||||
|
|
||||||
|
- repo: https://github.com/PyCQA/doc8
|
||||||
|
rev: 0.10.1
|
||||||
|
hooks:
|
||||||
|
- id: doc8
|
||||||
|
additional_dependencies:
|
||||||
|
- toml
|
||||||
|
|
||||||
|
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||||
|
rev: v2.14.0
|
||||||
|
hooks:
|
||||||
|
- id: pretty-format-java
|
||||||
|
args: [--autofix, --aosp]
|
||||||
|
files: ^.*\.java$
|
17
.readthedocs.yml
Normal file
17
.readthedocs.yml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
version: 2
|
||||||
|
formats: []
|
||||||
|
|
||||||
|
build:
|
||||||
|
image: latest
|
||||||
|
|
||||||
|
sphinx:
|
||||||
|
configuration: docs/conf.py
|
||||||
|
fail_on_warning: false
|
||||||
|
|
||||||
|
python:
|
||||||
|
version: 3.7
|
||||||
|
install:
|
||||||
|
- method: pip
|
||||||
|
path: .
|
||||||
|
extra_requirements:
|
||||||
|
- dev
|
241
CHANGELOG.md
Normal file
241
CHANGELOG.md
Normal file
@ -0,0 +1,241 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
- Versions suffixed with `b*` are in `beta` and can be installed with `pip install --pre betterproto`.
|
||||||
|
|
||||||
|
## [2.0.0b7] - 2024-08-11
|
||||||
|
|
||||||
|
- **Breaking**: Support `Pydantic` v2 and dropping support for v1 [#588](https://github.com/danielgtaylor/python-betterproto/pull/588)
|
||||||
|
- **Breaking**: The attempting to access an unset `oneof` now raises an `AttributeError`
|
||||||
|
field. To see how to access `oneof` fields now, refer to [#558](https://github.com/danielgtaylor/python-betterproto/pull/558)
|
||||||
|
and [README.md](https://github.com/danielgtaylor/python-betterproto#one-of-support).
|
||||||
|
- **Breaking**: A custom `Enum` has been implemented to match the behaviour of being an open set. Any checks for `isinstance(enum_member, enum.Enum)` and `issubclass(EnumSubclass, enum.Enum)` will now return `False`. This change also has the side effect of
|
||||||
|
preventing any passthrough of `Enum` members (i.e. `Foo.RED.GREEN` doesn't work any more). See [#293](https://github.com/danielgtaylor/python-betterproto/pull/293) for more info, this fixed many bugs related to `Enum` handling.
|
||||||
|
|
||||||
|
- Add support for `pickle` methods [#535](https://github.com/danielgtaylor/python-betterproto/pull/535)
|
||||||
|
- Add support for `Struct` and `Value` types [#551](https://github.com/danielgtaylor/python-betterproto/pull/551)
|
||||||
|
- Add support for [`Rich` package](https://rich.readthedocs.io/en/latest/index.html) for pretty printing [#508](https://github.com/danielgtaylor/python-betterproto/pull/508)
|
||||||
|
- Improve support for streaming messages [#518](https://github.com/danielgtaylor/python-betterproto/pull/518) [#529](https://github.com/danielgtaylor/python-betterproto/pull/529)
|
||||||
|
- Improve performance of serializing / de-serializing messages [#545](https://github.com/danielgtaylor/python-betterproto/pull/545)
|
||||||
|
- Improve the handling of message name collisions with typing by allowing the method / type of imports to be configured.
|
||||||
|
Refer to [#582](https://github.com/danielgtaylor/python-betterproto/pull/582)
|
||||||
|
and [README.md](https://github.com/danielgtaylor/python-betterproto#configuration-typing-imports).
|
||||||
|
- Fix roundtrip parsing of `datetime`s [#534](https://github.com/danielgtaylor/python-betterproto/pull/534)
|
||||||
|
- Fix accessing unset optional fields [#523](https://github.com/danielgtaylor/python-betterproto/pull/523)
|
||||||
|
- Fix `Message` equality comparison [#513](https://github.com/danielgtaylor/python-betterproto/pull/513)
|
||||||
|
- Fix behaviour with long comment messages [#532](https://github.com/danielgtaylor/python-betterproto/pull/532)
|
||||||
|
- Add a warning when calling a deprecated message [#596](https://github.com/danielgtaylor/python-betterproto/pull/596)
|
||||||
|
|
||||||
|
## [2.0.0b6] - 2023-06-25
|
||||||
|
|
||||||
|
- **Breaking**: the minimum Python version has been bumped to `3.7` [#444](https://github.com/danielgtaylor/python-betterproto/pull/444)
|
||||||
|
|
||||||
|
- Support generating [Pydantic dataclasses](https://docs.pydantic.dev/latest/usage/dataclasses).
|
||||||
|
Pydantic dataclasses are are drop-in replacement for dataclasses in the standard library that additionally supports validation.
|
||||||
|
Pass `--python_betterproto_opt=pydantic_dataclasses` to enable this feature.
|
||||||
|
Refer to [#406](https://github.com/danielgtaylor/python-betterproto/pull/406)
|
||||||
|
and [README.md](https://github.com/danielgtaylor/python-betterproto#generating-pydantic-models) for more information.
|
||||||
|
|
||||||
|
- Added support for `@generated` marker [#382](https://github.com/danielgtaylor/python-betterproto/pull/382)
|
||||||
|
- Pull down the `include_default_values` argument to `to_json()` [#405](https://github.com/danielgtaylor/python-betterproto/pull/405)
|
||||||
|
- Pythonize input_type name in py_input_message [#436](https://github.com/danielgtaylor/python-betterproto/pull/436)
|
||||||
|
- Widen `from_dict()` to accept any `Mapping` [#451](https://github.com/danielgtaylor/python-betterproto/pull/451)
|
||||||
|
- Replace `pkg_resources` with `importlib` [#462](https://github.com/danielgtaylor/python-betterproto/pull/462)
|
||||||
|
|
||||||
|
- Fix typechecker compatiblity checks in server streaming methods [#413](https://github.com/danielgtaylor/python-betterproto/pull/413)
|
||||||
|
- Fix "empty-valued" repeated fields not being serialised [#417](https://github.com/danielgtaylor/python-betterproto/pull/417)
|
||||||
|
- Fix `dict` encoding for timezone-aware `datetimes` [#468](https://github.com/danielgtaylor/python-betterproto/pull/468)
|
||||||
|
- Fix `to_pydict()` serialization for optional fields [#495](https://github.com/danielgtaylor/python-betterproto/pull/495)
|
||||||
|
- Handle empty value objects properly [#481](https://github.com/danielgtaylor/python-betterproto/pull/481)
|
||||||
|
|
||||||
|
## [2.0.0b5] - 2022-08-01
|
||||||
|
|
||||||
|
- **Breaking**: Client and Service Stubs no longer pack and unpack the input message fields as parameters [#331](https://github.com/danielgtaylor/python-betterproto/pull/311)
|
||||||
|
|
||||||
|
Update your client calls and server handlers as follows:
|
||||||
|
|
||||||
|
Clients before:
|
||||||
|
|
||||||
|
```py
|
||||||
|
response = await service.echo(value="hello", extra_times=1)
|
||||||
|
```
|
||||||
|
|
||||||
|
Clients after:
|
||||||
|
|
||||||
|
```py
|
||||||
|
response = await service.echo(EchoRequest(value="hello", extra_times=1))
|
||||||
|
```
|
||||||
|
|
||||||
|
Servers before:
|
||||||
|
|
||||||
|
```py
|
||||||
|
async def echo(self, value: str, extra_times: int) -> EchoResponse: ...
|
||||||
|
```
|
||||||
|
|
||||||
|
Servers after:
|
||||||
|
|
||||||
|
```py
|
||||||
|
async def echo(self, echo_request: EchoRequest) -> EchoResponse:
|
||||||
|
# Use echo_request.value
|
||||||
|
# Use echo_request.extra_times
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
- Add `to/from_pydict()` for `Message` [#203](https://github.com/danielgtaylor/python-betterproto/pull/203)
|
||||||
|
- Format field comments also as docstrings [#304](https://github.com/danielgtaylor/python-betterproto/pull/304)
|
||||||
|
- Implement `__deepcopy__` for `Message` [#339](https://github.com/danielgtaylor/python-betterproto/pull/339)
|
||||||
|
- Run isort on compiled code [#355](https://github.com/danielgtaylor/python-betterproto/pull/355)
|
||||||
|
- Expose timeout, deadline and metadata parameters from grpclib [#352](https://github.com/danielgtaylor/python-betterproto/pull/352)
|
||||||
|
- Make `Message.__getattribute__` invisible to type checkers [#359](https://github.com/danielgtaylor/python-betterproto/pull/359)
|
||||||
|
|
||||||
|
- Fix map field edge-case [#254](https://github.com/danielgtaylor/python-betterproto/pull/254)
|
||||||
|
- Fix message text in `NotImplementedError` [#325](https://github.com/danielgtaylor/python-betterproto/pull/325)
|
||||||
|
- Fix `Message.from_dict()` in the presence of optional datetime fields [#329](https://github.com/danielgtaylor/python-betterproto/pull/329)
|
||||||
|
- Support Jinja2 3.0 to prevent version conflicts [#330](https://github.com/danielgtaylor/python-betterproto/pull/330)
|
||||||
|
- Fix overwriting top level `__init__.py` [#337](https://github.com/danielgtaylor/python-betterproto/pull/337)
|
||||||
|
- Remove deprecation warnings when fields are initialised with non-default values [#348](https://github.com/danielgtaylor/python-betterproto/pull/348)
|
||||||
|
- Ensure nested class names are converted to PascalCase [#353](https://github.com/danielgtaylor/python-betterproto/pull/353)
|
||||||
|
- Fix `Message.to_dict()` mutating the underlying Message [#378](https://github.com/danielgtaylor/python-betterproto/pull/378)
|
||||||
|
- Fix some parameters being missing from services [#381](https://github.com/danielgtaylor/python-betterproto/pull/381)
|
||||||
|
|
||||||
|
## [2.0.0b4] - 2022-01-03
|
||||||
|
|
||||||
|
- **Breaking**: the minimum Python version has been bumped to `3.6.2`
|
||||||
|
|
||||||
|
- Always add `AsyncIterator` to imports if there are services [#264](https://github.com/danielgtaylor/python-betterproto/pull/264)
|
||||||
|
- Allow parsing of messages from `ByteStrings` [#266](https://github.com/danielgtaylor/python-betterproto/pull/266)
|
||||||
|
- Add support for proto3 optional [#281](https://github.com/danielgtaylor/python-betterproto/pull/281)
|
||||||
|
|
||||||
|
- Fix compilation of fields with names identical to builtin types [#294](https://github.com/danielgtaylor/python-betterproto/pull/294)
|
||||||
|
- Fix default values for enum service args [#299](https://github.com/danielgtaylor/python-betterproto/pull/299)
|
||||||
|
|
||||||
|
## [2.0.0b3] - 2021-04-07
|
||||||
|
|
||||||
|
- Generate grpclib service stubs [#170](https://github.com/danielgtaylor/python-betterproto/pull/170)
|
||||||
|
- Add \_\_version\_\_ attribute to package [#134](https://github.com/danielgtaylor/python-betterproto/pull/134)
|
||||||
|
- Use betterproto generated messages in the plugin [#161](https://github.com/danielgtaylor/python-betterproto/pull/161)
|
||||||
|
- Sort the list of sources in generated file headers [#164](https://github.com/danielgtaylor/python-betterproto/pull/164)
|
||||||
|
- Micro-optimization: use tuples instead of lists for conditions [#228](https://github.com/danielgtaylor/python-betterproto/pull/228)
|
||||||
|
- Improve datestring parsing [#213](https://github.com/danielgtaylor/python-betterproto/pull/213)
|
||||||
|
|
||||||
|
- Fix serialization of repeated fields with empty messages [#180](https://github.com/danielgtaylor/python-betterproto/pull/180)
|
||||||
|
- Fix compilation of fields named 'bytes' or 'str' [#226](https://github.com/danielgtaylor/python-betterproto/pull/226)
|
||||||
|
- Fix json serialization of infinite and nan floats/doubles [#215](https://github.com/danielgtaylor/python-betterproto/pull/215)
|
||||||
|
- Fix template bug resulting in empty \_\_post_init\_\_ methods [#162](https://github.com/danielgtaylor/python-betterproto/pull/162)
|
||||||
|
- Fix serialization of zero-value messages in a oneof group [#176](https://github.com/danielgtaylor/python-betterproto/pull/176)
|
||||||
|
- Fix missing typing and datetime imports [#183](https://github.com/danielgtaylor/python-betterproto/pull/183)
|
||||||
|
- Fix code generation for empty services [#222](https://github.com/danielgtaylor/python-betterproto/pull/222)
|
||||||
|
- Fix Message.to_dict and from_dict handling of repeated timestamps and durations [#211](https://github.com/danielgtaylor/python-betterproto/pull/211)
|
||||||
|
- Fix incorrect routes in generated client when service is not in a package [#177](https://github.com/danielgtaylor/python-betterproto/pull/177)
|
||||||
|
|
||||||
|
## [2.0.0b2] - 2020-11-24
|
||||||
|
|
||||||
|
- Add support for deprecated message and fields [#126](https://github.com/danielgtaylor/python-betterproto/pull/126)
|
||||||
|
- Add support for recursive messages [#130](https://github.com/danielgtaylor/python-betterproto/pull/130)
|
||||||
|
- Add support for `bool(Message)` [#142](https://github.com/danielgtaylor/python-betterproto/pull/142)
|
||||||
|
- Improve support for Python 3.9 [#140](https://github.com/danielgtaylor/python-betterproto/pull/140) [#173](https://github.com/danielgtaylor/python-betterproto/pull/173)
|
||||||
|
- Improve keyword sanitisation for generated code [#137](https://github.com/danielgtaylor/python-betterproto/pull/137)
|
||||||
|
|
||||||
|
- Fix missing serialized_on_wire when message contains only lists [#81](https://github.com/danielgtaylor/python-betterproto/pull/81)
|
||||||
|
- Fix circular dependencies [#100](https://github.com/danielgtaylor/python-betterproto/pull/100)
|
||||||
|
- Fix to_dict enum fields when numbering is not consecutive [#102](https://github.com/danielgtaylor/python-betterproto/pull/102)
|
||||||
|
- Fix argument generation for stub methods when using `import` with proto definition [#103](https://github.com/danielgtaylor/python-betterproto/pull/103)
|
||||||
|
- Fix missing async/await keywords when casing [#104](https://github.com/danielgtaylor/python-betterproto/pull/104)
|
||||||
|
- Fix mutable default arguments in generated code [#105](https://github.com/danielgtaylor/python-betterproto/pull/105)
|
||||||
|
- Fix serialisation of default values in oneofs when calling to_dict() or to_json() [#110](https://github.com/danielgtaylor/python-betterproto/pull/110)
|
||||||
|
- Fix static type checking for grpclib client [#124](https://github.com/danielgtaylor/python-betterproto/pull/124)
|
||||||
|
- Fix python3.6 compatibility issue with dataclasses [#124](https://github.com/danielgtaylor/python-betterproto/pull/124)
|
||||||
|
- Fix handling of trailer-only responses [#127](https://github.com/danielgtaylor/python-betterproto/pull/127)
|
||||||
|
|
||||||
|
- Refactor plugin.py to use modular dataclasses in tree-like structure to represent parsed data [#121](https://github.com/danielgtaylor/python-betterproto/pull/121)
|
||||||
|
- Refactor template compilation logic [#136](https://github.com/danielgtaylor/python-betterproto/pull/136)
|
||||||
|
|
||||||
|
- Replace use of platform provided protoc with development dependency on grpcio-tools [#107](https://github.com/danielgtaylor/python-betterproto/pull/107)
|
||||||
|
- Switch to using `poe` from `make` to manage project development tasks [#118](https://github.com/danielgtaylor/python-betterproto/pull/118)
|
||||||
|
- Improve CI platform coverage [#128](https://github.com/danielgtaylor/python-betterproto/pull/128)
|
||||||
|
|
||||||
|
## [2.0.0b1] - 2020-07-04
|
||||||
|
|
||||||
|
[Upgrade Guide](./docs/upgrading.md)
|
||||||
|
|
||||||
|
> Several bugfixes and improvements required or will require small breaking changes, necessitating a new version.
|
||||||
|
> `2.0.0` will be released once the interface is stable.
|
||||||
|
|
||||||
|
- Add support for gRPC and **stream-stream** [#83](https://github.com/danielgtaylor/python-betterproto/pull/83)
|
||||||
|
- Switch from `pipenv` to `poetry` for development [#75](https://github.com/danielgtaylor/python-betterproto/pull/75)
|
||||||
|
- Fix two packages with the same name suffix should not cause naming conflict [#25](https://github.com/danielgtaylor/python-betterproto/issues/25)
|
||||||
|
|
||||||
|
- Fix Import child package from root [#57](https://github.com/danielgtaylor/python-betterproto/issues/57)
|
||||||
|
- Fix Import child package from package [#58](https://github.com/danielgtaylor/python-betterproto/issues/58)
|
||||||
|
- Fix Import parent package from child package [#59](https://github.com/danielgtaylor/python-betterproto/issues/59)
|
||||||
|
- Fix Import root package from child package [#60](https://github.com/danielgtaylor/python-betterproto/issues/60)
|
||||||
|
- Fix Import root package from root [#61](https://github.com/danielgtaylor/python-betterproto/issues/61)
|
||||||
|
|
||||||
|
- Fix ALL_CAPS message fields are parsed incorrectly. [#11](https://github.com/danielgtaylor/python-betterproto/issues/11)
|
||||||
|
|
||||||
|
## [1.2.5] - 2020-04-27
|
||||||
|
|
||||||
|
- Add .j2 suffix to python template names to avoid confusing certain build tools [#72](https://github.com/danielgtaylor/python-betterproto/pull/72)
|
||||||
|
|
||||||
|
## [1.2.4] - 2020-04-26
|
||||||
|
|
||||||
|
- Enforce utf-8 for reading the readme in setup.py [#67](https://github.com/danielgtaylor/python-betterproto/pull/67)
|
||||||
|
- Only import types from grpclib when type checking [#52](https://github.com/danielgtaylor/python-betterproto/pull/52)
|
||||||
|
- Improve performance of serialize/deserialize by caching type information of fields in class [#46](https://github.com/danielgtaylor/python-betterproto/pull/46)
|
||||||
|
- Support using Google's wrapper types as RPC output values [#40](https://github.com/danielgtaylor/python-betterproto/pull/40)
|
||||||
|
- Fixes issue where protoc did not recognize plugin.py as win32 application [#38](https://github.com/danielgtaylor/python-betterproto/pull/38)
|
||||||
|
- Fix services using non-pythonified field names [#34](https://github.com/danielgtaylor/python-betterproto/pull/34)
|
||||||
|
- Add ability to provide metadata, timeout & deadline args to requests [#32](https://github.com/danielgtaylor/python-betterproto/pull/32)
|
||||||
|
|
||||||
|
## [1.2.3] - 2020-04-15
|
||||||
|
|
||||||
|
- Exclude empty lists from `to_dict` by default [#16](https://github.com/danielgtaylor/python-betterproto/pull/16)
|
||||||
|
- Add `include_default_values` parameter for `to_dict` [#12](https://github.com/danielgtaylor/python-betterproto/pull/12)
|
||||||
|
- Fix class names being prepended with duplicates when using protocol buffers that are nested more than once [#21](https://github.com/danielgtaylor/python-betterproto/pull/21)
|
||||||
|
- Add support for python 3.6 [#30](https://github.com/danielgtaylor/python-betterproto/pull/30)
|
||||||
|
|
||||||
|
## [1.2.2] - 2020-01-09
|
||||||
|
|
||||||
|
- Mention lack of Proto 2 support in README.
|
||||||
|
- Fix serialization of constructor parameters [#10](https://github.com/danielgtaylor/python-betterproto/pull/10)
|
||||||
|
- Fix `casing` parameter propagation [#7](https://github.com/danielgtaylor/python-betterproto/pull/7)
|
||||||
|
|
||||||
|
## [1.2.1] - 2019-10-29
|
||||||
|
|
||||||
|
- Fix comment indentation bug in rendered gRPC methods.
|
||||||
|
|
||||||
|
## [1.2.0] - 2019-10-28
|
||||||
|
|
||||||
|
- Generated code output auto-formatting via [Black](https://github.com/psf/black)
|
||||||
|
- Simplified gRPC helper functions
|
||||||
|
|
||||||
|
## [1.1.0] - 2019-10-27
|
||||||
|
|
||||||
|
- Better JSON casing support
|
||||||
|
- Handle field names which clash with Python reserved words
|
||||||
|
- Better handling of default values from type introspection
|
||||||
|
- Support for Google Duration & Timestamp types
|
||||||
|
- Support for Google wrapper types
|
||||||
|
- Documentation updates
|
||||||
|
|
||||||
|
## [1.0.1] - 2019-10-22
|
||||||
|
|
||||||
|
- README to the PyPI details page
|
||||||
|
|
||||||
|
## [1.0.0] - 2019-10-22
|
||||||
|
|
||||||
|
- Initial release
|
||||||
|
|
||||||
|
[1.2.5]: https://github.com/danielgtaylor/python-betterproto/compare/v1.2.4...v1.2.5
|
||||||
|
[1.2.4]: https://github.com/danielgtaylor/python-betterproto/compare/v1.2.3...v1.2.4
|
||||||
|
[1.2.3]: https://github.com/danielgtaylor/python-betterproto/compare/v1.2.2...v1.2.3
|
||||||
|
[1.2.2]: https://github.com/danielgtaylor/python-betterproto/compare/v1.2.1...v1.2.2
|
||||||
|
[1.2.1]: https://github.com/danielgtaylor/python-betterproto/compare/v1.2.0...v1.2.1
|
||||||
|
[1.2.0]: https://github.com/danielgtaylor/python-betterproto/compare/v1.1.0...v1.2.0
|
||||||
|
[1.1.0]: https://github.com/danielgtaylor/python-betterproto/compare/v1.0.1...v1.1.0
|
||||||
|
[1.0.1]: https://github.com/danielgtaylor/python-betterproto/compare/v1.0.0...v1.0.1
|
||||||
|
[1.0.0]: https://github.com/danielgtaylor/python-betterproto/releases/tag/v1.0.0
|
21
LICENSE.md
Normal file
21
LICENSE.md
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2023 Daniel G. Taylor
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
24
Pipfile
24
Pipfile
@ -1,24 +0,0 @@
|
|||||||
[[source]]
|
|
||||||
name = "pypi"
|
|
||||||
url = "https://pypi.org/simple"
|
|
||||||
verify_ssl = true
|
|
||||||
|
|
||||||
[dev-packages]
|
|
||||||
flake8 = "*"
|
|
||||||
mypy = "*"
|
|
||||||
isort = "*"
|
|
||||||
pytest = "*"
|
|
||||||
rope = "*"
|
|
||||||
|
|
||||||
[packages]
|
|
||||||
protobuf = "*"
|
|
||||||
jinja2 = "*"
|
|
||||||
grpclib = "*"
|
|
||||||
|
|
||||||
[requires]
|
|
||||||
python_version = "3.7"
|
|
||||||
|
|
||||||
[scripts]
|
|
||||||
plugin = "protoc --plugin=protoc-gen-custom=betterproto/plugin.py --custom_out=output"
|
|
||||||
generate = "python betterproto/tests/generate.py"
|
|
||||||
test = "pytest ./betterproto/tests"
|
|
344
Pipfile.lock
generated
344
Pipfile.lock
generated
@ -1,344 +0,0 @@
|
|||||||
{
|
|
||||||
"_meta": {
|
|
||||||
"hash": {
|
|
||||||
"sha256": "f698150037f2a8ac554e4d37ecd4619ba35d1aa570f5b641d048ec9c6b23eb40"
|
|
||||||
},
|
|
||||||
"pipfile-spec": 6,
|
|
||||||
"requires": {
|
|
||||||
"python_version": "3.7"
|
|
||||||
},
|
|
||||||
"sources": [
|
|
||||||
{
|
|
||||||
"name": "pypi",
|
|
||||||
"url": "https://pypi.org/simple",
|
|
||||||
"verify_ssl": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"default": {
|
|
||||||
"grpclib": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:d19e2ea87cb073e5b0825dfee15336fd2b1c09278d271816e04c90faddc107ea"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.3.0"
|
|
||||||
},
|
|
||||||
"h2": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:ac377fcf586314ef3177bfd90c12c7826ab0840edeb03f0f24f511858326049e",
|
|
||||||
"sha256:b8a32bd282594424c0ac55845377eea13fa54fe4a8db012f3a198ed923dc3ab4"
|
|
||||||
],
|
|
||||||
"version": "==3.1.1"
|
|
||||||
},
|
|
||||||
"hpack": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89",
|
|
||||||
"sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"
|
|
||||||
],
|
|
||||||
"version": "==3.0.0"
|
|
||||||
},
|
|
||||||
"hyperframe": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40",
|
|
||||||
"sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f"
|
|
||||||
],
|
|
||||||
"version": "==5.2.0"
|
|
||||||
},
|
|
||||||
"jinja2": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f",
|
|
||||||
"sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==2.10.3"
|
|
||||||
},
|
|
||||||
"markupsafe": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
|
|
||||||
"sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
|
|
||||||
"sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
|
|
||||||
"sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
|
|
||||||
"sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
|
|
||||||
"sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
|
|
||||||
"sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
|
|
||||||
"sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
|
|
||||||
"sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
|
|
||||||
"sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
|
|
||||||
"sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
|
|
||||||
"sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
|
|
||||||
"sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
|
|
||||||
"sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
|
|
||||||
"sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
|
|
||||||
"sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
|
|
||||||
"sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
|
|
||||||
"sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
|
|
||||||
"sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
|
|
||||||
"sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
|
|
||||||
"sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
|
|
||||||
"sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
|
|
||||||
"sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
|
|
||||||
"sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
|
|
||||||
"sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
|
|
||||||
"sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
|
|
||||||
"sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
|
|
||||||
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"
|
|
||||||
],
|
|
||||||
"version": "==1.1.1"
|
|
||||||
},
|
|
||||||
"multidict": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f",
|
|
||||||
"sha256:041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3",
|
|
||||||
"sha256:045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef",
|
|
||||||
"sha256:047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b",
|
|
||||||
"sha256:068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73",
|
|
||||||
"sha256:148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc",
|
|
||||||
"sha256:1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3",
|
|
||||||
"sha256:1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd",
|
|
||||||
"sha256:31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351",
|
|
||||||
"sha256:34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941",
|
|
||||||
"sha256:3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d",
|
|
||||||
"sha256:4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1",
|
|
||||||
"sha256:4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b",
|
|
||||||
"sha256:4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a",
|
|
||||||
"sha256:5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3",
|
|
||||||
"sha256:61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7",
|
|
||||||
"sha256:6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0",
|
|
||||||
"sha256:76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0",
|
|
||||||
"sha256:7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014",
|
|
||||||
"sha256:7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5",
|
|
||||||
"sha256:7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036",
|
|
||||||
"sha256:8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d",
|
|
||||||
"sha256:8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a",
|
|
||||||
"sha256:c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce",
|
|
||||||
"sha256:c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1",
|
|
||||||
"sha256:ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a",
|
|
||||||
"sha256:d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9",
|
|
||||||
"sha256:d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7",
|
|
||||||
"sha256:db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b"
|
|
||||||
],
|
|
||||||
"version": "==4.5.2"
|
|
||||||
},
|
|
||||||
"protobuf": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:125713564d8cfed7610e52444c9769b8dcb0b55e25cc7841f2290ee7bc86636f",
|
|
||||||
"sha256:1accdb7a47e51503be64d9a57543964ba674edac103215576399d2d0e34eac77",
|
|
||||||
"sha256:27003d12d4f68e3cbea9eb67427cab3bfddd47ff90670cb367fcd7a3a89b9657",
|
|
||||||
"sha256:3264f3c431a631b0b31e9db2ae8c927b79fc1a7b1b06b31e8e5bcf2af91fe896",
|
|
||||||
"sha256:3c5ab0f5c71ca5af27143e60613729e3488bb45f6d3f143dc918a20af8bab0bf",
|
|
||||||
"sha256:45dcf8758873e3f69feab075e5f3177270739f146255225474ee0b90429adef6",
|
|
||||||
"sha256:56a77d61a91186cc5676d8e11b36a5feb513873e4ae88d2ee5cf530d52bbcd3b",
|
|
||||||
"sha256:5984e4947bbcef5bd849d6244aec507d31786f2dd3344139adc1489fb403b300",
|
|
||||||
"sha256:6b0441da73796dd00821763bb4119674eaf252776beb50ae3883bed179a60b2a",
|
|
||||||
"sha256:6f6677c5ade94d4fe75a912926d6796d5c71a2a90c2aeefe0d6f211d75c74789",
|
|
||||||
"sha256:84a825a9418d7196e2acc48f8746cf1ee75877ed2f30433ab92a133f3eaf8fbe",
|
|
||||||
"sha256:b842c34fe043ccf78b4a6cf1019d7b80113707d68c88842d061fa2b8fb6ddedc",
|
|
||||||
"sha256:ca33d2f09dae149a1dcf942d2d825ebb06343b77b437198c9e2ef115cf5d5bc1",
|
|
||||||
"sha256:db83b5c12c0cd30150bb568e6feb2435c49ce4e68fe2d7b903113f0e221e58fe",
|
|
||||||
"sha256:f50f3b1c5c1c1334ca7ce9cad5992f098f460ffd6388a3cabad10b66c2006b09",
|
|
||||||
"sha256:f99f127909731cafb841c52f9216e447d3e4afb99b17bebfad327a75aee206de"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==3.10.0"
|
|
||||||
},
|
|
||||||
"six": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
|
|
||||||
"sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
|
|
||||||
],
|
|
||||||
"version": "==1.12.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"develop": {
|
|
||||||
"atomicwrites": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
|
|
||||||
"sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
|
|
||||||
],
|
|
||||||
"version": "==1.3.0"
|
|
||||||
},
|
|
||||||
"attrs": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2",
|
|
||||||
"sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396"
|
|
||||||
],
|
|
||||||
"version": "==19.2.0"
|
|
||||||
},
|
|
||||||
"entrypoints": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
|
|
||||||
"sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
|
|
||||||
],
|
|
||||||
"version": "==0.3"
|
|
||||||
},
|
|
||||||
"flake8": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548",
|
|
||||||
"sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==3.7.8"
|
|
||||||
},
|
|
||||||
"importlib-metadata": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26",
|
|
||||||
"sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"
|
|
||||||
],
|
|
||||||
"markers": "python_version < '3.8'",
|
|
||||||
"version": "==0.23"
|
|
||||||
},
|
|
||||||
"isort": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
|
|
||||||
"sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==4.3.21"
|
|
||||||
},
|
|
||||||
"mccabe": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
|
|
||||||
"sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
|
|
||||||
],
|
|
||||||
"version": "==0.6.1"
|
|
||||||
},
|
|
||||||
"more-itertools": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
|
|
||||||
"sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
|
|
||||||
],
|
|
||||||
"version": "==7.2.0"
|
|
||||||
},
|
|
||||||
"mypy": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:1d98fd818ad3128a5408148c9e4a5edce6ed6b58cc314283e631dd5d9216527b",
|
|
||||||
"sha256:22ee018e8fc212fe601aba65d3699689dd29a26410ef0d2cc1943de7bec7e3ac",
|
|
||||||
"sha256:3a24f80776edc706ec8d05329e854d5b9e464cd332e25cde10c8da2da0a0db6c",
|
|
||||||
"sha256:42a78944e80770f21609f504ca6c8173f7768043205b5ac51c9144e057dcf879",
|
|
||||||
"sha256:4b2b20106973548975f0c0b1112eceb4d77ed0cafe0a231a1318f3b3a22fc795",
|
|
||||||
"sha256:591a9625b4d285f3ba69f541c84c0ad9e7bffa7794da3fa0585ef13cf95cb021",
|
|
||||||
"sha256:5b4b70da3d8bae73b908a90bb2c387b977e59d484d22c604a2131f6f4397c1a3",
|
|
||||||
"sha256:84edda1ffeda0941b2ab38ecf49302326df79947fa33d98cdcfbf8ca9cf0bb23",
|
|
||||||
"sha256:b2b83d29babd61b876ae375786960a5374bba0e4aba3c293328ca6ca5dc448dd",
|
|
||||||
"sha256:cc4502f84c37223a1a5ab700649b5ab1b5e4d2bf2d426907161f20672a21930b",
|
|
||||||
"sha256:e29e24dd6e7f39f200a5bb55dcaa645d38a397dd5a6674f6042ef02df5795046"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.730"
|
|
||||||
},
|
|
||||||
"mypy-extensions": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:a161e3b917053de87dbe469987e173e49fb454eca10ef28b48b384538cc11458"
|
|
||||||
],
|
|
||||||
"version": "==0.4.2"
|
|
||||||
},
|
|
||||||
"packaging": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47",
|
|
||||||
"sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"
|
|
||||||
],
|
|
||||||
"version": "==19.2"
|
|
||||||
},
|
|
||||||
"pluggy": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6",
|
|
||||||
"sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"
|
|
||||||
],
|
|
||||||
"version": "==0.13.0"
|
|
||||||
},
|
|
||||||
"py": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
|
|
||||||
"sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
|
|
||||||
],
|
|
||||||
"version": "==1.8.0"
|
|
||||||
},
|
|
||||||
"pycodestyle": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
|
|
||||||
"sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
|
|
||||||
],
|
|
||||||
"version": "==2.5.0"
|
|
||||||
},
|
|
||||||
"pyflakes": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
|
|
||||||
"sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
|
|
||||||
],
|
|
||||||
"version": "==2.1.1"
|
|
||||||
},
|
|
||||||
"pyparsing": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
|
|
||||||
"sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
|
|
||||||
],
|
|
||||||
"version": "==2.4.2"
|
|
||||||
},
|
|
||||||
"pytest": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:7e4800063ccfc306a53c461442526c5571e1462f61583506ce97e4da6a1d88c8",
|
|
||||||
"sha256:ca563435f4941d0cb34767301c27bc65c510cb82e90b9ecf9cb52dc2c63caaa0"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==5.2.1"
|
|
||||||
},
|
|
||||||
"rope": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:6b728fdc3e98a83446c27a91fc5d56808a004f8beab7a31ab1d7224cecc7d969",
|
|
||||||
"sha256:c5c5a6a87f7b1a2095fb311135e2a3d1f194f5ecb96900fdd0a9100881f48aaf",
|
|
||||||
"sha256:f0dcf719b63200d492b85535ebe5ea9b29e0d0b8aebeb87fe03fc1a65924fdaf"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==0.14.0"
|
|
||||||
},
|
|
||||||
"six": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
|
|
||||||
"sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
|
|
||||||
],
|
|
||||||
"version": "==1.12.0"
|
|
||||||
},
|
|
||||||
"typed-ast": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
|
|
||||||
"sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
|
|
||||||
"sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
|
|
||||||
"sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
|
|
||||||
"sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
|
|
||||||
"sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
|
|
||||||
"sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
|
|
||||||
"sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
|
|
||||||
"sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
|
|
||||||
"sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
|
|
||||||
"sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
|
|
||||||
"sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
|
|
||||||
"sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
|
|
||||||
"sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
|
|
||||||
"sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
|
|
||||||
],
|
|
||||||
"version": "==1.4.0"
|
|
||||||
},
|
|
||||||
"typing-extensions": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:2ed632b30bb54fc3941c382decfd0ee4148f5c591651c9272473fea2c6397d95",
|
|
||||||
"sha256:b1edbbf0652660e32ae780ac9433f4231e7339c7f9a8057d0f042fcbcea49b87",
|
|
||||||
"sha256:d8179012ec2c620d3791ca6fe2bf7979d979acdbef1fca0bc56b37411db682ed"
|
|
||||||
],
|
|
||||||
"version": "==3.7.4"
|
|
||||||
},
|
|
||||||
"wcwidth": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
|
|
||||||
"sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
|
|
||||||
],
|
|
||||||
"version": "==0.1.7"
|
|
||||||
},
|
|
||||||
"zipp": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
|
|
||||||
"sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
|
|
||||||
],
|
|
||||||
"version": "==0.6.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
382
README.md
382
README.md
@ -1,8 +1,10 @@
|
|||||||
# Better Protobuf / gRPC Support for Python
|
# Better Protobuf / gRPC Support for Python
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
This project aims to provide an improved experience when using Protobuf / gRPC in a modern Python environment by making use of modern language features and generating readable, understandable, idiomatic Python code. It will not support legacy features or environments. The following are supported:
|
> :octocat: If you're reading this on github, please be aware that it might mention unreleased features! See the latest released README on [pypi](https://pypi.org/project/betterproto/).
|
||||||
|
|
||||||
|
This project aims to provide an improved experience when using Protobuf / gRPC in a modern Python environment by making use of modern language features and generating readable, understandable, idiomatic Python code. It will not support legacy features or environments (e.g. Protobuf 2). The following are supported:
|
||||||
|
|
||||||
- Protobuf 3 & gRPC code generation
|
- Protobuf 3 & gRPC code generation
|
||||||
- Both binary & JSON serialization is built-in
|
- Both binary & JSON serialization is built-in
|
||||||
@ -10,8 +12,10 @@ This project aims to provide an improved experience when using Protobuf / gRPC i
|
|||||||
- Enums
|
- Enums
|
||||||
- Dataclasses
|
- Dataclasses
|
||||||
- `async`/`await`
|
- `async`/`await`
|
||||||
|
- Timezone-aware `datetime` and `timedelta` objects
|
||||||
- Relative imports
|
- Relative imports
|
||||||
- Mypy type checking
|
- Mypy type checking
|
||||||
|
- [Pydantic Models](https://docs.pydantic.dev/) generation (see #generating-pydantic-models)
|
||||||
|
|
||||||
This project is heavily inspired by, and borrows functionality from:
|
This project is heavily inspired by, and borrows functionality from:
|
||||||
|
|
||||||
@ -34,22 +38,31 @@ This project exists because I am unhappy with the state of the official Google p
|
|||||||
- Much code looks like C++ or Java ported 1:1 to Python
|
- Much code looks like C++ or Java ported 1:1 to Python
|
||||||
- Capitalized function names like `HasField()` and `SerializeToString()`
|
- Capitalized function names like `HasField()` and `SerializeToString()`
|
||||||
- Uses `SerializeToString()` rather than the built-in `__bytes__()`
|
- Uses `SerializeToString()` rather than the built-in `__bytes__()`
|
||||||
|
- Special wrapped types don't use Python's `None`
|
||||||
|
- Timestamp/duration types don't use Python's built-in `datetime` module
|
||||||
|
|
||||||
|
|
||||||
This project is a reimplementation from the ground up focused on idiomatic modern Python to help fix some of the above. While it may not be a 1:1 drop-in replacement due to changed method names and call patterns, the wire format is identical.
|
This project is a reimplementation from the ground up focused on idiomatic modern Python to help fix some of the above. While it may not be a 1:1 drop-in replacement due to changed method names and call patterns, the wire format is identical.
|
||||||
|
|
||||||
## Installation & Getting Started
|
## Installation
|
||||||
|
|
||||||
First, install the package. Note that the `[compiler]` feature flag tells it to install extra dependencies only needed by the `protoc` plugin:
|
First, install the package. Note that the `[compiler]` feature flag tells it to install extra dependencies only needed by the `protoc` plugin:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Install both the library and compiler
|
# Install both the library and compiler
|
||||||
$ pip install betterproto[compiler]
|
pip install "betterproto[compiler]"
|
||||||
|
|
||||||
# Install just the library (to use the generated code output)
|
# Install just the library (to use the generated code output)
|
||||||
$ pip install betterproto
|
pip install betterproto
|
||||||
```
|
```
|
||||||
|
|
||||||
Now, given you installed the compiler and have a proto file, e.g `example.proto`:
|
*Betterproto* is under active development. To install the latest beta version, use `pip install --pre betterproto`.
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
### Compiling proto files
|
||||||
|
|
||||||
|
Given you installed the compiler and have a proto file, e.g `example.proto`:
|
||||||
|
|
||||||
```protobuf
|
```protobuf
|
||||||
syntax = "proto3";
|
syntax = "proto3";
|
||||||
@ -62,17 +75,25 @@ message Greeting {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
You can run the following:
|
You can run the following to invoke protoc directly:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
$ protoc -I . --python_betterproto_out=. example.proto
|
mkdir lib
|
||||||
|
protoc -I . --python_betterproto_out=lib example.proto
|
||||||
```
|
```
|
||||||
|
|
||||||
This will generate `hello.py` which looks like:
|
or run the following to invoke protoc via grpcio-tools:
|
||||||
|
|
||||||
```py
|
```sh
|
||||||
|
pip install grpcio-tools
|
||||||
|
python -m grpc_tools.protoc -I . --python_betterproto_out=lib example.proto
|
||||||
|
```
|
||||||
|
|
||||||
|
This will generate `lib/hello/__init__.py` which looks like:
|
||||||
|
|
||||||
|
```python
|
||||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||||
# sources: hello.proto
|
# sources: example.proto
|
||||||
# plugin: python-betterproto
|
# plugin: python-betterproto
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
@ -80,7 +101,7 @@ import betterproto
|
|||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Hello(betterproto.Message):
|
class Greeting(betterproto.Message):
|
||||||
"""Greeting represents a message you can tell a user."""
|
"""Greeting represents a message you can tell a user."""
|
||||||
|
|
||||||
message: str = betterproto.string_field(1)
|
message: str = betterproto.string_field(1)
|
||||||
@ -88,23 +109,23 @@ class Hello(betterproto.Message):
|
|||||||
|
|
||||||
Now you can use it!
|
Now you can use it!
|
||||||
|
|
||||||
```py
|
```python
|
||||||
>>> from hello import Hello
|
>>> from lib.hello import Greeting
|
||||||
>>> test = Hello()
|
>>> test = Greeting()
|
||||||
>>> test
|
>>> test
|
||||||
Hello(message='')
|
Greeting(message='')
|
||||||
|
|
||||||
>>> test.message = "Hey!"
|
>>> test.message = "Hey!"
|
||||||
>>> test
|
>>> test
|
||||||
Hello(message="Hey!")
|
Greeting(message="Hey!")
|
||||||
|
|
||||||
>>> serialized = bytes(test)
|
>>> serialized = bytes(test)
|
||||||
>>> serialized
|
>>> serialized
|
||||||
b'\n\x04Hey!'
|
b'\n\x04Hey!'
|
||||||
|
|
||||||
>>> another = Hello().parse(serialized)
|
>>> another = Greeting().parse(serialized)
|
||||||
>>> another
|
>>> another
|
||||||
Hello(message="Hey!")
|
Greeting(message="Hey!")
|
||||||
|
|
||||||
>>> another.to_dict()
|
>>> another.to_dict()
|
||||||
{"message": "Hey!"}
|
{"message": "Hey!"}
|
||||||
@ -116,7 +137,7 @@ Hello(message="Hey!")
|
|||||||
|
|
||||||
The generated Protobuf `Message` classes are compatible with [grpclib](https://github.com/vmagamedov/grpclib) so you are free to use it if you like. That said, this project also includes support for async gRPC stub generation with better static type checking and code completion support. It is enabled by default.
|
The generated Protobuf `Message` classes are compatible with [grpclib](https://github.com/vmagamedov/grpclib) so you are free to use it if you like. That said, this project also includes support for async gRPC stub generation with better static type checking and code completion support. It is enabled by default.
|
||||||
|
|
||||||
Given an example like:
|
Given an example service definition:
|
||||||
|
|
||||||
```protobuf
|
```protobuf
|
||||||
syntax = "proto3";
|
syntax = "proto3";
|
||||||
@ -143,22 +164,75 @@ service Echo {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
You can use it like so (enable async in the interactive shell first):
|
Generate echo proto file:
|
||||||
|
|
||||||
```py
|
```
|
||||||
>>> import echo
|
python -m grpc_tools.protoc -I . --python_betterproto_out=. echo.proto
|
||||||
>>> from grpclib.client import Channel
|
```
|
||||||
|
|
||||||
>>> channel = Channel(host="127.0.0.1", port=1234)
|
A client can be implemented as follows:
|
||||||
>>> service = echo.EchoStub(channel)
|
```python
|
||||||
>>> await service.echo(value="hello", extra_times=1)
|
import asyncio
|
||||||
EchoResponse(values=["hello", "hello"])
|
import echo
|
||||||
|
|
||||||
>>> async for response in service.echo_stream(value="hello", extra_times=1)
|
from grpclib.client import Channel
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
channel = Channel(host="127.0.0.1", port=50051)
|
||||||
|
service = echo.EchoStub(channel)
|
||||||
|
response = await service.echo(echo.EchoRequest(value="hello", extra_times=1))
|
||||||
print(response)
|
print(response)
|
||||||
|
|
||||||
EchoStreamResponse(value="hello")
|
async for response in service.echo_stream(echo.EchoRequest(value="hello", extra_times=1)):
|
||||||
EchoStreamResponse(value="hello")
|
print(response)
|
||||||
|
|
||||||
|
# don't forget to close the channel when done!
|
||||||
|
channel.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
loop.run_until_complete(main())
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
which would output
|
||||||
|
```python
|
||||||
|
EchoResponse(values=['hello', 'hello'])
|
||||||
|
EchoStreamResponse(value='hello')
|
||||||
|
EchoStreamResponse(value='hello')
|
||||||
|
```
|
||||||
|
|
||||||
|
This project also produces server-facing stubs that can be used to implement a Python
|
||||||
|
gRPC server.
|
||||||
|
To use them, simply subclass the base class in the generated files and override the
|
||||||
|
service methods:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import asyncio
|
||||||
|
from echo import EchoBase, EchoRequest, EchoResponse, EchoStreamResponse
|
||||||
|
from grpclib.server import Server
|
||||||
|
from typing import AsyncIterator
|
||||||
|
|
||||||
|
|
||||||
|
class EchoService(EchoBase):
|
||||||
|
async def echo(self, echo_request: "EchoRequest") -> "EchoResponse":
|
||||||
|
return EchoResponse([echo_request.value for _ in range(echo_request.extra_times)])
|
||||||
|
|
||||||
|
async def echo_stream(self, echo_request: "EchoRequest") -> AsyncIterator["EchoStreamResponse"]:
|
||||||
|
for _ in range(echo_request.extra_times):
|
||||||
|
yield EchoStreamResponse(echo_request.value)
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
server = Server([EchoService()])
|
||||||
|
await server.start("127.0.0.1", 50051)
|
||||||
|
await server.wait_closed()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
loop.run_until_complete(main())
|
||||||
```
|
```
|
||||||
|
|
||||||
### JSON
|
### JSON
|
||||||
@ -168,6 +242,12 @@ Both serializing and parsing are supported to/from JSON and Python dictionaries
|
|||||||
- Dicts: `Message().to_dict()`, `Message().from_dict(...)`
|
- Dicts: `Message().to_dict()`, `Message().from_dict(...)`
|
||||||
- JSON: `Message().to_json()`, `Message().from_json(...)`
|
- JSON: `Message().to_json()`, `Message().from_json(...)`
|
||||||
|
|
||||||
|
For compatibility the default is to convert field names to `camelCase`. You can control this behavior by passing a casing value, e.g:
|
||||||
|
|
||||||
|
```python
|
||||||
|
MyMessage().to_dict(casing=betterproto.Casing.SNAKE)
|
||||||
|
```
|
||||||
|
|
||||||
### Determining if a message was sent
|
### Determining if a message was sent
|
||||||
|
|
||||||
Sometimes it is useful to be able to determine whether a message has been sent on the wire. This is how the Google wrapper types work to let you know whether a value is unset, set as the default (zero value), or set as something else, for example.
|
Sometimes it is useful to be able to determine whether a message has been sent on the wire. This is how the Google wrapper types work to let you know whether a value is unset, set as the default (zero value), or set as something else, for example.
|
||||||
@ -198,7 +278,22 @@ message Test {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
You can use `betterproto.which_one_of(message, group_name)` to determine which of the fields was set. It returns a tuple of the field name and value, or a blank string and `None` if unset.
|
On Python 3.10 and later, you can use a `match` statement to access the provided one-of field, which supports type-checking:
|
||||||
|
|
||||||
|
```py
|
||||||
|
test = Test()
|
||||||
|
match test:
|
||||||
|
case Test(on=value):
|
||||||
|
print(value) # value: bool
|
||||||
|
case Test(count=value):
|
||||||
|
print(value) # value: int
|
||||||
|
case Test(name=value):
|
||||||
|
print(value) # value: str
|
||||||
|
case _:
|
||||||
|
print("No value provided")
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also use `betterproto.which_one_of(message, group_name)` to determine which of the fields was set. It returns a tuple of the field name and value, or a blank string and `None` if unset.
|
||||||
|
|
||||||
```py
|
```py
|
||||||
>>> test = Test()
|
>>> test = Test()
|
||||||
@ -213,17 +308,11 @@ You can use `betterproto.which_one_of(message, group_name)` to determine which o
|
|||||||
>>> test.count = 57
|
>>> test.count = 57
|
||||||
>>> betterproto.which_one_of(test, "foo")
|
>>> betterproto.which_one_of(test, "foo")
|
||||||
["count", 57]
|
["count", 57]
|
||||||
>>> test.on
|
|
||||||
False
|
|
||||||
|
|
||||||
# Default (zero) values also work.
|
# Default (zero) values also work.
|
||||||
>>> test.name = ""
|
>>> test.name = ""
|
||||||
>>> betterproto.which_one_of(test, "foo")
|
>>> betterproto.which_one_of(test, "foo")
|
||||||
["name", ""]
|
["name", ""]
|
||||||
>>> test.count
|
|
||||||
0
|
|
||||||
>>> test.on
|
|
||||||
False
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Again this is a little different than the official Google code generator:
|
Again this is a little different than the official Google code generator:
|
||||||
@ -238,36 +327,210 @@ Again this is a little different than the official Google code generator:
|
|||||||
["foo", "foo's value"]
|
["foo", "foo's value"]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Well-Known Google Types
|
||||||
|
|
||||||
|
Google provides several well-known message types like a timestamp, duration, and several wrappers used to provide optional zero value support. Each of these has a special JSON representation and is handled a little differently from normal messages. The Python mapping for these is as follows:
|
||||||
|
|
||||||
|
| Google Message | Python Type | Default |
|
||||||
|
| --------------------------- | ---------------------------------------- | ---------------------- |
|
||||||
|
| `google.protobuf.duration` | [`datetime.timedelta`][td] | `0` |
|
||||||
|
| `google.protobuf.timestamp` | Timezone-aware [`datetime.datetime`][dt] | `1970-01-01T00:00:00Z` |
|
||||||
|
| `google.protobuf.*Value` | `Optional[...]` | `None` |
|
||||||
|
| `google.protobuf.*` | `betterproto.lib.google.protobuf.*` | `None` |
|
||||||
|
|
||||||
|
[td]: https://docs.python.org/3/library/datetime.html#timedelta-objects
|
||||||
|
[dt]: https://docs.python.org/3/library/datetime.html#datetime.datetime
|
||||||
|
|
||||||
|
For the wrapper types, the Python type corresponds to the wrapped type, e.g. `google.protobuf.BoolValue` becomes `Optional[bool]` while `google.protobuf.Int32Value` becomes `Optional[int]`. All of the optional values default to `None`, so don't forget to check for that possible state. Given:
|
||||||
|
|
||||||
|
```protobuf
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
import "google/protobuf/duration.proto";
|
||||||
|
import "google/protobuf/timestamp.proto";
|
||||||
|
import "google/protobuf/wrappers.proto";
|
||||||
|
|
||||||
|
message Test {
|
||||||
|
google.protobuf.BoolValue maybe = 1;
|
||||||
|
google.protobuf.Timestamp ts = 2;
|
||||||
|
google.protobuf.Duration duration = 3;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You can do stuff like:
|
||||||
|
|
||||||
|
```py
|
||||||
|
>>> t = Test().from_dict({"maybe": True, "ts": "2019-01-01T12:00:00Z", "duration": "1.200s"})
|
||||||
|
>>> t
|
||||||
|
Test(maybe=True, ts=datetime.datetime(2019, 1, 1, 12, 0, tzinfo=datetime.timezone.utc), duration=datetime.timedelta(seconds=1, microseconds=200000))
|
||||||
|
|
||||||
|
>>> t.ts - t.duration
|
||||||
|
datetime.datetime(2019, 1, 1, 11, 59, 58, 800000, tzinfo=datetime.timezone.utc)
|
||||||
|
|
||||||
|
>>> t.ts.isoformat()
|
||||||
|
'2019-01-01T12:00:00+00:00'
|
||||||
|
|
||||||
|
>>> t.maybe = None
|
||||||
|
>>> t.to_dict()
|
||||||
|
{'ts': '2019-01-01T12:00:00Z', 'duration': '1.200s'}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generating Pydantic Models
|
||||||
|
|
||||||
|
You can use python-betterproto to generate pydantic based models, using
|
||||||
|
pydantic dataclasses. This means the results of the protobuf unmarshalling will
|
||||||
|
be typed checked. The usage is the same, but you need to add a custom option
|
||||||
|
when calling the protobuf compiler:
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
protoc -I . --python_betterproto_opt=pydantic_dataclasses --python_betterproto_out=lib example.proto
|
||||||
|
```
|
||||||
|
|
||||||
|
With the important change being `--python_betterproto_opt=pydantic_dataclasses`. This will
|
||||||
|
swap the dataclass implementation from the builtin python dataclass to the
|
||||||
|
pydantic dataclass. You must have pydantic as a dependency in your project for
|
||||||
|
this to work.
|
||||||
|
|
||||||
|
## Configuration typing imports
|
||||||
|
|
||||||
|
By default typing types will be imported directly from typing. This sometimes can lead to issues in generation if types that are being generated conflict with the name. In this case you can configure the way types are imported from 3 different options:
|
||||||
|
|
||||||
|
### Direct
|
||||||
|
```
|
||||||
|
protoc -I . --python_betterproto_opt=typing.direct --python_betterproto_out=lib example.proto
|
||||||
|
```
|
||||||
|
this configuration is the default, and will import types as follows:
|
||||||
|
```
|
||||||
|
from typing import (
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Union
|
||||||
|
)
|
||||||
|
...
|
||||||
|
value: List[str] = []
|
||||||
|
value2: Optional[str] = None
|
||||||
|
value3: Union[str, int] = 1
|
||||||
|
```
|
||||||
|
### Root
|
||||||
|
```
|
||||||
|
protoc -I . --python_betterproto_opt=typing.root --python_betterproto_out=lib example.proto
|
||||||
|
```
|
||||||
|
this configuration loads the root typing module, and then access the types off of it directly:
|
||||||
|
```
|
||||||
|
import typing
|
||||||
|
...
|
||||||
|
value: typing.List[str] = []
|
||||||
|
value2: typing.Optional[str] = None
|
||||||
|
value3: typing.Union[str, int] = 1
|
||||||
|
```
|
||||||
|
|
||||||
|
### 310
|
||||||
|
```
|
||||||
|
protoc -I . --python_betterproto_opt=typing.310 --python_betterproto_out=lib example.proto
|
||||||
|
```
|
||||||
|
this configuration avoid loading typing all together if possible and uses the python 3.10 pattern:
|
||||||
|
```
|
||||||
|
...
|
||||||
|
value: list[str] = []
|
||||||
|
value2: str | None = None
|
||||||
|
value3: str | int = 1
|
||||||
|
```
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
First, make sure you have Python 3.7+ and `pipenv` installed, along with the official [Protobuf Compiler](https://github.com/protocolbuffers/protobuf/releases) for your platform. Then:
|
- _Join us on [Discord](https://discord.gg/DEVteTupPb)!_
|
||||||
|
- _See how you can help → [Contributing](.github/CONTRIBUTING.md)_
|
||||||
|
|
||||||
|
### Requirements
|
||||||
|
|
||||||
|
- Python (3.7 or higher)
|
||||||
|
|
||||||
|
- [poetry](https://python-poetry.org/docs/#installation)
|
||||||
|
*Needed to install dependencies in a virtual environment*
|
||||||
|
|
||||||
|
- [poethepoet](https://github.com/nat-n/poethepoet) for running development tasks as defined in pyproject.toml
|
||||||
|
- Can be installed to your host environment via `pip install poethepoet` then executed as simple `poe`
|
||||||
|
- or run from the poetry venv as `poetry run poe`
|
||||||
|
|
||||||
|
### Setup
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Get set up with the virtual env & dependencies
|
# Get set up with the virtual env & dependencies
|
||||||
$ pipenv install --dev
|
poetry install -E compiler
|
||||||
|
|
||||||
# Link the local package
|
# Activate the poetry environment
|
||||||
$ pipenv shell
|
poetry shell
|
||||||
$ pip install -e .
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Code style
|
||||||
|
|
||||||
|
This project enforces [black](https://github.com/psf/black) python code formatting.
|
||||||
|
|
||||||
|
Before committing changes run:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
poe format
|
||||||
|
```
|
||||||
|
|
||||||
|
To avoid merge conflicts later, non-black formatted python code will fail in CI.
|
||||||
|
|
||||||
### Tests
|
### Tests
|
||||||
|
|
||||||
There are two types of tests:
|
There are two types of tests:
|
||||||
|
|
||||||
1. Manually-written tests for some behavior of the library
|
1. Standard tests
|
||||||
2. Proto files and JSON inputs for automated tests
|
2. Custom tests
|
||||||
|
|
||||||
For #2, you can add a new `*.proto` file into the `betterproto/tests` directory along with a sample `*.json` input and it will get automatically picked up.
|
#### Standard tests
|
||||||
|
|
||||||
|
Adding a standard test case is easy.
|
||||||
|
|
||||||
|
- Create a new directory `betterproto/tests/inputs/<name>`
|
||||||
|
- add `<name>.proto` with a message called `Test`
|
||||||
|
- add `<name>.json` with some test data (optional)
|
||||||
|
|
||||||
|
It will be picked up automatically when you run the tests.
|
||||||
|
|
||||||
|
- See also: [Standard Tests Development Guide](tests/README.md)
|
||||||
|
|
||||||
|
#### Custom tests
|
||||||
|
|
||||||
|
Custom tests are found in `tests/test_*.py` and are run with pytest.
|
||||||
|
|
||||||
|
#### Running
|
||||||
|
|
||||||
Here's how to run the tests.
|
Here's how to run the tests.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Generate assets from sample .proto files
|
# Generate assets from sample .proto files required by the tests
|
||||||
$ pipenv run generate
|
poe generate
|
||||||
|
|
||||||
# Run the tests
|
# Run the tests
|
||||||
$ pipenv run tests
|
poe test
|
||||||
|
```
|
||||||
|
|
||||||
|
To run tests as they are run in CI (with tox) run:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
poe full-test
|
||||||
|
```
|
||||||
|
|
||||||
|
### (Re)compiling Google Well-known Types
|
||||||
|
|
||||||
|
Betterproto includes compiled versions for Google's well-known types at [src/betterproto/lib/google](src/betterproto/lib/google).
|
||||||
|
Be sure to regenerate these files when modifying the plugin output format, and validate by running the tests.
|
||||||
|
|
||||||
|
Normally, the plugin does not compile any references to `google.protobuf`, since they are pre-compiled. To force compilation of `google.protobuf`, use the option `--custom_opt=INCLUDE_GOOGLE`.
|
||||||
|
|
||||||
|
Assuming your `google.protobuf` source files (included with all releases of `protoc`) are located in `/usr/local/include`, you can regenerate them as follows:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
protoc \
|
||||||
|
--plugin=protoc-gen-custom=src/betterproto/plugin/main.py \
|
||||||
|
--custom_opt=INCLUDE_GOOGLE \
|
||||||
|
--custom_out=src/betterproto/lib \
|
||||||
|
-I /usr/local/include/ \
|
||||||
|
/usr/local/include/google/protobuf/*.proto
|
||||||
```
|
```
|
||||||
|
|
||||||
### TODO
|
### TODO
|
||||||
@ -284,6 +547,9 @@ $ pipenv run tests
|
|||||||
- [x] Refs to nested types
|
- [x] Refs to nested types
|
||||||
- [x] Imports in proto files
|
- [x] Imports in proto files
|
||||||
- [x] Well-known Google types
|
- [x] Well-known Google types
|
||||||
|
- [ ] Support as request input
|
||||||
|
- [ ] Support as response output
|
||||||
|
- [ ] Automatically wrap/unwrap responses
|
||||||
- [x] OneOf support
|
- [x] OneOf support
|
||||||
- [x] Basic support on the wire
|
- [x] Basic support on the wire
|
||||||
- [x] Check which was set from the group
|
- [x] Check which was set from the group
|
||||||
@ -295,18 +561,22 @@ $ pipenv run tests
|
|||||||
- [x] Bytes as base64
|
- [x] Bytes as base64
|
||||||
- [ ] Any support
|
- [ ] Any support
|
||||||
- [x] Enum strings
|
- [x] Enum strings
|
||||||
- [ ] Well known types support (timestamp, duration, wrappers)
|
- [x] Well known types support (timestamp, duration, wrappers)
|
||||||
- [ ] Support different casing (orig vs. camel vs. others?)
|
- [x] Support different casing (orig vs. camel vs. others?)
|
||||||
- [ ] Async service stubs
|
- [x] Async service stubs
|
||||||
- [x] Unary-unary
|
- [x] Unary-unary
|
||||||
- [x] Server streaming response
|
- [x] Server streaming response
|
||||||
- [ ] Client streaming request
|
- [x] Client streaming request
|
||||||
- [ ] Renaming messages and fields to conform to Python name standards
|
- [x] Renaming messages and fields to conform to Python name standards
|
||||||
- [ ] Renaming clashes with language keywords and standard library top-level packages
|
- [x] Renaming clashes with language keywords
|
||||||
- [x] Python package
|
- [x] Python package
|
||||||
- [x] Automate running tests
|
- [x] Automate running tests
|
||||||
- [ ] Cleanup!
|
- [ ] Cleanup!
|
||||||
|
|
||||||
|
## Community
|
||||||
|
|
||||||
|
Join us on [Discord](https://discord.gg/DEVteTupPb)!
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Copyright © 2019 Daniel G. Taylor
|
Copyright © 2019 Daniel G. Taylor
|
||||||
|
157
asv.conf.json
Normal file
157
asv.conf.json
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
{
|
||||||
|
// The version of the config file format. Do not change, unless
|
||||||
|
// you know what you are doing.
|
||||||
|
"version": 1,
|
||||||
|
|
||||||
|
// The name of the project being benchmarked
|
||||||
|
"project": "python-betterproto",
|
||||||
|
|
||||||
|
// The project's homepage
|
||||||
|
"project_url": "https://github.com/danielgtaylor/python-betterproto",
|
||||||
|
|
||||||
|
// The URL or local path of the source code repository for the
|
||||||
|
// project being benchmarked
|
||||||
|
"repo": ".",
|
||||||
|
|
||||||
|
// The Python project's subdirectory in your repo. If missing or
|
||||||
|
// the empty string, the project is assumed to be located at the root
|
||||||
|
// of the repository.
|
||||||
|
// "repo_subdir": "",
|
||||||
|
|
||||||
|
// Customizable commands for building, installing, and
|
||||||
|
// uninstalling the project. See asv.conf.json documentation.
|
||||||
|
//
|
||||||
|
"install_command": ["python -m pip install ."],
|
||||||
|
"uninstall_command": ["return-code=any python -m pip uninstall -y {project}"],
|
||||||
|
"build_command": ["python -m pip wheel -w {build_cache_dir} {build_dir}"],
|
||||||
|
|
||||||
|
// List of branches to benchmark. If not provided, defaults to "master"
|
||||||
|
// (for git) or "default" (for mercurial).
|
||||||
|
// "branches": ["master"], // for git
|
||||||
|
// "branches": ["default"], // for mercurial
|
||||||
|
|
||||||
|
// The DVCS being used. If not set, it will be automatically
|
||||||
|
// determined from "repo" by looking at the protocol in the URL
|
||||||
|
// (if remote), or by looking for special directories, such as
|
||||||
|
// ".git" (if local).
|
||||||
|
// "dvcs": "git",
|
||||||
|
|
||||||
|
// The tool to use to create environments. May be "conda",
|
||||||
|
// "virtualenv" or other value depending on the plugins in use.
|
||||||
|
// If missing or the empty string, the tool will be automatically
|
||||||
|
// determined by looking for tools on the PATH environment
|
||||||
|
// variable.
|
||||||
|
"environment_type": "virtualenv",
|
||||||
|
|
||||||
|
// timeout in seconds for installing any dependencies in environment
|
||||||
|
// defaults to 10 min
|
||||||
|
//"install_timeout": 600,
|
||||||
|
|
||||||
|
// the base URL to show a commit for the project.
|
||||||
|
// "show_commit_url": "http://github.com/owner/project/commit/",
|
||||||
|
|
||||||
|
// The Pythons you'd like to test against. If not provided, defaults
|
||||||
|
// to the current version of Python used to run `asv`.
|
||||||
|
// "pythons": ["2.7", "3.6"],
|
||||||
|
|
||||||
|
// The list of conda channel names to be searched for benchmark
|
||||||
|
// dependency packages in the specified order
|
||||||
|
// "conda_channels": ["conda-forge", "defaults"],
|
||||||
|
|
||||||
|
// The matrix of dependencies to test. Each key is the name of a
|
||||||
|
// package (in PyPI) and the values are version numbers. An empty
|
||||||
|
// list or empty string indicates to just test against the default
|
||||||
|
// (latest) version. null indicates that the package is to not be
|
||||||
|
// installed. If the package to be tested is only available from
|
||||||
|
// PyPi, and the 'environment_type' is conda, then you can preface
|
||||||
|
// the package name by 'pip+', and the package will be installed via
|
||||||
|
// pip (with all the conda available packages installed first,
|
||||||
|
// followed by the pip installed packages).
|
||||||
|
//
|
||||||
|
// "matrix": {
|
||||||
|
// "numpy": ["1.6", "1.7"],
|
||||||
|
// "six": ["", null], // test with and without six installed
|
||||||
|
// "pip+emcee": [""], // emcee is only available for install with pip.
|
||||||
|
// },
|
||||||
|
|
||||||
|
// Combinations of libraries/python versions can be excluded/included
|
||||||
|
// from the set to test. Each entry is a dictionary containing additional
|
||||||
|
// key-value pairs to include/exclude.
|
||||||
|
//
|
||||||
|
// An exclude entry excludes entries where all values match. The
|
||||||
|
// values are regexps that should match the whole string.
|
||||||
|
//
|
||||||
|
// An include entry adds an environment. Only the packages listed
|
||||||
|
// are installed. The 'python' key is required. The exclude rules
|
||||||
|
// do not apply to includes.
|
||||||
|
//
|
||||||
|
// In addition to package names, the following keys are available:
|
||||||
|
//
|
||||||
|
// - python
|
||||||
|
// Python version, as in the *pythons* variable above.
|
||||||
|
// - environment_type
|
||||||
|
// Environment type, as above.
|
||||||
|
// - sys_platform
|
||||||
|
// Platform, as in sys.platform. Possible values for the common
|
||||||
|
// cases: 'linux2', 'win32', 'cygwin', 'darwin'.
|
||||||
|
//
|
||||||
|
// "exclude": [
|
||||||
|
// {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows
|
||||||
|
// {"environment_type": "conda", "six": null}, // don't run without six on conda
|
||||||
|
// ],
|
||||||
|
//
|
||||||
|
// "include": [
|
||||||
|
// // additional env for python2.7
|
||||||
|
// {"python": "2.7", "numpy": "1.8"},
|
||||||
|
// // additional env if run on windows+conda
|
||||||
|
// {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""},
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// The directory (relative to the current directory) that benchmarks are
|
||||||
|
// stored in. If not provided, defaults to "benchmarks"
|
||||||
|
// "benchmark_dir": "benchmarks",
|
||||||
|
|
||||||
|
// The directory (relative to the current directory) to cache the Python
|
||||||
|
// environments in. If not provided, defaults to "env"
|
||||||
|
"env_dir": ".asv/env",
|
||||||
|
|
||||||
|
// The directory (relative to the current directory) that raw benchmark
|
||||||
|
// results are stored in. If not provided, defaults to "results".
|
||||||
|
"results_dir": ".asv/results",
|
||||||
|
|
||||||
|
// The directory (relative to the current directory) that the html tree
|
||||||
|
// should be written to. If not provided, defaults to "html".
|
||||||
|
"html_dir": ".asv/html",
|
||||||
|
|
||||||
|
// The number of characters to retain in the commit hashes.
|
||||||
|
// "hash_length": 8,
|
||||||
|
|
||||||
|
// `asv` will cache results of the recent builds in each
|
||||||
|
// environment, making them faster to install next time. This is
|
||||||
|
// the number of builds to keep, per environment.
|
||||||
|
// "build_cache_size": 2,
|
||||||
|
|
||||||
|
// The commits after which the regression search in `asv publish`
|
||||||
|
// should start looking for regressions. Dictionary whose keys are
|
||||||
|
// regexps matching to benchmark names, and values corresponding to
|
||||||
|
// the commit (exclusive) after which to start looking for
|
||||||
|
// regressions. The default is to start from the first commit
|
||||||
|
// with results. If the commit is `null`, regression detection is
|
||||||
|
// skipped for the matching benchmark.
|
||||||
|
//
|
||||||
|
// "regressions_first_commits": {
|
||||||
|
// "some_benchmark": "352cdf", // Consider regressions only after this commit
|
||||||
|
// "another_benchmark": null, // Skip regression detection altogether
|
||||||
|
// },
|
||||||
|
|
||||||
|
// The thresholds for relative change in results, after which `asv
|
||||||
|
// publish` starts reporting regressions. Dictionary of the same
|
||||||
|
// form as in ``regressions_first_commits``, with values
|
||||||
|
// indicating the thresholds. If multiple entries match, the
|
||||||
|
// maximum is taken. If no entry matches, the default is 5%.
|
||||||
|
//
|
||||||
|
// "regressions_thresholds": {
|
||||||
|
// "some_benchmark": 0.01, // Threshold of 1%
|
||||||
|
// "another_benchmark": 0.5, // Threshold of 50%
|
||||||
|
// },
|
||||||
|
}
|
117
benchmarks/benchmarks.py
Normal file
117
benchmarks/benchmarks.py
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import betterproto
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TestMessage(betterproto.Message):
|
||||||
|
foo: int = betterproto.uint32_field(1)
|
||||||
|
bar: str = betterproto.string_field(2)
|
||||||
|
baz: float = betterproto.float_field(3)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TestNestedChildMessage(betterproto.Message):
|
||||||
|
str_key: str = betterproto.string_field(1)
|
||||||
|
bytes_key: bytes = betterproto.bytes_field(2)
|
||||||
|
bool_key: bool = betterproto.bool_field(3)
|
||||||
|
float_key: float = betterproto.float_field(4)
|
||||||
|
int_key: int = betterproto.uint64_field(5)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TestNestedMessage(betterproto.Message):
|
||||||
|
foo: TestNestedChildMessage = betterproto.message_field(1)
|
||||||
|
bar: TestNestedChildMessage = betterproto.message_field(2)
|
||||||
|
baz: TestNestedChildMessage = betterproto.message_field(3)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TestRepeatedMessage(betterproto.Message):
|
||||||
|
foo_repeat: List[str] = betterproto.string_field(1)
|
||||||
|
bar_repeat: List[int] = betterproto.int64_field(2)
|
||||||
|
baz_repeat: List[bool] = betterproto.bool_field(3)
|
||||||
|
|
||||||
|
|
||||||
|
class BenchMessage:
|
||||||
|
"""Test creation and usage a proto message."""
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
self.cls = TestMessage
|
||||||
|
self.instance = TestMessage()
|
||||||
|
self.instance_filled = TestMessage(0, "test", 0.0)
|
||||||
|
self.instance_filled_bytes = bytes(self.instance_filled)
|
||||||
|
self.instance_filled_nested = TestNestedMessage(
|
||||||
|
TestNestedChildMessage("foo", bytearray(b"test1"), True, 0.1234, 500),
|
||||||
|
TestNestedChildMessage("bar", bytearray(b"test2"), True, 3.1415, 302),
|
||||||
|
TestNestedChildMessage("baz", bytearray(b"test3"), False, 1e5, 300),
|
||||||
|
)
|
||||||
|
self.instance_filled_nested_bytes = bytes(self.instance_filled_nested)
|
||||||
|
self.instance_filled_repeated = TestRepeatedMessage(
|
||||||
|
[f"test{i}" for i in range(1_000)],
|
||||||
|
[(i - 500) ** 3 for i in range(1_000)],
|
||||||
|
[i % 2 == 0 for i in range(1_000)],
|
||||||
|
)
|
||||||
|
self.instance_filled_repeated_bytes = bytes(self.instance_filled_repeated)
|
||||||
|
|
||||||
|
def time_overhead(self):
|
||||||
|
"""Overhead in class definition."""
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Message(betterproto.Message):
|
||||||
|
foo: int = betterproto.uint32_field(1)
|
||||||
|
bar: str = betterproto.string_field(2)
|
||||||
|
baz: float = betterproto.float_field(3)
|
||||||
|
|
||||||
|
def time_instantiation(self):
|
||||||
|
"""Time instantiation"""
|
||||||
|
self.cls()
|
||||||
|
|
||||||
|
def time_attribute_access(self):
|
||||||
|
"""Time to access an attribute"""
|
||||||
|
self.instance.foo
|
||||||
|
self.instance.bar
|
||||||
|
self.instance.baz
|
||||||
|
|
||||||
|
def time_init_with_values(self):
|
||||||
|
"""Time to set an attribute"""
|
||||||
|
self.cls(0, "test", 0.0)
|
||||||
|
|
||||||
|
def time_attribute_setting(self):
|
||||||
|
"""Time to set attributes"""
|
||||||
|
self.instance.foo = 0
|
||||||
|
self.instance.bar = "test"
|
||||||
|
self.instance.baz = 0.0
|
||||||
|
|
||||||
|
def time_serialize(self):
|
||||||
|
"""Time serializing a message to wire."""
|
||||||
|
bytes(self.instance_filled)
|
||||||
|
|
||||||
|
def time_deserialize(self):
|
||||||
|
"""Time deserialize a message."""
|
||||||
|
TestMessage().parse(self.instance_filled_bytes)
|
||||||
|
|
||||||
|
def time_serialize_nested(self):
|
||||||
|
"""Time serializing a nested message to wire."""
|
||||||
|
bytes(self.instance_filled_nested)
|
||||||
|
|
||||||
|
def time_deserialize_nested(self):
|
||||||
|
"""Time deserialize a nested message."""
|
||||||
|
TestNestedMessage().parse(self.instance_filled_nested_bytes)
|
||||||
|
|
||||||
|
def time_serialize_repeated(self):
|
||||||
|
"""Time serializing a repeated message to wire."""
|
||||||
|
bytes(self.instance_filled_repeated)
|
||||||
|
|
||||||
|
def time_deserialize_repeated(self):
|
||||||
|
"""Time deserialize a repeated message."""
|
||||||
|
TestRepeatedMessage().parse(self.instance_filled_repeated_bytes)
|
||||||
|
|
||||||
|
|
||||||
|
class MemSuite:
|
||||||
|
def setup(self):
|
||||||
|
self.cls = TestMessage
|
||||||
|
|
||||||
|
def mem_instance(self):
|
||||||
|
return self.cls()
|
@ -1,775 +0,0 @@
|
|||||||
import dataclasses
|
|
||||||
import enum
|
|
||||||
import inspect
|
|
||||||
import json
|
|
||||||
import struct
|
|
||||||
from abc import ABC
|
|
||||||
from base64 import b64encode, b64decode
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
AsyncGenerator,
|
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
Generator,
|
|
||||||
Iterable,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
SupportsBytes,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
TypeVar,
|
|
||||||
Union,
|
|
||||||
get_type_hints,
|
|
||||||
)
|
|
||||||
|
|
||||||
import grpclib.client
|
|
||||||
import grpclib.const
|
|
||||||
|
|
||||||
# Proto 3 data types
|
|
||||||
TYPE_ENUM = "enum"
|
|
||||||
TYPE_BOOL = "bool"
|
|
||||||
TYPE_INT32 = "int32"
|
|
||||||
TYPE_INT64 = "int64"
|
|
||||||
TYPE_UINT32 = "uint32"
|
|
||||||
TYPE_UINT64 = "uint64"
|
|
||||||
TYPE_SINT32 = "sint32"
|
|
||||||
TYPE_SINT64 = "sint64"
|
|
||||||
TYPE_FLOAT = "float"
|
|
||||||
TYPE_DOUBLE = "double"
|
|
||||||
TYPE_FIXED32 = "fixed32"
|
|
||||||
TYPE_SFIXED32 = "sfixed32"
|
|
||||||
TYPE_FIXED64 = "fixed64"
|
|
||||||
TYPE_SFIXED64 = "sfixed64"
|
|
||||||
TYPE_STRING = "string"
|
|
||||||
TYPE_BYTES = "bytes"
|
|
||||||
TYPE_MESSAGE = "message"
|
|
||||||
TYPE_MAP = "map"
|
|
||||||
|
|
||||||
|
|
||||||
# Fields that use a fixed amount of space (4 or 8 bytes)
|
|
||||||
FIXED_TYPES = [
|
|
||||||
TYPE_FLOAT,
|
|
||||||
TYPE_DOUBLE,
|
|
||||||
TYPE_FIXED32,
|
|
||||||
TYPE_SFIXED32,
|
|
||||||
TYPE_FIXED64,
|
|
||||||
TYPE_SFIXED64,
|
|
||||||
]
|
|
||||||
|
|
||||||
# Fields that are numerical 64-bit types
|
|
||||||
INT_64_TYPES = [TYPE_INT64, TYPE_UINT64, TYPE_SINT64, TYPE_FIXED64, TYPE_SFIXED64]
|
|
||||||
|
|
||||||
# Fields that are efficiently packed when
|
|
||||||
PACKED_TYPES = [
|
|
||||||
TYPE_ENUM,
|
|
||||||
TYPE_BOOL,
|
|
||||||
TYPE_INT32,
|
|
||||||
TYPE_INT64,
|
|
||||||
TYPE_UINT32,
|
|
||||||
TYPE_UINT64,
|
|
||||||
TYPE_SINT32,
|
|
||||||
TYPE_SINT64,
|
|
||||||
TYPE_FLOAT,
|
|
||||||
TYPE_DOUBLE,
|
|
||||||
TYPE_FIXED32,
|
|
||||||
TYPE_SFIXED32,
|
|
||||||
TYPE_FIXED64,
|
|
||||||
TYPE_SFIXED64,
|
|
||||||
]
|
|
||||||
|
|
||||||
# Wire types
|
|
||||||
# https://developers.google.com/protocol-buffers/docs/encoding#structure
|
|
||||||
WIRE_VARINT = 0
|
|
||||||
WIRE_FIXED_64 = 1
|
|
||||||
WIRE_LEN_DELIM = 2
|
|
||||||
WIRE_FIXED_32 = 5
|
|
||||||
|
|
||||||
# Mappings of which Proto 3 types correspond to which wire types.
|
|
||||||
WIRE_VARINT_TYPES = [
|
|
||||||
TYPE_ENUM,
|
|
||||||
TYPE_BOOL,
|
|
||||||
TYPE_INT32,
|
|
||||||
TYPE_INT64,
|
|
||||||
TYPE_UINT32,
|
|
||||||
TYPE_UINT64,
|
|
||||||
TYPE_SINT32,
|
|
||||||
TYPE_SINT64,
|
|
||||||
]
|
|
||||||
|
|
||||||
WIRE_FIXED_32_TYPES = [TYPE_FLOAT, TYPE_FIXED32, TYPE_SFIXED32]
|
|
||||||
WIRE_FIXED_64_TYPES = [TYPE_DOUBLE, TYPE_FIXED64, TYPE_SFIXED64]
|
|
||||||
WIRE_LEN_DELIM_TYPES = [TYPE_STRING, TYPE_BYTES, TYPE_MESSAGE, TYPE_MAP]
|
|
||||||
|
|
||||||
|
|
||||||
class _PLACEHOLDER:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
PLACEHOLDER: Any = _PLACEHOLDER()
|
|
||||||
|
|
||||||
|
|
||||||
def get_default(proto_type: str) -> Any:
|
|
||||||
"""Get the default (zero value) for a given type."""
|
|
||||||
return {
|
|
||||||
TYPE_BOOL: False,
|
|
||||||
TYPE_FLOAT: 0.0,
|
|
||||||
TYPE_DOUBLE: 0.0,
|
|
||||||
TYPE_STRING: "",
|
|
||||||
TYPE_BYTES: b"",
|
|
||||||
TYPE_MAP: {},
|
|
||||||
}.get(proto_type, 0)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(frozen=True)
|
|
||||||
class FieldMetadata:
|
|
||||||
"""Stores internal metadata used for parsing & serialization."""
|
|
||||||
|
|
||||||
# Protobuf field number
|
|
||||||
number: int
|
|
||||||
# Protobuf type name
|
|
||||||
proto_type: str
|
|
||||||
# Map information if the proto_type is a map
|
|
||||||
map_types: Optional[Tuple[str, str]]
|
|
||||||
# Groups several "one-of" fields together
|
|
||||||
group: Optional[str]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get(field: dataclasses.Field) -> "FieldMetadata":
|
|
||||||
"""Returns the field metadata for a dataclass field."""
|
|
||||||
return field.metadata["betterproto"]
|
|
||||||
|
|
||||||
|
|
||||||
def dataclass_field(
|
|
||||||
number: int,
|
|
||||||
proto_type: str,
|
|
||||||
*,
|
|
||||||
map_types: Optional[Tuple[str, str]] = None,
|
|
||||||
group: Optional[str] = None,
|
|
||||||
) -> dataclasses.Field:
|
|
||||||
"""Creates a dataclass field with attached protobuf metadata."""
|
|
||||||
return dataclasses.field(
|
|
||||||
default=PLACEHOLDER,
|
|
||||||
metadata={"betterproto": FieldMetadata(number, proto_type, map_types, group)},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Note: the fields below return `Any` to prevent type errors in the generated
|
|
||||||
# data classes since the types won't match with `Field` and they get swapped
|
|
||||||
# out at runtime. The generated dataclass variables are still typed correctly.
|
|
||||||
|
|
||||||
|
|
||||||
def enum_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_ENUM, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def bool_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_BOOL, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def int32_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_INT32, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def int64_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_INT64, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def uint32_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_UINT32, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def uint64_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_UINT64, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def sint32_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_SINT32, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def sint64_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_SINT64, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def float_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_FLOAT, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def double_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_DOUBLE, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def fixed32_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_FIXED32, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def fixed64_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_FIXED64, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def sfixed32_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_SFIXED32, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def sfixed64_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_SFIXED64, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def string_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_STRING, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def bytes_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_BYTES, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def message_field(number: int, group: Optional[str] = None) -> Any:
|
|
||||||
return dataclass_field(number, TYPE_MESSAGE, group=group)
|
|
||||||
|
|
||||||
|
|
||||||
def map_field(
|
|
||||||
number: int, key_type: str, value_type: str, group: Optional[str] = None
|
|
||||||
) -> Any:
|
|
||||||
return dataclass_field(
|
|
||||||
number, TYPE_MAP, map_types=(key_type, value_type), group=group
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Enum(int, enum.Enum):
|
|
||||||
"""Protocol buffers enumeration base class. Acts like `enum.IntEnum`."""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_string(cls, name: str) -> int:
|
|
||||||
"""Return the value which corresponds to the string name."""
|
|
||||||
try:
|
|
||||||
return cls.__members__[name]
|
|
||||||
except KeyError as e:
|
|
||||||
raise ValueError(f"Unknown value {name} for enum {cls.__name__}") from e
|
|
||||||
|
|
||||||
|
|
||||||
def _pack_fmt(proto_type: str) -> str:
|
|
||||||
"""Returns a little-endian format string for reading/writing binary."""
|
|
||||||
return {
|
|
||||||
TYPE_DOUBLE: "<d",
|
|
||||||
TYPE_FLOAT: "<f",
|
|
||||||
TYPE_FIXED32: "<I",
|
|
||||||
TYPE_FIXED64: "<Q",
|
|
||||||
TYPE_SFIXED32: "<i",
|
|
||||||
TYPE_SFIXED64: "<q",
|
|
||||||
}[proto_type]
|
|
||||||
|
|
||||||
|
|
||||||
def encode_varint(value: int) -> bytes:
|
|
||||||
"""Encodes a single varint value for serialization."""
|
|
||||||
b: List[int] = []
|
|
||||||
|
|
||||||
if value < 0:
|
|
||||||
value += 1 << 64
|
|
||||||
|
|
||||||
bits = value & 0x7F
|
|
||||||
value >>= 7
|
|
||||||
while value:
|
|
||||||
b.append(0x80 | bits)
|
|
||||||
bits = value & 0x7F
|
|
||||||
value >>= 7
|
|
||||||
return bytes(b + [bits])
|
|
||||||
|
|
||||||
|
|
||||||
def _preprocess_single(proto_type: str, value: Any) -> bytes:
|
|
||||||
"""Adjusts values before serialization."""
|
|
||||||
if proto_type in [
|
|
||||||
TYPE_ENUM,
|
|
||||||
TYPE_BOOL,
|
|
||||||
TYPE_INT32,
|
|
||||||
TYPE_INT64,
|
|
||||||
TYPE_UINT32,
|
|
||||||
TYPE_UINT64,
|
|
||||||
]:
|
|
||||||
return encode_varint(value)
|
|
||||||
elif proto_type in [TYPE_SINT32, TYPE_SINT64]:
|
|
||||||
# Handle zig-zag encoding.
|
|
||||||
if value >= 0:
|
|
||||||
value = value << 1
|
|
||||||
else:
|
|
||||||
value = (value << 1) ^ (~0)
|
|
||||||
return encode_varint(value)
|
|
||||||
elif proto_type in FIXED_TYPES:
|
|
||||||
return struct.pack(_pack_fmt(proto_type), value)
|
|
||||||
elif proto_type == TYPE_STRING:
|
|
||||||
return value.encode("utf-8")
|
|
||||||
elif proto_type == TYPE_MESSAGE:
|
|
||||||
return bytes(value)
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def _serialize_single(
|
|
||||||
field_number: int, proto_type: str, value: Any, *, serialize_empty: bool = False
|
|
||||||
) -> bytes:
|
|
||||||
"""Serializes a single field and value."""
|
|
||||||
value = _preprocess_single(proto_type, value)
|
|
||||||
|
|
||||||
output = b""
|
|
||||||
if proto_type in WIRE_VARINT_TYPES:
|
|
||||||
key = encode_varint(field_number << 3)
|
|
||||||
output += key + value
|
|
||||||
elif proto_type in WIRE_FIXED_32_TYPES:
|
|
||||||
key = encode_varint((field_number << 3) | 5)
|
|
||||||
output += key + value
|
|
||||||
elif proto_type in WIRE_FIXED_64_TYPES:
|
|
||||||
key = encode_varint((field_number << 3) | 1)
|
|
||||||
output += key + value
|
|
||||||
elif proto_type in WIRE_LEN_DELIM_TYPES:
|
|
||||||
if len(value) or serialize_empty:
|
|
||||||
key = encode_varint((field_number << 3) | 2)
|
|
||||||
output += key + encode_varint(len(value)) + value
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(proto_type)
|
|
||||||
|
|
||||||
return output
|
|
||||||
|
|
||||||
|
|
||||||
def decode_varint(buffer: bytes, pos: int, signed: bool = False) -> Tuple[int, int]:
|
|
||||||
"""
|
|
||||||
Decode a single varint value from a byte buffer. Returns the value and the
|
|
||||||
new position in the buffer.
|
|
||||||
"""
|
|
||||||
result = 0
|
|
||||||
shift = 0
|
|
||||||
while 1:
|
|
||||||
b = buffer[pos]
|
|
||||||
result |= (b & 0x7F) << shift
|
|
||||||
pos += 1
|
|
||||||
if not (b & 0x80):
|
|
||||||
return (result, pos)
|
|
||||||
shift += 7
|
|
||||||
if shift >= 64:
|
|
||||||
raise ValueError("Too many bytes when decoding varint.")
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(frozen=True)
|
|
||||||
class ParsedField:
|
|
||||||
number: int
|
|
||||||
wire_type: int
|
|
||||||
value: Any
|
|
||||||
raw: bytes
|
|
||||||
|
|
||||||
|
|
||||||
def parse_fields(value: bytes) -> Generator[ParsedField, None, None]:
|
|
||||||
i = 0
|
|
||||||
while i < len(value):
|
|
||||||
start = i
|
|
||||||
num_wire, i = decode_varint(value, i)
|
|
||||||
# print(num_wire, i)
|
|
||||||
number = num_wire >> 3
|
|
||||||
wire_type = num_wire & 0x7
|
|
||||||
|
|
||||||
decoded: Any = None
|
|
||||||
if wire_type == 0:
|
|
||||||
decoded, i = decode_varint(value, i)
|
|
||||||
elif wire_type == 1:
|
|
||||||
decoded, i = value[i : i + 8], i + 8
|
|
||||||
elif wire_type == 2:
|
|
||||||
length, i = decode_varint(value, i)
|
|
||||||
decoded = value[i : i + length]
|
|
||||||
i += length
|
|
||||||
elif wire_type == 5:
|
|
||||||
decoded, i = value[i : i + 4], i + 4
|
|
||||||
|
|
||||||
# print(ParsedField(number=number, wire_type=wire_type, value=decoded))
|
|
||||||
|
|
||||||
yield ParsedField(
|
|
||||||
number=number, wire_type=wire_type, value=decoded, raw=value[start:i]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Bound type variable to allow methods to return `self` of subclasses
|
|
||||||
T = TypeVar("T", bound="Message")
|
|
||||||
|
|
||||||
|
|
||||||
class Message(ABC):
|
|
||||||
"""
|
|
||||||
A protobuf message base class. Generated code will inherit from this and
|
|
||||||
register the message fields which get used by the serializers and parsers
|
|
||||||
to go between Python, binary and JSON protobuf message representations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __post_init__(self) -> None:
|
|
||||||
# Set a default value for each field in the class after `__init__` has
|
|
||||||
# already been run.
|
|
||||||
group_map = {"fields": {}, "groups": {}}
|
|
||||||
for field in dataclasses.fields(self):
|
|
||||||
meta = FieldMetadata.get(field)
|
|
||||||
|
|
||||||
if meta.group:
|
|
||||||
group_map["fields"][field.name] = meta.group
|
|
||||||
|
|
||||||
if meta.group not in group_map["groups"]:
|
|
||||||
group_map["groups"][meta.group] = {"current": None, "fields": set()}
|
|
||||||
group_map["groups"][meta.group]["fields"].add(field)
|
|
||||||
|
|
||||||
if getattr(self, field.name) != PLACEHOLDER:
|
|
||||||
# Skip anything not set to the sentinel value
|
|
||||||
|
|
||||||
if meta.group:
|
|
||||||
# This was set, so make it the selected value of the one-of.
|
|
||||||
group_map["groups"][meta.group]["current"] = field
|
|
||||||
|
|
||||||
continue
|
|
||||||
|
|
||||||
setattr(self, field.name, self._get_field_default(field, meta))
|
|
||||||
|
|
||||||
# Now that all the defaults are set, reset it!
|
|
||||||
self.__dict__["_serialized_on_wire"] = False
|
|
||||||
self.__dict__["_unknown_fields"] = b""
|
|
||||||
self.__dict__["_group_map"] = group_map
|
|
||||||
|
|
||||||
def __setattr__(self, attr: str, value: Any) -> None:
|
|
||||||
if attr != "_serialized_on_wire":
|
|
||||||
# Track when a field has been set.
|
|
||||||
self.__dict__["_serialized_on_wire"] = True
|
|
||||||
|
|
||||||
if attr in getattr(self, "_group_map", {}).get("fields", {}):
|
|
||||||
group = self._group_map["fields"][attr]
|
|
||||||
for field in self._group_map["groups"][group]["fields"]:
|
|
||||||
if field.name == attr:
|
|
||||||
self._group_map["groups"][group]["current"] = field
|
|
||||||
else:
|
|
||||||
super().__setattr__(
|
|
||||||
field.name,
|
|
||||||
self._get_field_default(field, FieldMetadata.get(field)),
|
|
||||||
)
|
|
||||||
|
|
||||||
super().__setattr__(attr, value)
|
|
||||||
|
|
||||||
def __bytes__(self) -> bytes:
|
|
||||||
"""
|
|
||||||
Get the binary encoded Protobuf representation of this instance.
|
|
||||||
"""
|
|
||||||
output = b""
|
|
||||||
for field in dataclasses.fields(self):
|
|
||||||
meta = FieldMetadata.get(field)
|
|
||||||
value = getattr(self, field.name)
|
|
||||||
|
|
||||||
# Being selected in a a group means this field is the one that is
|
|
||||||
# currently set in a `oneof` group, so it must be serialized even
|
|
||||||
# if the value is the default zero value.
|
|
||||||
selected_in_group = False
|
|
||||||
if meta.group and self._group_map["groups"][meta.group]["current"] == field:
|
|
||||||
selected_in_group = True
|
|
||||||
|
|
||||||
if isinstance(value, list):
|
|
||||||
if not len(value) and not selected_in_group:
|
|
||||||
# Empty values are not serialized
|
|
||||||
continue
|
|
||||||
|
|
||||||
if meta.proto_type in PACKED_TYPES:
|
|
||||||
# Packed lists look like a length-delimited field. First,
|
|
||||||
# preprocess/encode each value into a buffer and then
|
|
||||||
# treat it like a field of raw bytes.
|
|
||||||
buf = b""
|
|
||||||
for item in value:
|
|
||||||
buf += _preprocess_single(meta.proto_type, item)
|
|
||||||
output += _serialize_single(meta.number, TYPE_BYTES, buf)
|
|
||||||
else:
|
|
||||||
for item in value:
|
|
||||||
output += _serialize_single(meta.number, meta.proto_type, item)
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
if not len(value) and not selected_in_group:
|
|
||||||
# Empty values are not serialized
|
|
||||||
continue
|
|
||||||
|
|
||||||
for k, v in value.items():
|
|
||||||
assert meta.map_types
|
|
||||||
sk = _serialize_single(1, meta.map_types[0], k)
|
|
||||||
sv = _serialize_single(2, meta.map_types[1], v)
|
|
||||||
output += _serialize_single(meta.number, meta.proto_type, sk + sv)
|
|
||||||
else:
|
|
||||||
if value == get_default(meta.proto_type) and not selected_in_group:
|
|
||||||
# Default (zero) values are not serialized
|
|
||||||
continue
|
|
||||||
|
|
||||||
serialize_empty = False
|
|
||||||
if isinstance(value, Message) and value._serialized_on_wire:
|
|
||||||
serialize_empty = True
|
|
||||||
output += _serialize_single(
|
|
||||||
meta.number, meta.proto_type, value, serialize_empty=serialize_empty
|
|
||||||
)
|
|
||||||
|
|
||||||
return output + self._unknown_fields
|
|
||||||
|
|
||||||
# For compatibility with other libraries
|
|
||||||
SerializeToString = __bytes__
|
|
||||||
|
|
||||||
def _cls_for(self, field: dataclasses.Field, index: int = 0) -> Type:
|
|
||||||
"""Get the message class for a field from the type hints."""
|
|
||||||
module = inspect.getmodule(self.__class__)
|
|
||||||
type_hints = get_type_hints(self.__class__, vars(module))
|
|
||||||
cls = type_hints[field.name]
|
|
||||||
if hasattr(cls, "__args__") and index >= 0:
|
|
||||||
cls = type_hints[field.name].__args__[index]
|
|
||||||
return cls
|
|
||||||
|
|
||||||
def _get_field_default(self, field: dataclasses.Field, meta: FieldMetadata) -> Any:
|
|
||||||
t = self._cls_for(field, index=-1)
|
|
||||||
|
|
||||||
value: Any = 0
|
|
||||||
if meta.proto_type == TYPE_MAP:
|
|
||||||
# Maps cannot be repeated, so we check these first.
|
|
||||||
value = {}
|
|
||||||
elif hasattr(t, "__args__") and len(t.__args__) == 1:
|
|
||||||
# Anything else with type args is a list.
|
|
||||||
value = []
|
|
||||||
elif meta.proto_type == TYPE_MESSAGE:
|
|
||||||
# Message means creating an instance of the right type.
|
|
||||||
value = t()
|
|
||||||
else:
|
|
||||||
value = get_default(meta.proto_type)
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
def _postprocess_single(
|
|
||||||
self, wire_type: int, meta: FieldMetadata, field: dataclasses.Field, value: Any
|
|
||||||
) -> Any:
|
|
||||||
"""Adjusts values after parsing."""
|
|
||||||
if wire_type == WIRE_VARINT:
|
|
||||||
if meta.proto_type in [TYPE_INT32, TYPE_INT64]:
|
|
||||||
bits = int(meta.proto_type[3:])
|
|
||||||
value = value & ((1 << bits) - 1)
|
|
||||||
signbit = 1 << (bits - 1)
|
|
||||||
value = int((value ^ signbit) - signbit)
|
|
||||||
elif meta.proto_type in [TYPE_SINT32, TYPE_SINT64]:
|
|
||||||
# Undo zig-zag encoding
|
|
||||||
value = (value >> 1) ^ (-(value & 1))
|
|
||||||
elif wire_type in [WIRE_FIXED_32, WIRE_FIXED_64]:
|
|
||||||
fmt = _pack_fmt(meta.proto_type)
|
|
||||||
value = struct.unpack(fmt, value)[0]
|
|
||||||
elif wire_type == WIRE_LEN_DELIM:
|
|
||||||
if meta.proto_type == TYPE_STRING:
|
|
||||||
value = value.decode("utf-8")
|
|
||||||
elif meta.proto_type == TYPE_MESSAGE:
|
|
||||||
cls = self._cls_for(field)
|
|
||||||
value = cls().parse(value)
|
|
||||||
value._serialized_on_wire = True
|
|
||||||
elif meta.proto_type == TYPE_MAP:
|
|
||||||
# TODO: This is slow, use a cache to make it faster since each
|
|
||||||
# key/value pair will recreate the class.
|
|
||||||
assert meta.map_types
|
|
||||||
kt = self._cls_for(field, index=0)
|
|
||||||
vt = self._cls_for(field, index=1)
|
|
||||||
Entry = dataclasses.make_dataclass(
|
|
||||||
"Entry",
|
|
||||||
[
|
|
||||||
("key", kt, dataclass_field(1, meta.map_types[0])),
|
|
||||||
("value", vt, dataclass_field(2, meta.map_types[1])),
|
|
||||||
],
|
|
||||||
bases=(Message,),
|
|
||||||
)
|
|
||||||
value = Entry().parse(value)
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
def parse(self: T, data: bytes) -> T:
|
|
||||||
"""
|
|
||||||
Parse the binary encoded Protobuf into this message instance. This
|
|
||||||
returns the instance itself and is therefore assignable and chainable.
|
|
||||||
"""
|
|
||||||
fields = {f.metadata["betterproto"].number: f for f in dataclasses.fields(self)}
|
|
||||||
for parsed in parse_fields(data):
|
|
||||||
if parsed.number in fields:
|
|
||||||
field = fields[parsed.number]
|
|
||||||
meta = FieldMetadata.get(field)
|
|
||||||
|
|
||||||
value: Any
|
|
||||||
if (
|
|
||||||
parsed.wire_type == WIRE_LEN_DELIM
|
|
||||||
and meta.proto_type in PACKED_TYPES
|
|
||||||
):
|
|
||||||
# This is a packed repeated field.
|
|
||||||
pos = 0
|
|
||||||
value = []
|
|
||||||
while pos < len(parsed.value):
|
|
||||||
if meta.proto_type in ["float", "fixed32", "sfixed32"]:
|
|
||||||
decoded, pos = parsed.value[pos : pos + 4], pos + 4
|
|
||||||
wire_type = WIRE_FIXED_32
|
|
||||||
elif meta.proto_type in ["double", "fixed64", "sfixed64"]:
|
|
||||||
decoded, pos = parsed.value[pos : pos + 8], pos + 8
|
|
||||||
wire_type = WIRE_FIXED_64
|
|
||||||
else:
|
|
||||||
decoded, pos = decode_varint(parsed.value, pos)
|
|
||||||
wire_type = WIRE_VARINT
|
|
||||||
decoded = self._postprocess_single(
|
|
||||||
wire_type, meta, field, decoded
|
|
||||||
)
|
|
||||||
value.append(decoded)
|
|
||||||
else:
|
|
||||||
value = self._postprocess_single(
|
|
||||||
parsed.wire_type, meta, field, parsed.value
|
|
||||||
)
|
|
||||||
|
|
||||||
current = getattr(self, field.name)
|
|
||||||
if meta.proto_type == TYPE_MAP:
|
|
||||||
# Value represents a single key/value pair entry in the map.
|
|
||||||
current[value.key] = value.value
|
|
||||||
elif isinstance(current, list) and not isinstance(value, list):
|
|
||||||
current.append(value)
|
|
||||||
else:
|
|
||||||
setattr(self, field.name, value)
|
|
||||||
else:
|
|
||||||
self._unknown_fields += parsed.raw
|
|
||||||
|
|
||||||
return self
|
|
||||||
|
|
||||||
# For compatibility with other libraries.
|
|
||||||
@classmethod
|
|
||||||
def FromString(cls: Type[T], data: bytes) -> T:
|
|
||||||
return cls().parse(data)
|
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
|
||||||
"""
|
|
||||||
Returns a dict representation of this message instance which can be
|
|
||||||
used to serialize to e.g. JSON.
|
|
||||||
"""
|
|
||||||
output: Dict[str, Any] = {}
|
|
||||||
for field in dataclasses.fields(self):
|
|
||||||
meta = FieldMetadata.get(field)
|
|
||||||
v = getattr(self, field.name)
|
|
||||||
if meta.proto_type == "message":
|
|
||||||
if isinstance(v, list):
|
|
||||||
# Convert each item.
|
|
||||||
v = [i.to_dict() for i in v]
|
|
||||||
output[field.name] = v
|
|
||||||
elif v._serialized_on_wire:
|
|
||||||
output[field.name] = v.to_dict()
|
|
||||||
elif meta.proto_type == "map":
|
|
||||||
for k in v:
|
|
||||||
if hasattr(v[k], "to_dict"):
|
|
||||||
v[k] = v[k].to_dict()
|
|
||||||
|
|
||||||
if v:
|
|
||||||
output[field.name] = v
|
|
||||||
elif v != get_default(meta.proto_type):
|
|
||||||
if meta.proto_type in INT_64_TYPES:
|
|
||||||
if isinstance(v, list):
|
|
||||||
output[field.name] = [str(n) for n in v]
|
|
||||||
else:
|
|
||||||
output[field.name] = str(v)
|
|
||||||
elif meta.proto_type == TYPE_BYTES:
|
|
||||||
if isinstance(v, list):
|
|
||||||
output[field.name] = [b64encode(b).decode("utf8") for b in v]
|
|
||||||
else:
|
|
||||||
output[field.name] = b64encode(v).decode("utf8")
|
|
||||||
elif meta.proto_type == TYPE_ENUM:
|
|
||||||
enum_values = list(self._cls_for(field))
|
|
||||||
if isinstance(v, list):
|
|
||||||
output[field.name] = [enum_values[e].name for e in v]
|
|
||||||
else:
|
|
||||||
output[field.name] = enum_values[v].name
|
|
||||||
else:
|
|
||||||
output[field.name] = v
|
|
||||||
return output
|
|
||||||
|
|
||||||
def from_dict(self: T, value: dict) -> T:
|
|
||||||
"""
|
|
||||||
Parse the key/value pairs in `value` into this message instance. This
|
|
||||||
returns the instance itself and is therefore assignable and chainable.
|
|
||||||
"""
|
|
||||||
self._serialized_on_wire = True
|
|
||||||
for field in dataclasses.fields(self):
|
|
||||||
meta = FieldMetadata.get(field)
|
|
||||||
if field.name in value and value[field.name] is not None:
|
|
||||||
if meta.proto_type == "message":
|
|
||||||
v = getattr(self, field.name)
|
|
||||||
# print(v, value[field.name])
|
|
||||||
if isinstance(v, list):
|
|
||||||
cls = self._cls_for(field)
|
|
||||||
for i in range(len(value[field.name])):
|
|
||||||
v.append(cls().from_dict(value[field.name][i]))
|
|
||||||
else:
|
|
||||||
v.from_dict(value[field.name])
|
|
||||||
elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE:
|
|
||||||
v = getattr(self, field.name)
|
|
||||||
cls = self._cls_for(field, index=1)
|
|
||||||
for k in value[field.name]:
|
|
||||||
v[k] = cls().from_dict(value[field.name][k])
|
|
||||||
else:
|
|
||||||
v = value[field.name]
|
|
||||||
if meta.proto_type in INT_64_TYPES:
|
|
||||||
if isinstance(value[field.name], list):
|
|
||||||
v = [int(n) for n in value[field.name]]
|
|
||||||
else:
|
|
||||||
v = int(value[field.name])
|
|
||||||
elif meta.proto_type == TYPE_BYTES:
|
|
||||||
if isinstance(value[field.name], list):
|
|
||||||
v = [b64decode(n) for n in value[field.name]]
|
|
||||||
else:
|
|
||||||
v = b64decode(value[field.name])
|
|
||||||
elif meta.proto_type == TYPE_ENUM:
|
|
||||||
enum_cls = self._cls_for(field)
|
|
||||||
if isinstance(v, list):
|
|
||||||
v = [enum_cls.from_string(e) for e in v]
|
|
||||||
elif isinstance(v, str):
|
|
||||||
v = enum_cls.from_string(v)
|
|
||||||
|
|
||||||
if v is not None:
|
|
||||||
setattr(self, field.name, v)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def to_json(self, indent: Union[None, int, str] = None) -> str:
|
|
||||||
"""Returns the encoded JSON representation of this message instance."""
|
|
||||||
return json.dumps(self.to_dict(), indent=indent)
|
|
||||||
|
|
||||||
def from_json(self: T, value: Union[str, bytes]) -> T:
|
|
||||||
"""
|
|
||||||
Parse the key/value pairs in `value` into this message instance. This
|
|
||||||
returns the instance itself and is therefore assignable and chainable.
|
|
||||||
"""
|
|
||||||
return self.from_dict(json.loads(value))
|
|
||||||
|
|
||||||
|
|
||||||
def serialized_on_wire(message: Message) -> bool:
|
|
||||||
"""
|
|
||||||
True if this message was or should be serialized on the wire. This can
|
|
||||||
be used to detect presence (e.g. optional wrapper message) and is used
|
|
||||||
internally during parsing/serialization.
|
|
||||||
"""
|
|
||||||
return message._serialized_on_wire
|
|
||||||
|
|
||||||
|
|
||||||
def which_one_of(message: Message, group_name: str) -> Tuple[str, Any]:
|
|
||||||
"""Return the name and value of a message's one-of field group."""
|
|
||||||
field = message._group_map["groups"].get(group_name, {}).get("current")
|
|
||||||
if not field:
|
|
||||||
return ("", None)
|
|
||||||
return (field.name, getattr(message, field.name))
|
|
||||||
|
|
||||||
|
|
||||||
class ServiceStub(ABC):
|
|
||||||
"""
|
|
||||||
Base class for async gRPC service stubs.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, channel: grpclib.client.Channel) -> None:
|
|
||||||
self.channel = channel
|
|
||||||
|
|
||||||
async def _unary_unary(
|
|
||||||
self, route: str, request_type: Type, response_type: Type[T], request: Any
|
|
||||||
) -> T:
|
|
||||||
"""Make a unary request and return the response."""
|
|
||||||
async with self.channel.request(
|
|
||||||
route, grpclib.const.Cardinality.UNARY_UNARY, request_type, response_type
|
|
||||||
) as stream:
|
|
||||||
await stream.send_message(request, end=True)
|
|
||||||
response = await stream.recv_message()
|
|
||||||
assert response is not None
|
|
||||||
return response
|
|
||||||
|
|
||||||
async def _unary_stream(
|
|
||||||
self, route: str, request_type: Type, response_type: Type[T], request: Any
|
|
||||||
) -> AsyncGenerator[T, None]:
|
|
||||||
"""Make a unary request and return the stream response iterator."""
|
|
||||||
async with self.channel.request(
|
|
||||||
route, grpclib.const.Cardinality.UNARY_STREAM, request_type, response_type
|
|
||||||
) as stream:
|
|
||||||
await stream.send_message(request, end=True)
|
|
||||||
async for message in stream:
|
|
||||||
yield message
|
|
Binary file not shown.
@ -1,392 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import itertools
|
|
||||||
import json
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import textwrap
|
|
||||||
from typing import Any, List, Tuple
|
|
||||||
|
|
||||||
try:
|
|
||||||
import jinja2
|
|
||||||
except ImportError:
|
|
||||||
print(
|
|
||||||
"Unable to import `jinja2`. Did you install the compiler feature with `pip install betterproto[compiler]`?"
|
|
||||||
)
|
|
||||||
raise SystemExit(1)
|
|
||||||
|
|
||||||
from google.protobuf.compiler import plugin_pb2 as plugin
|
|
||||||
from google.protobuf.descriptor_pb2 import (
|
|
||||||
DescriptorProto,
|
|
||||||
EnumDescriptorProto,
|
|
||||||
FieldDescriptorProto,
|
|
||||||
FileDescriptorProto,
|
|
||||||
ServiceDescriptorProto,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def snake_case(value: str) -> str:
|
|
||||||
return (
|
|
||||||
re.sub(r"(?<=[a-z])[A-Z]|[A-Z](?=[^A-Z])", r"_\g<0>", value).lower().strip("_")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_ref_type(package: str, imports: set, type_name: str) -> str:
|
|
||||||
"""
|
|
||||||
Return a Python type name for a proto type reference. Adds the import if
|
|
||||||
necessary.
|
|
||||||
"""
|
|
||||||
type_name = type_name.lstrip(".")
|
|
||||||
if type_name.startswith(package):
|
|
||||||
# This is the current package, which has nested types flattened.
|
|
||||||
type_name = f'"{type_name.lstrip(package).lstrip(".").replace(".", "")}"'
|
|
||||||
|
|
||||||
if "." in type_name:
|
|
||||||
# This is imported from another package. No need
|
|
||||||
# to use a forward ref and we need to add the import.
|
|
||||||
parts = type_name.split(".")
|
|
||||||
imports.add(f"from .{'.'.join(parts[:-2])} import {parts[-2]}")
|
|
||||||
type_name = f"{parts[-2]}.{parts[-1]}"
|
|
||||||
|
|
||||||
return type_name
|
|
||||||
|
|
||||||
|
|
||||||
def py_type(
|
|
||||||
package: str,
|
|
||||||
imports: set,
|
|
||||||
message: DescriptorProto,
|
|
||||||
descriptor: FieldDescriptorProto,
|
|
||||||
) -> str:
|
|
||||||
if descriptor.type in [1, 2, 6, 7, 15, 16]:
|
|
||||||
return "float"
|
|
||||||
elif descriptor.type in [3, 4, 5, 13, 17, 18]:
|
|
||||||
return "int"
|
|
||||||
elif descriptor.type == 8:
|
|
||||||
return "bool"
|
|
||||||
elif descriptor.type == 9:
|
|
||||||
return "str"
|
|
||||||
elif descriptor.type in [11, 14]:
|
|
||||||
# Type referencing another defined Message or a named enum
|
|
||||||
return get_ref_type(package, imports, descriptor.type_name)
|
|
||||||
elif descriptor.type == 12:
|
|
||||||
return "bytes"
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(f"Unknown type {descriptor.type}")
|
|
||||||
|
|
||||||
|
|
||||||
def get_py_zero(type_num: int) -> str:
|
|
||||||
zero = 0
|
|
||||||
if type_num in []:
|
|
||||||
zero = 0.0
|
|
||||||
elif type_num == 8:
|
|
||||||
zero = "False"
|
|
||||||
elif type_num == 9:
|
|
||||||
zero = '""'
|
|
||||||
elif type_num == 11:
|
|
||||||
zero = "None"
|
|
||||||
elif type_num == 12:
|
|
||||||
zero = 'b""'
|
|
||||||
|
|
||||||
return zero
|
|
||||||
|
|
||||||
|
|
||||||
def traverse(proto_file):
|
|
||||||
def _traverse(path, items):
|
|
||||||
for i, item in enumerate(items):
|
|
||||||
yield item, path + [i]
|
|
||||||
|
|
||||||
if isinstance(item, DescriptorProto):
|
|
||||||
for enum in item.enum_type:
|
|
||||||
enum.name = item.name + enum.name
|
|
||||||
yield enum, path + [i, 4]
|
|
||||||
|
|
||||||
if item.nested_type:
|
|
||||||
for n, p in _traverse(path + [i, 3], item.nested_type):
|
|
||||||
# Adjust the name since we flatten the heirarchy.
|
|
||||||
n.name = item.name + n.name
|
|
||||||
yield n, p
|
|
||||||
|
|
||||||
return itertools.chain(
|
|
||||||
_traverse([5], proto_file.enum_type), _traverse([4], proto_file.message_type)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_comment(proto_file, path: List[int]) -> str:
|
|
||||||
for sci in proto_file.source_code_info.location:
|
|
||||||
# print(list(sci.path), path, file=sys.stderr)
|
|
||||||
if list(sci.path) == path and sci.leading_comments:
|
|
||||||
lines = textwrap.wrap(
|
|
||||||
sci.leading_comments.strip().replace("\n", ""), width=75
|
|
||||||
)
|
|
||||||
|
|
||||||
if path[-2] == 2 and path[-4] != 6:
|
|
||||||
# This is a field
|
|
||||||
return " # " + " # ".join(lines)
|
|
||||||
else:
|
|
||||||
# This is a message, enum, service, or method
|
|
||||||
if len(lines) == 1 and len(lines[0]) < 70:
|
|
||||||
lines[0] = lines[0].strip('"')
|
|
||||||
return f' """{lines[0]}"""'
|
|
||||||
else:
|
|
||||||
joined = "\n ".join(lines)
|
|
||||||
return f' """\n {joined}\n """'
|
|
||||||
|
|
||||||
return ""
|
|
||||||
|
|
||||||
|
|
||||||
def generate_code(request, response):
|
|
||||||
env = jinja2.Environment(
|
|
||||||
trim_blocks=True,
|
|
||||||
lstrip_blocks=True,
|
|
||||||
loader=jinja2.FileSystemLoader("%s/templates/" % os.path.dirname(__file__)),
|
|
||||||
)
|
|
||||||
template = env.get_template("template.py")
|
|
||||||
|
|
||||||
output_map = {}
|
|
||||||
for proto_file in request.proto_file:
|
|
||||||
out = proto_file.package
|
|
||||||
if not out:
|
|
||||||
out = os.path.splitext(proto_file.name)[0].replace(os.path.sep, ".")
|
|
||||||
|
|
||||||
if out not in output_map:
|
|
||||||
output_map[out] = {"package": proto_file.package, "files": []}
|
|
||||||
output_map[out]["files"].append(proto_file)
|
|
||||||
|
|
||||||
# TODO: Figure out how to handle gRPC request/response messages and add
|
|
||||||
# processing below for Service.
|
|
||||||
|
|
||||||
for filename, options in output_map.items():
|
|
||||||
package = options["package"]
|
|
||||||
# print(package, filename, file=sys.stderr)
|
|
||||||
output = {
|
|
||||||
"package": package,
|
|
||||||
"files": [f.name for f in options["files"]],
|
|
||||||
"imports": set(),
|
|
||||||
"typing_imports": set(),
|
|
||||||
"messages": [],
|
|
||||||
"enums": [],
|
|
||||||
"services": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
type_mapping = {}
|
|
||||||
|
|
||||||
for proto_file in options["files"]:
|
|
||||||
# print(proto_file.message_type, file=sys.stderr)
|
|
||||||
# print(proto_file.service, file=sys.stderr)
|
|
||||||
# print(proto_file.source_code_info, file=sys.stderr)
|
|
||||||
|
|
||||||
for item, path in traverse(proto_file):
|
|
||||||
# print(item, file=sys.stderr)
|
|
||||||
# print(path, file=sys.stderr)
|
|
||||||
data = {"name": item.name}
|
|
||||||
|
|
||||||
if isinstance(item, DescriptorProto):
|
|
||||||
# print(item, file=sys.stderr)
|
|
||||||
if item.options.map_entry:
|
|
||||||
# Skip generated map entry messages since we just use dicts
|
|
||||||
continue
|
|
||||||
|
|
||||||
data.update(
|
|
||||||
{
|
|
||||||
"type": "Message",
|
|
||||||
"comment": get_comment(proto_file, path),
|
|
||||||
"properties": [],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
for i, f in enumerate(item.field):
|
|
||||||
t = py_type(package, output["imports"], item, f)
|
|
||||||
zero = get_py_zero(f.type)
|
|
||||||
|
|
||||||
repeated = False
|
|
||||||
packed = False
|
|
||||||
|
|
||||||
field_type = f.Type.Name(f.type).lower()[5:]
|
|
||||||
map_types = None
|
|
||||||
if f.type == 11:
|
|
||||||
# This might be a map...
|
|
||||||
message_type = f.type_name.split(".").pop().lower()
|
|
||||||
# message_type = py_type(package)
|
|
||||||
map_entry = f"{f.name.replace('_', '').lower()}entry"
|
|
||||||
|
|
||||||
if message_type == map_entry:
|
|
||||||
for nested in item.nested_type:
|
|
||||||
if (
|
|
||||||
nested.name.replace("_", "").lower()
|
|
||||||
== map_entry
|
|
||||||
):
|
|
||||||
if nested.options.map_entry:
|
|
||||||
# print("Found a map!", file=sys.stderr)
|
|
||||||
k = py_type(
|
|
||||||
package,
|
|
||||||
output["imports"],
|
|
||||||
item,
|
|
||||||
nested.field[0],
|
|
||||||
)
|
|
||||||
v = py_type(
|
|
||||||
package,
|
|
||||||
output["imports"],
|
|
||||||
item,
|
|
||||||
nested.field[1],
|
|
||||||
)
|
|
||||||
t = f"Dict[{k}, {v}]"
|
|
||||||
field_type = "map"
|
|
||||||
map_types = (
|
|
||||||
f.Type.Name(nested.field[0].type),
|
|
||||||
f.Type.Name(nested.field[1].type),
|
|
||||||
)
|
|
||||||
output["typing_imports"].add("Dict")
|
|
||||||
|
|
||||||
if f.label == 3 and field_type != "map":
|
|
||||||
# Repeated field
|
|
||||||
repeated = True
|
|
||||||
t = f"List[{t}]"
|
|
||||||
zero = "[]"
|
|
||||||
output["typing_imports"].add("List")
|
|
||||||
|
|
||||||
if f.type in [1, 2, 3, 4, 5, 6, 7, 8, 13, 15, 16, 17, 18]:
|
|
||||||
packed = True
|
|
||||||
|
|
||||||
one_of = ""
|
|
||||||
if f.HasField("oneof_index"):
|
|
||||||
one_of = item.oneof_decl[f.oneof_index].name
|
|
||||||
|
|
||||||
data["properties"].append(
|
|
||||||
{
|
|
||||||
"name": f.name,
|
|
||||||
"number": f.number,
|
|
||||||
"comment": get_comment(proto_file, path + [2, i]),
|
|
||||||
"proto_type": int(f.type),
|
|
||||||
"field_type": field_type,
|
|
||||||
"map_types": map_types,
|
|
||||||
"type": t,
|
|
||||||
"zero": zero,
|
|
||||||
"repeated": repeated,
|
|
||||||
"packed": packed,
|
|
||||||
"one_of": one_of,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
# print(f, file=sys.stderr)
|
|
||||||
|
|
||||||
output["messages"].append(data)
|
|
||||||
elif isinstance(item, EnumDescriptorProto):
|
|
||||||
# print(item.name, path, file=sys.stderr)
|
|
||||||
data.update(
|
|
||||||
{
|
|
||||||
"type": "Enum",
|
|
||||||
"comment": get_comment(proto_file, path),
|
|
||||||
"entries": [
|
|
||||||
{
|
|
||||||
"name": v.name,
|
|
||||||
"value": v.number,
|
|
||||||
"comment": get_comment(proto_file, path + [2, i]),
|
|
||||||
}
|
|
||||||
for i, v in enumerate(item.value)
|
|
||||||
],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
output["enums"].append(data)
|
|
||||||
|
|
||||||
for i, service in enumerate(proto_file.service):
|
|
||||||
# print(service, file=sys.stderr)
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"name": service.name,
|
|
||||||
"comment": get_comment(proto_file, [6, i]),
|
|
||||||
"methods": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
for j, method in enumerate(service.method):
|
|
||||||
if method.client_streaming:
|
|
||||||
raise NotImplementedError("Client streaming not yet supported")
|
|
||||||
|
|
||||||
input_message = None
|
|
||||||
input_type = get_ref_type(
|
|
||||||
package, output["imports"], method.input_type
|
|
||||||
).strip('"')
|
|
||||||
for msg in output["messages"]:
|
|
||||||
if msg["name"] == input_type:
|
|
||||||
input_message = msg
|
|
||||||
for field in msg["properties"]:
|
|
||||||
if field["zero"] == "None":
|
|
||||||
output["typing_imports"].add("Optional")
|
|
||||||
break
|
|
||||||
|
|
||||||
data["methods"].append(
|
|
||||||
{
|
|
||||||
"name": method.name,
|
|
||||||
"py_name": snake_case(method.name),
|
|
||||||
"comment": get_comment(proto_file, [6, i, 2, j]),
|
|
||||||
"route": f"/{package}.{service.name}/{method.name}",
|
|
||||||
"input": get_ref_type(
|
|
||||||
package, output["imports"], method.input_type
|
|
||||||
).strip('"'),
|
|
||||||
"input_message": input_message,
|
|
||||||
"output": get_ref_type(
|
|
||||||
package, output["imports"], method.output_type
|
|
||||||
).strip('"'),
|
|
||||||
"client_streaming": method.client_streaming,
|
|
||||||
"server_streaming": method.server_streaming,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if method.server_streaming:
|
|
||||||
output["typing_imports"].add("AsyncGenerator")
|
|
||||||
|
|
||||||
output["services"].append(data)
|
|
||||||
|
|
||||||
output["imports"] = sorted(output["imports"])
|
|
||||||
output["typing_imports"] = sorted(output["typing_imports"])
|
|
||||||
|
|
||||||
# Fill response
|
|
||||||
f = response.file.add()
|
|
||||||
# print(filename, file=sys.stderr)
|
|
||||||
f.name = filename.replace(".", os.path.sep) + ".py"
|
|
||||||
|
|
||||||
# f.content = json.dumps(output, indent=2)
|
|
||||||
f.content = template.render(description=output).rstrip("\n") + "\n"
|
|
||||||
|
|
||||||
inits = set([""])
|
|
||||||
for f in response.file:
|
|
||||||
# Ensure output paths exist
|
|
||||||
# print(f.name, file=sys.stderr)
|
|
||||||
dirnames = os.path.dirname(f.name)
|
|
||||||
if dirnames:
|
|
||||||
os.makedirs(dirnames, exist_ok=True)
|
|
||||||
base = ""
|
|
||||||
for part in dirnames.split(os.path.sep):
|
|
||||||
base = os.path.join(base, part)
|
|
||||||
inits.add(base)
|
|
||||||
|
|
||||||
for base in inits:
|
|
||||||
init = response.file.add()
|
|
||||||
init.name = os.path.join(base, "__init__.py")
|
|
||||||
init.content = b""
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""The plugin's main entry point."""
|
|
||||||
# Read request message from stdin
|
|
||||||
data = sys.stdin.buffer.read()
|
|
||||||
|
|
||||||
# Parse request
|
|
||||||
request = plugin.CodeGeneratorRequest()
|
|
||||||
request.ParseFromString(data)
|
|
||||||
|
|
||||||
# Create response
|
|
||||||
response = plugin.CodeGeneratorResponse()
|
|
||||||
|
|
||||||
# Generate code
|
|
||||||
generate_code(request, response)
|
|
||||||
|
|
||||||
# Serialise response message
|
|
||||||
output = response.SerializeToString()
|
|
||||||
|
|
||||||
# Write to stdout
|
|
||||||
sys.stdout.buffer.write(output)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -1,95 +0,0 @@
|
|||||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
||||||
# sources: {{ ', '.join(description.files) }}
|
|
||||||
# plugin: python-betterproto
|
|
||||||
from dataclasses import dataclass
|
|
||||||
{% if description.typing_imports %}
|
|
||||||
from typing import {% for i in description.typing_imports %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
|
|
||||||
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
import betterproto
|
|
||||||
{% if description.services %}
|
|
||||||
import grpclib
|
|
||||||
{% endif %}
|
|
||||||
{% for i in description.imports %}
|
|
||||||
|
|
||||||
{{ i }}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
|
|
||||||
{% if description.enums %}{% for enum in description.enums %}
|
|
||||||
class {{ enum.name }}(betterproto.Enum):
|
|
||||||
{% if enum.comment %}
|
|
||||||
{{ enum.comment }}
|
|
||||||
|
|
||||||
{% endif %}
|
|
||||||
{% for entry in enum.entries %}
|
|
||||||
{% if entry.comment %}
|
|
||||||
{{ entry.comment }}
|
|
||||||
{% endif %}
|
|
||||||
{{ entry.name }} = {{ entry.value }}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
{% for message in description.messages %}
|
|
||||||
@dataclass
|
|
||||||
class {{ message.name }}(betterproto.Message):
|
|
||||||
{% if message.comment %}
|
|
||||||
{{ message.comment }}
|
|
||||||
|
|
||||||
{% endif %}
|
|
||||||
{% for field in message.properties %}
|
|
||||||
{% if field.comment %}
|
|
||||||
{{ field.comment }}
|
|
||||||
{% endif %}
|
|
||||||
{{ field.name }}: {{ field.type }} = betterproto.{{ field.field_type }}_field({{ field.number }}{% if field.field_type == 'map'%}, betterproto.{{ field.map_types[0] }}, betterproto.{{ field.map_types[1] }}{% endif %}{% if field.one_of %}, group="{{ field.one_of }}"{% endif %})
|
|
||||||
{% endfor %}
|
|
||||||
{% if not message.properties %}
|
|
||||||
pass
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
|
|
||||||
{% endfor %}
|
|
||||||
{% for service in description.services %}
|
|
||||||
class {{ service.name }}Stub(betterproto.ServiceStub):
|
|
||||||
{% if service.comment %}
|
|
||||||
{{ service.comment }}
|
|
||||||
|
|
||||||
{% endif %}
|
|
||||||
{% for method in service.methods %}
|
|
||||||
async def {{ method.py_name }}(self{% if method.input_message and method.input_message.properties %}, *, {% for field in method.input_message.properties %}{{ field.name }}: {% if field.zero == "None" %}Optional[{{ field.type }}]{% else %}{{ field.type }}{% endif %} = {{ field.zero }}{% if not loop.last %}, {% endif %}{% endfor %}{% endif %}) -> {% if method.server_streaming %}AsyncGenerator[{{ method.output }}, None]{% else %}{{ method.output }}{% endif %}:
|
|
||||||
{% if method.comment %}
|
|
||||||
{{ method.comment }}
|
|
||||||
|
|
||||||
{% endif %}
|
|
||||||
request = {{ method.input }}()
|
|
||||||
{% for field in method.input_message.properties %}
|
|
||||||
{% if field.field_type == 'message' %}
|
|
||||||
if {{ field.name }} is not None:
|
|
||||||
request.{{ field.name }} = {{ field.name }}
|
|
||||||
{% else %}
|
|
||||||
request.{{ field.name }} = {{ field.name }}
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
{% if method.server_streaming %}
|
|
||||||
async for response in self._unary_stream(
|
|
||||||
"{{ method.route }}",
|
|
||||||
{{ method.input }},
|
|
||||||
{{ method.output }},
|
|
||||||
request,
|
|
||||||
):
|
|
||||||
yield response
|
|
||||||
{% else %}
|
|
||||||
return await self._unary_unary(
|
|
||||||
"{{ method.route }}",
|
|
||||||
{{ method.input }},
|
|
||||||
{{ method.output }},
|
|
||||||
request,
|
|
||||||
)
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"greeting": "HEY"
|
|
||||||
}
|
|
@ -1,14 +0,0 @@
|
|||||||
syntax = "proto3";
|
|
||||||
|
|
||||||
// Enum for the different greeting types
|
|
||||||
enum Greeting {
|
|
||||||
HI = 0;
|
|
||||||
HEY = 1;
|
|
||||||
// Formal greeting
|
|
||||||
HELLO = 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
message Test {
|
|
||||||
// Greeting enum example
|
|
||||||
Greeting greeting = 1;
|
|
||||||
}
|
|
@ -1,83 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Force pure-python implementation instead of C++, otherwise imports
|
|
||||||
# break things because we can't properly reset the symbol database.
|
|
||||||
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
|
|
||||||
|
|
||||||
import importlib
|
|
||||||
import json
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from typing import Generator, Tuple
|
|
||||||
|
|
||||||
from google.protobuf import symbol_database
|
|
||||||
from google.protobuf.descriptor_pool import DescriptorPool
|
|
||||||
from google.protobuf.json_format import MessageToJson, Parse
|
|
||||||
|
|
||||||
|
|
||||||
root = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
|
|
||||||
|
|
||||||
def get_files(end: str) -> Generator[str, None, None]:
|
|
||||||
for r, dirs, files in os.walk(root):
|
|
||||||
for filename in [f for f in files if f.endswith(end)]:
|
|
||||||
yield os.path.join(r, filename)
|
|
||||||
|
|
||||||
|
|
||||||
def get_base(filename: str) -> str:
|
|
||||||
return os.path.splitext(os.path.basename(filename))[0]
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_ext(filename: str, ext: str) -> str:
|
|
||||||
if not filename.endswith(ext):
|
|
||||||
return filename + ext
|
|
||||||
return filename
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
os.chdir(root)
|
|
||||||
|
|
||||||
if len(sys.argv) > 1:
|
|
||||||
proto_files = [ensure_ext(f, ".proto") for f in sys.argv[1:]]
|
|
||||||
bases = {get_base(f) for f in proto_files}
|
|
||||||
json_files = [
|
|
||||||
f for f in get_files(".json") if get_base(f).split("-")[0] in bases
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
proto_files = get_files(".proto")
|
|
||||||
json_files = get_files(".json")
|
|
||||||
|
|
||||||
for filename in proto_files:
|
|
||||||
print(f"Generating code for {os.path.basename(filename)}")
|
|
||||||
subprocess.run(
|
|
||||||
f"protoc --python_out=. {os.path.basename(filename)}", shell=True
|
|
||||||
)
|
|
||||||
subprocess.run(
|
|
||||||
f"protoc --plugin=protoc-gen-custom=../plugin.py --custom_out=. {os.path.basename(filename)}",
|
|
||||||
shell=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
for filename in json_files:
|
|
||||||
# Reset the internal symbol database so we can import the `Test` message
|
|
||||||
# multiple times. Ugh.
|
|
||||||
sym = symbol_database.Default()
|
|
||||||
sym.pool = DescriptorPool()
|
|
||||||
|
|
||||||
parts = get_base(filename).split("-")
|
|
||||||
out = filename.replace(".json", ".bin")
|
|
||||||
print(f"Using {parts[0]}_pb2 to generate {os.path.basename(out)}")
|
|
||||||
|
|
||||||
imported = importlib.import_module(f"{parts[0]}_pb2")
|
|
||||||
input_json = open(filename).read()
|
|
||||||
parsed = Parse(input_json, imported.Test())
|
|
||||||
serialized = parsed.SerializeToString()
|
|
||||||
serialized_json = MessageToJson(parsed, preserving_proto_field_name=True)
|
|
||||||
|
|
||||||
s_loaded = json.loads(serialized_json)
|
|
||||||
in_loaded = json.loads(input_json)
|
|
||||||
|
|
||||||
if s_loaded != in_loaded:
|
|
||||||
raise AssertionError("Expected JSON to be equal:", s_loaded, in_loaded)
|
|
||||||
|
|
||||||
open(out, "wb").write(serialized)
|
|
@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"count": -150
|
|
||||||
}
|
|
@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"count": 150
|
|
||||||
}
|
|
@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "foo"
|
|
||||||
}
|
|
@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"count": 1
|
|
||||||
}
|
|
@ -1,8 +0,0 @@
|
|||||||
syntax = "proto3";
|
|
||||||
|
|
||||||
message Test {
|
|
||||||
oneof foo {
|
|
||||||
int32 count = 1;
|
|
||||||
string name = 2;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,4 +0,0 @@
|
|||||||
{
|
|
||||||
"signed_32": -150,
|
|
||||||
"signed_64": "-150"
|
|
||||||
}
|
|
@ -1,4 +0,0 @@
|
|||||||
{
|
|
||||||
"signed_32": 150,
|
|
||||||
"signed_64": "150"
|
|
||||||
}
|
|
@ -1,6 +0,0 @@
|
|||||||
syntax = "proto3";
|
|
||||||
|
|
||||||
message Test {
|
|
||||||
sint32 signed_32 = 1;
|
|
||||||
sint64 signed_64 = 2;
|
|
||||||
}
|
|
@ -1,117 +0,0 @@
|
|||||||
import betterproto
|
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
|
|
||||||
def test_has_field():
|
|
||||||
@dataclass
|
|
||||||
class Bar(betterproto.Message):
|
|
||||||
baz: int = betterproto.int32_field(1)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Foo(betterproto.Message):
|
|
||||||
bar: Bar = betterproto.message_field(1)
|
|
||||||
|
|
||||||
# Unset by default
|
|
||||||
foo = Foo()
|
|
||||||
assert betterproto.serialized_on_wire(foo.bar) == False
|
|
||||||
|
|
||||||
# Serialized after setting something
|
|
||||||
foo.bar.baz = 1
|
|
||||||
assert betterproto.serialized_on_wire(foo.bar) == True
|
|
||||||
|
|
||||||
# Still has it after setting the default value
|
|
||||||
foo.bar.baz = 0
|
|
||||||
assert betterproto.serialized_on_wire(foo.bar) == True
|
|
||||||
|
|
||||||
# Manual override (don't do this)
|
|
||||||
foo.bar._serialized_on_wire = False
|
|
||||||
assert betterproto.serialized_on_wire(foo.bar) == False
|
|
||||||
|
|
||||||
# Can manually set it but defaults to false
|
|
||||||
foo.bar = Bar()
|
|
||||||
assert betterproto.serialized_on_wire(foo.bar) == False
|
|
||||||
|
|
||||||
|
|
||||||
def test_enum_as_int_json():
|
|
||||||
class TestEnum(betterproto.Enum):
|
|
||||||
ZERO = 0
|
|
||||||
ONE = 1
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Foo(betterproto.Message):
|
|
||||||
bar: TestEnum = betterproto.enum_field(1)
|
|
||||||
|
|
||||||
# JSON strings are supported, but ints should still be supported too.
|
|
||||||
foo = Foo().from_dict({"bar": 1})
|
|
||||||
assert foo.bar == TestEnum.ONE
|
|
||||||
|
|
||||||
# Plain-ol'-ints should serialize properly too.
|
|
||||||
foo.bar = 1
|
|
||||||
assert foo.to_dict() == {"bar": "ONE"}
|
|
||||||
|
|
||||||
|
|
||||||
def test_unknown_fields():
|
|
||||||
@dataclass
|
|
||||||
class Newer(betterproto.Message):
|
|
||||||
foo: bool = betterproto.bool_field(1)
|
|
||||||
bar: int = betterproto.int32_field(2)
|
|
||||||
baz: str = betterproto.string_field(3)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Older(betterproto.Message):
|
|
||||||
foo: bool = betterproto.bool_field(1)
|
|
||||||
|
|
||||||
newer = Newer(foo=True, bar=1, baz="Hello")
|
|
||||||
serialized_newer = bytes(newer)
|
|
||||||
|
|
||||||
# Unknown fields in `Newer` should round trip with `Older`
|
|
||||||
round_trip = bytes(Older().parse(serialized_newer))
|
|
||||||
assert serialized_newer == round_trip
|
|
||||||
|
|
||||||
new_again = Newer().parse(round_trip)
|
|
||||||
assert newer == new_again
|
|
||||||
|
|
||||||
|
|
||||||
def test_oneof_support():
|
|
||||||
@dataclass
|
|
||||||
class Sub(betterproto.Message):
|
|
||||||
val: int = betterproto.int32_field(1)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Foo(betterproto.Message):
|
|
||||||
bar: int = betterproto.int32_field(1, group="group1")
|
|
||||||
baz: str = betterproto.string_field(2, group="group1")
|
|
||||||
sub: Sub = betterproto.message_field(3, group="group2")
|
|
||||||
abc: str = betterproto.string_field(4, group="group2")
|
|
||||||
|
|
||||||
foo = Foo()
|
|
||||||
|
|
||||||
assert betterproto.which_one_of(foo, "group1")[0] == ""
|
|
||||||
|
|
||||||
foo.bar = 1
|
|
||||||
foo.baz = "test"
|
|
||||||
|
|
||||||
# Other oneof fields should now be unset
|
|
||||||
assert foo.bar == 0
|
|
||||||
assert betterproto.which_one_of(foo, "group1")[0] == "baz"
|
|
||||||
|
|
||||||
foo.sub.val = 1
|
|
||||||
assert betterproto.serialized_on_wire(foo.sub)
|
|
||||||
|
|
||||||
foo.abc = "test"
|
|
||||||
|
|
||||||
# Group 1 shouldn't be touched, group 2 should have reset
|
|
||||||
assert foo.sub.val == 0
|
|
||||||
assert betterproto.serialized_on_wire(foo.sub) == False
|
|
||||||
assert betterproto.which_one_of(foo, "group2")[0] == "abc"
|
|
||||||
|
|
||||||
# Zero value should always serialize for one-of
|
|
||||||
foo = Foo(bar=0)
|
|
||||||
assert betterproto.which_one_of(foo, "group1")[0] == "bar"
|
|
||||||
assert bytes(foo) == b"\x08\x00"
|
|
||||||
|
|
||||||
# Round trip should also work
|
|
||||||
foo2 = Foo().parse(bytes(foo))
|
|
||||||
assert betterproto.which_one_of(foo2, "group1")[0] == "bar"
|
|
||||||
assert foo.bar == 0
|
|
||||||
assert betterproto.which_one_of(foo2, "group2")[0] == ""
|
|
@ -1,32 +0,0 @@
|
|||||||
import importlib
|
|
||||||
import json
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from .generate import get_base, get_files
|
|
||||||
|
|
||||||
inputs = get_files(".bin")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("filename", inputs)
|
|
||||||
def test_sample(filename: str) -> None:
|
|
||||||
module = get_base(filename).split("-")[0]
|
|
||||||
imported = importlib.import_module(f"betterproto.tests.{module}")
|
|
||||||
data_binary = open(filename, "rb").read()
|
|
||||||
data_dict = json.loads(open(filename.replace(".bin", ".json")).read())
|
|
||||||
t1 = imported.Test().parse(data_binary)
|
|
||||||
t2 = imported.Test().from_dict(data_dict)
|
|
||||||
print(t1)
|
|
||||||
print(t2)
|
|
||||||
|
|
||||||
# Equality should automagically work for dataclasses!
|
|
||||||
assert t1 == t2
|
|
||||||
|
|
||||||
# Generally this can't be relied on, but here we are aiming to match the
|
|
||||||
# existing Python implementation and aren't doing anything tricky.
|
|
||||||
# https://developers.google.com/protocol-buffers/docs/encoding#implications
|
|
||||||
assert bytes(t1) == data_binary
|
|
||||||
assert bytes(t2) == data_binary
|
|
||||||
|
|
||||||
assert t1.to_dict() == data_dict
|
|
||||||
assert t2.to_dict() == data_dict
|
|
31
docs/api.rst
Normal file
31
docs/api.rst
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
.. currentmodule:: betterproto
|
||||||
|
|
||||||
|
API reference
|
||||||
|
=============
|
||||||
|
|
||||||
|
The following document outlines betterproto's api. **None** of these classes should be
|
||||||
|
extended by the user manually.
|
||||||
|
|
||||||
|
|
||||||
|
Message
|
||||||
|
--------
|
||||||
|
|
||||||
|
.. autoclass:: betterproto.Message
|
||||||
|
:members:
|
||||||
|
:special-members: __bytes__, __bool__
|
||||||
|
|
||||||
|
|
||||||
|
.. autofunction:: betterproto.serialized_on_wire
|
||||||
|
|
||||||
|
.. autofunction:: betterproto.which_one_of
|
||||||
|
|
||||||
|
|
||||||
|
Enumerations
|
||||||
|
-------------
|
||||||
|
|
||||||
|
.. autoclass:: betterproto.Enum()
|
||||||
|
:members:
|
||||||
|
|
||||||
|
|
||||||
|
.. autoclass:: betterproto.Casing()
|
||||||
|
:members:
|
60
docs/conf.py
Normal file
60
docs/conf.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
# Configuration file for the Sphinx documentation builder.
|
||||||
|
#
|
||||||
|
# This file only contains a selection of the most common options. For a full
|
||||||
|
# list see the documentation:
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||||
|
|
||||||
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
|
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
import toml
|
||||||
|
|
||||||
|
|
||||||
|
# -- Project information -----------------------------------------------------
|
||||||
|
|
||||||
|
project = "betterproto"
|
||||||
|
copyright = "2019 Daniel G. Taylor"
|
||||||
|
author = "danielgtaylor"
|
||||||
|
pyproject = toml.load(open(pathlib.Path(__file__).parent.parent / "pyproject.toml"))
|
||||||
|
|
||||||
|
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
release = pyproject["tool"]["poetry"]["version"]
|
||||||
|
|
||||||
|
|
||||||
|
# -- General configuration ---------------------------------------------------
|
||||||
|
|
||||||
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
|
# ones.
|
||||||
|
extensions = [
|
||||||
|
"sphinx.ext.autodoc",
|
||||||
|
"sphinx.ext.intersphinx",
|
||||||
|
"sphinx.ext.napoleon",
|
||||||
|
]
|
||||||
|
|
||||||
|
autodoc_member_order = "bysource"
|
||||||
|
autodoc_typehints = "none"
|
||||||
|
|
||||||
|
extlinks = {
|
||||||
|
"issue": ("https://github.com/danielgtaylor/python-betterproto/issues/%s", "GH-"),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Links used for cross-referencing stuff in other documentation
|
||||||
|
intersphinx_mapping = {
|
||||||
|
"py": ("https://docs.python.org/3", None),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTML output -------------------------------------------------
|
||||||
|
|
||||||
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
pygments_style = "friendly"
|
||||||
|
|
||||||
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
|
# a list of builtin themes.
|
||||||
|
|
||||||
|
html_theme = "sphinx_rtd_theme"
|
33
docs/index.rst
Normal file
33
docs/index.rst
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
Welcome to betterproto's documentation!
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
betterproto is a protobuf compiler and interpreter. It improves the experience of using
|
||||||
|
Protobuf and gRPC in Python, by generating readable, understandable, and idiomatic
|
||||||
|
Python code, using modern language features.
|
||||||
|
|
||||||
|
|
||||||
|
Features:
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
- Generated messages are both binary & JSON serializable
|
||||||
|
- Messages use relevant python types, e.g. ``Enum``, ``datetime`` and ``timedelta``
|
||||||
|
objects
|
||||||
|
- ``async``/``await`` support for gRPC Clients and Servers
|
||||||
|
- Generates modern, readable, idiomatic python code
|
||||||
|
|
||||||
|
Contents:
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
quick-start
|
||||||
|
api
|
||||||
|
migrating
|
||||||
|
|
||||||
|
|
||||||
|
If you still can't find what you're looking for, try in one of the following pages:
|
||||||
|
|
||||||
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
|
* :ref:`search`
|
159
docs/migrating.rst
Normal file
159
docs/migrating.rst
Normal file
@ -0,0 +1,159 @@
|
|||||||
|
Migrating Guide
|
||||||
|
===============
|
||||||
|
|
||||||
|
Google's protocolbuffers
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
betterproto has a mostly 1 to 1 drop in replacement for Google's protocolbuffers (after
|
||||||
|
regenerating your protobufs of course) although there are some minor differences.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
betterproto implements the same basic methods including:
|
||||||
|
|
||||||
|
- :meth:`betterproto.Message.FromString`
|
||||||
|
- :meth:`betterproto.Message.SerializeToString`
|
||||||
|
|
||||||
|
for compatibility purposes, however it is important to note that these are
|
||||||
|
effectively aliases for :meth:`betterproto.Message.parse` and
|
||||||
|
:meth:`betterproto.Message.__bytes__` respectively.
|
||||||
|
|
||||||
|
|
||||||
|
Determining if a message was sent
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Sometimes it is useful to be able to determine whether a message has been sent on
|
||||||
|
the wire. This is how the Google wrapper types work to let you know whether a value is
|
||||||
|
unset (set as the default/zero value), or set as something else, for example.
|
||||||
|
|
||||||
|
Use ``betterproto.serialized_on_wire(message)`` to determine if it was sent. This is
|
||||||
|
a little bit different from the official Google generated Python code, and it lives
|
||||||
|
outside the generated ``Message`` class to prevent name clashes. Note that it only
|
||||||
|
supports Proto 3 and thus can only be used to check if ``Message`` fields are set.
|
||||||
|
You cannot check if a scalar was sent on the wire.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# Old way (official Google Protobuf package)
|
||||||
|
>>> mymessage.HasField('myfield')
|
||||||
|
True
|
||||||
|
|
||||||
|
# New way (this project)
|
||||||
|
>>> betterproto.serialized_on_wire(mymessage.myfield)
|
||||||
|
True
|
||||||
|
|
||||||
|
|
||||||
|
One-of Support
|
||||||
|
~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Protobuf supports grouping fields in a oneof clause. Only one of the fields in the group
|
||||||
|
may be set at a given time. For example, given the proto:
|
||||||
|
|
||||||
|
.. code-block:: proto
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
message Test {
|
||||||
|
oneof foo {
|
||||||
|
bool on = 1;
|
||||||
|
int32 count = 2;
|
||||||
|
string name = 3;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
You can use ``betterproto.which_one_of(message, group_name)`` to determine which of the
|
||||||
|
fields was set. It returns a tuple of the field name and value, or a blank string and
|
||||||
|
``None`` if unset. Again this is a little different than the official Google code
|
||||||
|
generator:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# Old way (official Google protobuf package)
|
||||||
|
>>> message.WhichOneof("group")
|
||||||
|
"foo"
|
||||||
|
|
||||||
|
# New way (this project)
|
||||||
|
>>> betterproto.which_one_of(message, "group")
|
||||||
|
("foo", "foo's value")
|
||||||
|
|
||||||
|
|
||||||
|
Well-Known Google Types
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Google provides several well-known message types like a timestamp, duration, and several
|
||||||
|
wrappers used to provide optional zero value support. Each of these has a special JSON
|
||||||
|
representation and is handled a little differently from normal messages. The Python
|
||||||
|
mapping for these is as follows:
|
||||||
|
|
||||||
|
+-------------------------------+-------------------------------------------------+--------------------------+
|
||||||
|
| ``Google Message`` | ``Python Type`` | ``Default`` |
|
||||||
|
+===============================+=================================================+==========================+
|
||||||
|
| ``google.protobuf.duration`` | :class:`datetime.timedelta` | ``0`` |
|
||||||
|
+-------------------------------+-------------------------------------------------+--------------------------+
|
||||||
|
| ``google.protobuf.timestamp`` | ``Timezone-aware`` :class:`datetime.datetime` | ``1970-01-01T00:00:00Z`` |
|
||||||
|
+-------------------------------+-------------------------------------------------+--------------------------+
|
||||||
|
| ``google.protobuf.*Value`` | ``Optional[...]``/``None`` | ``None`` |
|
||||||
|
+-------------------------------+-------------------------------------------------+--------------------------+
|
||||||
|
| ``google.protobuf.*`` | ``betterproto.lib.std.google.protobuf.*`` | ``None`` |
|
||||||
|
+-------------------------------+-------------------------------------------------+--------------------------+
|
||||||
|
| ``google.protobuf.*`` | ``betterproto.lib.pydantic.google.protobuf.*`` | ``None`` |
|
||||||
|
+-------------------------------+-------------------------------------------------+--------------------------+
|
||||||
|
|
||||||
|
|
||||||
|
For the wrapper types, the Python type corresponds to the wrapped type, e.g.
|
||||||
|
``google.protobuf.BoolValue`` becomes ``Optional[bool]`` while
|
||||||
|
``google.protobuf.Int32Value`` becomes ``Optional[int]``. All of the optional values
|
||||||
|
default to None, so don't forget to check for that possible state.
|
||||||
|
|
||||||
|
Given:
|
||||||
|
|
||||||
|
.. code-block:: proto
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
import "google/protobuf/duration.proto";
|
||||||
|
import "google/protobuf/timestamp.proto";
|
||||||
|
import "google/protobuf/wrappers.proto";
|
||||||
|
|
||||||
|
message Test {
|
||||||
|
google.protobuf.BoolValue maybe = 1;
|
||||||
|
google.protobuf.Timestamp ts = 2;
|
||||||
|
google.protobuf.Duration duration = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
You can use it as such:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
>>> t = Test().from_dict({"maybe": True, "ts": "2019-01-01T12:00:00Z", "duration": "1.200s"})
|
||||||
|
>>> t
|
||||||
|
Test(maybe=True, ts=datetime.datetime(2019, 1, 1, 12, 0, tzinfo=datetime.timezone.utc), duration=datetime.timedelta(seconds=1, microseconds=200000))
|
||||||
|
|
||||||
|
>>> t.ts - t.duration
|
||||||
|
datetime.datetime(2019, 1, 1, 11, 59, 58, 800000, tzinfo=datetime.timezone.utc)
|
||||||
|
|
||||||
|
>>> t.ts.isoformat()
|
||||||
|
'2019-01-01T12:00:00+00:00'
|
||||||
|
|
||||||
|
>>> t.maybe = None
|
||||||
|
>>> t.to_dict()
|
||||||
|
{'ts': '2019-01-01T12:00:00Z', 'duration': '1.200s'}
|
||||||
|
|
||||||
|
|
||||||
|
[1.2.5] to [2.0.0b1]
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
Updated package structures
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Generated code now strictly follows the *package structure* of the ``.proto`` files.
|
||||||
|
Consequently ``.proto`` files without a package will be combined in a single
|
||||||
|
``__init__.py`` file. To avoid overwriting existing ``__init__.py`` files, its best
|
||||||
|
to compile into a dedicated subdirectory.
|
||||||
|
|
||||||
|
Upgrading:
|
||||||
|
|
||||||
|
- Remove your previously compiled ``.py`` files.
|
||||||
|
- Create a new *empty* directory, e.g. ``generated`` or ``lib/generated/proto`` etc.
|
||||||
|
- Regenerate your python files into this directory
|
||||||
|
- Update import statements, e.g. ``import ExampleMessage from generated``
|
222
docs/quick-start.rst
Normal file
222
docs/quick-start.rst
Normal file
@ -0,0 +1,222 @@
|
|||||||
|
Getting Started
|
||||||
|
===============
|
||||||
|
|
||||||
|
Installation
|
||||||
|
++++++++++++
|
||||||
|
|
||||||
|
Installation from PyPI is as simple as running:
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
python3 -m pip install -U betterproto
|
||||||
|
|
||||||
|
If you are using Windows, then the following should be used instead:
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
py -3 -m pip install -U betterproto
|
||||||
|
|
||||||
|
To include the protoc plugin, install betterproto[compiler] instead of betterproto,
|
||||||
|
e.g.
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
python3 -m pip install -U "betterproto[compiler]"
|
||||||
|
|
||||||
|
Compiling proto files
|
||||||
|
+++++++++++++++++++++
|
||||||
|
|
||||||
|
|
||||||
|
Given you installed the compiler and have a proto file, e.g ``example.proto``:
|
||||||
|
|
||||||
|
.. code-block:: proto
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package hello;
|
||||||
|
|
||||||
|
// Greeting represents a message you can tell a user.
|
||||||
|
message Greeting {
|
||||||
|
string message = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
To compile the proto you would run the following:
|
||||||
|
|
||||||
|
You can run the following to invoke protoc directly:
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
mkdir hello
|
||||||
|
protoc -I . --python_betterproto_out=lib example.proto
|
||||||
|
|
||||||
|
or run the following to invoke protoc via grpcio-tools:
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
pip install grpcio-tools
|
||||||
|
python -m grpc_tools.protoc -I . --python_betterproto_out=lib example.proto
|
||||||
|
|
||||||
|
|
||||||
|
This will generate ``lib/__init__.py`` which looks like:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||||
|
# sources: example.proto
|
||||||
|
# plugin: python-betterproto
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
import betterproto
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Greeting(betterproto.Message):
|
||||||
|
"""Greeting represents a message you can tell a user."""
|
||||||
|
|
||||||
|
message: str = betterproto.string_field(1)
|
||||||
|
|
||||||
|
|
||||||
|
Then to use it:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
>>> from lib import Greeting
|
||||||
|
|
||||||
|
>>> test = Greeting()
|
||||||
|
>>> test
|
||||||
|
Greeting(message='')
|
||||||
|
|
||||||
|
>>> test.message = "Hey!"
|
||||||
|
>>> test
|
||||||
|
Greeting(message="Hey!")
|
||||||
|
|
||||||
|
>>> bytes(test)
|
||||||
|
b'\n\x04Hey!'
|
||||||
|
>>> Greeting().parse(serialized)
|
||||||
|
Greeting(message="Hey!")
|
||||||
|
|
||||||
|
|
||||||
|
Async gRPC Support
|
||||||
|
++++++++++++++++++
|
||||||
|
|
||||||
|
The generated code includes `grpclib <https://grpclib.readthedocs.io/en/latest>`_ based
|
||||||
|
stub (client and server) classes for rpc services declared in the input proto files.
|
||||||
|
It is enabled by default.
|
||||||
|
|
||||||
|
|
||||||
|
Given a service definition similar to the one below:
|
||||||
|
|
||||||
|
.. code-block:: proto
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package echo;
|
||||||
|
|
||||||
|
message EchoRequest {
|
||||||
|
string value = 1;
|
||||||
|
// Number of extra times to echo
|
||||||
|
uint32 extra_times = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message EchoResponse {
|
||||||
|
repeated string values = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message EchoStreamResponse {
|
||||||
|
string value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
service Echo {
|
||||||
|
rpc Echo(EchoRequest) returns (EchoResponse);
|
||||||
|
rpc EchoStream(EchoRequest) returns (stream EchoStreamResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
The generated client can be used like so:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from grpclib.client import Channel
|
||||||
|
import echo
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
channel = Channel(host="127.0.0.1", port=50051)
|
||||||
|
service = echo.EchoStub(channel)
|
||||||
|
response = await service.echo(value="hello", extra_times=1)
|
||||||
|
print(response)
|
||||||
|
|
||||||
|
async for response in service.echo_stream(value="hello", extra_times=1):
|
||||||
|
print(response)
|
||||||
|
|
||||||
|
# don't forget to close the channel when you're done!
|
||||||
|
channel.close()
|
||||||
|
|
||||||
|
asyncio.run(main()) # python 3.7 only
|
||||||
|
|
||||||
|
# outputs
|
||||||
|
EchoResponse(values=['hello', 'hello'])
|
||||||
|
EchoStreamResponse(value='hello')
|
||||||
|
EchoStreamResponse(value='hello')
|
||||||
|
|
||||||
|
|
||||||
|
The server-facing stubs can be used to implement a Python
|
||||||
|
gRPC server.
|
||||||
|
To use them, simply subclass the base class in the generated files and override the
|
||||||
|
service methods:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from echo import EchoBase
|
||||||
|
from grpclib.server import Server
|
||||||
|
from typing import AsyncIterator
|
||||||
|
|
||||||
|
|
||||||
|
class EchoService(EchoBase):
|
||||||
|
async def echo(self, value: str, extra_times: int) -> "EchoResponse":
|
||||||
|
return value
|
||||||
|
|
||||||
|
async def echo_stream(
|
||||||
|
self, value: str, extra_times: int
|
||||||
|
) -> AsyncIterator["EchoStreamResponse"]:
|
||||||
|
for _ in range(extra_times):
|
||||||
|
yield value
|
||||||
|
|
||||||
|
|
||||||
|
async def start_server():
|
||||||
|
HOST = "127.0.0.1"
|
||||||
|
PORT = 1337
|
||||||
|
server = Server([EchoService()])
|
||||||
|
await server.start(HOST, PORT)
|
||||||
|
await server.serve_forever()
|
||||||
|
|
||||||
|
JSON
|
||||||
|
++++
|
||||||
|
Message objects include :meth:`betterproto.Message.to_json` and
|
||||||
|
:meth:`betterproto.Message.from_json` methods for JSON (de)serialisation, and
|
||||||
|
:meth:`betterproto.Message.to_dict`, :meth:`betterproto.Message.from_dict` for
|
||||||
|
converting back and forth from JSON serializable dicts.
|
||||||
|
|
||||||
|
For compatibility the default is to convert field names to
|
||||||
|
:attr:`betterproto.Casing.CAMEL`. You can control this behavior by passing a
|
||||||
|
different casing value, e.g:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MyMessage(betterproto.Message):
|
||||||
|
a_long_field_name: str = betterproto.string_field(1)
|
||||||
|
|
||||||
|
|
||||||
|
>>> test = MyMessage(a_long_field_name="Hello World!")
|
||||||
|
>>> test.to_dict(betterproto.Casing.SNAKE)
|
||||||
|
{"a_long_field_name": "Hello World!"}
|
||||||
|
>>> test.to_dict(betterproto.Casing.CAMEL)
|
||||||
|
{"aLongFieldName": "Hello World!"}
|
||||||
|
|
||||||
|
>>> test.to_json(indent=2)
|
||||||
|
'{\n "aLongFieldName": "Hello World!"\n}'
|
||||||
|
|
||||||
|
>>> test.from_dict({"aLongFieldName": "Goodbye World!"})
|
||||||
|
>>> test.a_long_field_name
|
||||||
|
"Goodbye World!"
|
2259
poetry.lock
generated
Normal file
2259
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
176
pyproject.toml
176
pyproject.toml
@ -1,9 +1,169 @@
|
|||||||
[tool.black]
|
[project]
|
||||||
target-version = ['py37']
|
name = "betterproto"
|
||||||
|
version = "2.0.0b7"
|
||||||
|
description = "A better Protobuf / gRPC generator & library"
|
||||||
|
authors = [
|
||||||
|
{name = "Daniel G. Taylor", email = "danielgtaylor@gmail.com"}
|
||||||
|
]
|
||||||
|
readme = "README.md"
|
||||||
|
repository = "https://github.com/danielgtaylor/python-betterproto"
|
||||||
|
keywords = ["protobuf", "gRPC"]
|
||||||
|
license = "MIT"
|
||||||
|
packages = [
|
||||||
|
{ include = "betterproto", from = "src" }
|
||||||
|
]
|
||||||
|
requires-python = ">=3.9,<4.0"
|
||||||
|
dynamic = ["dependencies"]
|
||||||
|
|
||||||
[tool.isort]
|
[tool.poetry.dependencies]
|
||||||
multi_line_output = 3
|
# The Ruff version is pinned. To update it, also update it in .pre-commit-config.yaml
|
||||||
include_trailing_comma = true
|
ruff = { version = "~0.9.1", optional = true }
|
||||||
force_grid_wrap = 0
|
grpclib = "^0.4.1"
|
||||||
use_parentheses = true
|
jinja2 = { version = ">=3.0.3", optional = true }
|
||||||
line_length = 88
|
python-dateutil = "^2.8"
|
||||||
|
typing-extensions = "^4.7.1"
|
||||||
|
betterproto-rust-codec = { version = "0.1.1", optional = true }
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
asv = "^0.6.4"
|
||||||
|
bpython = "^0.24"
|
||||||
|
jinja2 = ">=3.0.3"
|
||||||
|
mypy = "^1.11.2"
|
||||||
|
sphinx = "7.4.7"
|
||||||
|
sphinx-rtd-theme = "3.0.2"
|
||||||
|
pre-commit = "^4.0.1"
|
||||||
|
grpcio-tools = "^1.54.2"
|
||||||
|
tox = "^4.0.0"
|
||||||
|
|
||||||
|
[tool.poetry.group.test.dependencies]
|
||||||
|
poethepoet = ">=0.9.0"
|
||||||
|
pytest = "^7.4.4"
|
||||||
|
pytest-asyncio = "^0.23.8"
|
||||||
|
pytest-cov = "^6.0.0"
|
||||||
|
pytest-mock = "^3.1.1"
|
||||||
|
pydantic = ">=2.0,<3"
|
||||||
|
protobuf = "^5"
|
||||||
|
cachelib = "^0.13.0"
|
||||||
|
tomlkit = ">=0.7.0"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
protoc-gen-python_betterproto = "betterproto.plugin:main"
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
compiler = ["ruff", "jinja2"]
|
||||||
|
rust-codec = ["betterproto-rust-codec"]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
extend-exclude = ["tests/output_*"]
|
||||||
|
target-version = "py38"
|
||||||
|
|
||||||
|
[tool.ruff.lint.isort]
|
||||||
|
combine-as-imports = true
|
||||||
|
lines-after-imports = 2
|
||||||
|
|
||||||
|
# Dev workflow tasks
|
||||||
|
|
||||||
|
[tool.poe.tasks.generate]
|
||||||
|
script = "tests.generate:main"
|
||||||
|
help = "Generate test cases (do this once before running test)"
|
||||||
|
|
||||||
|
[tool.poe.tasks.test]
|
||||||
|
cmd = "pytest"
|
||||||
|
help = "Run tests"
|
||||||
|
|
||||||
|
[tool.poe.tasks.types]
|
||||||
|
cmd = "mypy src --ignore-missing-imports"
|
||||||
|
help = "Check types with mypy"
|
||||||
|
|
||||||
|
[tool.poe.tasks.format]
|
||||||
|
sequence = ["_format", "_sort-imports"]
|
||||||
|
help = "Format the source code, and sort the imports"
|
||||||
|
|
||||||
|
[tool.poe.tasks.check]
|
||||||
|
sequence = ["_check-format", "_check-imports"]
|
||||||
|
help = "Check that the source code is formatted and the imports sorted"
|
||||||
|
|
||||||
|
[tool.poe.tasks._format]
|
||||||
|
cmd = "ruff format src tests"
|
||||||
|
help = "Format the source code without sorting the imports"
|
||||||
|
|
||||||
|
[tool.poe.tasks._sort-imports]
|
||||||
|
cmd = "ruff check --select I --fix src tests"
|
||||||
|
help = "Sort the imports"
|
||||||
|
|
||||||
|
[tool.poe.tasks._check-format]
|
||||||
|
cmd = "ruff format --diff src tests"
|
||||||
|
help = "Check that the source code is formatted"
|
||||||
|
|
||||||
|
[tool.poe.tasks._check-imports]
|
||||||
|
cmd = "ruff check --select I src tests"
|
||||||
|
help = "Check that the imports are sorted"
|
||||||
|
|
||||||
|
[tool.poe.tasks.docs]
|
||||||
|
cmd = "sphinx-build docs docs/build"
|
||||||
|
help = "Build the sphinx docs"
|
||||||
|
|
||||||
|
[tool.poe.tasks.bench]
|
||||||
|
shell = "asv run master^! && asv run HEAD^! && asv compare master HEAD"
|
||||||
|
help = "Benchmark current commit vs. master branch"
|
||||||
|
|
||||||
|
[tool.poe.tasks.clean]
|
||||||
|
cmd = """
|
||||||
|
rm -rf .asv .coverage .mypy_cache .pytest_cache
|
||||||
|
dist betterproto.egg-info **/__pycache__
|
||||||
|
testsoutput_*
|
||||||
|
"""
|
||||||
|
help = "Clean out generated files from the workspace"
|
||||||
|
|
||||||
|
[tool.poe.tasks.generate_lib]
|
||||||
|
cmd = """
|
||||||
|
protoc
|
||||||
|
--plugin=protoc-gen-custom=src/betterproto/plugin/main.py
|
||||||
|
--custom_opt=INCLUDE_GOOGLE
|
||||||
|
--custom_out=src/betterproto/lib/std
|
||||||
|
-I C:\\work\\include
|
||||||
|
C:\\work\\include\\google\\protobuf\\**\\*.proto
|
||||||
|
"""
|
||||||
|
help = "Regenerate the types in betterproto.lib.std.google"
|
||||||
|
|
||||||
|
# CI tasks
|
||||||
|
|
||||||
|
[tool.poe.tasks.full-test]
|
||||||
|
shell = "poe generate && tox"
|
||||||
|
help = "Run tests with multiple pythons"
|
||||||
|
|
||||||
|
[tool.doc8]
|
||||||
|
paths = ["docs"]
|
||||||
|
max_line_length = 88
|
||||||
|
|
||||||
|
[tool.doc8.ignore_path_errors]
|
||||||
|
"docs/migrating.rst" = [
|
||||||
|
"D001", # contains table which is longer than 88 characters long
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
omit = ["betterproto/tests/*"]
|
||||||
|
|
||||||
|
[tool.tox]
|
||||||
|
legacy_tox_ini = """
|
||||||
|
[tox]
|
||||||
|
requires =
|
||||||
|
tox>=4.2
|
||||||
|
tox-poetry-installer[poetry]==1.0.0b1
|
||||||
|
env_list =
|
||||||
|
py311
|
||||||
|
py38
|
||||||
|
py37
|
||||||
|
|
||||||
|
[testenv]
|
||||||
|
commands =
|
||||||
|
pytest {posargs: --cov betterproto}
|
||||||
|
poetry_dep_groups =
|
||||||
|
test
|
||||||
|
require_locked_deps = true
|
||||||
|
require_poetry = true
|
||||||
|
"""
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["poetry-core>=2.0.0,<3"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
5
pytest.ini
Normal file
5
pytest.ini
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
[pytest]
|
||||||
|
python_files = test_*.py
|
||||||
|
python_classes =
|
||||||
|
norecursedirs = **/output_*
|
||||||
|
addopts = -p no:warnings
|
22
setup.py
22
setup.py
@ -1,22 +0,0 @@
|
|||||||
from setuptools import setup, find_packages
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name="betterproto",
|
|
||||||
version="1.0",
|
|
||||||
description="A better Protobuf / gRPC generator & library",
|
|
||||||
url="http://github.com/danielgtaylor/python-betterproto",
|
|
||||||
author="Daniel G. Taylor",
|
|
||||||
author_email="danielgtaylor@gmail.com",
|
|
||||||
license="MIT",
|
|
||||||
entry_points={
|
|
||||||
"console_scripts": ["protoc-gen-python_betterproto=betterproto.plugin:main"]
|
|
||||||
},
|
|
||||||
packages=find_packages(
|
|
||||||
exclude=["tests", "*.tests", "*.tests.*", "output", "output.*"]
|
|
||||||
),
|
|
||||||
package_data={"betterproto": ["py.typed", "templates/template.py"]},
|
|
||||||
python_requires=">=3.7",
|
|
||||||
install_requires=["grpclib"],
|
|
||||||
extras_require={"compiler": ["jinja2", "protobuf"]},
|
|
||||||
zip_safe=False,
|
|
||||||
)
|
|
2039
src/betterproto/__init__.py
Normal file
2039
src/betterproto/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
14
src/betterproto/_types.py
Normal file
14
src/betterproto/_types.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
TypeVar,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from grpclib._typing import IProtoMessage
|
||||||
|
|
||||||
|
from . import Message
|
||||||
|
|
||||||
|
# Bound type variable to allow methods to return `self` of subclasses
|
||||||
|
T = TypeVar("T", bound="Message")
|
||||||
|
ST = TypeVar("ST", bound="IProtoMessage")
|
7
src/betterproto/_version.py
Normal file
7
src/betterproto/_version.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
try:
|
||||||
|
from importlib import metadata
|
||||||
|
except ImportError: # for Python<3.8
|
||||||
|
import importlib_metadata as metadata # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = metadata.version("betterproto")
|
143
src/betterproto/casing.py
Normal file
143
src/betterproto/casing.py
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
import keyword
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
# Word delimiters and symbols that will not be preserved when re-casing.
|
||||||
|
# language=PythonRegExp
|
||||||
|
SYMBOLS = "[^a-zA-Z0-9]*"
|
||||||
|
|
||||||
|
# Optionally capitalized word.
|
||||||
|
# language=PythonRegExp
|
||||||
|
WORD = "[A-Z]*[a-z]*[0-9]*"
|
||||||
|
|
||||||
|
# Uppercase word, not followed by lowercase letters.
|
||||||
|
# language=PythonRegExp
|
||||||
|
WORD_UPPER = "[A-Z]+(?![a-z])[0-9]*"
|
||||||
|
|
||||||
|
|
||||||
|
def safe_snake_case(value: str) -> str:
|
||||||
|
"""Snake case a value taking into account Python keywords."""
|
||||||
|
value = snake_case(value)
|
||||||
|
value = sanitize_name(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def snake_case(value: str, strict: bool = True) -> str:
|
||||||
|
"""
|
||||||
|
Join words with an underscore into lowercase and remove symbols.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
-----------
|
||||||
|
value: :class:`str`
|
||||||
|
The value to convert.
|
||||||
|
strict: :class:`bool`
|
||||||
|
Whether or not to force single underscores.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
--------
|
||||||
|
:class:`str`
|
||||||
|
The value in snake_case.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def substitute_word(symbols: str, word: str, is_start: bool) -> str:
|
||||||
|
if not word:
|
||||||
|
return ""
|
||||||
|
if strict:
|
||||||
|
delimiter_count = 0 if is_start else 1 # Single underscore if strict.
|
||||||
|
elif is_start:
|
||||||
|
delimiter_count = len(symbols)
|
||||||
|
elif word.isupper() or word.islower():
|
||||||
|
delimiter_count = max(
|
||||||
|
1, len(symbols)
|
||||||
|
) # Preserve all delimiters if not strict.
|
||||||
|
else:
|
||||||
|
delimiter_count = len(symbols) + 1 # Extra underscore for leading capital.
|
||||||
|
|
||||||
|
return ("_" * delimiter_count) + word.lower()
|
||||||
|
|
||||||
|
snake = re.sub(
|
||||||
|
f"(^)?({SYMBOLS})({WORD_UPPER}|{WORD})",
|
||||||
|
lambda groups: substitute_word(groups[2], groups[3], groups[1] is not None),
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
return snake
|
||||||
|
|
||||||
|
|
||||||
|
def pascal_case(value: str, strict: bool = True) -> str:
|
||||||
|
"""
|
||||||
|
Capitalize each word and remove symbols.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
-----------
|
||||||
|
value: :class:`str`
|
||||||
|
The value to convert.
|
||||||
|
strict: :class:`bool`
|
||||||
|
Whether or not to output only alphanumeric characters.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
--------
|
||||||
|
:class:`str`
|
||||||
|
The value in PascalCase.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def substitute_word(symbols, word):
|
||||||
|
if strict:
|
||||||
|
return word.capitalize() # Remove all delimiters
|
||||||
|
|
||||||
|
if word.islower():
|
||||||
|
delimiter_length = len(symbols[:-1]) # Lose one delimiter
|
||||||
|
else:
|
||||||
|
delimiter_length = len(symbols) # Preserve all delimiters
|
||||||
|
|
||||||
|
return ("_" * delimiter_length) + word.capitalize()
|
||||||
|
|
||||||
|
return re.sub(
|
||||||
|
f"({SYMBOLS})({WORD_UPPER}|{WORD})",
|
||||||
|
lambda groups: substitute_word(groups[1], groups[2]),
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def camel_case(value: str, strict: bool = True) -> str:
|
||||||
|
"""
|
||||||
|
Capitalize all words except first and remove symbols.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
-----------
|
||||||
|
value: :class:`str`
|
||||||
|
The value to convert.
|
||||||
|
strict: :class:`bool`
|
||||||
|
Whether or not to output only alphanumeric characters.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
--------
|
||||||
|
:class:`str`
|
||||||
|
The value in camelCase.
|
||||||
|
"""
|
||||||
|
return lowercase_first(pascal_case(value, strict=strict))
|
||||||
|
|
||||||
|
|
||||||
|
def lowercase_first(value: str) -> str:
|
||||||
|
"""
|
||||||
|
Lower cases the first character of the value.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
value: :class:`str`
|
||||||
|
The value to lower case.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
:class:`str`
|
||||||
|
The lower cased string.
|
||||||
|
"""
|
||||||
|
return value[0:1].lower() + value[1:]
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_name(value: str) -> str:
|
||||||
|
# https://www.python.org/dev/peps/pep-0008/#descriptive-naming-styles
|
||||||
|
if keyword.iskeyword(value):
|
||||||
|
return f"{value}_"
|
||||||
|
if not value.isidentifier():
|
||||||
|
return f"_{value}"
|
||||||
|
return value
|
0
src/betterproto/compile/__init__.py
Normal file
0
src/betterproto/compile/__init__.py
Normal file
183
src/betterproto/compile/importing.py
Normal file
183
src/betterproto/compile/importing.py
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
|
Set,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ..casing import safe_snake_case
|
||||||
|
from ..lib.google import protobuf as google_protobuf
|
||||||
|
from .naming import pythonize_class_name
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..plugin.typing_compiler import TypingCompiler
|
||||||
|
|
||||||
|
WRAPPER_TYPES: Dict[str, Type] = {
|
||||||
|
".google.protobuf.DoubleValue": google_protobuf.DoubleValue,
|
||||||
|
".google.protobuf.FloatValue": google_protobuf.FloatValue,
|
||||||
|
".google.protobuf.Int32Value": google_protobuf.Int32Value,
|
||||||
|
".google.protobuf.Int64Value": google_protobuf.Int64Value,
|
||||||
|
".google.protobuf.UInt32Value": google_protobuf.UInt32Value,
|
||||||
|
".google.protobuf.UInt64Value": google_protobuf.UInt64Value,
|
||||||
|
".google.protobuf.BoolValue": google_protobuf.BoolValue,
|
||||||
|
".google.protobuf.StringValue": google_protobuf.StringValue,
|
||||||
|
".google.protobuf.BytesValue": google_protobuf.BytesValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def parse_source_type_name(field_type_name: str) -> Tuple[str, str]:
|
||||||
|
"""
|
||||||
|
Split full source type name into package and type name.
|
||||||
|
E.g. 'root.package.Message' -> ('root.package', 'Message')
|
||||||
|
'root.Message.SomeEnum' -> ('root', 'Message.SomeEnum')
|
||||||
|
"""
|
||||||
|
package_match = re.match(r"^\.?([^A-Z]+)\.(.+)", field_type_name)
|
||||||
|
if package_match:
|
||||||
|
package = package_match.group(1)
|
||||||
|
name = package_match.group(2)
|
||||||
|
else:
|
||||||
|
package = ""
|
||||||
|
name = field_type_name.lstrip(".")
|
||||||
|
return package, name
|
||||||
|
|
||||||
|
|
||||||
|
def get_type_reference(
|
||||||
|
*,
|
||||||
|
package: str,
|
||||||
|
imports: set,
|
||||||
|
source_type: str,
|
||||||
|
typing_compiler: TypingCompiler,
|
||||||
|
unwrap: bool = True,
|
||||||
|
pydantic: bool = False,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Return a Python type name for a proto type reference. Adds the import if
|
||||||
|
necessary. Unwraps well known type if required.
|
||||||
|
"""
|
||||||
|
if unwrap:
|
||||||
|
if source_type in WRAPPER_TYPES:
|
||||||
|
wrapped_type = type(WRAPPER_TYPES[source_type]().value)
|
||||||
|
return typing_compiler.optional(wrapped_type.__name__)
|
||||||
|
|
||||||
|
if source_type == ".google.protobuf.Duration":
|
||||||
|
return "timedelta"
|
||||||
|
|
||||||
|
elif source_type == ".google.protobuf.Timestamp":
|
||||||
|
return "datetime"
|
||||||
|
|
||||||
|
source_package, source_type = parse_source_type_name(source_type)
|
||||||
|
|
||||||
|
current_package: List[str] = package.split(".") if package else []
|
||||||
|
py_package: List[str] = source_package.split(".") if source_package else []
|
||||||
|
py_type: str = pythonize_class_name(source_type)
|
||||||
|
|
||||||
|
compiling_google_protobuf = current_package == ["google", "protobuf"]
|
||||||
|
importing_google_protobuf = py_package == ["google", "protobuf"]
|
||||||
|
if importing_google_protobuf and not compiling_google_protobuf:
|
||||||
|
py_package = (
|
||||||
|
["betterproto", "lib"] + (["pydantic"] if pydantic else []) + py_package
|
||||||
|
)
|
||||||
|
|
||||||
|
if py_package[:1] == ["betterproto"]:
|
||||||
|
return reference_absolute(imports, py_package, py_type)
|
||||||
|
|
||||||
|
if py_package == current_package:
|
||||||
|
return reference_sibling(py_type)
|
||||||
|
|
||||||
|
if py_package[: len(current_package)] == current_package:
|
||||||
|
return reference_descendent(current_package, imports, py_package, py_type)
|
||||||
|
|
||||||
|
if current_package[: len(py_package)] == py_package:
|
||||||
|
return reference_ancestor(current_package, imports, py_package, py_type)
|
||||||
|
|
||||||
|
return reference_cousin(current_package, imports, py_package, py_type)
|
||||||
|
|
||||||
|
|
||||||
|
def reference_absolute(imports: Set[str], py_package: List[str], py_type: str) -> str:
|
||||||
|
"""
|
||||||
|
Returns a reference to a python type located in the root, i.e. sys.path.
|
||||||
|
"""
|
||||||
|
string_import = ".".join(py_package)
|
||||||
|
string_alias = safe_snake_case(string_import)
|
||||||
|
imports.add(f"import {string_import} as {string_alias}")
|
||||||
|
return f'"{string_alias}.{py_type}"'
|
||||||
|
|
||||||
|
|
||||||
|
def reference_sibling(py_type: str) -> str:
|
||||||
|
"""
|
||||||
|
Returns a reference to a python type within the same package as the current package.
|
||||||
|
"""
|
||||||
|
return f'"{py_type}"'
|
||||||
|
|
||||||
|
|
||||||
|
def reference_descendent(
|
||||||
|
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Returns a reference to a python type in a package that is a descendent of the
|
||||||
|
current package, and adds the required import that is aliased to avoid name
|
||||||
|
conflicts.
|
||||||
|
"""
|
||||||
|
importing_descendent = py_package[len(current_package) :]
|
||||||
|
string_from = ".".join(importing_descendent[:-1])
|
||||||
|
string_import = importing_descendent[-1]
|
||||||
|
if string_from:
|
||||||
|
string_alias = "_".join(importing_descendent)
|
||||||
|
imports.add(f"from .{string_from} import {string_import} as {string_alias}")
|
||||||
|
return f'"{string_alias}.{py_type}"'
|
||||||
|
else:
|
||||||
|
imports.add(f"from . import {string_import}")
|
||||||
|
return f'"{string_import}.{py_type}"'
|
||||||
|
|
||||||
|
|
||||||
|
def reference_ancestor(
|
||||||
|
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Returns a reference to a python type in a package which is an ancestor to the
|
||||||
|
current package, and adds the required import that is aliased (if possible) to avoid
|
||||||
|
name conflicts.
|
||||||
|
|
||||||
|
Adds trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34).
|
||||||
|
"""
|
||||||
|
distance_up = len(current_package) - len(py_package)
|
||||||
|
if py_package:
|
||||||
|
string_import = py_package[-1]
|
||||||
|
string_alias = f"_{'_' * distance_up}{string_import}__"
|
||||||
|
string_from = f"..{'.' * distance_up}"
|
||||||
|
imports.add(f"from {string_from} import {string_import} as {string_alias}")
|
||||||
|
return f'"{string_alias}.{py_type}"'
|
||||||
|
else:
|
||||||
|
string_alias = f"{'_' * distance_up}{py_type}__"
|
||||||
|
imports.add(f"from .{'.' * distance_up} import {py_type} as {string_alias}")
|
||||||
|
return f'"{string_alias}"'
|
||||||
|
|
||||||
|
|
||||||
|
def reference_cousin(
|
||||||
|
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Returns a reference to a python type in a package that is not descendent, ancestor
|
||||||
|
or sibling, and adds the required import that is aliased to avoid name conflicts.
|
||||||
|
"""
|
||||||
|
shared_ancestry = os.path.commonprefix([current_package, py_package]) # type: ignore
|
||||||
|
distance_up = len(current_package) - len(shared_ancestry)
|
||||||
|
string_from = f".{'.' * distance_up}" + ".".join(
|
||||||
|
py_package[len(shared_ancestry) : -1]
|
||||||
|
)
|
||||||
|
string_import = py_package[-1]
|
||||||
|
# Add trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34)
|
||||||
|
string_alias = (
|
||||||
|
f"{'_' * distance_up}"
|
||||||
|
+ safe_snake_case(".".join(py_package[len(shared_ancestry) :]))
|
||||||
|
+ "__"
|
||||||
|
)
|
||||||
|
imports.add(f"from {string_from} import {string_import} as {string_alias}")
|
||||||
|
return f'"{string_alias}.{py_type}"'
|
21
src/betterproto/compile/naming.py
Normal file
21
src/betterproto/compile/naming.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from betterproto import casing
|
||||||
|
|
||||||
|
|
||||||
|
def pythonize_class_name(name: str) -> str:
|
||||||
|
return casing.pascal_case(name)
|
||||||
|
|
||||||
|
|
||||||
|
def pythonize_field_name(name: str) -> str:
|
||||||
|
return casing.safe_snake_case(name)
|
||||||
|
|
||||||
|
|
||||||
|
def pythonize_method_name(name: str) -> str:
|
||||||
|
return casing.safe_snake_case(name)
|
||||||
|
|
||||||
|
|
||||||
|
def pythonize_enum_member_name(name: str, enum_name: str) -> str:
|
||||||
|
enum_name = casing.snake_case(enum_name).upper()
|
||||||
|
find = name.find(enum_name)
|
||||||
|
if find != -1:
|
||||||
|
name = name[find + len(enum_name) :].strip("_")
|
||||||
|
return casing.sanitize_name(name)
|
197
src/betterproto/enum.py
Normal file
197
src/betterproto/enum.py
Normal file
@ -0,0 +1,197 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from enum import (
|
||||||
|
EnumMeta,
|
||||||
|
IntEnum,
|
||||||
|
)
|
||||||
|
from types import MappingProxyType
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Dict,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import (
|
||||||
|
Generator,
|
||||||
|
Mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
from typing_extensions import (
|
||||||
|
Never,
|
||||||
|
Self,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_descriptor(obj: object) -> bool:
|
||||||
|
return (
|
||||||
|
hasattr(obj, "__get__") or hasattr(obj, "__set__") or hasattr(obj, "__delete__")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class EnumType(EnumMeta if TYPE_CHECKING else type):
|
||||||
|
_value_map_: Mapping[int, Enum]
|
||||||
|
_member_map_: Mapping[str, Enum]
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
mcs, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]
|
||||||
|
) -> Self:
|
||||||
|
value_map = {}
|
||||||
|
member_map = {}
|
||||||
|
|
||||||
|
new_mcs = type(
|
||||||
|
f"{name}Type",
|
||||||
|
tuple(
|
||||||
|
dict.fromkeys(
|
||||||
|
[base.__class__ for base in bases if base.__class__ is not type]
|
||||||
|
+ [EnumType, type]
|
||||||
|
)
|
||||||
|
), # reorder the bases so EnumType and type are last to avoid conflicts
|
||||||
|
{"_value_map_": value_map, "_member_map_": member_map},
|
||||||
|
)
|
||||||
|
|
||||||
|
members = {
|
||||||
|
name: value
|
||||||
|
for name, value in namespace.items()
|
||||||
|
if not _is_descriptor(value) and not name.startswith("__")
|
||||||
|
}
|
||||||
|
|
||||||
|
cls = type.__new__(
|
||||||
|
new_mcs,
|
||||||
|
name,
|
||||||
|
bases,
|
||||||
|
{key: value for key, value in namespace.items() if key not in members},
|
||||||
|
)
|
||||||
|
# this allows us to disallow member access from other members as
|
||||||
|
# members become proper class variables
|
||||||
|
|
||||||
|
for name, value in members.items():
|
||||||
|
member = value_map.get(value)
|
||||||
|
if member is None:
|
||||||
|
member = cls.__new__(cls, name=name, value=value) # type: ignore
|
||||||
|
value_map[value] = member
|
||||||
|
member_map[name] = member
|
||||||
|
type.__setattr__(new_mcs, name, member)
|
||||||
|
|
||||||
|
return cls
|
||||||
|
|
||||||
|
if not TYPE_CHECKING:
|
||||||
|
|
||||||
|
def __call__(cls, value: int) -> Enum:
|
||||||
|
try:
|
||||||
|
return cls._value_map_[value]
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
raise ValueError(f"{value!r} is not a valid {cls.__name__}") from None
|
||||||
|
|
||||||
|
def __iter__(cls) -> Generator[Enum, None, None]:
|
||||||
|
yield from cls._member_map_.values()
|
||||||
|
|
||||||
|
def __reversed__(cls) -> Generator[Enum, None, None]:
|
||||||
|
yield from reversed(cls._member_map_.values())
|
||||||
|
|
||||||
|
def __getitem__(cls, key: str) -> Enum:
|
||||||
|
return cls._member_map_[key]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def __members__(cls) -> MappingProxyType[str, Enum]:
|
||||||
|
return MappingProxyType(cls._member_map_)
|
||||||
|
|
||||||
|
def __repr__(cls) -> str:
|
||||||
|
return f"<enum {cls.__name__!r}>"
|
||||||
|
|
||||||
|
def __len__(cls) -> int:
|
||||||
|
return len(cls._member_map_)
|
||||||
|
|
||||||
|
def __setattr__(cls, name: str, value: Any) -> Never:
|
||||||
|
raise AttributeError(f"{cls.__name__}: cannot reassign Enum members.")
|
||||||
|
|
||||||
|
def __delattr__(cls, name: str) -> Never:
|
||||||
|
raise AttributeError(f"{cls.__name__}: cannot delete Enum members.")
|
||||||
|
|
||||||
|
def __contains__(cls, member: object) -> bool:
|
||||||
|
return isinstance(member, cls) and member.name in cls._member_map_
|
||||||
|
|
||||||
|
|
||||||
|
class Enum(IntEnum if TYPE_CHECKING else int, metaclass=EnumType):
|
||||||
|
"""
|
||||||
|
The base class for protobuf enumerations, all generated enumerations will
|
||||||
|
inherit from this. Emulates `enum.IntEnum`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name: Optional[str]
|
||||||
|
value: int
|
||||||
|
|
||||||
|
if not TYPE_CHECKING:
|
||||||
|
|
||||||
|
def __new__(cls, *, name: Optional[str], value: int) -> Self:
|
||||||
|
self = super().__new__(cls, value)
|
||||||
|
super().__setattr__(self, "name", name)
|
||||||
|
super().__setattr__(self, "value", value)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __getnewargs_ex__(self) -> Tuple[Tuple[()], Dict[str, Any]]:
|
||||||
|
return (), {"name": self.name, "value": self.value}
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.name or "None"
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.__class__.__name__}.{self.name}"
|
||||||
|
|
||||||
|
def __setattr__(self, key: str, value: Any) -> Never:
|
||||||
|
raise AttributeError(
|
||||||
|
f"{self.__class__.__name__} Cannot reassign a member's attributes."
|
||||||
|
)
|
||||||
|
|
||||||
|
def __delattr__(self, item: Any) -> Never:
|
||||||
|
raise AttributeError(
|
||||||
|
f"{self.__class__.__name__} Cannot delete a member's attributes."
|
||||||
|
)
|
||||||
|
|
||||||
|
def __copy__(self) -> Self:
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __deepcopy__(self, memo: Any) -> Self:
|
||||||
|
return self
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def try_value(cls, value: int = 0) -> Self:
|
||||||
|
"""Return the value which corresponds to the value.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
-----------
|
||||||
|
value: :class:`int`
|
||||||
|
The value of the enum member to get.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
:class:`Enum`
|
||||||
|
The corresponding member or a new instance of the enum if
|
||||||
|
``value`` isn't actually a member.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return cls._value_map_[value]
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
return cls.__new__(cls, name=None, value=value)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_string(cls, name: str) -> Self:
|
||||||
|
"""Return the value which corresponds to the string name.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
-----------
|
||||||
|
name: :class:`str`
|
||||||
|
The name of the enum member to get.
|
||||||
|
|
||||||
|
Raises
|
||||||
|
-------
|
||||||
|
:exc:`ValueError`
|
||||||
|
The member was not found in the Enum.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return cls._member_map_[name]
|
||||||
|
except KeyError as e:
|
||||||
|
raise ValueError(f"Unknown value {name} for enum {cls.__name__}") from e
|
0
src/betterproto/grpc/__init__.py
Normal file
0
src/betterproto/grpc/__init__.py
Normal file
177
src/betterproto/grpc/grpclib_client.py
Normal file
177
src/betterproto/grpc/grpclib_client.py
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
import asyncio
|
||||||
|
from abc import ABC
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
AsyncIterable,
|
||||||
|
AsyncIterator,
|
||||||
|
Collection,
|
||||||
|
Iterable,
|
||||||
|
Mapping,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
import grpclib.const
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from grpclib.client import Channel
|
||||||
|
from grpclib.metadata import Deadline
|
||||||
|
|
||||||
|
from .._types import (
|
||||||
|
ST,
|
||||||
|
IProtoMessage,
|
||||||
|
Message,
|
||||||
|
T,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Value = Union[str, bytes]
|
||||||
|
MetadataLike = Union[Mapping[str, Value], Collection[Tuple[str, Value]]]
|
||||||
|
MessageSource = Union[Iterable["IProtoMessage"], AsyncIterable["IProtoMessage"]]
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceStub(ABC):
|
||||||
|
"""
|
||||||
|
Base class for async gRPC clients.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
channel: "Channel",
|
||||||
|
*,
|
||||||
|
timeout: Optional[float] = None,
|
||||||
|
deadline: Optional["Deadline"] = None,
|
||||||
|
metadata: Optional[MetadataLike] = None,
|
||||||
|
) -> None:
|
||||||
|
self.channel = channel
|
||||||
|
self.timeout = timeout
|
||||||
|
self.deadline = deadline
|
||||||
|
self.metadata = metadata
|
||||||
|
|
||||||
|
def __resolve_request_kwargs(
|
||||||
|
self,
|
||||||
|
timeout: Optional[float],
|
||||||
|
deadline: Optional["Deadline"],
|
||||||
|
metadata: Optional[MetadataLike],
|
||||||
|
):
|
||||||
|
return {
|
||||||
|
"timeout": self.timeout if timeout is None else timeout,
|
||||||
|
"deadline": self.deadline if deadline is None else deadline,
|
||||||
|
"metadata": self.metadata if metadata is None else metadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def _unary_unary(
|
||||||
|
self,
|
||||||
|
route: str,
|
||||||
|
request: "IProtoMessage",
|
||||||
|
response_type: Type["T"],
|
||||||
|
*,
|
||||||
|
timeout: Optional[float] = None,
|
||||||
|
deadline: Optional["Deadline"] = None,
|
||||||
|
metadata: Optional[MetadataLike] = None,
|
||||||
|
) -> "T":
|
||||||
|
"""Make a unary request and return the response."""
|
||||||
|
async with self.channel.request(
|
||||||
|
route,
|
||||||
|
grpclib.const.Cardinality.UNARY_UNARY,
|
||||||
|
type(request),
|
||||||
|
response_type,
|
||||||
|
**self.__resolve_request_kwargs(timeout, deadline, metadata),
|
||||||
|
) as stream:
|
||||||
|
await stream.send_message(request, end=True)
|
||||||
|
response = await stream.recv_message()
|
||||||
|
assert response is not None
|
||||||
|
return response
|
||||||
|
|
||||||
|
async def _unary_stream(
|
||||||
|
self,
|
||||||
|
route: str,
|
||||||
|
request: "IProtoMessage",
|
||||||
|
response_type: Type["T"],
|
||||||
|
*,
|
||||||
|
timeout: Optional[float] = None,
|
||||||
|
deadline: Optional["Deadline"] = None,
|
||||||
|
metadata: Optional[MetadataLike] = None,
|
||||||
|
) -> AsyncIterator["T"]:
|
||||||
|
"""Make a unary request and return the stream response iterator."""
|
||||||
|
async with self.channel.request(
|
||||||
|
route,
|
||||||
|
grpclib.const.Cardinality.UNARY_STREAM,
|
||||||
|
type(request),
|
||||||
|
response_type,
|
||||||
|
**self.__resolve_request_kwargs(timeout, deadline, metadata),
|
||||||
|
) as stream:
|
||||||
|
await stream.send_message(request, end=True)
|
||||||
|
async for message in stream:
|
||||||
|
yield message
|
||||||
|
|
||||||
|
async def _stream_unary(
|
||||||
|
self,
|
||||||
|
route: str,
|
||||||
|
request_iterator: MessageSource,
|
||||||
|
request_type: Type["IProtoMessage"],
|
||||||
|
response_type: Type["T"],
|
||||||
|
*,
|
||||||
|
timeout: Optional[float] = None,
|
||||||
|
deadline: Optional["Deadline"] = None,
|
||||||
|
metadata: Optional[MetadataLike] = None,
|
||||||
|
) -> "T":
|
||||||
|
"""Make a stream request and return the response."""
|
||||||
|
async with self.channel.request(
|
||||||
|
route,
|
||||||
|
grpclib.const.Cardinality.STREAM_UNARY,
|
||||||
|
request_type,
|
||||||
|
response_type,
|
||||||
|
**self.__resolve_request_kwargs(timeout, deadline, metadata),
|
||||||
|
) as stream:
|
||||||
|
await stream.send_request()
|
||||||
|
await self._send_messages(stream, request_iterator)
|
||||||
|
response = await stream.recv_message()
|
||||||
|
assert response is not None
|
||||||
|
return response
|
||||||
|
|
||||||
|
async def _stream_stream(
|
||||||
|
self,
|
||||||
|
route: str,
|
||||||
|
request_iterator: MessageSource,
|
||||||
|
request_type: Type["IProtoMessage"],
|
||||||
|
response_type: Type["T"],
|
||||||
|
*,
|
||||||
|
timeout: Optional[float] = None,
|
||||||
|
deadline: Optional["Deadline"] = None,
|
||||||
|
metadata: Optional[MetadataLike] = None,
|
||||||
|
) -> AsyncIterator["T"]:
|
||||||
|
"""
|
||||||
|
Make a stream request and return an AsyncIterator to iterate over response
|
||||||
|
messages.
|
||||||
|
"""
|
||||||
|
async with self.channel.request(
|
||||||
|
route,
|
||||||
|
grpclib.const.Cardinality.STREAM_STREAM,
|
||||||
|
request_type,
|
||||||
|
response_type,
|
||||||
|
**self.__resolve_request_kwargs(timeout, deadline, metadata),
|
||||||
|
) as stream:
|
||||||
|
await stream.send_request()
|
||||||
|
sending_task = asyncio.ensure_future(
|
||||||
|
self._send_messages(stream, request_iterator)
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
async for response in stream:
|
||||||
|
yield response
|
||||||
|
except:
|
||||||
|
sending_task.cancel()
|
||||||
|
raise
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _send_messages(stream, messages: MessageSource):
|
||||||
|
if isinstance(messages, AsyncIterable):
|
||||||
|
async for message in messages:
|
||||||
|
await stream.send_message(message)
|
||||||
|
else:
|
||||||
|
for message in messages:
|
||||||
|
await stream.send_message(message)
|
||||||
|
await stream.end()
|
33
src/betterproto/grpc/grpclib_server.py
Normal file
33
src/betterproto/grpc/grpclib_server.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
from abc import ABC
|
||||||
|
from collections.abc import AsyncIterable
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
)
|
||||||
|
|
||||||
|
import grpclib
|
||||||
|
import grpclib.server
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceBase(ABC):
|
||||||
|
"""
|
||||||
|
Base class for async gRPC servers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def _call_rpc_handler_server_stream(
|
||||||
|
self,
|
||||||
|
handler: Callable,
|
||||||
|
stream: grpclib.server.Stream,
|
||||||
|
request: Any,
|
||||||
|
) -> None:
|
||||||
|
response_iter = handler(request)
|
||||||
|
# check if response is actually an AsyncIterator
|
||||||
|
# this might be false if the method just returns without
|
||||||
|
# yielding at least once
|
||||||
|
# in that case, we just interpret it as an empty iterator
|
||||||
|
if isinstance(response_iter, AsyncIterable):
|
||||||
|
async for response_message in response_iter:
|
||||||
|
await stream.send_message(response_message)
|
||||||
|
else:
|
||||||
|
response_iter.close()
|
0
src/betterproto/grpc/util/__init__.py
Normal file
0
src/betterproto/grpc/util/__init__.py
Normal file
193
src/betterproto/grpc/util/async_channel.py
Normal file
193
src/betterproto/grpc/util/async_channel.py
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
import asyncio
|
||||||
|
from typing import (
|
||||||
|
AsyncIterable,
|
||||||
|
AsyncIterator,
|
||||||
|
Iterable,
|
||||||
|
Optional,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
class ChannelClosed(Exception):
|
||||||
|
"""
|
||||||
|
An exception raised on an attempt to send through a closed channel
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ChannelDone(Exception):
|
||||||
|
"""
|
||||||
|
An exception raised on an attempt to send receive from a channel that is both closed
|
||||||
|
and empty.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncChannel(AsyncIterable[T]):
|
||||||
|
"""
|
||||||
|
A buffered async channel for sending items between coroutines with FIFO ordering.
|
||||||
|
|
||||||
|
This makes decoupled bidirectional steaming gRPC requests easy if used like:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
client = GeneratedStub(grpclib_chan)
|
||||||
|
request_channel = await AsyncChannel()
|
||||||
|
# We can start be sending all the requests we already have
|
||||||
|
await request_channel.send_from([RequestObject(...), RequestObject(...)])
|
||||||
|
async for response in client.rpc_call(request_channel):
|
||||||
|
# The response iterator will remain active until the connection is closed
|
||||||
|
...
|
||||||
|
# More items can be sent at any time
|
||||||
|
await request_channel.send(RequestObject(...))
|
||||||
|
...
|
||||||
|
# The channel must be closed to complete the gRPC connection
|
||||||
|
request_channel.close()
|
||||||
|
|
||||||
|
Items can be sent through the channel by either:
|
||||||
|
- providing an iterable to the send_from method
|
||||||
|
- passing them to the send method one at a time
|
||||||
|
|
||||||
|
Items can be received from the channel by either:
|
||||||
|
- iterating over the channel with a for loop to get all items
|
||||||
|
- calling the receive method to get one item at a time
|
||||||
|
|
||||||
|
If the channel is empty then receivers will wait until either an item appears or the
|
||||||
|
channel is closed.
|
||||||
|
|
||||||
|
Once the channel is closed then subsequent attempt to send through the channel will
|
||||||
|
fail with a ChannelClosed exception.
|
||||||
|
|
||||||
|
When th channel is closed and empty then it is done, and further attempts to receive
|
||||||
|
from it will fail with a ChannelDone exception
|
||||||
|
|
||||||
|
If multiple coroutines receive from the channel concurrently, each item sent will be
|
||||||
|
received by only one of the receivers.
|
||||||
|
|
||||||
|
:param source:
|
||||||
|
An optional iterable will items that should be sent through the channel
|
||||||
|
immediately.
|
||||||
|
:param buffer_limit:
|
||||||
|
Limit the number of items that can be buffered in the channel, A value less than
|
||||||
|
1 implies no limit. If the channel is full then attempts to send more items will
|
||||||
|
result in the sender waiting until an item is received from the channel.
|
||||||
|
:param close:
|
||||||
|
If set to True then the channel will automatically close after exhausting source
|
||||||
|
or immediately if no source is provided.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *, buffer_limit: int = 0, close: bool = False):
|
||||||
|
self._queue: asyncio.Queue[T] = asyncio.Queue(buffer_limit)
|
||||||
|
self._closed = False
|
||||||
|
self._waiting_receivers: int = 0
|
||||||
|
# Track whether flush has been invoked so it can only happen once
|
||||||
|
self._flushed = False
|
||||||
|
|
||||||
|
def __aiter__(self) -> AsyncIterator[T]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __anext__(self) -> T:
|
||||||
|
if self.done():
|
||||||
|
raise StopAsyncIteration
|
||||||
|
self._waiting_receivers += 1
|
||||||
|
try:
|
||||||
|
result = await self._queue.get()
|
||||||
|
if result is self.__flush:
|
||||||
|
raise StopAsyncIteration
|
||||||
|
return result
|
||||||
|
finally:
|
||||||
|
self._waiting_receivers -= 1
|
||||||
|
self._queue.task_done()
|
||||||
|
|
||||||
|
def closed(self) -> bool:
|
||||||
|
"""
|
||||||
|
Returns True if this channel is closed and no-longer accepting new items
|
||||||
|
"""
|
||||||
|
return self._closed
|
||||||
|
|
||||||
|
def done(self) -> bool:
|
||||||
|
"""
|
||||||
|
Check if this channel is done.
|
||||||
|
|
||||||
|
:return: True if this channel is closed and and has been drained of items in
|
||||||
|
which case any further attempts to receive an item from this channel will raise
|
||||||
|
a ChannelDone exception.
|
||||||
|
"""
|
||||||
|
# After close the channel is not yet done until there is at least one waiting
|
||||||
|
# receiver per enqueued item.
|
||||||
|
return self._closed and self._queue.qsize() <= self._waiting_receivers
|
||||||
|
|
||||||
|
async def send_from(
|
||||||
|
self, source: Union[Iterable[T], AsyncIterable[T]], close: bool = False
|
||||||
|
) -> "AsyncChannel[T]":
|
||||||
|
"""
|
||||||
|
Iterates the given [Async]Iterable and sends all the resulting items.
|
||||||
|
If close is set to True then subsequent send calls will be rejected with a
|
||||||
|
ChannelClosed exception.
|
||||||
|
:param source: an iterable of items to send
|
||||||
|
:param close:
|
||||||
|
if True then the channel will be closed after the source has been exhausted
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._closed:
|
||||||
|
raise ChannelClosed("Cannot send through a closed channel")
|
||||||
|
if isinstance(source, AsyncIterable):
|
||||||
|
async for item in source:
|
||||||
|
await self._queue.put(item)
|
||||||
|
else:
|
||||||
|
for item in source:
|
||||||
|
await self._queue.put(item)
|
||||||
|
if close:
|
||||||
|
# Complete the closing process
|
||||||
|
self.close()
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def send(self, item: T) -> "AsyncChannel[T]":
|
||||||
|
"""
|
||||||
|
Send a single item over this channel.
|
||||||
|
:param item: The item to send
|
||||||
|
"""
|
||||||
|
if self._closed:
|
||||||
|
raise ChannelClosed("Cannot send through a closed channel")
|
||||||
|
await self._queue.put(item)
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def receive(self) -> Optional[T]:
|
||||||
|
"""
|
||||||
|
Returns the next item from this channel when it becomes available,
|
||||||
|
or None if the channel is closed before another item is sent.
|
||||||
|
:return: An item from the channel
|
||||||
|
"""
|
||||||
|
if self.done():
|
||||||
|
raise ChannelDone("Cannot receive from a closed channel")
|
||||||
|
self._waiting_receivers += 1
|
||||||
|
try:
|
||||||
|
result = await self._queue.get()
|
||||||
|
if result is self.__flush:
|
||||||
|
return None
|
||||||
|
return result
|
||||||
|
finally:
|
||||||
|
self._waiting_receivers -= 1
|
||||||
|
self._queue.task_done()
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""
|
||||||
|
Close this channel to new items
|
||||||
|
"""
|
||||||
|
self._closed = True
|
||||||
|
asyncio.ensure_future(self._flush_queue())
|
||||||
|
|
||||||
|
async def _flush_queue(self):
|
||||||
|
"""
|
||||||
|
To be called after the channel is closed. Pushes a number of self.__flush
|
||||||
|
objects to the queue to ensure no waiting consumers get deadlocked.
|
||||||
|
"""
|
||||||
|
if not self._flushed:
|
||||||
|
self._flushed = True
|
||||||
|
deadlocked_receivers = max(0, self._waiting_receivers - self._queue.qsize())
|
||||||
|
for _ in range(deadlocked_receivers):
|
||||||
|
await self._queue.put(self.__flush)
|
||||||
|
|
||||||
|
# A special signal object for flushing the queue when the channel is closed
|
||||||
|
__flush = object()
|
0
src/betterproto/lib/__init__.py
Normal file
0
src/betterproto/lib/__init__.py
Normal file
0
src/betterproto/lib/google/__init__.py
Normal file
0
src/betterproto/lib/google/__init__.py
Normal file
1
src/betterproto/lib/google/protobuf/__init__.py
Normal file
1
src/betterproto/lib/google/protobuf/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from betterproto.lib.std.google.protobuf import *
|
1
src/betterproto/lib/google/protobuf/compiler/__init__.py
Normal file
1
src/betterproto/lib/google/protobuf/compiler/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from betterproto.lib.std.google.protobuf.compiler import *
|
0
src/betterproto/lib/pydantic/__init__.py
Normal file
0
src/betterproto/lib/pydantic/__init__.py
Normal file
0
src/betterproto/lib/pydantic/google/__init__.py
Normal file
0
src/betterproto/lib/pydantic/google/__init__.py
Normal file
2673
src/betterproto/lib/pydantic/google/protobuf/__init__.py
Normal file
2673
src/betterproto/lib/pydantic/google/protobuf/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,210 @@
|
|||||||
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||||
|
# sources: google/protobuf/compiler/plugin.proto
|
||||||
|
# plugin: python-betterproto
|
||||||
|
# This file has been @generated
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from dataclasses import dataclass
|
||||||
|
else:
|
||||||
|
from pydantic.dataclasses import dataclass
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import betterproto
|
||||||
|
import betterproto.lib.pydantic.google.protobuf as betterproto_lib_pydantic_google_protobuf
|
||||||
|
|
||||||
|
|
||||||
|
class CodeGeneratorResponseFeature(betterproto.Enum):
|
||||||
|
"""Sync with code_generator.h."""
|
||||||
|
|
||||||
|
FEATURE_NONE = 0
|
||||||
|
FEATURE_PROTO3_OPTIONAL = 1
|
||||||
|
FEATURE_SUPPORTS_EDITIONS = 2
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class Version(betterproto.Message):
|
||||||
|
"""The version number of protocol compiler."""
|
||||||
|
|
||||||
|
major: int = betterproto.int32_field(1)
|
||||||
|
minor: int = betterproto.int32_field(2)
|
||||||
|
patch: int = betterproto.int32_field(3)
|
||||||
|
suffix: str = betterproto.string_field(4)
|
||||||
|
"""
|
||||||
|
A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
|
||||||
|
be empty for mainline stable releases.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class CodeGeneratorRequest(betterproto.Message):
|
||||||
|
"""An encoded CodeGeneratorRequest is written to the plugin's stdin."""
|
||||||
|
|
||||||
|
file_to_generate: List[str] = betterproto.string_field(1)
|
||||||
|
"""
|
||||||
|
The .proto files that were explicitly listed on the command-line. The
|
||||||
|
code generator should generate code only for these files. Each file's
|
||||||
|
descriptor will be included in proto_file, below.
|
||||||
|
"""
|
||||||
|
|
||||||
|
parameter: str = betterproto.string_field(2)
|
||||||
|
"""The generator parameter passed on the command-line."""
|
||||||
|
|
||||||
|
proto_file: List["betterproto_lib_pydantic_google_protobuf.FileDescriptorProto"] = (
|
||||||
|
betterproto.message_field(15)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
FileDescriptorProtos for all files in files_to_generate and everything
|
||||||
|
they import. The files will appear in topological order, so each file
|
||||||
|
appears before any file that imports it.
|
||||||
|
|
||||||
|
Note: the files listed in files_to_generate will include runtime-retention
|
||||||
|
options only, but all other files will include source-retention options.
|
||||||
|
The source_file_descriptors field below is available in case you need
|
||||||
|
source-retention options for files_to_generate.
|
||||||
|
|
||||||
|
protoc guarantees that all proto_files will be written after
|
||||||
|
the fields above, even though this is not technically guaranteed by the
|
||||||
|
protobuf wire format. This theoretically could allow a plugin to stream
|
||||||
|
in the FileDescriptorProtos and handle them one by one rather than read
|
||||||
|
the entire set into memory at once. However, as of this writing, this
|
||||||
|
is not similarly optimized on protoc's end -- it will store all fields in
|
||||||
|
memory at once before sending them to the plugin.
|
||||||
|
|
||||||
|
Type names of fields and extensions in the FileDescriptorProto are always
|
||||||
|
fully qualified.
|
||||||
|
"""
|
||||||
|
|
||||||
|
source_file_descriptors: List[
|
||||||
|
"betterproto_lib_pydantic_google_protobuf.FileDescriptorProto"
|
||||||
|
] = betterproto.message_field(17)
|
||||||
|
"""
|
||||||
|
File descriptors with all options, including source-retention options.
|
||||||
|
These descriptors are only provided for the files listed in
|
||||||
|
files_to_generate.
|
||||||
|
"""
|
||||||
|
|
||||||
|
compiler_version: "Version" = betterproto.message_field(3)
|
||||||
|
"""The version number of protocol compiler."""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class CodeGeneratorResponse(betterproto.Message):
|
||||||
|
"""The plugin writes an encoded CodeGeneratorResponse to stdout."""
|
||||||
|
|
||||||
|
error: str = betterproto.string_field(1)
|
||||||
|
"""
|
||||||
|
Error message. If non-empty, code generation failed. The plugin process
|
||||||
|
should exit with status code zero even if it reports an error in this way.
|
||||||
|
|
||||||
|
This should be used to indicate errors in .proto files which prevent the
|
||||||
|
code generator from generating correct code. Errors which indicate a
|
||||||
|
problem in protoc itself -- such as the input CodeGeneratorRequest being
|
||||||
|
unparseable -- should be reported by writing a message to stderr and
|
||||||
|
exiting with a non-zero status code.
|
||||||
|
"""
|
||||||
|
|
||||||
|
supported_features: int = betterproto.uint64_field(2)
|
||||||
|
"""
|
||||||
|
A bitmask of supported features that the code generator supports.
|
||||||
|
This is a bitwise "or" of values from the Feature enum.
|
||||||
|
"""
|
||||||
|
|
||||||
|
minimum_edition: int = betterproto.int32_field(3)
|
||||||
|
"""
|
||||||
|
The minimum edition this plugin supports. This will be treated as an
|
||||||
|
Edition enum, but we want to allow unknown values. It should be specified
|
||||||
|
according the edition enum value, *not* the edition number. Only takes
|
||||||
|
effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
|
||||||
|
"""
|
||||||
|
|
||||||
|
maximum_edition: int = betterproto.int32_field(4)
|
||||||
|
"""
|
||||||
|
The maximum edition this plugin supports. This will be treated as an
|
||||||
|
Edition enum, but we want to allow unknown values. It should be specified
|
||||||
|
according the edition enum value, *not* the edition number. Only takes
|
||||||
|
effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
|
||||||
|
"""
|
||||||
|
|
||||||
|
file: List["CodeGeneratorResponseFile"] = betterproto.message_field(15)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class CodeGeneratorResponseFile(betterproto.Message):
|
||||||
|
"""Represents a single generated file."""
|
||||||
|
|
||||||
|
name: str = betterproto.string_field(1)
|
||||||
|
"""
|
||||||
|
The file name, relative to the output directory. The name must not
|
||||||
|
contain "." or ".." components and must be relative, not be absolute (so,
|
||||||
|
the file cannot lie outside the output directory). "/" must be used as
|
||||||
|
the path separator, not "\".
|
||||||
|
|
||||||
|
If the name is omitted, the content will be appended to the previous
|
||||||
|
file. This allows the generator to break large files into small chunks,
|
||||||
|
and allows the generated text to be streamed back to protoc so that large
|
||||||
|
files need not reside completely in memory at one time. Note that as of
|
||||||
|
this writing protoc does not optimize for this -- it will read the entire
|
||||||
|
CodeGeneratorResponse before writing files to disk.
|
||||||
|
"""
|
||||||
|
|
||||||
|
insertion_point: str = betterproto.string_field(2)
|
||||||
|
"""
|
||||||
|
If non-empty, indicates that the named file should already exist, and the
|
||||||
|
content here is to be inserted into that file at a defined insertion
|
||||||
|
point. This feature allows a code generator to extend the output
|
||||||
|
produced by another code generator. The original generator may provide
|
||||||
|
insertion points by placing special annotations in the file that look
|
||||||
|
like:
|
||||||
|
@@protoc_insertion_point(NAME)
|
||||||
|
The annotation can have arbitrary text before and after it on the line,
|
||||||
|
which allows it to be placed in a comment. NAME should be replaced with
|
||||||
|
an identifier naming the point -- this is what other generators will use
|
||||||
|
as the insertion_point. Code inserted at this point will be placed
|
||||||
|
immediately above the line containing the insertion point (thus multiple
|
||||||
|
insertions to the same point will come out in the order they were added).
|
||||||
|
The double-@ is intended to make it unlikely that the generated code
|
||||||
|
could contain things that look like insertion points by accident.
|
||||||
|
|
||||||
|
For example, the C++ code generator places the following line in the
|
||||||
|
.pb.h files that it generates:
|
||||||
|
// @@protoc_insertion_point(namespace_scope)
|
||||||
|
This line appears within the scope of the file's package namespace, but
|
||||||
|
outside of any particular class. Another plugin can then specify the
|
||||||
|
insertion_point "namespace_scope" to generate additional classes or
|
||||||
|
other declarations that should be placed in this scope.
|
||||||
|
|
||||||
|
Note that if the line containing the insertion point begins with
|
||||||
|
whitespace, the same whitespace will be added to every line of the
|
||||||
|
inserted text. This is useful for languages like Python, where
|
||||||
|
indentation matters. In these languages, the insertion point comment
|
||||||
|
should be indented the same amount as any inserted code will need to be
|
||||||
|
in order to work correctly in that context.
|
||||||
|
|
||||||
|
The code generator that generates the initial file and the one which
|
||||||
|
inserts into it must both run as part of a single invocation of protoc.
|
||||||
|
Code generators are executed in the order in which they appear on the
|
||||||
|
command line.
|
||||||
|
|
||||||
|
If |insertion_point| is present, |name| must also be present.
|
||||||
|
"""
|
||||||
|
|
||||||
|
content: str = betterproto.string_field(15)
|
||||||
|
"""The file contents."""
|
||||||
|
|
||||||
|
generated_code_info: "betterproto_lib_pydantic_google_protobuf.GeneratedCodeInfo" = betterproto.message_field(
|
||||||
|
16
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
Information describing the file content being inserted. If an insertion
|
||||||
|
point is used, this information will be appropriately offset and inserted
|
||||||
|
into the code generation metadata for the generated files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
CodeGeneratorRequest.__pydantic_model__.update_forward_refs() # type: ignore
|
||||||
|
CodeGeneratorResponse.__pydantic_model__.update_forward_refs() # type: ignore
|
||||||
|
CodeGeneratorResponseFile.__pydantic_model__.update_forward_refs() # type: ignore
|
0
src/betterproto/lib/std/__init__.py
Normal file
0
src/betterproto/lib/std/__init__.py
Normal file
0
src/betterproto/lib/std/google/__init__.py
Normal file
0
src/betterproto/lib/std/google/__init__.py
Normal file
2526
src/betterproto/lib/std/google/protobuf/__init__.py
Normal file
2526
src/betterproto/lib/std/google/protobuf/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
198
src/betterproto/lib/std/google/protobuf/compiler/__init__.py
Normal file
198
src/betterproto/lib/std/google/protobuf/compiler/__init__.py
Normal file
@ -0,0 +1,198 @@
|
|||||||
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||||
|
# sources: google/protobuf/compiler/plugin.proto
|
||||||
|
# plugin: python-betterproto
|
||||||
|
# This file has been @generated
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import betterproto
|
||||||
|
import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf
|
||||||
|
|
||||||
|
|
||||||
|
class CodeGeneratorResponseFeature(betterproto.Enum):
|
||||||
|
"""Sync with code_generator.h."""
|
||||||
|
|
||||||
|
FEATURE_NONE = 0
|
||||||
|
FEATURE_PROTO3_OPTIONAL = 1
|
||||||
|
FEATURE_SUPPORTS_EDITIONS = 2
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class Version(betterproto.Message):
|
||||||
|
"""The version number of protocol compiler."""
|
||||||
|
|
||||||
|
major: int = betterproto.int32_field(1)
|
||||||
|
minor: int = betterproto.int32_field(2)
|
||||||
|
patch: int = betterproto.int32_field(3)
|
||||||
|
suffix: str = betterproto.string_field(4)
|
||||||
|
"""
|
||||||
|
A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
|
||||||
|
be empty for mainline stable releases.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class CodeGeneratorRequest(betterproto.Message):
|
||||||
|
"""An encoded CodeGeneratorRequest is written to the plugin's stdin."""
|
||||||
|
|
||||||
|
file_to_generate: List[str] = betterproto.string_field(1)
|
||||||
|
"""
|
||||||
|
The .proto files that were explicitly listed on the command-line. The
|
||||||
|
code generator should generate code only for these files. Each file's
|
||||||
|
descriptor will be included in proto_file, below.
|
||||||
|
"""
|
||||||
|
|
||||||
|
parameter: str = betterproto.string_field(2)
|
||||||
|
"""The generator parameter passed on the command-line."""
|
||||||
|
|
||||||
|
proto_file: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = (
|
||||||
|
betterproto.message_field(15)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
FileDescriptorProtos for all files in files_to_generate and everything
|
||||||
|
they import. The files will appear in topological order, so each file
|
||||||
|
appears before any file that imports it.
|
||||||
|
|
||||||
|
Note: the files listed in files_to_generate will include runtime-retention
|
||||||
|
options only, but all other files will include source-retention options.
|
||||||
|
The source_file_descriptors field below is available in case you need
|
||||||
|
source-retention options for files_to_generate.
|
||||||
|
|
||||||
|
protoc guarantees that all proto_files will be written after
|
||||||
|
the fields above, even though this is not technically guaranteed by the
|
||||||
|
protobuf wire format. This theoretically could allow a plugin to stream
|
||||||
|
in the FileDescriptorProtos and handle them one by one rather than read
|
||||||
|
the entire set into memory at once. However, as of this writing, this
|
||||||
|
is not similarly optimized on protoc's end -- it will store all fields in
|
||||||
|
memory at once before sending them to the plugin.
|
||||||
|
|
||||||
|
Type names of fields and extensions in the FileDescriptorProto are always
|
||||||
|
fully qualified.
|
||||||
|
"""
|
||||||
|
|
||||||
|
source_file_descriptors: List[
|
||||||
|
"betterproto_lib_google_protobuf.FileDescriptorProto"
|
||||||
|
] = betterproto.message_field(17)
|
||||||
|
"""
|
||||||
|
File descriptors with all options, including source-retention options.
|
||||||
|
These descriptors are only provided for the files listed in
|
||||||
|
files_to_generate.
|
||||||
|
"""
|
||||||
|
|
||||||
|
compiler_version: "Version" = betterproto.message_field(3)
|
||||||
|
"""The version number of protocol compiler."""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class CodeGeneratorResponse(betterproto.Message):
|
||||||
|
"""The plugin writes an encoded CodeGeneratorResponse to stdout."""
|
||||||
|
|
||||||
|
error: str = betterproto.string_field(1)
|
||||||
|
"""
|
||||||
|
Error message. If non-empty, code generation failed. The plugin process
|
||||||
|
should exit with status code zero even if it reports an error in this way.
|
||||||
|
|
||||||
|
This should be used to indicate errors in .proto files which prevent the
|
||||||
|
code generator from generating correct code. Errors which indicate a
|
||||||
|
problem in protoc itself -- such as the input CodeGeneratorRequest being
|
||||||
|
unparseable -- should be reported by writing a message to stderr and
|
||||||
|
exiting with a non-zero status code.
|
||||||
|
"""
|
||||||
|
|
||||||
|
supported_features: int = betterproto.uint64_field(2)
|
||||||
|
"""
|
||||||
|
A bitmask of supported features that the code generator supports.
|
||||||
|
This is a bitwise "or" of values from the Feature enum.
|
||||||
|
"""
|
||||||
|
|
||||||
|
minimum_edition: int = betterproto.int32_field(3)
|
||||||
|
"""
|
||||||
|
The minimum edition this plugin supports. This will be treated as an
|
||||||
|
Edition enum, but we want to allow unknown values. It should be specified
|
||||||
|
according the edition enum value, *not* the edition number. Only takes
|
||||||
|
effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
|
||||||
|
"""
|
||||||
|
|
||||||
|
maximum_edition: int = betterproto.int32_field(4)
|
||||||
|
"""
|
||||||
|
The maximum edition this plugin supports. This will be treated as an
|
||||||
|
Edition enum, but we want to allow unknown values. It should be specified
|
||||||
|
according the edition enum value, *not* the edition number. Only takes
|
||||||
|
effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
|
||||||
|
"""
|
||||||
|
|
||||||
|
file: List["CodeGeneratorResponseFile"] = betterproto.message_field(15)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class CodeGeneratorResponseFile(betterproto.Message):
|
||||||
|
"""Represents a single generated file."""
|
||||||
|
|
||||||
|
name: str = betterproto.string_field(1)
|
||||||
|
"""
|
||||||
|
The file name, relative to the output directory. The name must not
|
||||||
|
contain "." or ".." components and must be relative, not be absolute (so,
|
||||||
|
the file cannot lie outside the output directory). "/" must be used as
|
||||||
|
the path separator, not "\".
|
||||||
|
|
||||||
|
If the name is omitted, the content will be appended to the previous
|
||||||
|
file. This allows the generator to break large files into small chunks,
|
||||||
|
and allows the generated text to be streamed back to protoc so that large
|
||||||
|
files need not reside completely in memory at one time. Note that as of
|
||||||
|
this writing protoc does not optimize for this -- it will read the entire
|
||||||
|
CodeGeneratorResponse before writing files to disk.
|
||||||
|
"""
|
||||||
|
|
||||||
|
insertion_point: str = betterproto.string_field(2)
|
||||||
|
"""
|
||||||
|
If non-empty, indicates that the named file should already exist, and the
|
||||||
|
content here is to be inserted into that file at a defined insertion
|
||||||
|
point. This feature allows a code generator to extend the output
|
||||||
|
produced by another code generator. The original generator may provide
|
||||||
|
insertion points by placing special annotations in the file that look
|
||||||
|
like:
|
||||||
|
@@protoc_insertion_point(NAME)
|
||||||
|
The annotation can have arbitrary text before and after it on the line,
|
||||||
|
which allows it to be placed in a comment. NAME should be replaced with
|
||||||
|
an identifier naming the point -- this is what other generators will use
|
||||||
|
as the insertion_point. Code inserted at this point will be placed
|
||||||
|
immediately above the line containing the insertion point (thus multiple
|
||||||
|
insertions to the same point will come out in the order they were added).
|
||||||
|
The double-@ is intended to make it unlikely that the generated code
|
||||||
|
could contain things that look like insertion points by accident.
|
||||||
|
|
||||||
|
For example, the C++ code generator places the following line in the
|
||||||
|
.pb.h files that it generates:
|
||||||
|
// @@protoc_insertion_point(namespace_scope)
|
||||||
|
This line appears within the scope of the file's package namespace, but
|
||||||
|
outside of any particular class. Another plugin can then specify the
|
||||||
|
insertion_point "namespace_scope" to generate additional classes or
|
||||||
|
other declarations that should be placed in this scope.
|
||||||
|
|
||||||
|
Note that if the line containing the insertion point begins with
|
||||||
|
whitespace, the same whitespace will be added to every line of the
|
||||||
|
inserted text. This is useful for languages like Python, where
|
||||||
|
indentation matters. In these languages, the insertion point comment
|
||||||
|
should be indented the same amount as any inserted code will need to be
|
||||||
|
in order to work correctly in that context.
|
||||||
|
|
||||||
|
The code generator that generates the initial file and the one which
|
||||||
|
inserts into it must both run as part of a single invocation of protoc.
|
||||||
|
Code generators are executed in the order in which they appear on the
|
||||||
|
command line.
|
||||||
|
|
||||||
|
If |insertion_point| is present, |name| must also be present.
|
||||||
|
"""
|
||||||
|
|
||||||
|
content: str = betterproto.string_field(15)
|
||||||
|
"""The file contents."""
|
||||||
|
|
||||||
|
generated_code_info: "betterproto_lib_google_protobuf.GeneratedCodeInfo" = (
|
||||||
|
betterproto.message_field(16)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
Information describing the file content being inserted. If an insertion
|
||||||
|
point is used, this information will be appropriately offset and inserted
|
||||||
|
into the code generation metadata for the generated files.
|
||||||
|
"""
|
1
src/betterproto/plugin/__init__.py
Normal file
1
src/betterproto/plugin/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from .main import main
|
4
src/betterproto/plugin/__main__.py
Normal file
4
src/betterproto/plugin/__main__.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from .main import main
|
||||||
|
|
||||||
|
|
||||||
|
main()
|
64
src/betterproto/plugin/compiler.py
Normal file
64
src/betterproto/plugin/compiler.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
import os.path
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from .module_validation import ModuleValidator
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
# betterproto[compiler] specific dependencies
|
||||||
|
import jinja2
|
||||||
|
except ImportError as err:
|
||||||
|
print(
|
||||||
|
"\033[31m"
|
||||||
|
f"Unable to import `{err.name}` from betterproto plugin! "
|
||||||
|
"Please ensure that you've installed betterproto as "
|
||||||
|
'`pip install "betterproto[compiler]"` so that compiler dependencies '
|
||||||
|
"are included."
|
||||||
|
"\033[0m"
|
||||||
|
)
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
from .models import OutputTemplate
|
||||||
|
|
||||||
|
|
||||||
|
def outputfile_compiler(output_file: OutputTemplate) -> str:
|
||||||
|
templates_folder = os.path.abspath(
|
||||||
|
os.path.join(os.path.dirname(__file__), "..", "templates")
|
||||||
|
)
|
||||||
|
|
||||||
|
env = jinja2.Environment(
|
||||||
|
trim_blocks=True,
|
||||||
|
lstrip_blocks=True,
|
||||||
|
loader=jinja2.FileSystemLoader(templates_folder),
|
||||||
|
undefined=jinja2.StrictUndefined,
|
||||||
|
)
|
||||||
|
# Load the body first so we have a compleate list of imports needed.
|
||||||
|
body_template = env.get_template("template.py.j2")
|
||||||
|
header_template = env.get_template("header.py.j2")
|
||||||
|
|
||||||
|
code = body_template.render(output_file=output_file)
|
||||||
|
code = header_template.render(output_file=output_file) + code
|
||||||
|
|
||||||
|
# Sort imports, delete unused ones
|
||||||
|
code = subprocess.check_output(
|
||||||
|
["ruff", "check", "--select", "I,F401", "--fix", "--silent", "-"],
|
||||||
|
input=code,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Format the code
|
||||||
|
code = subprocess.check_output(
|
||||||
|
["ruff", "format", "-"], input=code, encoding="utf-8"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate the generated code.
|
||||||
|
validator = ModuleValidator(iter(code.splitlines()))
|
||||||
|
if not validator.validate():
|
||||||
|
message_builder = ["[WARNING]: Generated code has collisions in the module:"]
|
||||||
|
for collision, lines in validator.collisions.items():
|
||||||
|
message_builder.append(f' "{collision}" on lines:')
|
||||||
|
for num, line in lines:
|
||||||
|
message_builder.append(f" {num}:{line}")
|
||||||
|
print("\n".join(message_builder), file=sys.stderr)
|
||||||
|
return code
|
52
src/betterproto/plugin/main.py
Executable file
52
src/betterproto/plugin/main.py
Executable file
@ -0,0 +1,52 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from betterproto.lib.google.protobuf.compiler import (
|
||||||
|
CodeGeneratorRequest,
|
||||||
|
CodeGeneratorResponse,
|
||||||
|
)
|
||||||
|
from betterproto.plugin.models import monkey_patch_oneof_index
|
||||||
|
from betterproto.plugin.parser import generate_code
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""The plugin's main entry point."""
|
||||||
|
# Read request message from stdin
|
||||||
|
data = sys.stdin.buffer.read()
|
||||||
|
|
||||||
|
# Apply Work around for proto2/3 difference in protoc messages
|
||||||
|
monkey_patch_oneof_index()
|
||||||
|
|
||||||
|
# Parse request
|
||||||
|
request = CodeGeneratorRequest()
|
||||||
|
request.parse(data)
|
||||||
|
|
||||||
|
dump_file = os.getenv("BETTERPROTO_DUMP")
|
||||||
|
if dump_file:
|
||||||
|
dump_request(dump_file, request)
|
||||||
|
|
||||||
|
# Generate code
|
||||||
|
response = generate_code(request)
|
||||||
|
|
||||||
|
# Serialise response message
|
||||||
|
output = response.SerializeToString()
|
||||||
|
|
||||||
|
# Write to stdout
|
||||||
|
sys.stdout.buffer.write(output)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_request(dump_file: str, request: CodeGeneratorRequest) -> None:
|
||||||
|
"""
|
||||||
|
For developers: Supports running plugin.py standalone so its possible to debug it.
|
||||||
|
Run protoc (or generate.py) with BETTERPROTO_DUMP="yourfile.bin" to write the request to a file.
|
||||||
|
Then run plugin.py from your IDE in debugging mode, and redirect stdin to the file.
|
||||||
|
"""
|
||||||
|
with open(str(dump_file), "wb") as fh:
|
||||||
|
sys.stderr.write(f"\033[31mWriting input from protoc to: {dump_file}\033[0m\n")
|
||||||
|
fh.write(request.SerializeToString())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
787
src/betterproto/plugin/models.py
Normal file
787
src/betterproto/plugin/models.py
Normal file
@ -0,0 +1,787 @@
|
|||||||
|
"""Plugin model dataclasses.
|
||||||
|
|
||||||
|
These classes are meant to be an intermediate representation
|
||||||
|
of protobuf objects. They are used to organize the data collected during parsing.
|
||||||
|
|
||||||
|
The general intention is to create a doubly-linked tree-like structure
|
||||||
|
with the following types of references:
|
||||||
|
- Downwards references: from message -> fields, from output package -> messages
|
||||||
|
or from service -> service methods
|
||||||
|
- Upwards references: from field -> message, message -> package.
|
||||||
|
- Input/output message references: from a service method to it's corresponding
|
||||||
|
input/output messages, which may even be in another package.
|
||||||
|
|
||||||
|
There are convenience methods to allow climbing up and down this tree, for
|
||||||
|
example to retrieve the list of all messages that are in the same package as
|
||||||
|
the current message.
|
||||||
|
|
||||||
|
Most of these classes take as inputs:
|
||||||
|
- proto_obj: A reference to it's corresponding protobuf object as
|
||||||
|
presented by the protoc plugin.
|
||||||
|
- parent: a reference to the parent object in the tree.
|
||||||
|
|
||||||
|
With this information, the class is able to expose attributes,
|
||||||
|
such as a pythonized name, that will be calculated from proto_obj.
|
||||||
|
|
||||||
|
The instantiation should also attach a reference to the new object
|
||||||
|
into the corresponding place within it's parent object. For example,
|
||||||
|
instantiating field `A` with parent message `B` should add a
|
||||||
|
reference to `A` to `B`'s `fields` attribute.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import builtins
|
||||||
|
import re
|
||||||
|
from dataclasses import (
|
||||||
|
dataclass,
|
||||||
|
field,
|
||||||
|
)
|
||||||
|
from typing import (
|
||||||
|
Dict,
|
||||||
|
Iterable,
|
||||||
|
Iterator,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Set,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
import betterproto
|
||||||
|
from betterproto.compile.naming import (
|
||||||
|
pythonize_class_name,
|
||||||
|
pythonize_field_name,
|
||||||
|
pythonize_method_name,
|
||||||
|
)
|
||||||
|
from betterproto.lib.google.protobuf import (
|
||||||
|
DescriptorProto,
|
||||||
|
EnumDescriptorProto,
|
||||||
|
Field,
|
||||||
|
FieldDescriptorProto,
|
||||||
|
FieldDescriptorProtoLabel,
|
||||||
|
FieldDescriptorProtoType,
|
||||||
|
FileDescriptorProto,
|
||||||
|
MethodDescriptorProto,
|
||||||
|
)
|
||||||
|
from betterproto.lib.google.protobuf.compiler import CodeGeneratorRequest
|
||||||
|
|
||||||
|
from .. import which_one_of
|
||||||
|
from ..compile.importing import (
|
||||||
|
get_type_reference,
|
||||||
|
parse_source_type_name,
|
||||||
|
)
|
||||||
|
from ..compile.naming import (
|
||||||
|
pythonize_class_name,
|
||||||
|
pythonize_enum_member_name,
|
||||||
|
pythonize_field_name,
|
||||||
|
pythonize_method_name,
|
||||||
|
)
|
||||||
|
from .typing_compiler import (
|
||||||
|
DirectImportTypingCompiler,
|
||||||
|
TypingCompiler,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Create a unique placeholder to deal with
|
||||||
|
# https://stackoverflow.com/questions/51575931/class-inheritance-in-python-3-7-dataclasses
|
||||||
|
PLACEHOLDER = object()
|
||||||
|
|
||||||
|
# Organize proto types into categories
|
||||||
|
PROTO_FLOAT_TYPES = (
|
||||||
|
FieldDescriptorProtoType.TYPE_DOUBLE, # 1
|
||||||
|
FieldDescriptorProtoType.TYPE_FLOAT, # 2
|
||||||
|
)
|
||||||
|
PROTO_INT_TYPES = (
|
||||||
|
FieldDescriptorProtoType.TYPE_INT64, # 3
|
||||||
|
FieldDescriptorProtoType.TYPE_UINT64, # 4
|
||||||
|
FieldDescriptorProtoType.TYPE_INT32, # 5
|
||||||
|
FieldDescriptorProtoType.TYPE_FIXED64, # 6
|
||||||
|
FieldDescriptorProtoType.TYPE_FIXED32, # 7
|
||||||
|
FieldDescriptorProtoType.TYPE_UINT32, # 13
|
||||||
|
FieldDescriptorProtoType.TYPE_SFIXED32, # 15
|
||||||
|
FieldDescriptorProtoType.TYPE_SFIXED64, # 16
|
||||||
|
FieldDescriptorProtoType.TYPE_SINT32, # 17
|
||||||
|
FieldDescriptorProtoType.TYPE_SINT64, # 18
|
||||||
|
)
|
||||||
|
PROTO_BOOL_TYPES = (FieldDescriptorProtoType.TYPE_BOOL,) # 8
|
||||||
|
PROTO_STR_TYPES = (FieldDescriptorProtoType.TYPE_STRING,) # 9
|
||||||
|
PROTO_BYTES_TYPES = (FieldDescriptorProtoType.TYPE_BYTES,) # 12
|
||||||
|
PROTO_MESSAGE_TYPES = (
|
||||||
|
FieldDescriptorProtoType.TYPE_MESSAGE, # 11
|
||||||
|
FieldDescriptorProtoType.TYPE_ENUM, # 14
|
||||||
|
)
|
||||||
|
PROTO_MAP_TYPES = (FieldDescriptorProtoType.TYPE_MESSAGE,) # 11
|
||||||
|
PROTO_PACKED_TYPES = (
|
||||||
|
FieldDescriptorProtoType.TYPE_DOUBLE, # 1
|
||||||
|
FieldDescriptorProtoType.TYPE_FLOAT, # 2
|
||||||
|
FieldDescriptorProtoType.TYPE_INT64, # 3
|
||||||
|
FieldDescriptorProtoType.TYPE_UINT64, # 4
|
||||||
|
FieldDescriptorProtoType.TYPE_INT32, # 5
|
||||||
|
FieldDescriptorProtoType.TYPE_FIXED64, # 6
|
||||||
|
FieldDescriptorProtoType.TYPE_FIXED32, # 7
|
||||||
|
FieldDescriptorProtoType.TYPE_BOOL, # 8
|
||||||
|
FieldDescriptorProtoType.TYPE_UINT32, # 13
|
||||||
|
FieldDescriptorProtoType.TYPE_SFIXED32, # 15
|
||||||
|
FieldDescriptorProtoType.TYPE_SFIXED64, # 16
|
||||||
|
FieldDescriptorProtoType.TYPE_SINT32, # 17
|
||||||
|
FieldDescriptorProtoType.TYPE_SINT64, # 18
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def monkey_patch_oneof_index():
|
||||||
|
"""
|
||||||
|
The compiler message types are written for proto2, but we read them as proto3.
|
||||||
|
For this to work in the case of the oneof_index fields, which depend on being able
|
||||||
|
to tell whether they were set, we have to treat them as oneof fields. This method
|
||||||
|
monkey patches the generated classes after the fact to force this behaviour.
|
||||||
|
"""
|
||||||
|
object.__setattr__(
|
||||||
|
FieldDescriptorProto.__dataclass_fields__["oneof_index"].metadata[
|
||||||
|
"betterproto"
|
||||||
|
],
|
||||||
|
"group",
|
||||||
|
"oneof_index",
|
||||||
|
)
|
||||||
|
object.__setattr__(
|
||||||
|
Field.__dataclass_fields__["oneof_index"].metadata["betterproto"],
|
||||||
|
"group",
|
||||||
|
"oneof_index",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_comment(
|
||||||
|
proto_file: "FileDescriptorProto", path: List[int], indent: int = 4
|
||||||
|
) -> str:
|
||||||
|
pad = " " * indent
|
||||||
|
for sci_loc in proto_file.source_code_info.location:
|
||||||
|
if list(sci_loc.path) == path:
|
||||||
|
all_comments = list(sci_loc.leading_detached_comments)
|
||||||
|
if sci_loc.leading_comments:
|
||||||
|
all_comments.append(sci_loc.leading_comments)
|
||||||
|
if sci_loc.trailing_comments:
|
||||||
|
all_comments.append(sci_loc.trailing_comments)
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
|
||||||
|
for comment in all_comments:
|
||||||
|
lines += comment.split("\n")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Remove consecutive empty lines
|
||||||
|
lines = [
|
||||||
|
line for i, line in enumerate(lines) if line or (i == 0 or lines[i - 1])
|
||||||
|
]
|
||||||
|
|
||||||
|
if lines and not lines[-1]:
|
||||||
|
lines.pop() # Remove the last empty line
|
||||||
|
|
||||||
|
# It is common for one line comments to start with a space, for example: // comment
|
||||||
|
# We don't add this space to the generated file.
|
||||||
|
lines = [line[1:] if line and line[0] == " " else line for line in lines]
|
||||||
|
|
||||||
|
# This is a field, message, enum, service, or method
|
||||||
|
if len(lines) == 1 and len(lines[0]) < 79 - indent - 6:
|
||||||
|
return f'{pad}"""{lines[0]}"""'
|
||||||
|
else:
|
||||||
|
joined = f"\n{pad}".join(lines)
|
||||||
|
return f'{pad}"""\n{pad}{joined}\n{pad}"""'
|
||||||
|
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
class ProtoContentBase:
|
||||||
|
"""Methods common to MessageCompiler, ServiceCompiler and ServiceMethodCompiler."""
|
||||||
|
|
||||||
|
source_file: FileDescriptorProto
|
||||||
|
typing_compiler: TypingCompiler
|
||||||
|
path: List[int]
|
||||||
|
comment_indent: int = 4
|
||||||
|
parent: Union["betterproto.Message", "OutputTemplate"]
|
||||||
|
|
||||||
|
__dataclass_fields__: Dict[str, object]
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
"""Checks that no fake default fields were left as placeholders."""
|
||||||
|
for field_name, field_val in self.__dataclass_fields__.items():
|
||||||
|
if field_val is PLACEHOLDER:
|
||||||
|
raise ValueError(f"`{field_name}` is a required field.")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def output_file(self) -> "OutputTemplate":
|
||||||
|
current = self
|
||||||
|
while not isinstance(current, OutputTemplate):
|
||||||
|
current = current.parent
|
||||||
|
return current
|
||||||
|
|
||||||
|
@property
|
||||||
|
def request(self) -> "PluginRequestCompiler":
|
||||||
|
current = self
|
||||||
|
while not isinstance(current, OutputTemplate):
|
||||||
|
current = current.parent
|
||||||
|
return current.parent_request
|
||||||
|
|
||||||
|
@property
|
||||||
|
def comment(self) -> str:
|
||||||
|
"""Crawl the proto source code and retrieve comments
|
||||||
|
for this object.
|
||||||
|
"""
|
||||||
|
return get_comment(
|
||||||
|
proto_file=self.source_file, path=self.path, indent=self.comment_indent
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PluginRequestCompiler:
|
||||||
|
plugin_request_obj: CodeGeneratorRequest
|
||||||
|
output_packages: Dict[str, "OutputTemplate"] = field(default_factory=dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def all_messages(self) -> List["MessageCompiler"]:
|
||||||
|
"""All of the messages in this request.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
List[MessageCompiler]
|
||||||
|
List of all of the messages in this request.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
msg for output in self.output_packages.values() for msg in output.messages
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class OutputTemplate:
|
||||||
|
"""Representation of an output .py file.
|
||||||
|
|
||||||
|
Each output file corresponds to a .proto input file,
|
||||||
|
but may need references to other .proto files to be
|
||||||
|
built.
|
||||||
|
"""
|
||||||
|
|
||||||
|
parent_request: PluginRequestCompiler
|
||||||
|
package_proto_obj: FileDescriptorProto
|
||||||
|
input_files: List[str] = field(default_factory=list)
|
||||||
|
imports_end: Set[str] = field(default_factory=set)
|
||||||
|
datetime_imports: Set[str] = field(default_factory=set)
|
||||||
|
pydantic_imports: Set[str] = field(default_factory=set)
|
||||||
|
builtins_import: bool = False
|
||||||
|
messages: List["MessageCompiler"] = field(default_factory=list)
|
||||||
|
enums: List["EnumDefinitionCompiler"] = field(default_factory=list)
|
||||||
|
services: List["ServiceCompiler"] = field(default_factory=list)
|
||||||
|
imports_type_checking_only: Set[str] = field(default_factory=set)
|
||||||
|
pydantic_dataclasses: bool = False
|
||||||
|
output: bool = True
|
||||||
|
typing_compiler: TypingCompiler = field(default_factory=DirectImportTypingCompiler)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def package(self) -> str:
|
||||||
|
"""Name of input package.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
str
|
||||||
|
Name of input package.
|
||||||
|
"""
|
||||||
|
return self.package_proto_obj.package
|
||||||
|
|
||||||
|
@property
|
||||||
|
def input_filenames(self) -> Iterable[str]:
|
||||||
|
"""Names of the input files used to build this output.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
Iterable[str]
|
||||||
|
Names of the input files used to build this output.
|
||||||
|
"""
|
||||||
|
return sorted(f.name for f in self.input_files)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def python_module_imports(self) -> Set[str]:
|
||||||
|
imports = set()
|
||||||
|
|
||||||
|
has_deprecated = False
|
||||||
|
if any(m.deprecated for m in self.messages):
|
||||||
|
has_deprecated = True
|
||||||
|
if any(x for x in self.messages if any(x.deprecated_fields)):
|
||||||
|
has_deprecated = True
|
||||||
|
if any(
|
||||||
|
any(m.proto_obj.options.deprecated for m in s.methods)
|
||||||
|
for s in self.services
|
||||||
|
):
|
||||||
|
has_deprecated = True
|
||||||
|
|
||||||
|
if has_deprecated:
|
||||||
|
imports.add("warnings")
|
||||||
|
|
||||||
|
if self.builtins_import:
|
||||||
|
imports.add("builtins")
|
||||||
|
return imports
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MessageCompiler(ProtoContentBase):
|
||||||
|
"""Representation of a protobuf message."""
|
||||||
|
|
||||||
|
source_file: FileDescriptorProto
|
||||||
|
typing_compiler: TypingCompiler
|
||||||
|
parent: Union["MessageCompiler", OutputTemplate] = PLACEHOLDER
|
||||||
|
proto_obj: DescriptorProto = PLACEHOLDER
|
||||||
|
path: List[int] = PLACEHOLDER
|
||||||
|
fields: List[Union["FieldCompiler", "MessageCompiler"]] = field(
|
||||||
|
default_factory=list
|
||||||
|
)
|
||||||
|
deprecated: bool = field(default=False, init=False)
|
||||||
|
builtins_types: Set[str] = field(default_factory=set)
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
# Add message to output file
|
||||||
|
if isinstance(self.parent, OutputTemplate):
|
||||||
|
if isinstance(self, EnumDefinitionCompiler):
|
||||||
|
self.output_file.enums.append(self)
|
||||||
|
else:
|
||||||
|
self.output_file.messages.append(self)
|
||||||
|
self.deprecated = self.proto_obj.options.deprecated
|
||||||
|
super().__post_init__()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def proto_name(self) -> str:
|
||||||
|
return self.proto_obj.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def py_name(self) -> str:
|
||||||
|
return pythonize_class_name(self.proto_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def deprecated_fields(self) -> Iterator[str]:
|
||||||
|
for f in self.fields:
|
||||||
|
if f.deprecated:
|
||||||
|
yield f.py_name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_deprecated_fields(self) -> bool:
|
||||||
|
return any(self.deprecated_fields)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_oneof_fields(self) -> bool:
|
||||||
|
return any(isinstance(field, OneOfFieldCompiler) for field in self.fields)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_message_field(self) -> bool:
|
||||||
|
return any(
|
||||||
|
(
|
||||||
|
field.proto_obj.type in PROTO_MESSAGE_TYPES
|
||||||
|
for field in self.fields
|
||||||
|
if isinstance(field.proto_obj, FieldDescriptorProto)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def is_map(
|
||||||
|
proto_field_obj: FieldDescriptorProto, parent_message: DescriptorProto
|
||||||
|
) -> bool:
|
||||||
|
"""True if proto_field_obj is a map, otherwise False."""
|
||||||
|
if proto_field_obj.type == FieldDescriptorProtoType.TYPE_MESSAGE:
|
||||||
|
if not hasattr(parent_message, "nested_type"):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# This might be a map...
|
||||||
|
message_type = proto_field_obj.type_name.split(".").pop().lower()
|
||||||
|
map_entry = f"{proto_field_obj.name.replace('_', '').lower()}entry"
|
||||||
|
if message_type == map_entry:
|
||||||
|
for nested in parent_message.nested_type: # parent message
|
||||||
|
if (
|
||||||
|
nested.name.replace("_", "").lower() == map_entry
|
||||||
|
and nested.options.map_entry
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def is_oneof(proto_field_obj: FieldDescriptorProto) -> bool:
|
||||||
|
"""
|
||||||
|
True if proto_field_obj is a OneOf, otherwise False.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
Becuase the message from protoc is defined in proto2, and betterproto works with
|
||||||
|
proto3, and interpreting the FieldDescriptorProto.oneof_index field requires
|
||||||
|
distinguishing between default and unset values (which proto3 doesn't support),
|
||||||
|
we have to hack the generated FieldDescriptorProto class for this to work.
|
||||||
|
The hack consists of setting group="oneof_index" in the field metadata,
|
||||||
|
essentially making oneof_index the sole member of a one_of group, which allows
|
||||||
|
us to tell whether it was set, via the which_one_of interface.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return (
|
||||||
|
not proto_field_obj.proto3_optional
|
||||||
|
and which_one_of(proto_field_obj, "oneof_index")[0] == "oneof_index"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FieldCompiler(MessageCompiler):
|
||||||
|
parent: MessageCompiler = PLACEHOLDER
|
||||||
|
proto_obj: FieldDescriptorProto = PLACEHOLDER
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
# Add field to message
|
||||||
|
self.parent.fields.append(self)
|
||||||
|
# Check for new imports
|
||||||
|
self.add_imports_to(self.output_file)
|
||||||
|
super().__post_init__() # call FieldCompiler-> MessageCompiler __post_init__
|
||||||
|
|
||||||
|
def get_field_string(self, indent: int = 4) -> str:
|
||||||
|
"""Construct string representation of this field as a field."""
|
||||||
|
name = f"{self.py_name}"
|
||||||
|
annotations = f": {self.annotation}"
|
||||||
|
field_args = ", ".join(
|
||||||
|
([""] + self.betterproto_field_args) if self.betterproto_field_args else []
|
||||||
|
)
|
||||||
|
betterproto_field_type = (
|
||||||
|
f"betterproto.{self.field_type}_field({self.proto_obj.number}{field_args})"
|
||||||
|
)
|
||||||
|
if self.py_name in dir(builtins):
|
||||||
|
self.parent.builtins_types.add(self.py_name)
|
||||||
|
return f"{name}{annotations} = {betterproto_field_type}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def betterproto_field_args(self) -> List[str]:
|
||||||
|
args = []
|
||||||
|
if self.field_wraps:
|
||||||
|
args.append(f"wraps={self.field_wraps}")
|
||||||
|
if self.optional:
|
||||||
|
args.append(f"optional=True")
|
||||||
|
return args
|
||||||
|
|
||||||
|
@property
|
||||||
|
def datetime_imports(self) -> Set[str]:
|
||||||
|
imports = set()
|
||||||
|
annotation = self.annotation
|
||||||
|
# FIXME: false positives - e.g. `MyDatetimedelta`
|
||||||
|
if "timedelta" in annotation:
|
||||||
|
imports.add("timedelta")
|
||||||
|
if "datetime" in annotation:
|
||||||
|
imports.add("datetime")
|
||||||
|
return imports
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pydantic_imports(self) -> Set[str]:
|
||||||
|
return set()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def use_builtins(self) -> bool:
|
||||||
|
return self.py_type in self.parent.builtins_types or (
|
||||||
|
self.py_type == self.py_name and self.py_name in dir(builtins)
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_imports_to(self, output_file: OutputTemplate) -> None:
|
||||||
|
output_file.datetime_imports.update(self.datetime_imports)
|
||||||
|
output_file.pydantic_imports.update(self.pydantic_imports)
|
||||||
|
output_file.builtins_import = output_file.builtins_import or self.use_builtins
|
||||||
|
|
||||||
|
@property
|
||||||
|
def field_wraps(self) -> Optional[str]:
|
||||||
|
"""Returns betterproto wrapped field type or None."""
|
||||||
|
match_wrapper = re.match(
|
||||||
|
r"\.google\.protobuf\.(.+)Value$", self.proto_obj.type_name
|
||||||
|
)
|
||||||
|
if match_wrapper:
|
||||||
|
wrapped_type = "TYPE_" + match_wrapper.group(1).upper()
|
||||||
|
if hasattr(betterproto, wrapped_type):
|
||||||
|
return f"betterproto.{wrapped_type}"
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def repeated(self) -> bool:
|
||||||
|
return (
|
||||||
|
self.proto_obj.label == FieldDescriptorProtoLabel.LABEL_REPEATED
|
||||||
|
and not is_map(self.proto_obj, self.parent)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def optional(self) -> bool:
|
||||||
|
return self.proto_obj.proto3_optional
|
||||||
|
|
||||||
|
@property
|
||||||
|
def field_type(self) -> str:
|
||||||
|
"""String representation of proto field type."""
|
||||||
|
return (
|
||||||
|
FieldDescriptorProtoType(self.proto_obj.type)
|
||||||
|
.name.lower()
|
||||||
|
.replace("type_", "")
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def packed(self) -> bool:
|
||||||
|
"""True if the wire representation is a packed format."""
|
||||||
|
return self.repeated and self.proto_obj.type in PROTO_PACKED_TYPES
|
||||||
|
|
||||||
|
@property
|
||||||
|
def py_name(self) -> str:
|
||||||
|
"""Pythonized name."""
|
||||||
|
return pythonize_field_name(self.proto_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def proto_name(self) -> str:
|
||||||
|
"""Original protobuf name."""
|
||||||
|
return self.proto_obj.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def py_type(self) -> str:
|
||||||
|
"""String representation of Python type."""
|
||||||
|
if self.proto_obj.type in PROTO_FLOAT_TYPES:
|
||||||
|
return "float"
|
||||||
|
elif self.proto_obj.type in PROTO_INT_TYPES:
|
||||||
|
return "int"
|
||||||
|
elif self.proto_obj.type in PROTO_BOOL_TYPES:
|
||||||
|
return "bool"
|
||||||
|
elif self.proto_obj.type in PROTO_STR_TYPES:
|
||||||
|
return "str"
|
||||||
|
elif self.proto_obj.type in PROTO_BYTES_TYPES:
|
||||||
|
return "bytes"
|
||||||
|
elif self.proto_obj.type in PROTO_MESSAGE_TYPES:
|
||||||
|
# Type referencing another defined Message or a named enum
|
||||||
|
return get_type_reference(
|
||||||
|
package=self.output_file.package,
|
||||||
|
imports=self.output_file.imports_end,
|
||||||
|
source_type=self.proto_obj.type_name,
|
||||||
|
typing_compiler=self.typing_compiler,
|
||||||
|
pydantic=self.output_file.pydantic_dataclasses,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(f"Unknown type {self.proto_obj.type}")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def annotation(self) -> str:
|
||||||
|
py_type = self.py_type
|
||||||
|
if self.use_builtins:
|
||||||
|
py_type = f"builtins.{py_type}"
|
||||||
|
if self.repeated:
|
||||||
|
return self.typing_compiler.list(py_type)
|
||||||
|
if self.optional:
|
||||||
|
return self.typing_compiler.optional(py_type)
|
||||||
|
return py_type
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class OneOfFieldCompiler(FieldCompiler):
|
||||||
|
@property
|
||||||
|
def betterproto_field_args(self) -> List[str]:
|
||||||
|
args = super().betterproto_field_args
|
||||||
|
group = self.parent.proto_obj.oneof_decl[self.proto_obj.oneof_index].name
|
||||||
|
args.append(f'group="{group}"')
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PydanticOneOfFieldCompiler(OneOfFieldCompiler):
|
||||||
|
@property
|
||||||
|
def optional(self) -> bool:
|
||||||
|
# Force the optional to be True. This will allow the pydantic dataclass
|
||||||
|
# to validate the object correctly by allowing the field to be let empty.
|
||||||
|
# We add a pydantic validator later to ensure exactly one field is defined.
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pydantic_imports(self) -> Set[str]:
|
||||||
|
return {"model_validator"}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MapEntryCompiler(FieldCompiler):
|
||||||
|
py_k_type: Type = PLACEHOLDER
|
||||||
|
py_v_type: Type = PLACEHOLDER
|
||||||
|
proto_k_type: str = PLACEHOLDER
|
||||||
|
proto_v_type: str = PLACEHOLDER
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
"""Explore nested types and set k_type and v_type if unset."""
|
||||||
|
map_entry = f"{self.proto_obj.name.replace('_', '').lower()}entry"
|
||||||
|
for nested in self.parent.proto_obj.nested_type:
|
||||||
|
if (
|
||||||
|
nested.name.replace("_", "").lower() == map_entry
|
||||||
|
and nested.options.map_entry
|
||||||
|
):
|
||||||
|
# Get Python types
|
||||||
|
self.py_k_type = FieldCompiler(
|
||||||
|
source_file=self.source_file,
|
||||||
|
parent=self,
|
||||||
|
proto_obj=nested.field[0], # key
|
||||||
|
typing_compiler=self.typing_compiler,
|
||||||
|
).py_type
|
||||||
|
self.py_v_type = FieldCompiler(
|
||||||
|
source_file=self.source_file,
|
||||||
|
parent=self,
|
||||||
|
proto_obj=nested.field[1], # value
|
||||||
|
typing_compiler=self.typing_compiler,
|
||||||
|
).py_type
|
||||||
|
|
||||||
|
# Get proto types
|
||||||
|
self.proto_k_type = FieldDescriptorProtoType(nested.field[0].type).name
|
||||||
|
self.proto_v_type = FieldDescriptorProtoType(nested.field[1].type).name
|
||||||
|
super().__post_init__() # call FieldCompiler-> MessageCompiler __post_init__
|
||||||
|
|
||||||
|
@property
|
||||||
|
def betterproto_field_args(self) -> List[str]:
|
||||||
|
return [f"betterproto.{self.proto_k_type}", f"betterproto.{self.proto_v_type}"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def field_type(self) -> str:
|
||||||
|
return "map"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def annotation(self) -> str:
|
||||||
|
return self.typing_compiler.dict(self.py_k_type, self.py_v_type)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def repeated(self) -> bool:
|
||||||
|
return False # maps cannot be repeated
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class EnumDefinitionCompiler(MessageCompiler):
|
||||||
|
"""Representation of a proto Enum definition."""
|
||||||
|
|
||||||
|
proto_obj: EnumDescriptorProto = PLACEHOLDER
|
||||||
|
entries: List["EnumDefinitionCompiler.EnumEntry"] = PLACEHOLDER
|
||||||
|
|
||||||
|
@dataclass(unsafe_hash=True)
|
||||||
|
class EnumEntry:
|
||||||
|
"""Representation of an Enum entry."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
value: int
|
||||||
|
comment: str
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
# Get entries/allowed values for this Enum
|
||||||
|
self.entries = [
|
||||||
|
self.EnumEntry(
|
||||||
|
name=pythonize_enum_member_name(
|
||||||
|
entry_proto_value.name, self.proto_obj.name
|
||||||
|
),
|
||||||
|
value=entry_proto_value.number,
|
||||||
|
comment=get_comment(
|
||||||
|
proto_file=self.source_file, path=self.path + [2, entry_number]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
for entry_number, entry_proto_value in enumerate(self.proto_obj.value)
|
||||||
|
]
|
||||||
|
super().__post_init__() # call MessageCompiler __post_init__
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ServiceCompiler(ProtoContentBase):
|
||||||
|
source_file: FileDescriptorProto
|
||||||
|
parent: OutputTemplate = PLACEHOLDER
|
||||||
|
proto_obj: DescriptorProto = PLACEHOLDER
|
||||||
|
path: List[int] = PLACEHOLDER
|
||||||
|
methods: List["ServiceMethodCompiler"] = field(default_factory=list)
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
# Add service to output file
|
||||||
|
self.output_file.services.append(self)
|
||||||
|
super().__post_init__() # check for unset fields
|
||||||
|
|
||||||
|
@property
|
||||||
|
def proto_name(self) -> str:
|
||||||
|
return self.proto_obj.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def py_name(self) -> str:
|
||||||
|
return pythonize_class_name(self.proto_name)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ServiceMethodCompiler(ProtoContentBase):
|
||||||
|
source_file: FileDescriptorProto
|
||||||
|
parent: ServiceCompiler
|
||||||
|
proto_obj: MethodDescriptorProto
|
||||||
|
path: List[int] = PLACEHOLDER
|
||||||
|
comment_indent: int = 8
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
# Add method to service
|
||||||
|
self.parent.methods.append(self)
|
||||||
|
|
||||||
|
self.output_file.imports_type_checking_only.add("import grpclib.server")
|
||||||
|
self.output_file.imports_type_checking_only.add(
|
||||||
|
"from betterproto.grpc.grpclib_client import MetadataLike"
|
||||||
|
)
|
||||||
|
self.output_file.imports_type_checking_only.add(
|
||||||
|
"from grpclib.metadata import Deadline"
|
||||||
|
)
|
||||||
|
|
||||||
|
super().__post_init__() # check for unset fields
|
||||||
|
|
||||||
|
@property
|
||||||
|
def py_name(self) -> str:
|
||||||
|
"""Pythonized method name."""
|
||||||
|
return pythonize_method_name(self.proto_obj.name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def proto_name(self) -> str:
|
||||||
|
"""Original protobuf name."""
|
||||||
|
return self.proto_obj.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def route(self) -> str:
|
||||||
|
package_part = (
|
||||||
|
f"{self.output_file.package}." if self.output_file.package else ""
|
||||||
|
)
|
||||||
|
return f"/{package_part}{self.parent.proto_name}/{self.proto_name}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def py_input_message_type(self) -> str:
|
||||||
|
"""String representation of the Python type corresponding to the
|
||||||
|
input message.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
str
|
||||||
|
String representation of the Python type corresponding to the input message.
|
||||||
|
"""
|
||||||
|
return get_type_reference(
|
||||||
|
package=self.output_file.package,
|
||||||
|
imports=self.output_file.imports_end,
|
||||||
|
source_type=self.proto_obj.input_type,
|
||||||
|
typing_compiler=self.output_file.typing_compiler,
|
||||||
|
unwrap=False,
|
||||||
|
pydantic=self.output_file.pydantic_dataclasses,
|
||||||
|
).strip('"')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def py_input_message_param(self) -> str:
|
||||||
|
"""Param name corresponding to py_input_message_type.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
str
|
||||||
|
Param name corresponding to py_input_message_type.
|
||||||
|
"""
|
||||||
|
return pythonize_field_name(self.py_input_message_type)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def py_output_message_type(self) -> str:
|
||||||
|
"""String representation of the Python type corresponding to the
|
||||||
|
output message.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
str
|
||||||
|
String representation of the Python type corresponding to the output message.
|
||||||
|
"""
|
||||||
|
return get_type_reference(
|
||||||
|
package=self.output_file.package,
|
||||||
|
imports=self.output_file.imports_end,
|
||||||
|
source_type=self.proto_obj.output_type,
|
||||||
|
typing_compiler=self.output_file.typing_compiler,
|
||||||
|
unwrap=False,
|
||||||
|
pydantic=self.output_file.pydantic_dataclasses,
|
||||||
|
).strip('"')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def client_streaming(self) -> bool:
|
||||||
|
return self.proto_obj.client_streaming
|
||||||
|
|
||||||
|
@property
|
||||||
|
def server_streaming(self) -> bool:
|
||||||
|
return self.proto_obj.server_streaming
|
163
src/betterproto/plugin/module_validation.py
Normal file
163
src/betterproto/plugin/module_validation.py
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
import re
|
||||||
|
from collections import defaultdict
|
||||||
|
from dataclasses import (
|
||||||
|
dataclass,
|
||||||
|
field,
|
||||||
|
)
|
||||||
|
from typing import (
|
||||||
|
Dict,
|
||||||
|
Iterator,
|
||||||
|
List,
|
||||||
|
Tuple,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ModuleValidator:
|
||||||
|
line_iterator: Iterator[str]
|
||||||
|
line_number: int = field(init=False, default=0)
|
||||||
|
|
||||||
|
collisions: Dict[str, List[Tuple[int, str]]] = field(
|
||||||
|
init=False, default_factory=lambda: defaultdict(list)
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_import(self, imp: str, number: int, full_line: str):
|
||||||
|
"""
|
||||||
|
Adds an import to be tracked.
|
||||||
|
"""
|
||||||
|
self.collisions[imp].append((number, full_line))
|
||||||
|
|
||||||
|
def process_import(self, imp: str):
|
||||||
|
"""
|
||||||
|
Filters out the import to its actual value.
|
||||||
|
"""
|
||||||
|
if " as " in imp:
|
||||||
|
imp = imp[imp.index(" as ") + 4 :]
|
||||||
|
|
||||||
|
imp = imp.strip()
|
||||||
|
assert " " not in imp, imp
|
||||||
|
return imp
|
||||||
|
|
||||||
|
def evaluate_multiline_import(self, line: str):
|
||||||
|
"""
|
||||||
|
Evaluates a multiline import from a starting line
|
||||||
|
"""
|
||||||
|
# Filter the first line and remove anything before the import statement.
|
||||||
|
full_line = line
|
||||||
|
line = line.split("import", 1)[1]
|
||||||
|
if "(" in line:
|
||||||
|
conditional = lambda line: ")" not in line
|
||||||
|
else:
|
||||||
|
conditional = lambda line: "\\" in line
|
||||||
|
|
||||||
|
# Remove open parenthesis if it exists.
|
||||||
|
if "(" in line:
|
||||||
|
line = line[line.index("(") + 1 :]
|
||||||
|
|
||||||
|
# Choose the conditional based on how multiline imports are formatted.
|
||||||
|
while conditional(line):
|
||||||
|
# Split the line by commas
|
||||||
|
imports = line.split(",")
|
||||||
|
|
||||||
|
for imp in imports:
|
||||||
|
# Add the import to the namespace
|
||||||
|
imp = self.process_import(imp)
|
||||||
|
if imp:
|
||||||
|
self.add_import(imp, self.line_number, full_line)
|
||||||
|
# Get the next line
|
||||||
|
full_line = line = next(self.line_iterator)
|
||||||
|
# Increment the line number
|
||||||
|
self.line_number += 1
|
||||||
|
|
||||||
|
# validate the last line
|
||||||
|
if ")" in line:
|
||||||
|
line = line[: line.index(")")]
|
||||||
|
imports = line.split(",")
|
||||||
|
for imp in imports:
|
||||||
|
imp = self.process_import(imp)
|
||||||
|
if imp:
|
||||||
|
self.add_import(imp, self.line_number, full_line)
|
||||||
|
|
||||||
|
def evaluate_import(self, line: str):
|
||||||
|
"""
|
||||||
|
Extracts an import from a line.
|
||||||
|
"""
|
||||||
|
whole_line = line
|
||||||
|
line = line[line.index("import") + 6 :]
|
||||||
|
values = line.split(",")
|
||||||
|
for v in values:
|
||||||
|
self.add_import(self.process_import(v), self.line_number, whole_line)
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
"""
|
||||||
|
Evaluate each line for names in the module.
|
||||||
|
"""
|
||||||
|
line = next(self.line_iterator)
|
||||||
|
|
||||||
|
# Skip lines with indentation or comments
|
||||||
|
if (
|
||||||
|
# Skip indents and whitespace.
|
||||||
|
line.startswith(" ")
|
||||||
|
or line == "\n"
|
||||||
|
or line.startswith("\t")
|
||||||
|
or
|
||||||
|
# Skip comments
|
||||||
|
line.startswith("#")
|
||||||
|
or
|
||||||
|
# Skip decorators
|
||||||
|
line.startswith("@")
|
||||||
|
):
|
||||||
|
self.line_number += 1
|
||||||
|
return
|
||||||
|
|
||||||
|
# Skip docstrings.
|
||||||
|
if line.startswith('"""') or line.startswith("'''"):
|
||||||
|
quote = line[0] * 3
|
||||||
|
line = line[3:]
|
||||||
|
while quote not in line:
|
||||||
|
line = next(self.line_iterator)
|
||||||
|
self.line_number += 1
|
||||||
|
return
|
||||||
|
|
||||||
|
# Evaluate Imports.
|
||||||
|
if line.startswith("from ") or line.startswith("import "):
|
||||||
|
if "(" in line or "\\" in line:
|
||||||
|
self.evaluate_multiline_import(line)
|
||||||
|
else:
|
||||||
|
self.evaluate_import(line)
|
||||||
|
|
||||||
|
# Evaluate Classes.
|
||||||
|
elif line.startswith("class "):
|
||||||
|
class_name = re.search(r"class (\w+)", line).group(1)
|
||||||
|
if class_name:
|
||||||
|
self.add_import(class_name, self.line_number, line)
|
||||||
|
|
||||||
|
# Evaluate Functions.
|
||||||
|
elif line.startswith("def "):
|
||||||
|
function_name = re.search(r"def (\w+)", line).group(1)
|
||||||
|
if function_name:
|
||||||
|
self.add_import(function_name, self.line_number, line)
|
||||||
|
|
||||||
|
# Evaluate direct assignments.
|
||||||
|
elif "=" in line:
|
||||||
|
assignment = re.search(r"(\w+)\s*=", line).group(1)
|
||||||
|
if assignment:
|
||||||
|
self.add_import(assignment, self.line_number, line)
|
||||||
|
|
||||||
|
self.line_number += 1
|
||||||
|
|
||||||
|
def validate(self) -> bool:
|
||||||
|
"""
|
||||||
|
Run Validation.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
self.next()
|
||||||
|
except StopIteration:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Filter collisions for those with more than one value.
|
||||||
|
self.collisions = {k: v for k, v in self.collisions.items() if len(v) > 1}
|
||||||
|
|
||||||
|
# Return True if no collisions are found.
|
||||||
|
return not bool(self.collisions)
|
269
src/betterproto/plugin/parser.py
Normal file
269
src/betterproto/plugin/parser.py
Normal file
@ -0,0 +1,269 @@
|
|||||||
|
import pathlib
|
||||||
|
import sys
|
||||||
|
from typing import (
|
||||||
|
Generator,
|
||||||
|
List,
|
||||||
|
Set,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
from betterproto.lib.google.protobuf import (
|
||||||
|
DescriptorProto,
|
||||||
|
EnumDescriptorProto,
|
||||||
|
FieldDescriptorProto,
|
||||||
|
FileDescriptorProto,
|
||||||
|
ServiceDescriptorProto,
|
||||||
|
)
|
||||||
|
from betterproto.lib.google.protobuf.compiler import (
|
||||||
|
CodeGeneratorRequest,
|
||||||
|
CodeGeneratorResponse,
|
||||||
|
CodeGeneratorResponseFeature,
|
||||||
|
CodeGeneratorResponseFile,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .compiler import outputfile_compiler
|
||||||
|
from .models import (
|
||||||
|
EnumDefinitionCompiler,
|
||||||
|
FieldCompiler,
|
||||||
|
MapEntryCompiler,
|
||||||
|
MessageCompiler,
|
||||||
|
OneOfFieldCompiler,
|
||||||
|
OutputTemplate,
|
||||||
|
PluginRequestCompiler,
|
||||||
|
PydanticOneOfFieldCompiler,
|
||||||
|
ServiceCompiler,
|
||||||
|
ServiceMethodCompiler,
|
||||||
|
is_map,
|
||||||
|
is_oneof,
|
||||||
|
)
|
||||||
|
from .typing_compiler import (
|
||||||
|
DirectImportTypingCompiler,
|
||||||
|
NoTyping310TypingCompiler,
|
||||||
|
TypingCompiler,
|
||||||
|
TypingImportTypingCompiler,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def traverse(
|
||||||
|
proto_file: FileDescriptorProto,
|
||||||
|
) -> Generator[
|
||||||
|
Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None
|
||||||
|
]:
|
||||||
|
# Todo: Keep information about nested hierarchy
|
||||||
|
def _traverse(
|
||||||
|
path: List[int],
|
||||||
|
items: Union[List[EnumDescriptorProto], List[DescriptorProto]],
|
||||||
|
prefix: str = "",
|
||||||
|
) -> Generator[
|
||||||
|
Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None
|
||||||
|
]:
|
||||||
|
for i, item in enumerate(items):
|
||||||
|
# Adjust the name since we flatten the hierarchy.
|
||||||
|
# Todo: don't change the name, but include full name in returned tuple
|
||||||
|
item.name = next_prefix = f"{prefix}_{item.name}"
|
||||||
|
yield item, [*path, i]
|
||||||
|
|
||||||
|
if isinstance(item, DescriptorProto):
|
||||||
|
# Get nested types.
|
||||||
|
yield from _traverse([*path, i, 4], item.enum_type, next_prefix)
|
||||||
|
yield from _traverse([*path, i, 3], item.nested_type, next_prefix)
|
||||||
|
|
||||||
|
yield from _traverse([5], proto_file.enum_type)
|
||||||
|
yield from _traverse([4], proto_file.message_type)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
|
||||||
|
response = CodeGeneratorResponse()
|
||||||
|
|
||||||
|
plugin_options = request.parameter.split(",") if request.parameter else []
|
||||||
|
response.supported_features = CodeGeneratorResponseFeature.FEATURE_PROTO3_OPTIONAL
|
||||||
|
|
||||||
|
request_data = PluginRequestCompiler(plugin_request_obj=request)
|
||||||
|
# Gather output packages
|
||||||
|
for proto_file in request.proto_file:
|
||||||
|
output_package_name = proto_file.package
|
||||||
|
if output_package_name not in request_data.output_packages:
|
||||||
|
# Create a new output if there is no output for this package
|
||||||
|
request_data.output_packages[output_package_name] = OutputTemplate(
|
||||||
|
parent_request=request_data, package_proto_obj=proto_file
|
||||||
|
)
|
||||||
|
# Add this input file to the output corresponding to this package
|
||||||
|
request_data.output_packages[output_package_name].input_files.append(proto_file)
|
||||||
|
|
||||||
|
if (
|
||||||
|
proto_file.package == "google.protobuf"
|
||||||
|
and "INCLUDE_GOOGLE" not in plugin_options
|
||||||
|
):
|
||||||
|
# If not INCLUDE_GOOGLE,
|
||||||
|
# skip outputting Google's well-known types
|
||||||
|
request_data.output_packages[output_package_name].output = False
|
||||||
|
|
||||||
|
if "pydantic_dataclasses" in plugin_options:
|
||||||
|
request_data.output_packages[
|
||||||
|
output_package_name
|
||||||
|
].pydantic_dataclasses = True
|
||||||
|
|
||||||
|
# Gather any typing generation options.
|
||||||
|
typing_opts = [
|
||||||
|
opt[len("typing.") :] for opt in plugin_options if opt.startswith("typing.")
|
||||||
|
]
|
||||||
|
|
||||||
|
if len(typing_opts) > 1:
|
||||||
|
raise ValueError("Multiple typing options provided")
|
||||||
|
# Set the compiler type.
|
||||||
|
typing_opt = typing_opts[0] if typing_opts else "direct"
|
||||||
|
if typing_opt == "direct":
|
||||||
|
request_data.output_packages[
|
||||||
|
output_package_name
|
||||||
|
].typing_compiler = DirectImportTypingCompiler()
|
||||||
|
elif typing_opt == "root":
|
||||||
|
request_data.output_packages[
|
||||||
|
output_package_name
|
||||||
|
].typing_compiler = TypingImportTypingCompiler()
|
||||||
|
elif typing_opt == "310":
|
||||||
|
request_data.output_packages[
|
||||||
|
output_package_name
|
||||||
|
].typing_compiler = NoTyping310TypingCompiler()
|
||||||
|
|
||||||
|
# Read Messages and Enums
|
||||||
|
# We need to read Messages before Services in so that we can
|
||||||
|
# get the references to input/output messages for each service
|
||||||
|
for output_package_name, output_package in request_data.output_packages.items():
|
||||||
|
for proto_input_file in output_package.input_files:
|
||||||
|
for item, path in traverse(proto_input_file):
|
||||||
|
read_protobuf_type(
|
||||||
|
source_file=proto_input_file,
|
||||||
|
item=item,
|
||||||
|
path=path,
|
||||||
|
output_package=output_package,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Read Services
|
||||||
|
for output_package_name, output_package in request_data.output_packages.items():
|
||||||
|
for proto_input_file in output_package.input_files:
|
||||||
|
for index, service in enumerate(proto_input_file.service):
|
||||||
|
read_protobuf_service(proto_input_file, service, index, output_package)
|
||||||
|
|
||||||
|
# Generate output files
|
||||||
|
output_paths: Set[pathlib.Path] = set()
|
||||||
|
for output_package_name, output_package in request_data.output_packages.items():
|
||||||
|
if not output_package.output:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Add files to the response object
|
||||||
|
output_path = pathlib.Path(*output_package_name.split("."), "__init__.py")
|
||||||
|
output_paths.add(output_path)
|
||||||
|
|
||||||
|
response.file.append(
|
||||||
|
CodeGeneratorResponseFile(
|
||||||
|
name=str(output_path),
|
||||||
|
# Render and then format the output file
|
||||||
|
content=outputfile_compiler(output_file=output_package),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make each output directory a package with __init__ file
|
||||||
|
init_files = {
|
||||||
|
directory.joinpath("__init__.py")
|
||||||
|
for path in output_paths
|
||||||
|
for directory in path.parents
|
||||||
|
if not directory.joinpath("__init__.py").exists()
|
||||||
|
} - output_paths
|
||||||
|
|
||||||
|
for init_file in init_files:
|
||||||
|
response.file.append(CodeGeneratorResponseFile(name=str(init_file)))
|
||||||
|
|
||||||
|
for output_package_name in sorted(output_paths.union(init_files)):
|
||||||
|
print(f"Writing {output_package_name}", file=sys.stderr)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def _make_one_of_field_compiler(
|
||||||
|
output_package: OutputTemplate,
|
||||||
|
source_file: "FileDescriptorProto",
|
||||||
|
parent: MessageCompiler,
|
||||||
|
proto_obj: "FieldDescriptorProto",
|
||||||
|
path: List[int],
|
||||||
|
) -> FieldCompiler:
|
||||||
|
pydantic = output_package.pydantic_dataclasses
|
||||||
|
Cls = PydanticOneOfFieldCompiler if pydantic else OneOfFieldCompiler
|
||||||
|
return Cls(
|
||||||
|
source_file=source_file,
|
||||||
|
parent=parent,
|
||||||
|
proto_obj=proto_obj,
|
||||||
|
path=path,
|
||||||
|
typing_compiler=output_package.typing_compiler,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def read_protobuf_type(
|
||||||
|
item: DescriptorProto,
|
||||||
|
path: List[int],
|
||||||
|
source_file: "FileDescriptorProto",
|
||||||
|
output_package: OutputTemplate,
|
||||||
|
) -> None:
|
||||||
|
if isinstance(item, DescriptorProto):
|
||||||
|
if item.options.map_entry:
|
||||||
|
# Skip generated map entry messages since we just use dicts
|
||||||
|
return
|
||||||
|
# Process Message
|
||||||
|
message_data = MessageCompiler(
|
||||||
|
source_file=source_file,
|
||||||
|
parent=output_package,
|
||||||
|
proto_obj=item,
|
||||||
|
path=path,
|
||||||
|
typing_compiler=output_package.typing_compiler,
|
||||||
|
)
|
||||||
|
for index, field in enumerate(item.field):
|
||||||
|
if is_map(field, item):
|
||||||
|
MapEntryCompiler(
|
||||||
|
source_file=source_file,
|
||||||
|
parent=message_data,
|
||||||
|
proto_obj=field,
|
||||||
|
path=path + [2, index],
|
||||||
|
typing_compiler=output_package.typing_compiler,
|
||||||
|
)
|
||||||
|
elif is_oneof(field):
|
||||||
|
_make_one_of_field_compiler(
|
||||||
|
output_package, source_file, message_data, field, path + [2, index]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
FieldCompiler(
|
||||||
|
source_file=source_file,
|
||||||
|
parent=message_data,
|
||||||
|
proto_obj=field,
|
||||||
|
path=path + [2, index],
|
||||||
|
typing_compiler=output_package.typing_compiler,
|
||||||
|
)
|
||||||
|
elif isinstance(item, EnumDescriptorProto):
|
||||||
|
# Enum
|
||||||
|
EnumDefinitionCompiler(
|
||||||
|
source_file=source_file,
|
||||||
|
parent=output_package,
|
||||||
|
proto_obj=item,
|
||||||
|
path=path,
|
||||||
|
typing_compiler=output_package.typing_compiler,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def read_protobuf_service(
|
||||||
|
source_file: FileDescriptorProto,
|
||||||
|
service: ServiceDescriptorProto,
|
||||||
|
index: int,
|
||||||
|
output_package: OutputTemplate,
|
||||||
|
) -> None:
|
||||||
|
service_data = ServiceCompiler(
|
||||||
|
source_file=source_file,
|
||||||
|
parent=output_package,
|
||||||
|
proto_obj=service,
|
||||||
|
path=[6, index],
|
||||||
|
)
|
||||||
|
for j, method in enumerate(service.method):
|
||||||
|
ServiceMethodCompiler(
|
||||||
|
source_file=source_file,
|
||||||
|
parent=service_data,
|
||||||
|
proto_obj=method,
|
||||||
|
path=[6, index, 2, j],
|
||||||
|
)
|
2
src/betterproto/plugin/plugin.bat
Normal file
2
src/betterproto/plugin/plugin.bat
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
@SET plugin_dir=%~dp0
|
||||||
|
@python -m %plugin_dir% %*
|
173
src/betterproto/plugin/typing_compiler.py
Normal file
173
src/betterproto/plugin/typing_compiler.py
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
import abc
|
||||||
|
from collections import defaultdict
|
||||||
|
from dataclasses import (
|
||||||
|
dataclass,
|
||||||
|
field,
|
||||||
|
)
|
||||||
|
from typing import (
|
||||||
|
Dict,
|
||||||
|
Iterator,
|
||||||
|
Optional,
|
||||||
|
Set,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TypingCompiler(metaclass=abc.ABCMeta):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def optional(self, type: str) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def list(self, type: str) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def dict(self, key: str, value: str) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def union(self, *types: str) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def iterable(self, type: str) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def async_iterable(self, type: str) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def async_iterator(self, type: str) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def imports(self) -> Dict[str, Optional[Set[str]]]:
|
||||||
|
"""
|
||||||
|
Returns either the direct import as a key with none as value, or a set of
|
||||||
|
values to import from the key.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def import_lines(self) -> Iterator:
|
||||||
|
imports = self.imports()
|
||||||
|
for key, value in imports.items():
|
||||||
|
if value is None:
|
||||||
|
yield f"import {key}"
|
||||||
|
else:
|
||||||
|
yield f"from {key} import ("
|
||||||
|
for v in sorted(value):
|
||||||
|
yield f" {v},"
|
||||||
|
yield ")"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DirectImportTypingCompiler(TypingCompiler):
|
||||||
|
_imports: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set))
|
||||||
|
|
||||||
|
def optional(self, type: str) -> str:
|
||||||
|
self._imports["typing"].add("Optional")
|
||||||
|
return f"Optional[{type}]"
|
||||||
|
|
||||||
|
def list(self, type: str) -> str:
|
||||||
|
self._imports["typing"].add("List")
|
||||||
|
return f"List[{type}]"
|
||||||
|
|
||||||
|
def dict(self, key: str, value: str) -> str:
|
||||||
|
self._imports["typing"].add("Dict")
|
||||||
|
return f"Dict[{key}, {value}]"
|
||||||
|
|
||||||
|
def union(self, *types: str) -> str:
|
||||||
|
self._imports["typing"].add("Union")
|
||||||
|
return f"Union[{', '.join(types)}]"
|
||||||
|
|
||||||
|
def iterable(self, type: str) -> str:
|
||||||
|
self._imports["typing"].add("Iterable")
|
||||||
|
return f"Iterable[{type}]"
|
||||||
|
|
||||||
|
def async_iterable(self, type: str) -> str:
|
||||||
|
self._imports["typing"].add("AsyncIterable")
|
||||||
|
return f"AsyncIterable[{type}]"
|
||||||
|
|
||||||
|
def async_iterator(self, type: str) -> str:
|
||||||
|
self._imports["typing"].add("AsyncIterator")
|
||||||
|
return f"AsyncIterator[{type}]"
|
||||||
|
|
||||||
|
def imports(self) -> Dict[str, Optional[Set[str]]]:
|
||||||
|
return {k: v if v else None for k, v in self._imports.items()}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TypingImportTypingCompiler(TypingCompiler):
|
||||||
|
_imported: bool = False
|
||||||
|
|
||||||
|
def optional(self, type: str) -> str:
|
||||||
|
self._imported = True
|
||||||
|
return f"typing.Optional[{type}]"
|
||||||
|
|
||||||
|
def list(self, type: str) -> str:
|
||||||
|
self._imported = True
|
||||||
|
return f"typing.List[{type}]"
|
||||||
|
|
||||||
|
def dict(self, key: str, value: str) -> str:
|
||||||
|
self._imported = True
|
||||||
|
return f"typing.Dict[{key}, {value}]"
|
||||||
|
|
||||||
|
def union(self, *types: str) -> str:
|
||||||
|
self._imported = True
|
||||||
|
return f"typing.Union[{', '.join(types)}]"
|
||||||
|
|
||||||
|
def iterable(self, type: str) -> str:
|
||||||
|
self._imported = True
|
||||||
|
return f"typing.Iterable[{type}]"
|
||||||
|
|
||||||
|
def async_iterable(self, type: str) -> str:
|
||||||
|
self._imported = True
|
||||||
|
return f"typing.AsyncIterable[{type}]"
|
||||||
|
|
||||||
|
def async_iterator(self, type: str) -> str:
|
||||||
|
self._imported = True
|
||||||
|
return f"typing.AsyncIterator[{type}]"
|
||||||
|
|
||||||
|
def imports(self) -> Dict[str, Optional[Set[str]]]:
|
||||||
|
if self._imported:
|
||||||
|
return {"typing": None}
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class NoTyping310TypingCompiler(TypingCompiler):
|
||||||
|
_imports: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _fmt(type: str) -> str: # for now this is necessary till 3.14
|
||||||
|
if type.startswith('"'):
|
||||||
|
return type[1:-1]
|
||||||
|
return type
|
||||||
|
|
||||||
|
def optional(self, type: str) -> str:
|
||||||
|
return f'"{self._fmt(type)} | None"'
|
||||||
|
|
||||||
|
def list(self, type: str) -> str:
|
||||||
|
return f'"list[{self._fmt(type)}]"'
|
||||||
|
|
||||||
|
def dict(self, key: str, value: str) -> str:
|
||||||
|
return f'"dict[{key}, {self._fmt(value)}]"'
|
||||||
|
|
||||||
|
def union(self, *types: str) -> str:
|
||||||
|
return f'"{" | ".join(map(self._fmt, types))}"'
|
||||||
|
|
||||||
|
def iterable(self, type: str) -> str:
|
||||||
|
self._imports["collections.abc"].add("Iterable")
|
||||||
|
return f'"Iterable[{type}]"'
|
||||||
|
|
||||||
|
def async_iterable(self, type: str) -> str:
|
||||||
|
self._imports["collections.abc"].add("AsyncIterable")
|
||||||
|
return f'"AsyncIterable[{type}]"'
|
||||||
|
|
||||||
|
def async_iterator(self, type: str) -> str:
|
||||||
|
self._imports["collections.abc"].add("AsyncIterator")
|
||||||
|
return f'"AsyncIterator[{type}]"'
|
||||||
|
|
||||||
|
def imports(self) -> Dict[str, Optional[Set[str]]]:
|
||||||
|
return {k: v if v else None for k, v in self._imports.items()}
|
0
src/betterproto/py.typed
Normal file
0
src/betterproto/py.typed
Normal file
57
src/betterproto/templates/header.py.j2
Normal file
57
src/betterproto/templates/header.py.j2
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||||
|
# sources: {{ ', '.join(output_file.input_filenames) }}
|
||||||
|
# plugin: python-betterproto
|
||||||
|
# This file has been @generated
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
{%- for enum in output_file.enums -%}
|
||||||
|
"{{ enum.py_name }}",
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- for message in output_file.messages -%}
|
||||||
|
"{{ message.py_name }}",
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- for service in output_file.services -%}
|
||||||
|
"{{ service.py_name }}Stub",
|
||||||
|
"{{ service.py_name }}Base",
|
||||||
|
{%- endfor -%}
|
||||||
|
)
|
||||||
|
|
||||||
|
{% for i in output_file.python_module_imports|sort %}
|
||||||
|
import {{ i }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% if output_file.pydantic_dataclasses %}
|
||||||
|
from pydantic.dataclasses import dataclass
|
||||||
|
{%- else -%}
|
||||||
|
from dataclasses import dataclass
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if output_file.datetime_imports %}
|
||||||
|
from datetime import {% for i in output_file.datetime_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||||
|
|
||||||
|
{% endif%}
|
||||||
|
{% set typing_imports = output_file.typing_compiler.imports() %}
|
||||||
|
{% if typing_imports %}
|
||||||
|
{% for line in output_file.typing_compiler.import_lines() %}
|
||||||
|
{{ line }}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if output_file.pydantic_imports %}
|
||||||
|
from pydantic import {% for i in output_file.pydantic_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
import betterproto
|
||||||
|
{% if output_file.services %}
|
||||||
|
from betterproto.grpc.grpclib_server import ServiceBase
|
||||||
|
import grpclib
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if output_file.imports_type_checking_only %}
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
{% for i in output_file.imports_type_checking_only|sort %} {{ i }}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
217
src/betterproto/templates/template.py.j2
Normal file
217
src/betterproto/templates/template.py.j2
Normal file
@ -0,0 +1,217 @@
|
|||||||
|
{% if output_file.enums %}{% for enum in output_file.enums %}
|
||||||
|
class {{ enum.py_name }}(betterproto.Enum):
|
||||||
|
{% if enum.comment %}
|
||||||
|
{{ enum.comment }}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% for entry in enum.entries %}
|
||||||
|
{{ entry.name }} = {{ entry.value }}
|
||||||
|
{% if entry.comment %}
|
||||||
|
{{ entry.comment }}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% if output_file.pydantic_dataclasses %}
|
||||||
|
@classmethod
|
||||||
|
def __get_pydantic_core_schema__(cls, _source_type, _handler):
|
||||||
|
from pydantic_core import core_schema
|
||||||
|
|
||||||
|
return core_schema.int_schema(ge=0)
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% for message in output_file.messages %}
|
||||||
|
{% if output_file.pydantic_dataclasses %}
|
||||||
|
@dataclass(eq=False, repr=False, config={"extra": "forbid"})
|
||||||
|
{% else %}
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
{% endif %}
|
||||||
|
class {{ message.py_name }}(betterproto.Message):
|
||||||
|
{% if message.comment %}
|
||||||
|
{{ message.comment }}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% for field in message.fields %}
|
||||||
|
{{ field.get_field_string() }}
|
||||||
|
{% if field.comment %}
|
||||||
|
{{ field.comment }}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% if not message.fields %}
|
||||||
|
pass
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if message.deprecated or message.has_deprecated_fields %}
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
{% if message.deprecated %}
|
||||||
|
warnings.warn("{{ message.py_name }} is deprecated", DeprecationWarning)
|
||||||
|
{% endif %}
|
||||||
|
super().__post_init__()
|
||||||
|
{% for field in message.deprecated_fields %}
|
||||||
|
if self.is_set("{{ field }}"):
|
||||||
|
warnings.warn("{{ message.py_name }}.{{ field }} is deprecated", DeprecationWarning)
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if output_file.pydantic_dataclasses and message.has_oneof_fields %}
|
||||||
|
@model_validator(mode='after')
|
||||||
|
def check_oneof(cls, values):
|
||||||
|
return cls._validate_field_groups(values)
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
{% for service in output_file.services %}
|
||||||
|
class {{ service.py_name }}Stub(betterproto.ServiceStub):
|
||||||
|
{% if service.comment %}
|
||||||
|
{{ service.comment }}
|
||||||
|
|
||||||
|
{% elif not service.methods %}
|
||||||
|
pass
|
||||||
|
{% endif %}
|
||||||
|
{% for method in service.methods %}
|
||||||
|
async def {{ method.py_name }}(self
|
||||||
|
{%- if not method.client_streaming -%}
|
||||||
|
, {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}"
|
||||||
|
{%- else -%}
|
||||||
|
{# Client streaming: need a request iterator instead #}
|
||||||
|
, {{ method.py_input_message_param }}_iterator: "{{ output_file.typing_compiler.union(output_file.typing_compiler.async_iterable(method.py_input_message_type), output_file.typing_compiler.iterable(method.py_input_message_type)) }}"
|
||||||
|
{%- endif -%}
|
||||||
|
,
|
||||||
|
*
|
||||||
|
, timeout: {{ output_file.typing_compiler.optional("float") }} = None
|
||||||
|
, deadline: {{ output_file.typing_compiler.optional('"Deadline"') }} = None
|
||||||
|
, metadata: {{ output_file.typing_compiler.optional('"MetadataLike"') }} = None
|
||||||
|
) -> "{% if method.server_streaming %}{{ output_file.typing_compiler.async_iterator(method.py_output_message_type ) }}{% else %}{{ method.py_output_message_type }}{% endif %}":
|
||||||
|
{% if method.comment %}
|
||||||
|
{{ method.comment }}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% if method.proto_obj.options.deprecated %}
|
||||||
|
warnings.warn("{{ service.py_name }}.{{ method.py_name }} is deprecated", DeprecationWarning)
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% if method.server_streaming %}
|
||||||
|
{% if method.client_streaming %}
|
||||||
|
async for response in self._stream_stream(
|
||||||
|
"{{ method.route }}",
|
||||||
|
{{ method.py_input_message_param }}_iterator,
|
||||||
|
{{ method.py_input_message_type }},
|
||||||
|
{{ method.py_output_message_type.strip('"') }},
|
||||||
|
timeout=timeout,
|
||||||
|
deadline=deadline,
|
||||||
|
metadata=metadata,
|
||||||
|
):
|
||||||
|
yield response
|
||||||
|
{% else %}{# i.e. not client streaming #}
|
||||||
|
async for response in self._unary_stream(
|
||||||
|
"{{ method.route }}",
|
||||||
|
{{ method.py_input_message_param }},
|
||||||
|
{{ method.py_output_message_type.strip('"') }},
|
||||||
|
timeout=timeout,
|
||||||
|
deadline=deadline,
|
||||||
|
metadata=metadata,
|
||||||
|
):
|
||||||
|
yield response
|
||||||
|
|
||||||
|
{% endif %}{# if client streaming #}
|
||||||
|
{% else %}{# i.e. not server streaming #}
|
||||||
|
{% if method.client_streaming %}
|
||||||
|
return await self._stream_unary(
|
||||||
|
"{{ method.route }}",
|
||||||
|
{{ method.py_input_message_param }}_iterator,
|
||||||
|
{{ method.py_input_message_type }},
|
||||||
|
{{ method.py_output_message_type.strip('"') }},
|
||||||
|
timeout=timeout,
|
||||||
|
deadline=deadline,
|
||||||
|
metadata=metadata,
|
||||||
|
)
|
||||||
|
{% else %}{# i.e. not client streaming #}
|
||||||
|
return await self._unary_unary(
|
||||||
|
"{{ method.route }}",
|
||||||
|
{{ method.py_input_message_param }},
|
||||||
|
{{ method.py_output_message_type.strip('"') }},
|
||||||
|
timeout=timeout,
|
||||||
|
deadline=deadline,
|
||||||
|
metadata=metadata,
|
||||||
|
)
|
||||||
|
{% endif %}{# client streaming #}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% for i in output_file.imports_end %}
|
||||||
|
{{ i }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% for service in output_file.services %}
|
||||||
|
class {{ service.py_name }}Base(ServiceBase):
|
||||||
|
{% if service.comment %}
|
||||||
|
{{ service.comment }}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% for method in service.methods %}
|
||||||
|
async def {{ method.py_name }}(self
|
||||||
|
{%- if not method.client_streaming -%}
|
||||||
|
, {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}"
|
||||||
|
{%- else -%}
|
||||||
|
{# Client streaming: need a request iterator instead #}
|
||||||
|
, {{ method.py_input_message_param }}_iterator: {{ output_file.typing_compiler.async_iterator(method.py_input_message_type) }}
|
||||||
|
{%- endif -%}
|
||||||
|
) -> {% if method.server_streaming %}{{ output_file.typing_compiler.async_iterator(method.py_output_message_type) }}{% else %}"{{ method.py_output_message_type }}"{% endif %}:
|
||||||
|
{% if method.comment %}
|
||||||
|
{{ method.comment }}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
|
||||||
|
{% if method.server_streaming %}
|
||||||
|
yield {{ method.py_output_message_type }}()
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% for method in service.methods %}
|
||||||
|
async def __rpc_{{ method.py_name }}(self, stream: "grpclib.server.Stream[{{ method.py_input_message_type }}, {{ method.py_output_message_type }}]") -> None:
|
||||||
|
{% if not method.client_streaming %}
|
||||||
|
request = await stream.recv_message()
|
||||||
|
{% else %}
|
||||||
|
request = stream.__aiter__()
|
||||||
|
{% endif %}
|
||||||
|
{% if not method.server_streaming %}
|
||||||
|
response = await self.{{ method.py_name }}(request)
|
||||||
|
await stream.send_message(response)
|
||||||
|
{% else %}
|
||||||
|
await self._call_rpc_handler_server_stream(
|
||||||
|
self.{{ method.py_name }},
|
||||||
|
stream,
|
||||||
|
request,
|
||||||
|
)
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
def __mapping__(self) -> {{ output_file.typing_compiler.dict("str", "grpclib.const.Handler") }}:
|
||||||
|
return {
|
||||||
|
{% for method in service.methods %}
|
||||||
|
"{{ method.route }}": grpclib.const.Handler(
|
||||||
|
self.__rpc_{{ method.py_name }},
|
||||||
|
{% if not method.client_streaming and not method.server_streaming %}
|
||||||
|
grpclib.const.Cardinality.UNARY_UNARY,
|
||||||
|
{% elif not method.client_streaming and method.server_streaming %}
|
||||||
|
grpclib.const.Cardinality.UNARY_STREAM,
|
||||||
|
{% elif method.client_streaming and not method.server_streaming %}
|
||||||
|
grpclib.const.Cardinality.STREAM_UNARY,
|
||||||
|
{% else %}
|
||||||
|
grpclib.const.Cardinality.STREAM_STREAM,
|
||||||
|
{% endif %}
|
||||||
|
{{ method.py_input_message_type }},
|
||||||
|
{{ method.py_output_message_type }},
|
||||||
|
),
|
||||||
|
{% endfor %}
|
||||||
|
}
|
||||||
|
|
||||||
|
{% endfor %}
|
56
src/betterproto/utils.py
Normal file
56
src/betterproto/utils.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Generic,
|
||||||
|
Optional,
|
||||||
|
Type,
|
||||||
|
TypeVar,
|
||||||
|
)
|
||||||
|
|
||||||
|
from typing_extensions import (
|
||||||
|
Concatenate,
|
||||||
|
ParamSpec,
|
||||||
|
Self,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
SelfT = TypeVar("SelfT")
|
||||||
|
P = ParamSpec("P")
|
||||||
|
HybridT = TypeVar("HybridT", covariant=True)
|
||||||
|
|
||||||
|
|
||||||
|
class hybridmethod(Generic[SelfT, P, HybridT]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
func: Callable[
|
||||||
|
Concatenate[type[SelfT], P], HybridT
|
||||||
|
], # Must be the classmethod version
|
||||||
|
):
|
||||||
|
self.cls_func = func
|
||||||
|
self.__doc__ = func.__doc__
|
||||||
|
|
||||||
|
def instancemethod(self, func: Callable[Concatenate[SelfT, P], HybridT]) -> Self:
|
||||||
|
self.instance_func = func
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __get__(
|
||||||
|
self, instance: Optional[SelfT], owner: Type[SelfT]
|
||||||
|
) -> Callable[P, HybridT]:
|
||||||
|
if instance is None or self.instance_func is None:
|
||||||
|
# either bound to the class, or no instance method available
|
||||||
|
return self.cls_func.__get__(owner, None)
|
||||||
|
return self.instance_func.__get__(instance, owner)
|
||||||
|
|
||||||
|
|
||||||
|
T_co = TypeVar("T_co")
|
||||||
|
TT_co = TypeVar("TT_co", bound="type[Any]")
|
||||||
|
|
||||||
|
|
||||||
|
class classproperty(Generic[TT_co, T_co]):
|
||||||
|
def __init__(self, func: Callable[[TT_co], T_co]):
|
||||||
|
self.__func__ = func
|
||||||
|
|
||||||
|
def __get__(self, instance: Any, type: TT_co) -> T_co:
|
||||||
|
return self.__func__(type)
|
91
tests/README.md
Normal file
91
tests/README.md
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
# Standard Tests Development Guide
|
||||||
|
|
||||||
|
Standard test cases are found in [betterproto/tests/inputs](inputs), where each subdirectory represents a testcase, that is verified in isolation.
|
||||||
|
|
||||||
|
```
|
||||||
|
inputs/
|
||||||
|
bool/
|
||||||
|
double/
|
||||||
|
int32/
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test case directory structure
|
||||||
|
|
||||||
|
Each testcase has a `<name>.proto` file with a message called `Test`, and optionally a matching `.json` file and a custom test called `test_*.py`.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bool/
|
||||||
|
bool.proto
|
||||||
|
bool.json # optional
|
||||||
|
test_bool.py # optional
|
||||||
|
```
|
||||||
|
|
||||||
|
### proto
|
||||||
|
|
||||||
|
`<name>.proto` — *The protobuf message to test*
|
||||||
|
|
||||||
|
```protobuf
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
message Test {
|
||||||
|
bool value = 1;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You can add multiple `.proto` files to the test case, as long as one file matches the directory name.
|
||||||
|
|
||||||
|
### json
|
||||||
|
|
||||||
|
`<name>.json` — *Test-data to validate the message with*
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"value": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### pytest
|
||||||
|
|
||||||
|
`test_<name>.py` — *Custom test to validate specific aspects of the generated class*
|
||||||
|
|
||||||
|
```python
|
||||||
|
from tests.output_betterproto.bool.bool import Test
|
||||||
|
|
||||||
|
def test_value():
|
||||||
|
message = Test()
|
||||||
|
assert not message.value, "Boolean is False by default"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Standard tests
|
||||||
|
|
||||||
|
The following tests are automatically executed for all cases:
|
||||||
|
|
||||||
|
- [x] Can the generated python code be imported?
|
||||||
|
- [x] Can the generated message class be instantiated?
|
||||||
|
- [x] Is the generated code compatible with the Google's `grpc_tools.protoc` implementation?
|
||||||
|
- _when `.json` is present_
|
||||||
|
|
||||||
|
## Running the tests
|
||||||
|
|
||||||
|
- `pipenv run generate`
|
||||||
|
This generates:
|
||||||
|
- `betterproto/tests/output_betterproto` — *the plugin generated python classes*
|
||||||
|
- `betterproto/tests/output_reference` — *reference implementation classes*
|
||||||
|
- `pipenv run test`
|
||||||
|
|
||||||
|
## Intentionally Failing tests
|
||||||
|
|
||||||
|
The standard test suite includes tests that fail by intention. These tests document known bugs and missing features that are intended to be corrected in the future.
|
||||||
|
|
||||||
|
When running `pytest`, they show up as `x` or `X` in the test results.
|
||||||
|
|
||||||
|
```
|
||||||
|
betterproto/tests/test_inputs.py ..x...x..x...x.X........xx........x.....x.......x.xx....x...................... [ 84%]
|
||||||
|
```
|
||||||
|
|
||||||
|
- `.` — PASSED
|
||||||
|
- `x` — XFAIL: expected failure
|
||||||
|
- `X` — XPASS: expected failure, but still passed
|
||||||
|
|
||||||
|
Test cases marked for expected failure are declared in [inputs/config.py](inputs/config.py)
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
11
tests/conftest.py
Normal file
11
tests/conftest.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import copy
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def reset_sys_path():
|
||||||
|
original = copy.deepcopy(sys.path)
|
||||||
|
yield
|
||||||
|
sys.path = original
|
209
tests/generate.py
Executable file
209
tests/generate.py
Executable file
@ -0,0 +1,209 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Set
|
||||||
|
|
||||||
|
from tests.util import (
|
||||||
|
get_directories,
|
||||||
|
inputs_path,
|
||||||
|
output_path_betterproto,
|
||||||
|
output_path_betterproto_pydantic,
|
||||||
|
output_path_reference,
|
||||||
|
protoc,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Force pure-python implementation instead of C++, otherwise imports
|
||||||
|
# break things because we can't properly reset the symbol database.
|
||||||
|
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
|
||||||
|
|
||||||
|
|
||||||
|
def clear_directory(dir_path: Path):
|
||||||
|
for file_or_directory in dir_path.glob("*"):
|
||||||
|
if file_or_directory.is_dir():
|
||||||
|
shutil.rmtree(file_or_directory)
|
||||||
|
else:
|
||||||
|
file_or_directory.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
async def generate(whitelist: Set[str], verbose: bool):
|
||||||
|
test_case_names = set(get_directories(inputs_path)) - {"__pycache__"}
|
||||||
|
|
||||||
|
path_whitelist = set()
|
||||||
|
name_whitelist = set()
|
||||||
|
for item in whitelist:
|
||||||
|
if item in test_case_names:
|
||||||
|
name_whitelist.add(item)
|
||||||
|
continue
|
||||||
|
path_whitelist.add(item)
|
||||||
|
|
||||||
|
generation_tasks = []
|
||||||
|
for test_case_name in sorted(test_case_names):
|
||||||
|
test_case_input_path = inputs_path.joinpath(test_case_name).resolve()
|
||||||
|
if (
|
||||||
|
whitelist
|
||||||
|
and str(test_case_input_path) not in path_whitelist
|
||||||
|
and test_case_name not in name_whitelist
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
generation_tasks.append(
|
||||||
|
generate_test_case_output(test_case_input_path, test_case_name, verbose)
|
||||||
|
)
|
||||||
|
|
||||||
|
failed_test_cases = []
|
||||||
|
# Wait for all subprocs and match any failures to names to report
|
||||||
|
for test_case_name, result in zip(
|
||||||
|
sorted(test_case_names), await asyncio.gather(*generation_tasks)
|
||||||
|
):
|
||||||
|
if result != 0:
|
||||||
|
failed_test_cases.append(test_case_name)
|
||||||
|
|
||||||
|
if len(failed_test_cases) > 0:
|
||||||
|
sys.stderr.write(
|
||||||
|
"\n\033[31;1;4mFailed to generate the following test cases:\033[0m\n"
|
||||||
|
)
|
||||||
|
for failed_test_case in failed_test_cases:
|
||||||
|
sys.stderr.write(f"- {failed_test_case}\n")
|
||||||
|
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
async def generate_test_case_output(
|
||||||
|
test_case_input_path: Path, test_case_name: str, verbose: bool
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Returns the max of the subprocess return values
|
||||||
|
"""
|
||||||
|
|
||||||
|
test_case_output_path_reference = output_path_reference.joinpath(test_case_name)
|
||||||
|
test_case_output_path_betterproto = output_path_betterproto
|
||||||
|
test_case_output_path_betterproto_pyd = output_path_betterproto_pydantic
|
||||||
|
|
||||||
|
os.makedirs(test_case_output_path_reference, exist_ok=True)
|
||||||
|
os.makedirs(test_case_output_path_betterproto, exist_ok=True)
|
||||||
|
os.makedirs(test_case_output_path_betterproto_pyd, exist_ok=True)
|
||||||
|
|
||||||
|
clear_directory(test_case_output_path_reference)
|
||||||
|
clear_directory(test_case_output_path_betterproto)
|
||||||
|
|
||||||
|
(
|
||||||
|
(ref_out, ref_err, ref_code),
|
||||||
|
(plg_out, plg_err, plg_code),
|
||||||
|
(plg_out_pyd, plg_err_pyd, plg_code_pyd),
|
||||||
|
) = await asyncio.gather(
|
||||||
|
protoc(test_case_input_path, test_case_output_path_reference, True),
|
||||||
|
protoc(test_case_input_path, test_case_output_path_betterproto, False),
|
||||||
|
protoc(
|
||||||
|
test_case_input_path, test_case_output_path_betterproto_pyd, False, True
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
if ref_code == 0:
|
||||||
|
print(f"\033[31;1;4mGenerated reference output for {test_case_name!r}\033[0m")
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"\033[31;1;4mFailed to generate reference output for {test_case_name!r}\033[0m"
|
||||||
|
)
|
||||||
|
print(ref_err.decode())
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
if ref_out:
|
||||||
|
print("Reference stdout:")
|
||||||
|
sys.stdout.buffer.write(ref_out)
|
||||||
|
sys.stdout.buffer.flush()
|
||||||
|
|
||||||
|
if ref_err:
|
||||||
|
print("Reference stderr:")
|
||||||
|
sys.stderr.buffer.write(ref_err)
|
||||||
|
sys.stderr.buffer.flush()
|
||||||
|
|
||||||
|
if plg_code == 0:
|
||||||
|
print(f"\033[31;1;4mGenerated plugin output for {test_case_name!r}\033[0m")
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"\033[31;1;4mFailed to generate plugin output for {test_case_name!r}\033[0m"
|
||||||
|
)
|
||||||
|
print(plg_err.decode())
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
if plg_out:
|
||||||
|
print("Plugin stdout:")
|
||||||
|
sys.stdout.buffer.write(plg_out)
|
||||||
|
sys.stdout.buffer.flush()
|
||||||
|
|
||||||
|
if plg_err:
|
||||||
|
print("Plugin stderr:")
|
||||||
|
sys.stderr.buffer.write(plg_err)
|
||||||
|
sys.stderr.buffer.flush()
|
||||||
|
|
||||||
|
if plg_code_pyd == 0:
|
||||||
|
print(
|
||||||
|
f"\033[31;1;4mGenerated plugin (pydantic compatible) output for {test_case_name!r}\033[0m"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"\033[31;1;4mFailed to generate plugin (pydantic compatible) output for {test_case_name!r}\033[0m"
|
||||||
|
)
|
||||||
|
print(plg_err_pyd.decode())
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
if plg_out_pyd:
|
||||||
|
print("Plugin stdout:")
|
||||||
|
sys.stdout.buffer.write(plg_out_pyd)
|
||||||
|
sys.stdout.buffer.flush()
|
||||||
|
|
||||||
|
if plg_err_pyd:
|
||||||
|
print("Plugin stderr:")
|
||||||
|
sys.stderr.buffer.write(plg_err_pyd)
|
||||||
|
sys.stderr.buffer.flush()
|
||||||
|
|
||||||
|
return max(ref_code, plg_code, plg_code_pyd)
|
||||||
|
|
||||||
|
|
||||||
|
HELP = "\n".join(
|
||||||
|
(
|
||||||
|
"Usage: python generate.py [-h] [-v] [DIRECTORIES or NAMES]",
|
||||||
|
"Generate python classes for standard tests.",
|
||||||
|
"",
|
||||||
|
"DIRECTORIES One or more relative or absolute directories of test-cases to generate classes for.",
|
||||||
|
" python generate.py inputs/bool inputs/double inputs/enum",
|
||||||
|
"",
|
||||||
|
"NAMES One or more test-case names to generate classes for.",
|
||||||
|
" python generate.py bool double enums",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if set(sys.argv).intersection({"-h", "--help"}):
|
||||||
|
print(HELP)
|
||||||
|
return
|
||||||
|
if sys.argv[1:2] == ["-v"]:
|
||||||
|
verbose = True
|
||||||
|
whitelist = set(sys.argv[2:])
|
||||||
|
else:
|
||||||
|
verbose = False
|
||||||
|
whitelist = set(sys.argv[1:])
|
||||||
|
|
||||||
|
if platform.system() == "Windows":
|
||||||
|
# for python version prior to 3.8, loop policy needs to be set explicitly
|
||||||
|
# https://docs.python.org/3/library/asyncio-policy.html#asyncio.DefaultEventLoopPolicy
|
||||||
|
try:
|
||||||
|
asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
|
||||||
|
except AttributeError:
|
||||||
|
# python < 3.7 does not have asyncio.WindowsProactorEventLoopPolicy
|
||||||
|
asyncio.get_event_loop_policy().set_event_loop(asyncio.ProactorEventLoop())
|
||||||
|
|
||||||
|
try:
|
||||||
|
asyncio.run(generate(whitelist, verbose))
|
||||||
|
except AttributeError:
|
||||||
|
# compatibility code for python < 3.7
|
||||||
|
asyncio.get_event_loop().run_until_complete(generate(whitelist, verbose))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
0
tests/grpc/__init__.py
Normal file
0
tests/grpc/__init__.py
Normal file
294
tests/grpc/test_grpclib_client.py
Normal file
294
tests/grpc/test_grpclib_client.py
Normal file
@ -0,0 +1,294 @@
|
|||||||
|
import asyncio
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import grpclib
|
||||||
|
import grpclib.client
|
||||||
|
import grpclib.metadata
|
||||||
|
import grpclib.server
|
||||||
|
import pytest
|
||||||
|
from grpclib.testing import ChannelFor
|
||||||
|
|
||||||
|
from betterproto.grpc.util.async_channel import AsyncChannel
|
||||||
|
from tests.output_betterproto.service import (
|
||||||
|
DoThingRequest,
|
||||||
|
DoThingResponse,
|
||||||
|
GetThingRequest,
|
||||||
|
TestStub as ThingServiceClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .thing_service import ThingService
|
||||||
|
|
||||||
|
|
||||||
|
async def _test_client(client: ThingServiceClient, name="clean room", **kwargs):
|
||||||
|
response = await client.do_thing(DoThingRequest(name=name), **kwargs)
|
||||||
|
assert response.names == [name]
|
||||||
|
|
||||||
|
|
||||||
|
def _assert_request_meta_received(deadline, metadata):
|
||||||
|
def server_side_test(stream):
|
||||||
|
assert stream.deadline._timestamp == pytest.approx(deadline._timestamp, 1), (
|
||||||
|
"The provided deadline should be received serverside"
|
||||||
|
)
|
||||||
|
assert stream.metadata["authorization"] == metadata["authorization"], (
|
||||||
|
"The provided authorization metadata should be received serverside"
|
||||||
|
)
|
||||||
|
|
||||||
|
return server_side_test
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def handler_trailer_only_unauthenticated():
|
||||||
|
async def handler(stream: grpclib.server.Stream):
|
||||||
|
await stream.recv_message()
|
||||||
|
await stream.send_initial_metadata()
|
||||||
|
await stream.send_trailing_metadata(status=grpclib.Status.UNAUTHENTICATED)
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_simple_service_call():
|
||||||
|
async with ChannelFor([ThingService()]) as channel:
|
||||||
|
await _test_client(ThingServiceClient(channel))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_trailer_only_error_unary_unary(
|
||||||
|
mocker, handler_trailer_only_unauthenticated
|
||||||
|
):
|
||||||
|
service = ThingService()
|
||||||
|
mocker.patch.object(
|
||||||
|
service,
|
||||||
|
"do_thing",
|
||||||
|
side_effect=handler_trailer_only_unauthenticated,
|
||||||
|
autospec=True,
|
||||||
|
)
|
||||||
|
async with ChannelFor([service]) as channel:
|
||||||
|
with pytest.raises(grpclib.exceptions.GRPCError) as e:
|
||||||
|
await ThingServiceClient(channel).do_thing(DoThingRequest(name="something"))
|
||||||
|
assert e.value.status == grpclib.Status.UNAUTHENTICATED
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_trailer_only_error_stream_unary(
|
||||||
|
mocker, handler_trailer_only_unauthenticated
|
||||||
|
):
|
||||||
|
service = ThingService()
|
||||||
|
mocker.patch.object(
|
||||||
|
service,
|
||||||
|
"do_many_things",
|
||||||
|
side_effect=handler_trailer_only_unauthenticated,
|
||||||
|
autospec=True,
|
||||||
|
)
|
||||||
|
async with ChannelFor([service]) as channel:
|
||||||
|
with pytest.raises(grpclib.exceptions.GRPCError) as e:
|
||||||
|
await ThingServiceClient(channel).do_many_things(
|
||||||
|
do_thing_request_iterator=[DoThingRequest(name="something")]
|
||||||
|
)
|
||||||
|
await _test_client(ThingServiceClient(channel))
|
||||||
|
assert e.value.status == grpclib.Status.UNAUTHENTICATED
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_service_call_mutable_defaults(mocker):
|
||||||
|
async with ChannelFor([ThingService()]) as channel:
|
||||||
|
client = ThingServiceClient(channel)
|
||||||
|
spy = mocker.spy(client, "_unary_unary")
|
||||||
|
await _test_client(client)
|
||||||
|
comments = spy.call_args_list[-1].args[1].comments
|
||||||
|
await _test_client(client)
|
||||||
|
assert spy.call_args_list[-1].args[1].comments is not comments
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_service_call_with_upfront_request_params():
|
||||||
|
# Setting deadline
|
||||||
|
deadline = grpclib.metadata.Deadline.from_timeout(22)
|
||||||
|
metadata = {"authorization": "12345"}
|
||||||
|
async with ChannelFor(
|
||||||
|
[ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]
|
||||||
|
) as channel:
|
||||||
|
await _test_client(
|
||||||
|
ThingServiceClient(channel, deadline=deadline, metadata=metadata)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Setting timeout
|
||||||
|
timeout = 99
|
||||||
|
deadline = grpclib.metadata.Deadline.from_timeout(timeout)
|
||||||
|
metadata = {"authorization": "12345"}
|
||||||
|
async with ChannelFor(
|
||||||
|
[ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]
|
||||||
|
) as channel:
|
||||||
|
await _test_client(
|
||||||
|
ThingServiceClient(channel, timeout=timeout, metadata=metadata)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_service_call_lower_level_with_overrides():
|
||||||
|
THING_TO_DO = "get milk"
|
||||||
|
|
||||||
|
# Setting deadline
|
||||||
|
deadline = grpclib.metadata.Deadline.from_timeout(22)
|
||||||
|
metadata = {"authorization": "12345"}
|
||||||
|
kwarg_deadline = grpclib.metadata.Deadline.from_timeout(28)
|
||||||
|
kwarg_metadata = {"authorization": "12345"}
|
||||||
|
async with ChannelFor(
|
||||||
|
[ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]
|
||||||
|
) as channel:
|
||||||
|
client = ThingServiceClient(channel, deadline=deadline, metadata=metadata)
|
||||||
|
response = await client._unary_unary(
|
||||||
|
"/service.Test/DoThing",
|
||||||
|
DoThingRequest(THING_TO_DO),
|
||||||
|
DoThingResponse,
|
||||||
|
deadline=kwarg_deadline,
|
||||||
|
metadata=kwarg_metadata,
|
||||||
|
)
|
||||||
|
assert response.names == [THING_TO_DO]
|
||||||
|
|
||||||
|
# Setting timeout
|
||||||
|
timeout = 99
|
||||||
|
deadline = grpclib.metadata.Deadline.from_timeout(timeout)
|
||||||
|
metadata = {"authorization": "12345"}
|
||||||
|
kwarg_timeout = 9000
|
||||||
|
kwarg_deadline = grpclib.metadata.Deadline.from_timeout(kwarg_timeout)
|
||||||
|
kwarg_metadata = {"authorization": "09876"}
|
||||||
|
async with ChannelFor(
|
||||||
|
[
|
||||||
|
ThingService(
|
||||||
|
test_hook=_assert_request_meta_received(kwarg_deadline, kwarg_metadata),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
) as channel:
|
||||||
|
client = ThingServiceClient(channel, deadline=deadline, metadata=metadata)
|
||||||
|
response = await client._unary_unary(
|
||||||
|
"/service.Test/DoThing",
|
||||||
|
DoThingRequest(THING_TO_DO),
|
||||||
|
DoThingResponse,
|
||||||
|
timeout=kwarg_timeout,
|
||||||
|
metadata=kwarg_metadata,
|
||||||
|
)
|
||||||
|
assert response.names == [THING_TO_DO]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("overrides_gen",),
|
||||||
|
[
|
||||||
|
(lambda: dict(timeout=10),),
|
||||||
|
(lambda: dict(deadline=grpclib.metadata.Deadline.from_timeout(10)),),
|
||||||
|
(lambda: dict(metadata={"authorization": str(uuid.uuid4())}),),
|
||||||
|
(lambda: dict(timeout=20, metadata={"authorization": str(uuid.uuid4())}),),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_service_call_high_level_with_overrides(mocker, overrides_gen):
|
||||||
|
overrides = overrides_gen()
|
||||||
|
request_spy = mocker.spy(grpclib.client.Channel, "request")
|
||||||
|
name = str(uuid.uuid4())
|
||||||
|
defaults = dict(
|
||||||
|
timeout=99,
|
||||||
|
deadline=grpclib.metadata.Deadline.from_timeout(99),
|
||||||
|
metadata={"authorization": name},
|
||||||
|
)
|
||||||
|
|
||||||
|
async with ChannelFor(
|
||||||
|
[
|
||||||
|
ThingService(
|
||||||
|
test_hook=_assert_request_meta_received(
|
||||||
|
deadline=grpclib.metadata.Deadline.from_timeout(
|
||||||
|
overrides.get("timeout", 99)
|
||||||
|
),
|
||||||
|
metadata=overrides.get("metadata", defaults.get("metadata")),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
) as channel:
|
||||||
|
client = ThingServiceClient(channel, **defaults)
|
||||||
|
await _test_client(client, name=name, **overrides)
|
||||||
|
assert request_spy.call_count == 1
|
||||||
|
|
||||||
|
# for python <3.8 request_spy.call_args.kwargs do not work
|
||||||
|
_, request_spy_call_kwargs = request_spy.call_args_list[0]
|
||||||
|
|
||||||
|
# ensure all overrides were successful
|
||||||
|
for key, value in overrides.items():
|
||||||
|
assert key in request_spy_call_kwargs
|
||||||
|
assert request_spy_call_kwargs[key] == value
|
||||||
|
|
||||||
|
# ensure default values were retained
|
||||||
|
for key in set(defaults.keys()) - set(overrides.keys()):
|
||||||
|
assert key in request_spy_call_kwargs
|
||||||
|
assert request_spy_call_kwargs[key] == defaults[key]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_async_gen_for_unary_stream_request():
|
||||||
|
thing_name = "my milkshakes"
|
||||||
|
|
||||||
|
async with ChannelFor([ThingService()]) as channel:
|
||||||
|
client = ThingServiceClient(channel)
|
||||||
|
expected_versions = [5, 4, 3, 2, 1]
|
||||||
|
async for response in client.get_thing_versions(
|
||||||
|
GetThingRequest(name=thing_name)
|
||||||
|
):
|
||||||
|
assert response.name == thing_name
|
||||||
|
assert response.version == expected_versions.pop()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_async_gen_for_stream_stream_request():
|
||||||
|
some_things = ["cake", "cricket", "coral reef"]
|
||||||
|
more_things = ["ball", "that", "56kmodem", "liberal humanism", "cheesesticks"]
|
||||||
|
expected_things = (*some_things, *more_things)
|
||||||
|
|
||||||
|
async with ChannelFor([ThingService()]) as channel:
|
||||||
|
client = ThingServiceClient(channel)
|
||||||
|
# Use an AsyncChannel to decouple sending and recieving, it'll send some_things
|
||||||
|
# immediately and we'll use it to send more_things later, after recieving some
|
||||||
|
# results
|
||||||
|
request_chan = AsyncChannel()
|
||||||
|
send_initial_requests = asyncio.ensure_future(
|
||||||
|
request_chan.send_from(GetThingRequest(name) for name in some_things)
|
||||||
|
)
|
||||||
|
response_index = 0
|
||||||
|
async for response in client.get_different_things(request_chan):
|
||||||
|
assert response.name == expected_things[response_index]
|
||||||
|
assert response.version == response_index + 1
|
||||||
|
response_index += 1
|
||||||
|
if more_things:
|
||||||
|
# Send some more requests as we receive responses to be sure coordination of
|
||||||
|
# send/receive events doesn't matter
|
||||||
|
await request_chan.send(GetThingRequest(more_things.pop(0)))
|
||||||
|
elif not send_initial_requests.done():
|
||||||
|
# Make sure the sending task it completed
|
||||||
|
await send_initial_requests
|
||||||
|
else:
|
||||||
|
# No more things to send make sure channel is closed
|
||||||
|
request_chan.close()
|
||||||
|
assert response_index == len(expected_things), (
|
||||||
|
"Didn't receive all expected responses"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_stream_unary_with_empty_iterable():
|
||||||
|
things = [] # empty
|
||||||
|
|
||||||
|
async with ChannelFor([ThingService()]) as channel:
|
||||||
|
client = ThingServiceClient(channel)
|
||||||
|
requests = [DoThingRequest(name) for name in things]
|
||||||
|
response = await client.do_many_things(requests)
|
||||||
|
assert len(response.names) == 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_stream_stream_with_empty_iterable():
|
||||||
|
things = [] # empty
|
||||||
|
|
||||||
|
async with ChannelFor([ThingService()]) as channel:
|
||||||
|
client = ThingServiceClient(channel)
|
||||||
|
requests = [GetThingRequest(name) for name in things]
|
||||||
|
responses = [
|
||||||
|
response async for response in client.get_different_things(requests)
|
||||||
|
]
|
||||||
|
assert len(responses) == 0
|
99
tests/grpc/test_stream_stream.py
Normal file
99
tests/grpc/test_stream_stream.py
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
import asyncio
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import AsyncIterator
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import betterproto
|
||||||
|
from betterproto.grpc.util.async_channel import AsyncChannel
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Message(betterproto.Message):
|
||||||
|
body: str = betterproto.string_field(1)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def expected_responses():
|
||||||
|
return [Message("Hello world 1"), Message("Hello world 2"), Message("Done")]
|
||||||
|
|
||||||
|
|
||||||
|
class ClientStub:
|
||||||
|
async def connect(self, requests: AsyncIterator):
|
||||||
|
await asyncio.sleep(0.1)
|
||||||
|
async for request in requests:
|
||||||
|
await asyncio.sleep(0.1)
|
||||||
|
yield request
|
||||||
|
await asyncio.sleep(0.1)
|
||||||
|
yield Message("Done")
|
||||||
|
|
||||||
|
|
||||||
|
async def to_list(generator: AsyncIterator):
|
||||||
|
return [value async for value in generator]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client():
|
||||||
|
# channel = Channel(host='127.0.0.1', port=50051)
|
||||||
|
# return ClientStub(channel)
|
||||||
|
return ClientStub()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_from_before_connect_and_close_automatically(
|
||||||
|
client, expected_responses
|
||||||
|
):
|
||||||
|
requests = AsyncChannel()
|
||||||
|
await requests.send_from(
|
||||||
|
[Message(body="Hello world 1"), Message(body="Hello world 2")], close=True
|
||||||
|
)
|
||||||
|
responses = client.connect(requests)
|
||||||
|
|
||||||
|
assert await to_list(responses) == expected_responses
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_from_after_connect_and_close_automatically(
|
||||||
|
client, expected_responses
|
||||||
|
):
|
||||||
|
requests = AsyncChannel()
|
||||||
|
responses = client.connect(requests)
|
||||||
|
await requests.send_from(
|
||||||
|
[Message(body="Hello world 1"), Message(body="Hello world 2")], close=True
|
||||||
|
)
|
||||||
|
|
||||||
|
assert await to_list(responses) == expected_responses
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_from_close_manually_immediately(client, expected_responses):
|
||||||
|
requests = AsyncChannel()
|
||||||
|
responses = client.connect(requests)
|
||||||
|
await requests.send_from(
|
||||||
|
[Message(body="Hello world 1"), Message(body="Hello world 2")], close=False
|
||||||
|
)
|
||||||
|
requests.close()
|
||||||
|
|
||||||
|
assert await to_list(responses) == expected_responses
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_individually_and_close_before_connect(client, expected_responses):
|
||||||
|
requests = AsyncChannel()
|
||||||
|
await requests.send(Message(body="Hello world 1"))
|
||||||
|
await requests.send(Message(body="Hello world 2"))
|
||||||
|
requests.close()
|
||||||
|
responses = client.connect(requests)
|
||||||
|
|
||||||
|
assert await to_list(responses) == expected_responses
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_individually_and_close_after_connect(client, expected_responses):
|
||||||
|
requests = AsyncChannel()
|
||||||
|
await requests.send(Message(body="Hello world 1"))
|
||||||
|
await requests.send(Message(body="Hello world 2"))
|
||||||
|
responses = client.connect(requests)
|
||||||
|
requests.close()
|
||||||
|
|
||||||
|
assert await to_list(responses) == expected_responses
|
85
tests/grpc/thing_service.py
Normal file
85
tests/grpc/thing_service.py
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
import grpclib
|
||||||
|
import grpclib.server
|
||||||
|
|
||||||
|
from tests.output_betterproto.service import (
|
||||||
|
DoThingRequest,
|
||||||
|
DoThingResponse,
|
||||||
|
GetThingRequest,
|
||||||
|
GetThingResponse,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ThingService:
|
||||||
|
def __init__(self, test_hook=None):
|
||||||
|
# This lets us pass assertions to the servicer ;)
|
||||||
|
self.test_hook = test_hook
|
||||||
|
|
||||||
|
async def do_thing(
|
||||||
|
self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]"
|
||||||
|
):
|
||||||
|
request = await stream.recv_message()
|
||||||
|
if self.test_hook is not None:
|
||||||
|
self.test_hook(stream)
|
||||||
|
await stream.send_message(DoThingResponse([request.name]))
|
||||||
|
|
||||||
|
async def do_many_things(
|
||||||
|
self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]"
|
||||||
|
):
|
||||||
|
thing_names = [request.name async for request in stream]
|
||||||
|
if self.test_hook is not None:
|
||||||
|
self.test_hook(stream)
|
||||||
|
await stream.send_message(DoThingResponse(thing_names))
|
||||||
|
|
||||||
|
async def get_thing_versions(
|
||||||
|
self, stream: "grpclib.server.Stream[GetThingRequest, GetThingResponse]"
|
||||||
|
):
|
||||||
|
request = await stream.recv_message()
|
||||||
|
if self.test_hook is not None:
|
||||||
|
self.test_hook(stream)
|
||||||
|
for version_num in range(1, 6):
|
||||||
|
await stream.send_message(
|
||||||
|
GetThingResponse(name=request.name, version=version_num)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_different_things(
|
||||||
|
self, stream: "grpclib.server.Stream[GetThingRequest, GetThingResponse]"
|
||||||
|
):
|
||||||
|
if self.test_hook is not None:
|
||||||
|
self.test_hook(stream)
|
||||||
|
# Respond to each input item immediately
|
||||||
|
response_num = 0
|
||||||
|
async for request in stream:
|
||||||
|
response_num += 1
|
||||||
|
await stream.send_message(
|
||||||
|
GetThingResponse(name=request.name, version=response_num)
|
||||||
|
)
|
||||||
|
|
||||||
|
def __mapping__(self) -> Dict[str, "grpclib.const.Handler"]:
|
||||||
|
return {
|
||||||
|
"/service.Test/DoThing": grpclib.const.Handler(
|
||||||
|
self.do_thing,
|
||||||
|
grpclib.const.Cardinality.UNARY_UNARY,
|
||||||
|
DoThingRequest,
|
||||||
|
DoThingResponse,
|
||||||
|
),
|
||||||
|
"/service.Test/DoManyThings": grpclib.const.Handler(
|
||||||
|
self.do_many_things,
|
||||||
|
grpclib.const.Cardinality.STREAM_UNARY,
|
||||||
|
DoThingRequest,
|
||||||
|
DoThingResponse,
|
||||||
|
),
|
||||||
|
"/service.Test/GetThingVersions": grpclib.const.Handler(
|
||||||
|
self.get_thing_versions,
|
||||||
|
grpclib.const.Cardinality.UNARY_STREAM,
|
||||||
|
GetThingRequest,
|
||||||
|
GetThingResponse,
|
||||||
|
),
|
||||||
|
"/service.Test/GetDifferentThings": grpclib.const.Handler(
|
||||||
|
self.get_different_things,
|
||||||
|
grpclib.const.Cardinality.STREAM_STREAM,
|
||||||
|
GetThingRequest,
|
||||||
|
GetThingResponse,
|
||||||
|
),
|
||||||
|
}
|
3
tests/inputs/bool/bool.json
Normal file
3
tests/inputs/bool/bool.json
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"value": true
|
||||||
|
}
|
7
tests/inputs/bool/bool.proto
Normal file
7
tests/inputs/bool/bool.proto
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package bool;
|
||||||
|
|
||||||
|
message Test {
|
||||||
|
bool value = 1;
|
||||||
|
}
|
24
tests/inputs/bool/test_bool.py
Normal file
24
tests/inputs/bool/test_bool.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from tests.output_betterproto.bool import Test
|
||||||
|
from tests.output_betterproto_pydantic.bool import Test as TestPyd
|
||||||
|
|
||||||
|
|
||||||
|
def test_value():
|
||||||
|
message = Test()
|
||||||
|
assert not message.value, "Boolean is False by default"
|
||||||
|
|
||||||
|
|
||||||
|
def test_pydantic_no_value():
|
||||||
|
message = TestPyd()
|
||||||
|
assert not message.value, "Boolean is False by default"
|
||||||
|
|
||||||
|
|
||||||
|
def test_pydantic_value():
|
||||||
|
message = TestPyd(value=False)
|
||||||
|
assert not message.value
|
||||||
|
|
||||||
|
|
||||||
|
def test_pydantic_bad_value():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
TestPyd(value=123)
|
@ -1,5 +1,7 @@
|
|||||||
syntax = "proto3";
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package bytes;
|
||||||
|
|
||||||
message Test {
|
message Test {
|
||||||
bytes data = 1;
|
bytes data = 1;
|
||||||
}
|
}
|
4
tests/inputs/casing/casing.json
Normal file
4
tests/inputs/casing/casing.json
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"camelCase": 1,
|
||||||
|
"snakeCase": "ONE"
|
||||||
|
}
|
20
tests/inputs/casing/casing.proto
Normal file
20
tests/inputs/casing/casing.proto
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package casing;
|
||||||
|
|
||||||
|
enum my_enum {
|
||||||
|
ZERO = 0;
|
||||||
|
ONE = 1;
|
||||||
|
TWO = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Test {
|
||||||
|
int32 camelCase = 1;
|
||||||
|
my_enum snake_case = 2;
|
||||||
|
snake_case_message snake_case_message = 3;
|
||||||
|
int32 UPPERCASE = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
message snake_case_message {
|
||||||
|
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user