Compare commits
	
		
			96 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | bde6d06835 | ||
|  | eaa4f7f5d9 | ||
|  | cdddb2f42a | ||
|  | d21cd6e391 | ||
|  | af7115429a | ||
|  | 0d9387abec | ||
|  | f4ebcb0f65 | ||
|  | 81711d2427 | ||
|  | e3135ce766 | ||
|  | 9532844929 | ||
|  | 0c5d1ff868 | ||
|  | 5fb4b4b7ff | ||
|  | 4f820b4a6a | ||
|  | 75a4c230da | ||
|  | 5c9a12e2f6 | ||
|  | e1ccd540a9 | ||
|  | 4e78fe9579 | ||
|  | 50bb67bf5d | ||
|  | 1ecbf1a125 | ||
|  | 0814729c5a | ||
|  | f7aa6150e2 | ||
|  | 159c30ddd8 | ||
|  | c8229e53a7 | ||
|  | 3185c67098 | ||
|  | 52eea5ce4c | ||
|  | 4b6f55dce5 | ||
|  | fdbe0205f1 | ||
|  | 09f821921f | ||
|  | a757da1b29 | ||
|  | e2d672a422 | ||
|  | 63f5191f02 | ||
|  | 87f4b34930 | ||
|  | 2c360a55f2 | ||
|  | 04dce524aa | ||
|  | 8edec81b11 | ||
|  | 32c8e77274 | ||
|  | d9fa6d2dd3 | ||
|  | c88edfd093 | ||
|  | a46979c8a6 | ||
|  | 83e13aa606 | ||
|  | 3ca75dadd7 | ||
|  | 5d2f3a2cd9 | ||
|  | 65c1f366ef | ||
|  | 34c34bd15a | ||
|  | fb54917f2c | ||
|  | 1a95a7988e | ||
|  | 76db2f153e | ||
|  | 8567892352 | ||
|  | 3105e952ea | ||
|  | 7c8d47de6d | ||
|  | c00e2aef19 | ||
|  | fdf3b2e764 | ||
|  | f7c2fd1194 | ||
|  | d8abb850f8 | ||
|  | d7ba27de2b | ||
|  | 57523a9e7f | ||
|  | e5e61c873c | ||
|  | 9fd1c058e6 | ||
|  | d336153845 | ||
|  | 9a45ea9f16 | ||
|  | bb7f5229fb | ||
|  | f7769a19d1 | ||
|  | d31f90be6b | ||
|  | 919b0a6a7d | ||
|  | 7ecf3fe0e6 | ||
|  | ff14948a4e | ||
|  | cb00273257 | ||
|  | 973d68a154 | ||
|  | ab9857b5fd | ||
|  | 2f658df666 | ||
|  | b813d1cedb | ||
|  | f5ce1b7108 | ||
|  | 62fc421d60 | ||
|  | eeed1c0db7 | ||
|  | 2a3e1e1827 | ||
|  | 53ce1255d3 | ||
|  | e8991339e9 | ||
|  | 4556d67503 | ||
|  | f087c6c9bd | ||
|  | eec24e4ee8 | ||
|  | 91111ab7d8 | ||
|  | fcff3dff74 | ||
|  | 5c4969ff1c | ||
|  | ed33a48d64 | ||
|  | b354aeb692 | ||
|  | 6d9e3fc580 | ||
|  | 72de590651 | ||
|  | 3c70f21074 | ||
|  | 4b7d5d3de4 | ||
|  | 142e976c40 | ||
|  | 382fabb96c | ||
|  | 18598e77d4 | ||
|  | 6871053ab2 | ||
|  | 5bb6931df7 | ||
|  | e8a9960b73 | ||
|  | f25c66777a | 
							
								
								
									
										66
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										66
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							| @@ -10,63 +10,65 @@ jobs: | |||||||
|     name: Consult black on python formatting |     name: Consult black on python formatting | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v1 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-python@v1 |       - uses: actions/setup-python@v2 | ||||||
|         with: |         with: | ||||||
|           python-version: 3.7 |           python-version: 3.7 | ||||||
|       - uses: dschep/install-pipenv-action@v1 |       - uses: Gr1N/setup-poetry@v2 | ||||||
|  |       - uses: actions/cache@v2 | ||||||
|  |         with: | ||||||
|  |           path: ~/.cache/pypoetry/virtualenvs | ||||||
|  |           key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} | ||||||
|  |           restore-keys: | | ||||||
|  |             ${{ runner.os }}-poetry- | ||||||
|       - name: Install dependencies |       - name: Install dependencies | ||||||
|         run: | |         run: poetry install | ||||||
|           pipenv install --dev --python ${pythonLocation}/python |  | ||||||
|       - name: Run black |       - name: Run black | ||||||
|         run: | |         run: make check-style | ||||||
|           pipenv run black . --check --diff --exclude tests/output_ |  | ||||||
|  |  | ||||||
|   run-tests: |   run-tests: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|  |  | ||||||
|  |     name: Run tests with tox | ||||||
|  |  | ||||||
|     strategy: |     strategy: | ||||||
|       matrix: |       matrix: | ||||||
|         python-version: [ '3.6', '3.7' ] |         python-version: [ '3.6', '3.7', '3.8'] | ||||||
|  |  | ||||||
|     name: Python ${{ matrix.python-version }} test |  | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v1 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-python@v1 |       - uses: actions/setup-python@v2 | ||||||
|         with: |         with: | ||||||
|           python-version: ${{ matrix.python-version }} |           python-version: ${{ matrix.python-version }} | ||||||
|       - uses: dschep/install-pipenv-action@v1 |       - uses: Gr1N/setup-poetry@v2 | ||||||
|  |       - uses: actions/cache@v2 | ||||||
|  |         with: | ||||||
|  |           path: ~/.cache/pypoetry/virtualenvs | ||||||
|  |           key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} | ||||||
|  |           restore-keys: | | ||||||
|  |             ${{ runner.os }}-poetry- | ||||||
|       - name: Install dependencies |       - name: Install dependencies | ||||||
|         run: | |         run: | | ||||||
|           sudo apt install protobuf-compiler libprotobuf-dev |           sudo apt install protobuf-compiler libprotobuf-dev | ||||||
|           pipenv install --dev --python ${pythonLocation}/python |           poetry install | ||||||
|       - name: Run tests |       - name: Run tests | ||||||
|         run: | |         run: | | ||||||
|           cp .env.default .env |           make generate | ||||||
|           pipenv run pip install -e . |           make test | ||||||
|           pipenv run generate |  | ||||||
|           pipenv run test |  | ||||||
|  |  | ||||||
|   build-release: |   build-release: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v1 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-python@v1 |       - uses: actions/setup-python@v2 | ||||||
|         with: |         with: | ||||||
|           python-version: 3.7 |           python-version: 3.7 | ||||||
|       - uses: dschep/install-pipenv-action@v1 |       - uses: Gr1N/setup-poetry@v2 | ||||||
|       - name: Install dependencies |  | ||||||
|         run: | |  | ||||||
|           sudo apt install protobuf-compiler libprotobuf-dev |  | ||||||
|           pipenv install --dev --python ${pythonLocation}/python |  | ||||||
|       - name: Build package |       - name: Build package | ||||||
|  |         run: poetry build | ||||||
|  |       - name: Publish package to PyPI | ||||||
|         if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') |         if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') | ||||||
|         run: pipenv run python setup.py sdist |         run: poetry publish -n | ||||||
|       - name: Publish package |         env: | ||||||
|         if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') |           POETRY_PYPI_TOKEN_PYPI: ${{ secrets.pypi }} | ||||||
|         uses: pypa/gh-action-pypi-publish@v1.0.0a0 |  | ||||||
|         with: |  | ||||||
|           user: __token__ |  | ||||||
|           password: ${{ secrets.pypi }} |  | ||||||
|   | |||||||
							
								
								
									
										6
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,3 +1,5 @@ | |||||||
|  | .coverage | ||||||
|  | .DS_Store | ||||||
| .env | .env | ||||||
| .vscode/settings.json | .vscode/settings.json | ||||||
| .mypy_cache | .mypy_cache | ||||||
| @@ -9,4 +11,6 @@ betterproto/tests/output_* | |||||||
| dist | dist | ||||||
| **/*.egg-info | **/*.egg-info | ||||||
| output | output | ||||||
| .idea | .idea | ||||||
|  | .DS_Store | ||||||
|  | .tox | ||||||
|   | |||||||
							
								
								
									
										22
									
								
								CHANGELOG.md
									
									
									
									
									
								
							
							
						
						
									
										22
									
								
								CHANGELOG.md
									
									
									
									
									
								
							| @@ -5,6 +5,28 @@ All notable changes to this project will be documented in this file. | |||||||
| The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), | ||||||
| and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). | ||||||
|  |  | ||||||
|  | - Versions suffixed with `b*` are in `beta` and can be installed with `pip install --pre betterproto`. | ||||||
|  |  | ||||||
|  | ## [2.0.0b1] - 2020-07-04 | ||||||
|  |  | ||||||
|  | [Upgrade Guide](./docs/upgrading.md)  | ||||||
|  |  | ||||||
|  | > Several bugfixes and improvements required or will require small breaking changes, necessitating a new version. | ||||||
|  | > `2.0.0` will be released once the interface is stable. | ||||||
|  |  | ||||||
|  | - Add support for gRPC  and **stream-stream** [#83](https://github.com/danielgtaylor/python-betterproto/pull/83) | ||||||
|  | - Switch from  to `poetry` for development [#75](https://github.com/danielgtaylor/python-betterproto/pull/75) | ||||||
|  | - Fix No arguments are generated for stub methods when using import with proto definition  | ||||||
|  | - Fix two packages with the same name suffix should not cause naming conflict [#25](https://github.com/danielgtaylor/python-betterproto/issues/25) | ||||||
|  |  | ||||||
|  | - Fix Import child package from root [#57](https://github.com/danielgtaylor/python-betterproto/issues/57) | ||||||
|  | - Fix Import child package from package [#58](https://github.com/danielgtaylor/python-betterproto/issues/58) | ||||||
|  | - Fix Import parent package from child package [#59](https://github.com/danielgtaylor/python-betterproto/issues/59) | ||||||
|  | - Fix Import root package from child package [#60](https://github.com/danielgtaylor/python-betterproto/issues/60) | ||||||
|  | - Fix Import root package from root [#61](https://github.com/danielgtaylor/python-betterproto/issues/61) | ||||||
|  |  | ||||||
|  | - Fix ALL_CAPS message fields are parsed incorrectly. [#11](https://github.com/danielgtaylor/python-betterproto/issues/11) | ||||||
|  |  | ||||||
| ## [1.2.5] - 2020-04-27 | ## [1.2.5] - 2020-04-27 | ||||||
|  |  | ||||||
| - Add .j2 suffix to python template names to avoid confusing certain build tools [#72](https://github.com/danielgtaylor/python-betterproto/pull/72) | - Add .j2 suffix to python template names to avoid confusing certain build tools [#72](https://github.com/danielgtaylor/python-betterproto/pull/72) | ||||||
|   | |||||||
							
								
								
									
										42
									
								
								Makefile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								Makefile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,42 @@ | |||||||
|  | .PHONY: help setup generate test types format clean plugin full-test check-style | ||||||
|  |  | ||||||
|  | help:               ## - Show this help. | ||||||
|  | 	@fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/##//' | ||||||
|  |  | ||||||
|  | # Dev workflow tasks | ||||||
|  |  | ||||||
|  | generate:           ## - Generate test cases (do this once before running test) | ||||||
|  | 	poetry run ./betterproto/tests/generate.py | ||||||
|  |  | ||||||
|  | test:               ## - Run tests | ||||||
|  | 	poetry run pytest --cov betterproto | ||||||
|  |  | ||||||
|  | types:              ## - Check types with mypy | ||||||
|  | 	poetry run mypy betterproto --ignore-missing-imports | ||||||
|  |  | ||||||
|  | format:             ## - Apply black formatting to source code | ||||||
|  | 	poetry run black . --exclude tests/output_ | ||||||
|  |  | ||||||
|  | clean:              ## - Clean out generated files from the workspace | ||||||
|  | 	rm -rf .coverage \ | ||||||
|  | 	       .mypy_cache \ | ||||||
|  | 	       .pytest_cache \ | ||||||
|  | 	       dist \ | ||||||
|  | 	       **/__pycache__ \ | ||||||
|  | 	       betterproto/tests/output_* | ||||||
|  |  | ||||||
|  | # Manual testing | ||||||
|  |  | ||||||
|  | # By default write plugin output to a directory called output | ||||||
|  | o=output | ||||||
|  | plugin:             ## - Execute the protoc plugin, with output write to `output` or the value passed to `-o` | ||||||
|  | 	mkdir -p $(o) | ||||||
|  | 	protoc --plugin=protoc-gen-custom=betterproto/plugin.py $(i) --custom_out=$(o) | ||||||
|  |  | ||||||
|  | # CI tasks | ||||||
|  |  | ||||||
|  | full-test: generate ## - Run full testing sequence with multiple pythons | ||||||
|  | 	poetry run tox | ||||||
|  |  | ||||||
|  | check-style:        ## - Check if code style is correct | ||||||
|  | 	poetry run black . --check --diff --exclude tests/output_ | ||||||
							
								
								
									
										32
									
								
								Pipfile
									
									
									
									
									
								
							
							
						
						
									
										32
									
								
								Pipfile
									
									
									
									
									
								
							| @@ -1,32 +0,0 @@ | |||||||
| [[source]] |  | ||||||
| name = "pypi" |  | ||||||
| url = "https://pypi.org/simple" |  | ||||||
| verify_ssl = true |  | ||||||
|  |  | ||||||
| [dev-packages] |  | ||||||
| flake8 = "*" |  | ||||||
| mypy = "*" |  | ||||||
| isort = "*" |  | ||||||
| pytest = "*" |  | ||||||
| pytest-asyncio = "*" |  | ||||||
| rope = "*" |  | ||||||
|  |  | ||||||
| [packages] |  | ||||||
| protobuf = "*" |  | ||||||
| jinja2 = "*" |  | ||||||
| grpclib = "*" |  | ||||||
| stringcase = "*" |  | ||||||
| black = "*" |  | ||||||
| backports-datetime-fromisoformat = "*" |  | ||||||
| dataclasses = "*" |  | ||||||
|  |  | ||||||
| [requires] |  | ||||||
| python_version = "3.6" |  | ||||||
|  |  | ||||||
| [scripts] |  | ||||||
| plugin = "protoc --plugin=protoc-gen-custom=betterproto/plugin.py --custom_out=output" |  | ||||||
| generate = "python betterproto/tests/generate.py" |  | ||||||
| test = "pytest ./betterproto/tests" |  | ||||||
|  |  | ||||||
| [pipenv] |  | ||||||
| allow_prereleases = true |  | ||||||
							
								
								
									
										444
									
								
								Pipfile.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										444
									
								
								Pipfile.lock
									
									
									
										generated
									
									
									
								
							| @@ -1,444 +0,0 @@ | |||||||
| { |  | ||||||
|     "_meta": { |  | ||||||
|         "hash": { |  | ||||||
|             "sha256": "44ae793965dc2b6ec17f0435a388846248b8a703cf857470b66af84227535950" |  | ||||||
|         }, |  | ||||||
|         "pipfile-spec": 6, |  | ||||||
|         "requires": { |  | ||||||
|             "python_version": "3.6" |  | ||||||
|         }, |  | ||||||
|         "sources": [ |  | ||||||
|             { |  | ||||||
|                 "name": "pypi", |  | ||||||
|                 "url": "https://pypi.org/simple", |  | ||||||
|                 "verify_ssl": true |  | ||||||
|             } |  | ||||||
|         ] |  | ||||||
|     }, |  | ||||||
|     "default": { |  | ||||||
|         "appdirs": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", |  | ||||||
|                 "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" |  | ||||||
|             ], |  | ||||||
|             "version": "==1.4.4" |  | ||||||
|         }, |  | ||||||
|         "attrs": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", |  | ||||||
|                 "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" |  | ||||||
|             ], |  | ||||||
|             "version": "==19.3.0" |  | ||||||
|         }, |  | ||||||
|         "backports-datetime-fromisoformat": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:9577a2a9486cd7383a5f58b23bb8e81cf0821dbbc0eb7c87d3fa198c1df40f5c" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==1.0.0" |  | ||||||
|         }, |  | ||||||
|         "black": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b", |  | ||||||
|                 "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==19.10b0" |  | ||||||
|         }, |  | ||||||
|         "click": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", |  | ||||||
|                 "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" |  | ||||||
|             ], |  | ||||||
|             "version": "==7.1.2" |  | ||||||
|         }, |  | ||||||
|         "dataclasses": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f", |  | ||||||
|                 "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==0.6" |  | ||||||
|         }, |  | ||||||
|         "grpclib": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:b27d56c987b89023d5640fe9668943e49b46703fc85d8182a58c9f3b19120cdc" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==0.3.2rc1" |  | ||||||
|         }, |  | ||||||
|         "h2": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:61e0f6601fa709f35cdb730863b4e5ec7ad449792add80d1410d4174ed139af5", |  | ||||||
|                 "sha256:875f41ebd6f2c44781259005b157faed1a5031df3ae5aa7bcb4628a6c0782f14" |  | ||||||
|             ], |  | ||||||
|             "version": "==3.2.0" |  | ||||||
|         }, |  | ||||||
|         "hpack": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89", |  | ||||||
|                 "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2" |  | ||||||
|             ], |  | ||||||
|             "version": "==3.0.0" |  | ||||||
|         }, |  | ||||||
|         "hyperframe": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40", |  | ||||||
|                 "sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f" |  | ||||||
|             ], |  | ||||||
|             "version": "==5.2.0" |  | ||||||
|         }, |  | ||||||
|         "jinja2": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:c10142f819c2d22bdcd17548c46fa9b77cf4fda45097854c689666bf425e7484", |  | ||||||
|                 "sha256:c922560ac46888d47384de1dbdc3daaa2ea993af4b26a436dec31fa2c19ec668" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==3.0.0a1" |  | ||||||
|         }, |  | ||||||
|         "markupsafe": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:06358015a4dee8ee23ae426bf885616ab3963622defd829eb45b44e3dee3515f", |  | ||||||
|                 "sha256:0b0c4fc852c5f02c6277ef3b33d23fcbe89b1b227460423e3335374da046b6db", |  | ||||||
|                 "sha256:267677fc42afed5094fc5ea1c4236bbe4b6a00fe4b08e93451e65ae9048139c7", |  | ||||||
|                 "sha256:303cb70893e2c345588fb5d5b86e0ca369f9bb56942f03064c5e3e75fa7a238a", |  | ||||||
|                 "sha256:3c9b624a0d9ed5a5093ac4edc4e823e6b125441e60ef35d36e6f4a6fdacd5054", |  | ||||||
|                 "sha256:42033e14cae1f6c86fc0c3e90d04d08ce73ac8e46ba420a0d22d545c2abd4977", |  | ||||||
|                 "sha256:4e4a99b6af7bdc0856b50020c095848ec050356a001e1f751510aef6ab14d0e0", |  | ||||||
|                 "sha256:4eb07faad54bb07427d848f31030a65a49ebb0cec0b30674f91cf1ddd456bfe4", |  | ||||||
|                 "sha256:63a7161cd8c2bc563feeda45df62f42c860dd0675e2b8da2667f25bb3c95eaba", |  | ||||||
|                 "sha256:68e0fd039b68d2945b4beb947d4023ca7f8e95b708031c345762efba214ea761", |  | ||||||
|                 "sha256:8092a63397025c2f655acd42784b2a1528339b90b987beb9253f22e8cdbb36c3", |  | ||||||
|                 "sha256:841218860683c0f2223e24756843d84cc49cccdae6765e04962607754a52d3e0", |  | ||||||
|                 "sha256:94076b2314bd2f6cfae508ad65b4d493e3a58a50112b7a2cbb6287bdbc404ae8", |  | ||||||
|                 "sha256:9d22aff1c5322e402adfb3ce40839a5056c353e711c033798cf4f02eb9f5124d", |  | ||||||
|                 "sha256:b0e4584f62b3e5f5c1a7bcefd2b52f236505e6ef032cc508caa4f4c8dc8d3af1", |  | ||||||
|                 "sha256:b1163ffc1384d242964426a8164da12dbcdbc0de18ea36e2c34b898ed38c3b45", |  | ||||||
|                 "sha256:beac28ed60c8e838301226a7a85841d0af2068eba2dcb1a58c2d32d6c05e440e", |  | ||||||
|                 "sha256:c29f096ce79c03054a1101d6e5fe6bf04b0bb489165d5e0e9653fb4fe8048ee1", |  | ||||||
|                 "sha256:c58779966d53e5f14ba393d64e2402a7926601d1ac8adeb4e83893def79d0428", |  | ||||||
|                 "sha256:cfe14b37908eaf7d5506302987228bff69e1b8e7071ccd4e70fd0283b1b47f0b", |  | ||||||
|                 "sha256:e834249c45aa9837d0753351cdca61a4b8b383cc9ad0ff2325c97ff7b69e72a6", |  | ||||||
|                 "sha256:eed1b234c4499811ee85bcefa22ef5e466e75d132502226ed29740d593316c1f" |  | ||||||
|             ], |  | ||||||
|             "version": "==2.0.0a1" |  | ||||||
|         }, |  | ||||||
|         "multidict": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a", |  | ||||||
|                 "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000", |  | ||||||
|                 "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2", |  | ||||||
|                 "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507", |  | ||||||
|                 "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5", |  | ||||||
|                 "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7", |  | ||||||
|                 "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d", |  | ||||||
|                 "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463", |  | ||||||
|                 "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19", |  | ||||||
|                 "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3", |  | ||||||
|                 "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b", |  | ||||||
|                 "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c", |  | ||||||
|                 "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87", |  | ||||||
|                 "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7", |  | ||||||
|                 "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430", |  | ||||||
|                 "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255", |  | ||||||
|                 "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d" |  | ||||||
|             ], |  | ||||||
|             "version": "==4.7.6" |  | ||||||
|         }, |  | ||||||
|         "pathspec": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0", |  | ||||||
|                 "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061" |  | ||||||
|             ], |  | ||||||
|             "version": "==0.8.0" |  | ||||||
|         }, |  | ||||||
|         "protobuf": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:04d0b2bd99050d09393875a5a25fd12337b17f3ac2e29c0c1b8e65b277cbfe72", |  | ||||||
|                 "sha256:05288e44638e91498f13127a3699a6528dec6f9d3084d60959d721bfb9ea5b98", |  | ||||||
|                 "sha256:175d85370947f89e33b3da93f4ccdda3f326bebe3e599df5915ceb7f804cd9df", |  | ||||||
|                 "sha256:440a8c77531b3652f24999b249256ed01fd44c498ab0973843066681bd276685", |  | ||||||
|                 "sha256:49fb6fab19cd3f30fa0e976eeedcbf2558e9061e5fa65b4fe51ded1f4002e04d", |  | ||||||
|                 "sha256:4c7cae1f56056a4a2a2e3b00b26ab8550eae738bd9548f4ea0c2fcb88ed76ae5", |  | ||||||
|                 "sha256:519abfacbb421c3591d26e8daf7a4957763428db7267f7207e3693e29f6978db", |  | ||||||
|                 "sha256:60f32af25620abc4d7928d8197f9f25d49d558c5959aa1e08c686f974ac0b71a", |  | ||||||
|                 "sha256:613ac49f6db266fba243daf60fb32af107cfe3678e5c003bb40a381b6786389d", |  | ||||||
|                 "sha256:954bb14816edd24e746ba1a6b2d48c43576393bbde2fb8e1e3bd6d4504c7feac", |  | ||||||
|                 "sha256:9b1462c033a2cee7f4e8eb396905c69de2c532c3b835ff8f71f8e5fb77c38023", |  | ||||||
|                 "sha256:c0767f4d93ce4288475afe0571663c78870924f1f8881efd5406c10f070c75e4", |  | ||||||
|                 "sha256:c45f5980ce32879391144b5766120fd7b8803129f127ce36bd060dd38824801f", |  | ||||||
|                 "sha256:eeb7502f59e889a88bcb59f299493e215d1864f3d75335ea04a413004eb4fe24", |  | ||||||
|                 "sha256:fdb1742f883ee4662e39fcc5916f2725fec36a5191a52123fec60f8c53b70495", |  | ||||||
|                 "sha256:fe554066c4962c2db0a1d4752655223eb948d2bfa0fb1c4a7f2c00ec07324f1c" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==3.12.1" |  | ||||||
|         }, |  | ||||||
|         "regex": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:1386e75c9d1574f6aa2e4eb5355374c8e55f9aac97e224a8a5a6abded0f9c927", |  | ||||||
|                 "sha256:27ff7325b297fb6e5ebb70d10437592433601c423f5acf86e5bc1ee2919b9561", |  | ||||||
|                 "sha256:329ba35d711e3428db6b45a53b1b13a0a8ba07cbbcf10bbed291a7da45f106c3", |  | ||||||
|                 "sha256:3a9394197664e35566242686d84dfd264c07b20f93514e2e09d3c2b3ffdf78fe", |  | ||||||
|                 "sha256:51f17abbe973c7673a61863516bdc9c0ef467407a940f39501e786a07406699c", |  | ||||||
|                 "sha256:579ea215c81d18da550b62ff97ee187b99f1b135fd894a13451e00986a080cad", |  | ||||||
|                 "sha256:70c14743320a68c5dac7fc5a0f685be63bc2024b062fe2aaccc4acc3d01b14a1", |  | ||||||
|                 "sha256:7e61be8a2900897803c293247ef87366d5df86bf701083b6c43119c7c6c99108", |  | ||||||
|                 "sha256:8044d1c085d49673aadb3d7dc20ef5cb5b030c7a4fa253a593dda2eab3059929", |  | ||||||
|                 "sha256:89d76ce33d3266173f5be80bd4efcbd5196cafc34100fdab814f9b228dee0fa4", |  | ||||||
|                 "sha256:99568f00f7bf820c620f01721485cad230f3fb28f57d8fbf4a7967ec2e446994", |  | ||||||
|                 "sha256:a7c37f048ec3920783abab99f8f4036561a174f1314302ccfa4e9ad31cb00eb4", |  | ||||||
|                 "sha256:c2062c7d470751b648f1cacc3f54460aebfc261285f14bc6da49c6943bd48bdd", |  | ||||||
|                 "sha256:c9bce6e006fbe771a02bda468ec40ffccbf954803b470a0345ad39c603402577", |  | ||||||
|                 "sha256:ce367d21f33e23a84fb83a641b3834dd7dd8e9318ad8ff677fbfae5915a239f7", |  | ||||||
|                 "sha256:ce450ffbfec93821ab1fea94779a8440e10cf63819be6e176eb1973a6017aff5", |  | ||||||
|                 "sha256:ce5cc53aa9fbbf6712e92c7cf268274eaff30f6bd12a0754e8133d85a8fb0f5f", |  | ||||||
|                 "sha256:d466967ac8e45244b9dfe302bbe5e3337f8dc4dec8d7d10f5e950d83b140d33a", |  | ||||||
|                 "sha256:d881c2e657c51d89f02ae4c21d9adbef76b8325fe4d5cf0e9ad62f850f3a98fd", |  | ||||||
|                 "sha256:e565569fc28e3ba3e475ec344d87ed3cd8ba2d575335359749298a0899fe122e", |  | ||||||
|                 "sha256:ea55b80eb0d1c3f1d8d784264a6764f931e172480a2f1868f2536444c5f01e01" |  | ||||||
|             ], |  | ||||||
|             "version": "==2020.5.14" |  | ||||||
|         }, |  | ||||||
|         "six": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", |  | ||||||
|                 "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" |  | ||||||
|             ], |  | ||||||
|             "version": "==1.15.0" |  | ||||||
|         }, |  | ||||||
|         "stringcase": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:48a06980661908efe8d9d34eab2b6c13aefa2163b3ced26972902e3bdfd87008" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==1.2.0" |  | ||||||
|         }, |  | ||||||
|         "toml": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f", |  | ||||||
|                 "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88" |  | ||||||
|             ], |  | ||||||
|             "version": "==0.10.1" |  | ||||||
|         }, |  | ||||||
|         "typed-ast": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355", |  | ||||||
|                 "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919", |  | ||||||
|                 "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa", |  | ||||||
|                 "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652", |  | ||||||
|                 "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75", |  | ||||||
|                 "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01", |  | ||||||
|                 "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d", |  | ||||||
|                 "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1", |  | ||||||
|                 "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907", |  | ||||||
|                 "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c", |  | ||||||
|                 "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3", |  | ||||||
|                 "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b", |  | ||||||
|                 "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614", |  | ||||||
|                 "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb", |  | ||||||
|                 "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b", |  | ||||||
|                 "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41", |  | ||||||
|                 "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6", |  | ||||||
|                 "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34", |  | ||||||
|                 "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe", |  | ||||||
|                 "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4", |  | ||||||
|                 "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7" |  | ||||||
|             ], |  | ||||||
|             "version": "==1.4.1" |  | ||||||
|         } |  | ||||||
|     }, |  | ||||||
|     "develop": { |  | ||||||
|         "attrs": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", |  | ||||||
|                 "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" |  | ||||||
|             ], |  | ||||||
|             "version": "==19.3.0" |  | ||||||
|         }, |  | ||||||
|         "flake8": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:c69ac1668e434d37a2d2880b3ca9aafd54b3a10a3ac1ab101d22f29e29cf8634", |  | ||||||
|                 "sha256:ccaa799ef9893cebe69fdfefed76865aeaefbb94cb8545617b2298786a4de9a5" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==3.8.2" |  | ||||||
|         }, |  | ||||||
|         "importlib-metadata": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f", |  | ||||||
|                 "sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e" |  | ||||||
|             ], |  | ||||||
|             "markers": "python_version < '3.8'", |  | ||||||
|             "version": "==1.6.0" |  | ||||||
|         }, |  | ||||||
|         "isort": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", |  | ||||||
|                 "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==4.3.21" |  | ||||||
|         }, |  | ||||||
|         "mccabe": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", |  | ||||||
|                 "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" |  | ||||||
|             ], |  | ||||||
|             "version": "==0.6.1" |  | ||||||
|         }, |  | ||||||
|         "more-itertools": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:558bb897a2232f5e4f8e2399089e35aecb746e1f9191b6584a151647e89267be", |  | ||||||
|                 "sha256:7818f596b1e87be009031c7653d01acc46ed422e6656b394b0f765ce66ed4982" |  | ||||||
|             ], |  | ||||||
|             "version": "==8.3.0" |  | ||||||
|         }, |  | ||||||
|         "mypy": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2", |  | ||||||
|                 "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1", |  | ||||||
|                 "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164", |  | ||||||
|                 "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761", |  | ||||||
|                 "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce", |  | ||||||
|                 "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27", |  | ||||||
|                 "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754", |  | ||||||
|                 "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae", |  | ||||||
|                 "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9", |  | ||||||
|                 "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600", |  | ||||||
|                 "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65", |  | ||||||
|                 "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8", |  | ||||||
|                 "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913", |  | ||||||
|                 "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==0.770" |  | ||||||
|         }, |  | ||||||
|         "mypy-extensions": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", |  | ||||||
|                 "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" |  | ||||||
|             ], |  | ||||||
|             "version": "==0.4.3" |  | ||||||
|         }, |  | ||||||
|         "packaging": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", |  | ||||||
|                 "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" |  | ||||||
|             ], |  | ||||||
|             "version": "==20.4" |  | ||||||
|         }, |  | ||||||
|         "pluggy": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", |  | ||||||
|                 "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" |  | ||||||
|             ], |  | ||||||
|             "version": "==0.13.1" |  | ||||||
|         }, |  | ||||||
|         "py": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", |  | ||||||
|                 "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" |  | ||||||
|             ], |  | ||||||
|             "version": "==1.8.1" |  | ||||||
|         }, |  | ||||||
|         "pycodestyle": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", |  | ||||||
|                 "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" |  | ||||||
|             ], |  | ||||||
|             "version": "==2.6.0" |  | ||||||
|         }, |  | ||||||
|         "pyflakes": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", |  | ||||||
|                 "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" |  | ||||||
|             ], |  | ||||||
|             "version": "==2.2.0" |  | ||||||
|         }, |  | ||||||
|         "pyparsing": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:67199f0c41a9c702154efb0e7a8cc08accf830eb003b4d9fa42c4059002e2492", |  | ||||||
|                 "sha256:700d17888d441604b0bd51535908dcb297561b040819cccde647a92439db5a2a" |  | ||||||
|             ], |  | ||||||
|             "version": "==3.0.0a1" |  | ||||||
|         }, |  | ||||||
|         "pytest": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:95c710d0a72d91c13fae35dce195633c929c3792f54125919847fdcdf7caa0d3", |  | ||||||
|                 "sha256:eb2b5e935f6a019317e455b6da83dd8650ac9ffd2ee73a7b657a30873d67a698" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==5.4.2" |  | ||||||
|         }, |  | ||||||
|         "pytest-asyncio": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:475bd2f3dc0bc11d2463656b3cbaafdbec5a47b47508ea0b329ee693040eebd2" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==0.12.0" |  | ||||||
|         }, |  | ||||||
|         "rope": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:658ad6705f43dcf3d6df379da9486529cf30e02d9ea14c5682aa80eb33b649e1" |  | ||||||
|             ], |  | ||||||
|             "index": "pypi", |  | ||||||
|             "version": "==0.17.0" |  | ||||||
|         }, |  | ||||||
|         "six": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", |  | ||||||
|                 "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" |  | ||||||
|             ], |  | ||||||
|             "version": "==1.15.0" |  | ||||||
|         }, |  | ||||||
|         "typed-ast": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355", |  | ||||||
|                 "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919", |  | ||||||
|                 "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa", |  | ||||||
|                 "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652", |  | ||||||
|                 "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75", |  | ||||||
|                 "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01", |  | ||||||
|                 "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d", |  | ||||||
|                 "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1", |  | ||||||
|                 "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907", |  | ||||||
|                 "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c", |  | ||||||
|                 "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3", |  | ||||||
|                 "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b", |  | ||||||
|                 "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614", |  | ||||||
|                 "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb", |  | ||||||
|                 "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b", |  | ||||||
|                 "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41", |  | ||||||
|                 "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6", |  | ||||||
|                 "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34", |  | ||||||
|                 "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe", |  | ||||||
|                 "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4", |  | ||||||
|                 "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7" |  | ||||||
|             ], |  | ||||||
|             "version": "==1.4.1" |  | ||||||
|         }, |  | ||||||
|         "typing-extensions": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5", |  | ||||||
|                 "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae", |  | ||||||
|                 "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392" |  | ||||||
|             ], |  | ||||||
|             "version": "==3.7.4.2" |  | ||||||
|         }, |  | ||||||
|         "wcwidth": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1", |  | ||||||
|                 "sha256:ee73862862a156bf77ff92b09034fc4825dd3af9cf81bc5b360668d425f3c5f1" |  | ||||||
|             ], |  | ||||||
|             "version": "==0.1.9" |  | ||||||
|         }, |  | ||||||
|         "zipp": { |  | ||||||
|             "hashes": [ |  | ||||||
|                 "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", |  | ||||||
|                 "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" |  | ||||||
|             ], |  | ||||||
|             "version": "==3.1.0" |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
| } |  | ||||||
							
								
								
									
										118
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										118
									
								
								README.md
									
									
									
									
									
								
							| @@ -40,18 +40,24 @@ This project exists because I am unhappy with the state of the official Google p | |||||||
|  |  | ||||||
| This project is a reimplementation from the ground up focused on idiomatic modern Python to help fix some of the above. While it may not be a 1:1 drop-in replacement due to changed method names and call patterns, the wire format is identical. | This project is a reimplementation from the ground up focused on idiomatic modern Python to help fix some of the above. While it may not be a 1:1 drop-in replacement due to changed method names and call patterns, the wire format is identical. | ||||||
|  |  | ||||||
| ## Installation & Getting Started | ## Installation | ||||||
|  |  | ||||||
| First, install the package. Note that the `[compiler]` feature flag tells it to install extra dependencies only needed by the `protoc` plugin: | First, install the package. Note that the `[compiler]` feature flag tells it to install extra dependencies only needed by the `protoc` plugin: | ||||||
|  |  | ||||||
| ```sh | ```sh | ||||||
| # Install both the library and compiler | # Install both the library and compiler | ||||||
| $ pip install "betterproto[compiler]" | pip install "betterproto[compiler]" | ||||||
|  |  | ||||||
| # Install just the library (to use the generated code output) | # Install just the library (to use the generated code output) | ||||||
| $ pip install betterproto | pip install betterproto | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
|  | *Betterproto* is under active development. To install the latest beta version, use `pip install --pre betterproto`. | ||||||
|  |  | ||||||
|  | ## Getting Started | ||||||
|  |  | ||||||
|  | ### Compiling proto files | ||||||
|  |  | ||||||
| Now, given you installed the compiler and have a proto file, e.g `example.proto`: | Now, given you installed the compiler and have a proto file, e.g `example.proto`: | ||||||
|  |  | ||||||
| ```protobuf | ```protobuf | ||||||
| @@ -68,14 +74,15 @@ message Greeting { | |||||||
| You can run the following: | You can run the following: | ||||||
|  |  | ||||||
| ```sh | ```sh | ||||||
| $ protoc -I . --python_betterproto_out=. example.proto | mkdir lib | ||||||
|  | protoc -I . --python_betterproto_out=lib example.proto | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| This will generate `hello.py` which looks like: | This will generate `lib/hello/__init__.py` which looks like: | ||||||
|  |  | ||||||
| ```py | ```python | ||||||
| # Generated by the protocol buffer compiler.  DO NOT EDIT! | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
| # sources: hello.proto | # sources: example.proto | ||||||
| # plugin: python-betterproto | # plugin: python-betterproto | ||||||
| from dataclasses import dataclass | from dataclasses import dataclass | ||||||
|  |  | ||||||
| @@ -83,7 +90,7 @@ import betterproto | |||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass | @dataclass | ||||||
| class Hello(betterproto.Message): | class Greeting(betterproto.Message): | ||||||
|     """Greeting represents a message you can tell a user.""" |     """Greeting represents a message you can tell a user.""" | ||||||
|  |  | ||||||
|     message: str = betterproto.string_field(1) |     message: str = betterproto.string_field(1) | ||||||
| @@ -91,23 +98,23 @@ class Hello(betterproto.Message): | |||||||
|  |  | ||||||
| Now you can use it! | Now you can use it! | ||||||
|  |  | ||||||
| ```py | ```python | ||||||
| >>> from hello import Hello | >>> from lib.hello import Greeting | ||||||
| >>> test = Hello() | >>> test = Greeting() | ||||||
| >>> test | >>> test | ||||||
| Hello(message='') | Greeting(message='') | ||||||
|  |  | ||||||
| >>> test.message = "Hey!" | >>> test.message = "Hey!" | ||||||
| >>> test | >>> test | ||||||
| Hello(message="Hey!") | Greeting(message="Hey!") | ||||||
|  |  | ||||||
| >>> serialized = bytes(test) | >>> serialized = bytes(test) | ||||||
| >>> serialized | >>> serialized | ||||||
| b'\n\x04Hey!' | b'\n\x04Hey!' | ||||||
|  |  | ||||||
| >>> another = Hello().parse(serialized) | >>> another = Greeting().parse(serialized) | ||||||
| >>> another | >>> another | ||||||
| Hello(message="Hey!") | Greeting(message="Hey!") | ||||||
|  |  | ||||||
| >>> another.to_dict() | >>> another.to_dict() | ||||||
| {"message": "Hey!"} | {"message": "Hey!"} | ||||||
| @@ -148,7 +155,7 @@ service Echo { | |||||||
|  |  | ||||||
| You can use it like so (enable async in the interactive shell first): | You can use it like so (enable async in the interactive shell first): | ||||||
|  |  | ||||||
| ```py | ```python | ||||||
| >>> import echo | >>> import echo | ||||||
| >>> from grpclib.client import Channel | >>> from grpclib.client import Channel | ||||||
|  |  | ||||||
| @@ -173,8 +180,8 @@ Both serializing and parsing are supported to/from JSON and Python dictionaries | |||||||
|  |  | ||||||
| For compatibility the default is to convert field names to `camelCase`. You can control this behavior by passing a casing value, e.g: | For compatibility the default is to convert field names to `camelCase`. You can control this behavior by passing a casing value, e.g: | ||||||
|  |  | ||||||
| ```py | ```python | ||||||
| >>> MyMessage().to_dict(casing=betterproto.Casing.SNAKE) | MyMessage().to_dict(casing=betterproto.Casing.SNAKE) | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| ### Determining if a message was sent | ### Determining if a message was sent | ||||||
| @@ -256,6 +263,7 @@ Google provides several well-known message types like a timestamp, duration, and | |||||||
| | `google.protobuf.duration`  | [`datetime.timedelta`][td]               | `0`                    | | | `google.protobuf.duration`  | [`datetime.timedelta`][td]               | `0`                    | | ||||||
| | `google.protobuf.timestamp` | Timezone-aware [`datetime.datetime`][dt] | `1970-01-01T00:00:00Z` | | | `google.protobuf.timestamp` | Timezone-aware [`datetime.datetime`][dt] | `1970-01-01T00:00:00Z` | | ||||||
| | `google.protobuf.*Value`    | `Optional[...]`                          | `None`                 | | | `google.protobuf.*Value`    | `Optional[...]`                          | `None`                 | | ||||||
|  | | `google.protobuf.*`         | `betterproto.lib.google.protobuf.*`      | `None`                 | | ||||||
|  |  | ||||||
| [td]: https://docs.python.org/3/library/datetime.html#timedelta-objects | [td]: https://docs.python.org/3/library/datetime.html#timedelta-objects | ||||||
| [dt]: https://docs.python.org/3/library/datetime.html#datetime.datetime | [dt]: https://docs.python.org/3/library/datetime.html#datetime.datetime | ||||||
| @@ -296,25 +304,43 @@ datetime.datetime(2019, 1, 1, 11, 59, 58, 800000, tzinfo=datetime.timezone.utc) | |||||||
|  |  | ||||||
| ## Development | ## Development | ||||||
|  |  | ||||||
| First, make sure you have Python 3.6+ and `pipenv` installed, along with the official [Protobuf Compiler](https://github.com/protocolbuffers/protobuf/releases) for your platform. Then: | Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)! | ||||||
|  |  | ||||||
|  | ### Requirements | ||||||
|  |  | ||||||
|  | - Python (3.6 or higher) | ||||||
|  |  | ||||||
|  | - [protoc](https://grpc.io/docs/protoc-installation/) (3.12 or higher) | ||||||
|  |   *Needed to compile `.proto` files and run the tests* | ||||||
|  |  | ||||||
|  | - [poetry](https://python-poetry.org/docs/#installation) | ||||||
|  |   *Needed to install dependencies in a virtual environment* | ||||||
|  |  | ||||||
|  | - make ([ubuntu](https://www.howtoinstall.me/ubuntu/18-04/make/), [windows](https://stackoverflow.com/questions/32127524/how-to-install-and-use-make-in-windows), [mac](https://osxdaily.com/2014/02/12/install-command-line-tools-mac-os-x/)) | ||||||
|  |  | ||||||
|  |   *Needed to conveniently run development tasks.* | ||||||
|  |   *Alternatively, manually run the commands defined in the [Makefile](./Makefile)* | ||||||
|  |  | ||||||
|  | ### Setup | ||||||
|  |  | ||||||
| ```sh | ```sh | ||||||
| # Get set up with the virtual env & dependencies | # Get set up with the virtual env & dependencies | ||||||
| $ pipenv install --dev | poetry install | ||||||
|  |  | ||||||
| # Link the local package | # Activate the poetry environment | ||||||
| $ pipenv shell | poetry shell | ||||||
| $ pip install -e . |  | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
|  | Run `make help` to see all available development tasks. | ||||||
|  |  | ||||||
| ### Code style | ### Code style | ||||||
|  |  | ||||||
| This project enforces [black](https://github.com/psf/black) python code formatting. | This project enforces [black](https://github.com/psf/black) python code formatting. | ||||||
|  |  | ||||||
| Before commiting changes run: | Before committing changes run: | ||||||
|  |  | ||||||
| ```bash | ```sh | ||||||
| pipenv run black . | make format | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| To avoid merge conflicts later, non-black formatted python code will fail in CI. | To avoid merge conflicts later, non-black formatted python code will fail in CI. | ||||||
| @@ -332,7 +358,7 @@ Adding a standard test case is easy. | |||||||
|  |  | ||||||
| - Create a new directory `betterproto/tests/inputs/<name>` | - Create a new directory `betterproto/tests/inputs/<name>` | ||||||
|   - add `<name>.proto`  with a message called `Test` |   - add `<name>.proto`  with a message called `Test` | ||||||
|   - add `<name>.json` with some test data |   - add `<name>.json` with some test data (optional) | ||||||
|  |  | ||||||
| It will be picked up automatically when you run the tests. | It will be picked up automatically when you run the tests. | ||||||
|  |  | ||||||
| @@ -347,13 +373,37 @@ Custom tests are found in `tests/test_*.py` and are run with pytest. | |||||||
| Here's how to run the tests. | Here's how to run the tests. | ||||||
|  |  | ||||||
| ```sh | ```sh | ||||||
| # Generate assets from sample .proto files | # Generate assets from sample .proto files required by the tests | ||||||
| $ pipenv run generate | make generate | ||||||
|  | # Run the tests | ||||||
| # Run all tests | make test | ||||||
| $ pipenv run test |  | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
|  | To run tests as they are run in CI (with tox) run: | ||||||
|  |  | ||||||
|  | ```sh | ||||||
|  | make full-test | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### (Re)compiling Google Well-known Types | ||||||
|  |  | ||||||
|  | Betterproto includes compiled versions for Google's well-known types at [betterproto/lib/google](betterproto/lib/google). | ||||||
|  | Be sure to regenerate these files when modifying the plugin output format, and validate by running the tests. | ||||||
|  |  | ||||||
|  | Normally, the plugin does not compile any references to `google.protobuf`, since they are pre-compiled. To force compilation of `google.protobuf`, use the option `--custom_opt=INCLUDE_GOOGLE`.  | ||||||
|  |  | ||||||
|  | Assuming your `google.protobuf` source files (included with all releases of `protoc`) are located in `/usr/local/include`, you can regenerate them as follows: | ||||||
|  |  | ||||||
|  | ```sh | ||||||
|  | protoc \ | ||||||
|  |     --plugin=protoc-gen-custom=betterproto/plugin.py \ | ||||||
|  |     --custom_opt=INCLUDE_GOOGLE \ | ||||||
|  |     --custom_out=betterproto/lib \ | ||||||
|  |     -I /usr/local/include/ \ | ||||||
|  |     /usr/local/include/google/protobuf/*.proto | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  |  | ||||||
| ### TODO | ### TODO | ||||||
|  |  | ||||||
| - [x] Fixed length fields | - [x] Fixed length fields | ||||||
| @@ -394,6 +444,10 @@ $ pipenv run test | |||||||
| - [x] Automate running tests | - [x] Automate running tests | ||||||
| - [ ] Cleanup! | - [ ] Cleanup! | ||||||
|  |  | ||||||
|  | ## Community | ||||||
|  |  | ||||||
|  | Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)! | ||||||
|  |  | ||||||
| ## License | ## License | ||||||
|  |  | ||||||
| Copyright © 2019 Daniel G. Taylor | Copyright © 2019 Daniel G. Taylor | ||||||
|   | |||||||
| @@ -5,38 +5,25 @@ import json | |||||||
| import struct | import struct | ||||||
| import sys | import sys | ||||||
| from abc import ABC | from abc import ABC | ||||||
| from base64 import b64encode, b64decode | from base64 import b64decode, b64encode | ||||||
| from datetime import datetime, timedelta, timezone | from datetime import datetime, timedelta, timezone | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     AsyncGenerator, |  | ||||||
|     Callable, |     Callable, | ||||||
|     Collection, |  | ||||||
|     Dict, |     Dict, | ||||||
|     Generator, |     Generator, | ||||||
|     Iterable, |  | ||||||
|     List, |     List, | ||||||
|     Mapping, |  | ||||||
|     Optional, |     Optional, | ||||||
|     SupportsBytes, |     Set, | ||||||
|     Tuple, |     Tuple, | ||||||
|     Type, |     Type, | ||||||
|     TypeVar, |  | ||||||
|     Union, |     Union, | ||||||
|     get_type_hints, |     get_type_hints, | ||||||
|     TYPE_CHECKING, |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | from ._types import T | ||||||
| import grpclib.const | from .casing import camel_case, safe_snake_case, safe_snake_case, snake_case | ||||||
| import stringcase | from .grpc.grpclib_client import ServiceStub | ||||||
|  |  | ||||||
| from .casing import safe_snake_case |  | ||||||
|  |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from grpclib._protocols import IProtoMessage |  | ||||||
|     from grpclib.client import Channel |  | ||||||
|     from grpclib.metadata import Deadline |  | ||||||
|  |  | ||||||
| if not (sys.version_info.major == 3 and sys.version_info.minor >= 7): | if not (sys.version_info.major == 3 and sys.version_info.minor >= 7): | ||||||
|     # Apply backport of datetime.fromisoformat from 3.7 |     # Apply backport of datetime.fromisoformat from 3.7 | ||||||
| @@ -132,8 +119,8 @@ DATETIME_ZERO = datetime_default_gen() | |||||||
| class Casing(enum.Enum): | class Casing(enum.Enum): | ||||||
|     """Casing constants for serialization.""" |     """Casing constants for serialization.""" | ||||||
|  |  | ||||||
|     CAMEL = stringcase.camelcase |     CAMEL = camel_case | ||||||
|     SNAKE = stringcase.snakecase |     SNAKE = snake_case | ||||||
|  |  | ||||||
|  |  | ||||||
| class _PLACEHOLDER: | class _PLACEHOLDER: | ||||||
| @@ -430,19 +417,30 @@ def parse_fields(value: bytes) -> Generator[ParsedField, None, None]: | |||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| # Bound type variable to allow methods to return `self` of subclasses |  | ||||||
| T = TypeVar("T", bound="Message") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ProtoClassMetadata: | class ProtoClassMetadata: | ||||||
|     cls: Type["Message"] |     oneof_group_by_field: Dict[str, str] | ||||||
|  |     oneof_field_by_group: Dict[str, Set[dataclasses.Field]] | ||||||
|  |     default_gen: Dict[str, Callable] | ||||||
|  |     cls_by_field: Dict[str, Type] | ||||||
|  |     field_name_by_number: Dict[int, str] | ||||||
|  |     meta_by_field_name: Dict[str, FieldMetadata] | ||||||
|  |     __slots__ = ( | ||||||
|  |         "oneof_group_by_field", | ||||||
|  |         "oneof_field_by_group", | ||||||
|  |         "default_gen", | ||||||
|  |         "cls_by_field", | ||||||
|  |         "field_name_by_number", | ||||||
|  |         "meta_by_field_name", | ||||||
|  |     ) | ||||||
|  |  | ||||||
|     def __init__(self, cls: Type["Message"]): |     def __init__(self, cls: Type["Message"]): | ||||||
|         self.cls = cls |  | ||||||
|         by_field = {} |         by_field = {} | ||||||
|         by_group = {} |         by_group: Dict[str, Set] = {} | ||||||
|  |         by_field_name = {} | ||||||
|  |         by_field_number = {} | ||||||
|  |  | ||||||
|         for field in dataclasses.fields(cls): |         fields = dataclasses.fields(cls) | ||||||
|  |         for field in fields: | ||||||
|             meta = FieldMetadata.get(field) |             meta = FieldMetadata.get(field) | ||||||
|  |  | ||||||
|             if meta.group: |             if meta.group: | ||||||
| @@ -451,30 +449,36 @@ class ProtoClassMetadata: | |||||||
|  |  | ||||||
|                 by_group.setdefault(meta.group, set()).add(field) |                 by_group.setdefault(meta.group, set()).add(field) | ||||||
|  |  | ||||||
|  |             by_field_name[field.name] = meta | ||||||
|  |             by_field_number[meta.number] = field.name | ||||||
|  |  | ||||||
|         self.oneof_group_by_field = by_field |         self.oneof_group_by_field = by_field | ||||||
|         self.oneof_field_by_group = by_group |         self.oneof_field_by_group = by_group | ||||||
|  |         self.field_name_by_number = by_field_number | ||||||
|  |         self.meta_by_field_name = by_field_name | ||||||
|  |  | ||||||
|         self.init_default_gen() |         self.default_gen = self._get_default_gen(cls, fields) | ||||||
|         self.init_cls_by_field() |         self.cls_by_field = self._get_cls_by_field(cls, fields) | ||||||
|  |  | ||||||
|     def init_default_gen(self): |     @staticmethod | ||||||
|  |     def _get_default_gen(cls, fields): | ||||||
|         default_gen = {} |         default_gen = {} | ||||||
|  |  | ||||||
|         for field in dataclasses.fields(self.cls): |         for field in fields: | ||||||
|             meta = FieldMetadata.get(field) |             default_gen[field.name] = cls._get_field_default_gen(field) | ||||||
|             default_gen[field.name] = self.cls._get_field_default_gen(field, meta) |  | ||||||
|  |  | ||||||
|         self.default_gen = default_gen |         return default_gen | ||||||
|  |  | ||||||
|     def init_cls_by_field(self): |     @staticmethod | ||||||
|  |     def _get_cls_by_field(cls, fields): | ||||||
|         field_cls = {} |         field_cls = {} | ||||||
|  |  | ||||||
|         for field in dataclasses.fields(self.cls): |         for field in fields: | ||||||
|             meta = FieldMetadata.get(field) |             meta = FieldMetadata.get(field) | ||||||
|             if meta.proto_type == TYPE_MAP: |             if meta.proto_type == TYPE_MAP: | ||||||
|                 assert meta.map_types |                 assert meta.map_types | ||||||
|                 kt = self.cls._cls_for(field, index=0) |                 kt = cls._cls_for(field, index=0) | ||||||
|                 vt = self.cls._cls_for(field, index=1) |                 vt = cls._cls_for(field, index=1) | ||||||
|                 Entry = dataclasses.make_dataclass( |                 Entry = dataclasses.make_dataclass( | ||||||
|                     "Entry", |                     "Entry", | ||||||
|                     [ |                     [ | ||||||
| @@ -486,9 +490,9 @@ class ProtoClassMetadata: | |||||||
|                 field_cls[field.name] = Entry |                 field_cls[field.name] = Entry | ||||||
|                 field_cls[field.name + ".value"] = vt |                 field_cls[field.name + ".value"] = vt | ||||||
|             else: |             else: | ||||||
|                 field_cls[field.name] = self.cls._cls_for(field) |                 field_cls[field.name] = cls._cls_for(field) | ||||||
|  |  | ||||||
|         self.cls_by_field = field_cls |         return field_cls | ||||||
|  |  | ||||||
|  |  | ||||||
| class Message(ABC): | class Message(ABC): | ||||||
| @@ -500,53 +504,50 @@ class Message(ABC): | |||||||
|  |  | ||||||
|     _serialized_on_wire: bool |     _serialized_on_wire: bool | ||||||
|     _unknown_fields: bytes |     _unknown_fields: bytes | ||||||
|     _group_map: Dict[str, dict] |     _group_current: Dict[str, str] | ||||||
|  |  | ||||||
|     def __post_init__(self) -> None: |     def __post_init__(self) -> None: | ||||||
|         # Keep track of whether every field was default |         # Keep track of whether every field was default | ||||||
|         all_sentinel = True |         all_sentinel = True | ||||||
|  |  | ||||||
|         # Set a default value for each field in the class after `__init__` has |         # Set current field of each group after `__init__` has already been run. | ||||||
|         # already been run. |         group_current: Dict[str, str] = {} | ||||||
|         group_map: Dict[str, dataclasses.Field] = {} |         for field_name, meta in self._betterproto.meta_by_field_name.items(): | ||||||
|         for field in dataclasses.fields(self): |  | ||||||
|             meta = FieldMetadata.get(field) |  | ||||||
|  |  | ||||||
|             if meta.group: |             if meta.group: | ||||||
|                 group_map.setdefault(meta.group) |                 group_current.setdefault(meta.group) | ||||||
|  |  | ||||||
|             if getattr(self, field.name) != PLACEHOLDER: |             if getattr(self, field_name) != PLACEHOLDER: | ||||||
|                 # Skip anything not set to the sentinel value |                 # Skip anything not set to the sentinel value | ||||||
|                 all_sentinel = False |                 all_sentinel = False | ||||||
|  |  | ||||||
|                 if meta.group: |                 if meta.group: | ||||||
|                     # This was set, so make it the selected value of the one-of. |                     # This was set, so make it the selected value of the one-of. | ||||||
|                     group_map[meta.group] = field |                     group_current[meta.group] = field_name | ||||||
|  |  | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             setattr(self, field.name, self._get_field_default(field, meta)) |             setattr(self, field_name, self._get_field_default(field_name)) | ||||||
|  |  | ||||||
|         # Now that all the defaults are set, reset it! |         # Now that all the defaults are set, reset it! | ||||||
|         self.__dict__["_serialized_on_wire"] = not all_sentinel |         self.__dict__["_serialized_on_wire"] = not all_sentinel | ||||||
|         self.__dict__["_unknown_fields"] = b"" |         self.__dict__["_unknown_fields"] = b"" | ||||||
|         self.__dict__["_group_map"] = group_map |         self.__dict__["_group_current"] = group_current | ||||||
|  |  | ||||||
|     def __setattr__(self, attr: str, value: Any) -> None: |     def __setattr__(self, attr: str, value: Any) -> None: | ||||||
|         if attr != "_serialized_on_wire": |         if attr != "_serialized_on_wire": | ||||||
|             # Track when a field has been set. |             # Track when a field has been set. | ||||||
|             self.__dict__["_serialized_on_wire"] = True |             self.__dict__["_serialized_on_wire"] = True | ||||||
|  |  | ||||||
|         if hasattr(self, "_group_map"):  # __post_init__ had already run |         if hasattr(self, "_group_current"):  # __post_init__ had already run | ||||||
|             if attr in self._betterproto.oneof_group_by_field: |             if attr in self._betterproto.oneof_group_by_field: | ||||||
|                 group = self._betterproto.oneof_group_by_field[attr] |                 group = self._betterproto.oneof_group_by_field[attr] | ||||||
|                 for field in self._betterproto.oneof_field_by_group[group]: |                 for field in self._betterproto.oneof_field_by_group[group]: | ||||||
|                     if field.name == attr: |                     if field.name == attr: | ||||||
|                         self._group_map[group] = field |                         self._group_current[group] = field.name | ||||||
|                     else: |                     else: | ||||||
|                         super().__setattr__( |                         super().__setattr__( | ||||||
|                             field.name, |                             field.name, self._get_field_default(field.name), | ||||||
|                             self._get_field_default(field, FieldMetadata.get(field)), |  | ||||||
|                         ) |                         ) | ||||||
|  |  | ||||||
|         super().__setattr__(attr, value) |         super().__setattr__(attr, value) | ||||||
| @@ -569,9 +570,8 @@ class Message(ABC): | |||||||
|         Get the binary encoded Protobuf representation of this instance. |         Get the binary encoded Protobuf representation of this instance. | ||||||
|         """ |         """ | ||||||
|         output = b"" |         output = b"" | ||||||
|         for field in dataclasses.fields(self): |         for field_name, meta in self._betterproto.meta_by_field_name.items(): | ||||||
|             meta = FieldMetadata.get(field) |             value = getattr(self, field_name) | ||||||
|             value = getattr(self, field.name) |  | ||||||
|  |  | ||||||
|             if value is None: |             if value is None: | ||||||
|                 # Optional items should be skipped. This is used for the Google |                 # Optional items should be skipped. This is used for the Google | ||||||
| @@ -582,16 +582,16 @@ class Message(ABC): | |||||||
|             # currently set in a `oneof` group, so it must be serialized even |             # currently set in a `oneof` group, so it must be serialized even | ||||||
|             # if the value is the default zero value. |             # if the value is the default zero value. | ||||||
|             selected_in_group = False |             selected_in_group = False | ||||||
|             if meta.group and self._group_map[meta.group] == field: |             if meta.group and self._group_current[meta.group] == field_name: | ||||||
|                 selected_in_group = True |                 selected_in_group = True | ||||||
|  |  | ||||||
|             serialize_empty = False |             serialize_empty = False | ||||||
|             if isinstance(value, Message) and value._serialized_on_wire: |             if isinstance(value, Message) and value._serialized_on_wire: | ||||||
|                 # Empty messages can still be sent on the wire if they were |                 # Empty messages can still be sent on the wire if they were | ||||||
|                 # set (or received empty). |                 # set (or recieved empty). | ||||||
|                 serialize_empty = True |                 serialize_empty = True | ||||||
|  |  | ||||||
|             if value == self._get_field_default(field, meta) and not ( |             if value == self._get_field_default(field_name) and not ( | ||||||
|                 selected_in_group or serialize_empty |                 selected_in_group or serialize_empty | ||||||
|             ): |             ): | ||||||
|                 # Default (zero) values are not serialized. Two exceptions are |                 # Default (zero) values are not serialized. Two exceptions are | ||||||
| @@ -648,13 +648,11 @@ class Message(ABC): | |||||||
|             field_cls = field_cls.__args__[index] |             field_cls = field_cls.__args__[index] | ||||||
|         return field_cls |         return field_cls | ||||||
|  |  | ||||||
|     def _get_field_default(self, field: dataclasses.Field, meta: FieldMetadata) -> Any: |     def _get_field_default(self, field_name): | ||||||
|         return self._betterproto.default_gen[field.name]() |         return self._betterproto.default_gen[field_name]() | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_field_default_gen( |     def _get_field_default_gen(cls, field: dataclasses.Field) -> Any: | ||||||
|         cls, field: dataclasses.Field, meta: FieldMetadata |  | ||||||
|     ) -> Any: |  | ||||||
|         t = cls._type_hint(field.name) |         t = cls._type_hint(field.name) | ||||||
|  |  | ||||||
|         if hasattr(t, "__origin__"): |         if hasattr(t, "__origin__"): | ||||||
| @@ -682,7 +680,7 @@ class Message(ABC): | |||||||
|             return t |             return t | ||||||
|  |  | ||||||
|     def _postprocess_single( |     def _postprocess_single( | ||||||
|         self, wire_type: int, meta: FieldMetadata, field: dataclasses.Field, value: Any |         self, wire_type: int, meta: FieldMetadata, field_name: str, value: Any | ||||||
|     ) -> Any: |     ) -> Any: | ||||||
|         """Adjusts values after parsing.""" |         """Adjusts values after parsing.""" | ||||||
|         if wire_type == WIRE_VARINT: |         if wire_type == WIRE_VARINT: | ||||||
| @@ -704,7 +702,7 @@ class Message(ABC): | |||||||
|             if meta.proto_type == TYPE_STRING: |             if meta.proto_type == TYPE_STRING: | ||||||
|                 value = value.decode("utf-8") |                 value = value.decode("utf-8") | ||||||
|             elif meta.proto_type == TYPE_MESSAGE: |             elif meta.proto_type == TYPE_MESSAGE: | ||||||
|                 cls = self._betterproto.cls_by_field[field.name] |                 cls = self._betterproto.cls_by_field[field_name] | ||||||
|  |  | ||||||
|                 if cls == datetime: |                 if cls == datetime: | ||||||
|                     value = _Timestamp().parse(value).to_datetime() |                     value = _Timestamp().parse(value).to_datetime() | ||||||
| @@ -718,7 +716,7 @@ class Message(ABC): | |||||||
|                     value = cls().parse(value) |                     value = cls().parse(value) | ||||||
|                     value._serialized_on_wire = True |                     value._serialized_on_wire = True | ||||||
|             elif meta.proto_type == TYPE_MAP: |             elif meta.proto_type == TYPE_MAP: | ||||||
|                 value = self._betterproto.cls_by_field[field.name]().parse(value) |                 value = self._betterproto.cls_by_field[field_name]().parse(value) | ||||||
|  |  | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
| @@ -727,49 +725,46 @@ class Message(ABC): | |||||||
|         Parse the binary encoded Protobuf into this message instance. This |         Parse the binary encoded Protobuf into this message instance. This | ||||||
|         returns the instance itself and is therefore assignable and chainable. |         returns the instance itself and is therefore assignable and chainable. | ||||||
|         """ |         """ | ||||||
|         fields = {f.metadata["betterproto"].number: f for f in dataclasses.fields(self)} |  | ||||||
|         for parsed in parse_fields(data): |         for parsed in parse_fields(data): | ||||||
|             if parsed.number in fields: |             field_name = self._betterproto.field_name_by_number.get(parsed.number) | ||||||
|                 field = fields[parsed.number] |             if not field_name: | ||||||
|                 meta = FieldMetadata.get(field) |  | ||||||
|  |  | ||||||
|                 value: Any |  | ||||||
|                 if ( |  | ||||||
|                     parsed.wire_type == WIRE_LEN_DELIM |  | ||||||
|                     and meta.proto_type in PACKED_TYPES |  | ||||||
|                 ): |  | ||||||
|                     # This is a packed repeated field. |  | ||||||
|                     pos = 0 |  | ||||||
|                     value = [] |  | ||||||
|                     while pos < len(parsed.value): |  | ||||||
|                         if meta.proto_type in ["float", "fixed32", "sfixed32"]: |  | ||||||
|                             decoded, pos = parsed.value[pos : pos + 4], pos + 4 |  | ||||||
|                             wire_type = WIRE_FIXED_32 |  | ||||||
|                         elif meta.proto_type in ["double", "fixed64", "sfixed64"]: |  | ||||||
|                             decoded, pos = parsed.value[pos : pos + 8], pos + 8 |  | ||||||
|                             wire_type = WIRE_FIXED_64 |  | ||||||
|                         else: |  | ||||||
|                             decoded, pos = decode_varint(parsed.value, pos) |  | ||||||
|                             wire_type = WIRE_VARINT |  | ||||||
|                         decoded = self._postprocess_single( |  | ||||||
|                             wire_type, meta, field, decoded |  | ||||||
|                         ) |  | ||||||
|                         value.append(decoded) |  | ||||||
|                 else: |  | ||||||
|                     value = self._postprocess_single( |  | ||||||
|                         parsed.wire_type, meta, field, parsed.value |  | ||||||
|                     ) |  | ||||||
|  |  | ||||||
|                 current = getattr(self, field.name) |  | ||||||
|                 if meta.proto_type == TYPE_MAP: |  | ||||||
|                     # Value represents a single key/value pair entry in the map. |  | ||||||
|                     current[value.key] = value.value |  | ||||||
|                 elif isinstance(current, list) and not isinstance(value, list): |  | ||||||
|                     current.append(value) |  | ||||||
|                 else: |  | ||||||
|                     setattr(self, field.name, value) |  | ||||||
|             else: |  | ||||||
|                 self._unknown_fields += parsed.raw |                 self._unknown_fields += parsed.raw | ||||||
|  |                 continue | ||||||
|  |  | ||||||
|  |             meta = self._betterproto.meta_by_field_name[field_name] | ||||||
|  |  | ||||||
|  |             value: Any | ||||||
|  |             if parsed.wire_type == WIRE_LEN_DELIM and meta.proto_type in PACKED_TYPES: | ||||||
|  |                 # This is a packed repeated field. | ||||||
|  |                 pos = 0 | ||||||
|  |                 value = [] | ||||||
|  |                 while pos < len(parsed.value): | ||||||
|  |                     if meta.proto_type in ["float", "fixed32", "sfixed32"]: | ||||||
|  |                         decoded, pos = parsed.value[pos : pos + 4], pos + 4 | ||||||
|  |                         wire_type = WIRE_FIXED_32 | ||||||
|  |                     elif meta.proto_type in ["double", "fixed64", "sfixed64"]: | ||||||
|  |                         decoded, pos = parsed.value[pos : pos + 8], pos + 8 | ||||||
|  |                         wire_type = WIRE_FIXED_64 | ||||||
|  |                     else: | ||||||
|  |                         decoded, pos = decode_varint(parsed.value, pos) | ||||||
|  |                         wire_type = WIRE_VARINT | ||||||
|  |                     decoded = self._postprocess_single( | ||||||
|  |                         wire_type, meta, field_name, decoded | ||||||
|  |                     ) | ||||||
|  |                     value.append(decoded) | ||||||
|  |             else: | ||||||
|  |                 value = self._postprocess_single( | ||||||
|  |                     parsed.wire_type, meta, field_name, parsed.value | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |             current = getattr(self, field_name) | ||||||
|  |             if meta.proto_type == TYPE_MAP: | ||||||
|  |                 # Value represents a single key/value pair entry in the map. | ||||||
|  |                 current[value.key] = value.value | ||||||
|  |             elif isinstance(current, list) and not isinstance(value, list): | ||||||
|  |                 current.append(value) | ||||||
|  |             else: | ||||||
|  |                 setattr(self, field_name, value) | ||||||
|  |  | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
| @@ -780,7 +775,7 @@ class Message(ABC): | |||||||
|  |  | ||||||
|     def to_dict( |     def to_dict( | ||||||
|         self, casing: Casing = Casing.CAMEL, include_default_values: bool = False |         self, casing: Casing = Casing.CAMEL, include_default_values: bool = False | ||||||
|     ) -> dict: |     ) -> Dict[str, Any]: | ||||||
|         """ |         """ | ||||||
|         Returns a dict representation of this message instance which can be |         Returns a dict representation of this message instance which can be | ||||||
|         used to serialize to e.g. JSON. Defaults to camel casing for |         used to serialize to e.g. JSON. Defaults to camel casing for | ||||||
| @@ -792,10 +787,9 @@ class Message(ABC): | |||||||
|         `False`. |         `False`. | ||||||
|         """ |         """ | ||||||
|         output: Dict[str, Any] = {} |         output: Dict[str, Any] = {} | ||||||
|         for field in dataclasses.fields(self): |         for field_name, meta in self._betterproto.meta_by_field_name.items(): | ||||||
|             meta = FieldMetadata.get(field) |             v = getattr(self, field_name) | ||||||
|             v = getattr(self, field.name) |             cased_name = casing(field_name).rstrip("_")  # type: ignore | ||||||
|             cased_name = casing(field.name).rstrip("_")  # type: ignore |  | ||||||
|             if meta.proto_type == "message": |             if meta.proto_type == "message": | ||||||
|                 if isinstance(v, datetime): |                 if isinstance(v, datetime): | ||||||
|                     if v != DATETIME_ZERO or include_default_values: |                     if v != DATETIME_ZERO or include_default_values: | ||||||
| @@ -821,7 +815,7 @@ class Message(ABC): | |||||||
|  |  | ||||||
|                 if v or include_default_values: |                 if v or include_default_values: | ||||||
|                     output[cased_name] = v |                     output[cased_name] = v | ||||||
|             elif v != self._get_field_default(field, meta) or include_default_values: |             elif v != self._get_field_default(field_name) or include_default_values: | ||||||
|                 if meta.proto_type in INT_64_TYPES: |                 if meta.proto_type in INT_64_TYPES: | ||||||
|                     if isinstance(v, list): |                     if isinstance(v, list): | ||||||
|                         output[cased_name] = [str(n) for n in v] |                         output[cased_name] = [str(n) for n in v] | ||||||
| @@ -834,7 +828,7 @@ class Message(ABC): | |||||||
|                         output[cased_name] = b64encode(v).decode("utf8") |                         output[cased_name] = b64encode(v).decode("utf8") | ||||||
|                 elif meta.proto_type == TYPE_ENUM: |                 elif meta.proto_type == TYPE_ENUM: | ||||||
|                     enum_values = list( |                     enum_values = list( | ||||||
|                         self._betterproto.cls_by_field[field.name] |                         self._betterproto.cls_by_field[field_name] | ||||||
|                     )  # type: ignore |                     )  # type: ignore | ||||||
|                     if isinstance(v, list): |                     if isinstance(v, list): | ||||||
|                         output[cased_name] = [enum_values[e].name for e in v] |                         output[cased_name] = [enum_values[e].name for e in v] | ||||||
| @@ -852,56 +846,54 @@ class Message(ABC): | |||||||
|         self._serialized_on_wire = True |         self._serialized_on_wire = True | ||||||
|         fields_by_name = {f.name: f for f in dataclasses.fields(self)} |         fields_by_name = {f.name: f for f in dataclasses.fields(self)} | ||||||
|         for key in value: |         for key in value: | ||||||
|             snake_cased = safe_snake_case(key) |             field_name = safe_snake_case(key) | ||||||
|             if snake_cased in fields_by_name: |             meta = self._betterproto.meta_by_field_name.get(field_name) | ||||||
|                 field = fields_by_name[snake_cased] |             if not meta: | ||||||
|                 meta = FieldMetadata.get(field) |                 continue | ||||||
|  |  | ||||||
|                 if value[key] is not None: |             if value[key] is not None: | ||||||
|                     if meta.proto_type == "message": |                 if meta.proto_type == "message": | ||||||
|                         v = getattr(self, field.name) |                     v = getattr(self, field_name) | ||||||
|                         if isinstance(v, list): |                     if isinstance(v, list): | ||||||
|                             cls = self._betterproto.cls_by_field[field.name] |                         cls = self._betterproto.cls_by_field[field_name] | ||||||
|                             for i in range(len(value[key])): |                         for i in range(len(value[key])): | ||||||
|                                 v.append(cls().from_dict(value[key][i])) |                             v.append(cls().from_dict(value[key][i])) | ||||||
|                         elif isinstance(v, datetime): |                     elif isinstance(v, datetime): | ||||||
|                             v = datetime.fromisoformat( |                         v = datetime.fromisoformat(value[key].replace("Z", "+00:00")) | ||||||
|                                 value[key].replace("Z", "+00:00") |                         setattr(self, field_name, v) | ||||||
|                             ) |                     elif isinstance(v, timedelta): | ||||||
|                             setattr(self, field.name, v) |                         v = timedelta(seconds=float(value[key][:-1])) | ||||||
|                         elif isinstance(v, timedelta): |                         setattr(self, field_name, v) | ||||||
|                             v = timedelta(seconds=float(value[key][:-1])) |                     elif meta.wraps: | ||||||
|                             setattr(self, field.name, v) |                         setattr(self, field_name, value[key]) | ||||||
|                         elif meta.wraps: |  | ||||||
|                             setattr(self, field.name, value[key]) |  | ||||||
|                         else: |  | ||||||
|                             v.from_dict(value[key]) |  | ||||||
|                     elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE: |  | ||||||
|                         v = getattr(self, field.name) |  | ||||||
|                         cls = self._betterproto.cls_by_field[field.name + ".value"] |  | ||||||
|                         for k in value[key]: |  | ||||||
|                             v[k] = cls().from_dict(value[key][k]) |  | ||||||
|                     else: |                     else: | ||||||
|                         v = value[key] |                         v.from_dict(value[key]) | ||||||
|                         if meta.proto_type in INT_64_TYPES: |                 elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE: | ||||||
|                             if isinstance(value[key], list): |                     v = getattr(self, field_name) | ||||||
|                                 v = [int(n) for n in value[key]] |                     cls = self._betterproto.cls_by_field[field_name + ".value"] | ||||||
|                             else: |                     for k in value[key]: | ||||||
|                                 v = int(value[key]) |                         v[k] = cls().from_dict(value[key][k]) | ||||||
|                         elif meta.proto_type == TYPE_BYTES: |                 else: | ||||||
|                             if isinstance(value[key], list): |                     v = value[key] | ||||||
|                                 v = [b64decode(n) for n in value[key]] |                     if meta.proto_type in INT_64_TYPES: | ||||||
|                             else: |                         if isinstance(value[key], list): | ||||||
|                                 v = b64decode(value[key]) |                             v = [int(n) for n in value[key]] | ||||||
|                         elif meta.proto_type == TYPE_ENUM: |                         else: | ||||||
|                             enum_cls = self._betterproto.cls_by_field[field.name] |                             v = int(value[key]) | ||||||
|                             if isinstance(v, list): |                     elif meta.proto_type == TYPE_BYTES: | ||||||
|                                 v = [enum_cls.from_string(e) for e in v] |                         if isinstance(value[key], list): | ||||||
|                             elif isinstance(v, str): |                             v = [b64decode(n) for n in value[key]] | ||||||
|                                 v = enum_cls.from_string(v) |                         else: | ||||||
|  |                             v = b64decode(value[key]) | ||||||
|  |                     elif meta.proto_type == TYPE_ENUM: | ||||||
|  |                         enum_cls = self._betterproto.cls_by_field[field_name] | ||||||
|  |                         if isinstance(v, list): | ||||||
|  |                             v = [enum_cls.from_string(e) for e in v] | ||||||
|  |                         elif isinstance(v, str): | ||||||
|  |                             v = enum_cls.from_string(v) | ||||||
|  |  | ||||||
|                         if v is not None: |                     if v is not None: | ||||||
|                             setattr(self, field.name, v) |                         setattr(self, field_name, v) | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def to_json(self, indent: Union[None, int, str] = None) -> str: |     def to_json(self, indent: Union[None, int, str] = None) -> str: | ||||||
| @@ -927,25 +919,29 @@ def serialized_on_wire(message: Message) -> bool: | |||||||
|  |  | ||||||
| def which_one_of(message: Message, group_name: str) -> Tuple[str, Any]: | def which_one_of(message: Message, group_name: str) -> Tuple[str, Any]: | ||||||
|     """Return the name and value of a message's one-of field group.""" |     """Return the name and value of a message's one-of field group.""" | ||||||
|     field = message._group_map.get(group_name) |     field_name = message._group_current.get(group_name) | ||||||
|     if not field: |     if not field_name: | ||||||
|         return ("", None) |         return ("", None) | ||||||
|     return (field.name, getattr(message, field.name)) |     return (field_name, getattr(message, field_name)) | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass | # Circular import workaround: google.protobuf depends on base classes defined above. | ||||||
| class _Duration(Message): | from .lib.google.protobuf import ( | ||||||
|     # Signed seconds of the span of time. Must be from -315,576,000,000 to |     Duration, | ||||||
|     # +315,576,000,000 inclusive. Note: these bounds are computed from: 60 |     Timestamp, | ||||||
|     # sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years |     BoolValue, | ||||||
|     seconds: int = int64_field(1) |     BytesValue, | ||||||
|     # Signed fractions of a second at nanosecond resolution of the span of time. |     DoubleValue, | ||||||
|     # Durations less than one second are represented with a 0 `seconds` field and |     FloatValue, | ||||||
|     # a positive or negative `nanos` field. For durations of one second or more, |     Int32Value, | ||||||
|     # a non-zero value for the `nanos` field must be of the same sign as the |     Int64Value, | ||||||
|     # `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive. |     StringValue, | ||||||
|     nanos: int = int32_field(2) |     UInt32Value, | ||||||
|  |     UInt64Value, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class _Duration(Duration): | ||||||
|     def to_timedelta(self) -> timedelta: |     def to_timedelta(self) -> timedelta: | ||||||
|         return timedelta(seconds=self.seconds, microseconds=self.nanos / 1e3) |         return timedelta(seconds=self.seconds, microseconds=self.nanos / 1e3) | ||||||
|  |  | ||||||
| @@ -958,16 +954,7 @@ class _Duration(Message): | |||||||
|         return ".".join(parts) + "s" |         return ".".join(parts) + "s" | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass | class _Timestamp(Timestamp): | ||||||
| class _Timestamp(Message): |  | ||||||
|     # Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must |  | ||||||
|     # be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. |  | ||||||
|     seconds: int = int64_field(1) |  | ||||||
|     # Non-negative fractions of a second at nanosecond resolution. Negative |  | ||||||
|     # second values with fractions must still have non-negative nanos values that |  | ||||||
|     # count forward in time. Must be from 0 to 999,999,999 inclusive. |  | ||||||
|     nanos: int = int32_field(2) |  | ||||||
|  |  | ||||||
|     def to_datetime(self) -> datetime: |     def to_datetime(self) -> datetime: | ||||||
|         ts = self.seconds + (self.nanos / 1e9) |         ts = self.seconds + (self.nanos / 1e9) | ||||||
|         return datetime.fromtimestamp(ts, tz=timezone.utc) |         return datetime.fromtimestamp(ts, tz=timezone.utc) | ||||||
| @@ -1008,141 +995,16 @@ class _WrappedMessage(Message): | |||||||
|         return self |         return self | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass |  | ||||||
| class _BoolValue(_WrappedMessage): |  | ||||||
|     value: bool = bool_field(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass |  | ||||||
| class _Int32Value(_WrappedMessage): |  | ||||||
|     value: int = int32_field(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass |  | ||||||
| class _UInt32Value(_WrappedMessage): |  | ||||||
|     value: int = uint32_field(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass |  | ||||||
| class _Int64Value(_WrappedMessage): |  | ||||||
|     value: int = int64_field(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass |  | ||||||
| class _UInt64Value(_WrappedMessage): |  | ||||||
|     value: int = uint64_field(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass |  | ||||||
| class _FloatValue(_WrappedMessage): |  | ||||||
|     value: float = float_field(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass |  | ||||||
| class _DoubleValue(_WrappedMessage): |  | ||||||
|     value: float = double_field(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass |  | ||||||
| class _StringValue(_WrappedMessage): |  | ||||||
|     value: str = string_field(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclasses.dataclass |  | ||||||
| class _BytesValue(_WrappedMessage): |  | ||||||
|     value: bytes = bytes_field(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _get_wrapper(proto_type: str) -> Type: | def _get_wrapper(proto_type: str) -> Type: | ||||||
|     """Get the wrapper message class for a wrapped type.""" |     """Get the wrapper message class for a wrapped type.""" | ||||||
|     return { |     return { | ||||||
|         TYPE_BOOL: _BoolValue, |         TYPE_BOOL: BoolValue, | ||||||
|         TYPE_INT32: _Int32Value, |         TYPE_INT32: Int32Value, | ||||||
|         TYPE_UINT32: _UInt32Value, |         TYPE_UINT32: UInt32Value, | ||||||
|         TYPE_INT64: _Int64Value, |         TYPE_INT64: Int64Value, | ||||||
|         TYPE_UINT64: _UInt64Value, |         TYPE_UINT64: UInt64Value, | ||||||
|         TYPE_FLOAT: _FloatValue, |         TYPE_FLOAT: FloatValue, | ||||||
|         TYPE_DOUBLE: _DoubleValue, |         TYPE_DOUBLE: DoubleValue, | ||||||
|         TYPE_STRING: _StringValue, |         TYPE_STRING: StringValue, | ||||||
|         TYPE_BYTES: _BytesValue, |         TYPE_BYTES: BytesValue, | ||||||
|     }[proto_type] |     }[proto_type] | ||||||
|  |  | ||||||
|  |  | ||||||
| _Value = Union[str, bytes] |  | ||||||
| _MetadataLike = Union[Mapping[str, _Value], Collection[Tuple[str, _Value]]] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ServiceStub(ABC): |  | ||||||
|     """ |  | ||||||
|     Base class for async gRPC service stubs. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def __init__( |  | ||||||
|         self, |  | ||||||
|         channel: "Channel", |  | ||||||
|         *, |  | ||||||
|         timeout: Optional[float] = None, |  | ||||||
|         deadline: Optional["Deadline"] = None, |  | ||||||
|         metadata: Optional[_MetadataLike] = None, |  | ||||||
|     ) -> None: |  | ||||||
|         self.channel = channel |  | ||||||
|         self.timeout = timeout |  | ||||||
|         self.deadline = deadline |  | ||||||
|         self.metadata = metadata |  | ||||||
|  |  | ||||||
|     def __resolve_request_kwargs( |  | ||||||
|         self, |  | ||||||
|         timeout: Optional[float], |  | ||||||
|         deadline: Optional["Deadline"], |  | ||||||
|         metadata: Optional[_MetadataLike], |  | ||||||
|     ): |  | ||||||
|         return { |  | ||||||
|             "timeout": self.timeout if timeout is None else timeout, |  | ||||||
|             "deadline": self.deadline if deadline is None else deadline, |  | ||||||
|             "metadata": self.metadata if metadata is None else metadata, |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     async def _unary_unary( |  | ||||||
|         self, |  | ||||||
|         route: str, |  | ||||||
|         request: "IProtoMessage", |  | ||||||
|         response_type: Type[T], |  | ||||||
|         *, |  | ||||||
|         timeout: Optional[float] = None, |  | ||||||
|         deadline: Optional["Deadline"] = None, |  | ||||||
|         metadata: Optional[_MetadataLike] = None, |  | ||||||
|     ) -> T: |  | ||||||
|         """Make a unary request and return the response.""" |  | ||||||
|         async with self.channel.request( |  | ||||||
|             route, |  | ||||||
|             grpclib.const.Cardinality.UNARY_UNARY, |  | ||||||
|             type(request), |  | ||||||
|             response_type, |  | ||||||
|             **self.__resolve_request_kwargs(timeout, deadline, metadata), |  | ||||||
|         ) as stream: |  | ||||||
|             await stream.send_message(request, end=True) |  | ||||||
|             response = await stream.recv_message() |  | ||||||
|             assert response is not None |  | ||||||
|             return response |  | ||||||
|  |  | ||||||
|     async def _unary_stream( |  | ||||||
|         self, |  | ||||||
|         route: str, |  | ||||||
|         request: "IProtoMessage", |  | ||||||
|         response_type: Type[T], |  | ||||||
|         *, |  | ||||||
|         timeout: Optional[float] = None, |  | ||||||
|         deadline: Optional["Deadline"] = None, |  | ||||||
|         metadata: Optional[_MetadataLike] = None, |  | ||||||
|     ) -> AsyncGenerator[T, None]: |  | ||||||
|         """Make a unary request and return the stream response iterator.""" |  | ||||||
|         async with self.channel.request( |  | ||||||
|             route, |  | ||||||
|             grpclib.const.Cardinality.UNARY_STREAM, |  | ||||||
|             type(request), |  | ||||||
|             response_type, |  | ||||||
|             **self.__resolve_request_kwargs(timeout, deadline, metadata), |  | ||||||
|         ) as stream: |  | ||||||
|             await stream.send_message(request, end=True) |  | ||||||
|             async for message in stream: |  | ||||||
|                 yield message |  | ||||||
|   | |||||||
							
								
								
									
										9
									
								
								betterproto/_types.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								betterproto/_types.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | |||||||
|  | from typing import TYPE_CHECKING, TypeVar | ||||||
|  |  | ||||||
|  | if TYPE_CHECKING: | ||||||
|  |     from . import Message | ||||||
|  |     from grpclib._protocols import IProtoMessage | ||||||
|  |  | ||||||
|  | # Bound type variable to allow methods to return `self` of subclasses | ||||||
|  | T = TypeVar("T", bound="Message") | ||||||
|  | ST = TypeVar("ST", bound="IProtoMessage") | ||||||
| @@ -1,9 +1,21 @@ | |||||||
| import stringcase | import re | ||||||
|  |  | ||||||
|  | # Word delimiters and symbols that will not be preserved when re-casing. | ||||||
|  | # language=PythonRegExp | ||||||
|  | SYMBOLS = "[^a-zA-Z0-9]*" | ||||||
|  |  | ||||||
|  | # Optionally capitalized word. | ||||||
|  | # language=PythonRegExp | ||||||
|  | WORD = "[A-Z]*[a-z]*[0-9]*" | ||||||
|  |  | ||||||
|  | # Uppercase word, not followed by lowercase letters. | ||||||
|  | # language=PythonRegExp | ||||||
|  | WORD_UPPER = "[A-Z]+(?![a-z])[0-9]*" | ||||||
|  |  | ||||||
|  |  | ||||||
| def safe_snake_case(value: str) -> str: | def safe_snake_case(value: str) -> str: | ||||||
|     """Snake case a value taking into account Python keywords.""" |     """Snake case a value taking into account Python keywords.""" | ||||||
|     value = stringcase.snakecase(value) |     value = snake_case(value) | ||||||
|     if value in [ |     if value in [ | ||||||
|         "and", |         "and", | ||||||
|         "as", |         "as", | ||||||
| @@ -39,3 +51,70 @@ def safe_snake_case(value: str) -> str: | |||||||
|         # https://www.python.org/dev/peps/pep-0008/#descriptive-naming-styles |         # https://www.python.org/dev/peps/pep-0008/#descriptive-naming-styles | ||||||
|         value += "_" |         value += "_" | ||||||
|     return value |     return value | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def snake_case(value: str, strict: bool = True): | ||||||
|  |     """ | ||||||
|  |     Join words with an underscore into lowercase and remove symbols. | ||||||
|  |     @param value: value to convert | ||||||
|  |     @param strict: force single underscores | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def substitute_word(symbols, word, is_start): | ||||||
|  |         if not word: | ||||||
|  |             return "" | ||||||
|  |         if strict: | ||||||
|  |             delimiter_count = 0 if is_start else 1  # Single underscore if strict. | ||||||
|  |         elif is_start: | ||||||
|  |             delimiter_count = len(symbols) | ||||||
|  |         elif word.isupper() or word.islower(): | ||||||
|  |             delimiter_count = max( | ||||||
|  |                 1, len(symbols) | ||||||
|  |             )  # Preserve all delimiters if not strict. | ||||||
|  |         else: | ||||||
|  |             delimiter_count = len(symbols) + 1  # Extra underscore for leading capital. | ||||||
|  |  | ||||||
|  |         return ("_" * delimiter_count) + word.lower() | ||||||
|  |  | ||||||
|  |     snake = re.sub( | ||||||
|  |         f"(^)?({SYMBOLS})({WORD_UPPER}|{WORD})", | ||||||
|  |         lambda groups: substitute_word(groups[2], groups[3], groups[1] is not None), | ||||||
|  |         value, | ||||||
|  |     ) | ||||||
|  |     return snake | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def pascal_case(value: str, strict: bool = True): | ||||||
|  |     """ | ||||||
|  |     Capitalize each word and remove symbols. | ||||||
|  |     @param value: value to convert | ||||||
|  |     @param strict: output only alphanumeric characters | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def substitute_word(symbols, word): | ||||||
|  |         if strict: | ||||||
|  |             return word.capitalize()  # Remove all delimiters | ||||||
|  |  | ||||||
|  |         if word.islower(): | ||||||
|  |             delimiter_length = len(symbols[:-1])  # Lose one delimiter | ||||||
|  |         else: | ||||||
|  |             delimiter_length = len(symbols)  # Preserve all delimiters | ||||||
|  |  | ||||||
|  |         return ("_" * delimiter_length) + word.capitalize() | ||||||
|  |  | ||||||
|  |     return re.sub( | ||||||
|  |         f"({SYMBOLS})({WORD_UPPER}|{WORD})", | ||||||
|  |         lambda groups: substitute_word(groups[1], groups[2]), | ||||||
|  |         value, | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def camel_case(value: str, strict: bool = True): | ||||||
|  |     """ | ||||||
|  |     Capitalize all words except first and remove symbols. | ||||||
|  |     """ | ||||||
|  |     return lowercase_first(pascal_case(value, strict=strict)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def lowercase_first(value: str): | ||||||
|  |     return value[0:1].lower() + value[1:] | ||||||
|   | |||||||
							
								
								
									
										0
									
								
								betterproto/compile/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								betterproto/compile/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										160
									
								
								betterproto/compile/importing.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										160
									
								
								betterproto/compile/importing.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,160 @@ | |||||||
|  | import os | ||||||
|  | import re | ||||||
|  | from typing import Dict, List, Set, Type | ||||||
|  |  | ||||||
|  | from betterproto import safe_snake_case | ||||||
|  | from betterproto.compile.naming import pythonize_class_name | ||||||
|  | from betterproto.lib.google import protobuf as google_protobuf | ||||||
|  |  | ||||||
|  | WRAPPER_TYPES: Dict[str, Type] = { | ||||||
|  |     ".google.protobuf.DoubleValue": google_protobuf.DoubleValue, | ||||||
|  |     ".google.protobuf.FloatValue": google_protobuf.FloatValue, | ||||||
|  |     ".google.protobuf.Int32Value": google_protobuf.Int32Value, | ||||||
|  |     ".google.protobuf.Int64Value": google_protobuf.Int64Value, | ||||||
|  |     ".google.protobuf.UInt32Value": google_protobuf.UInt32Value, | ||||||
|  |     ".google.protobuf.UInt64Value": google_protobuf.UInt64Value, | ||||||
|  |     ".google.protobuf.BoolValue": google_protobuf.BoolValue, | ||||||
|  |     ".google.protobuf.StringValue": google_protobuf.StringValue, | ||||||
|  |     ".google.protobuf.BytesValue": google_protobuf.BytesValue, | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def parse_source_type_name(field_type_name): | ||||||
|  |     """ | ||||||
|  |     Split full source type name into package and type name. | ||||||
|  |     E.g. 'root.package.Message' -> ('root.package', 'Message') | ||||||
|  |          'root.Message.SomeEnum' -> ('root', 'Message.SomeEnum') | ||||||
|  |     """ | ||||||
|  |     package_match = re.match(r"^\.?([^A-Z]+)\.(.+)", field_type_name) | ||||||
|  |     if package_match: | ||||||
|  |         package = package_match.group(1) | ||||||
|  |         name = package_match.group(2) | ||||||
|  |     else: | ||||||
|  |         package = "" | ||||||
|  |         name = field_type_name.lstrip(".") | ||||||
|  |     return package, name | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_type_reference( | ||||||
|  |     package: str, imports: set, source_type: str, unwrap: bool = True, | ||||||
|  | ) -> str: | ||||||
|  |     """ | ||||||
|  |     Return a Python type name for a proto type reference. Adds the import if | ||||||
|  |     necessary. Unwraps well known type if required. | ||||||
|  |     """ | ||||||
|  |     if unwrap: | ||||||
|  |         if source_type in WRAPPER_TYPES: | ||||||
|  |             wrapped_type = type(WRAPPER_TYPES[source_type]().value) | ||||||
|  |             return f"Optional[{wrapped_type.__name__}]" | ||||||
|  |  | ||||||
|  |         if source_type == ".google.protobuf.Duration": | ||||||
|  |             return "timedelta" | ||||||
|  |  | ||||||
|  |         if source_type == ".google.protobuf.Timestamp": | ||||||
|  |             return "datetime" | ||||||
|  |  | ||||||
|  |     source_package, source_type = parse_source_type_name(source_type) | ||||||
|  |  | ||||||
|  |     current_package: List[str] = package.split(".") if package else [] | ||||||
|  |     py_package: List[str] = source_package.split(".") if source_package else [] | ||||||
|  |     py_type: str = pythonize_class_name(source_type) | ||||||
|  |  | ||||||
|  |     compiling_google_protobuf = current_package == ["google", "protobuf"] | ||||||
|  |     importing_google_protobuf = py_package == ["google", "protobuf"] | ||||||
|  |     if importing_google_protobuf and not compiling_google_protobuf: | ||||||
|  |         py_package = ["betterproto", "lib"] + py_package | ||||||
|  |  | ||||||
|  |     if py_package[:1] == ["betterproto"]: | ||||||
|  |         return reference_absolute(imports, py_package, py_type) | ||||||
|  |  | ||||||
|  |     if py_package == current_package: | ||||||
|  |         return reference_sibling(py_type) | ||||||
|  |  | ||||||
|  |     if py_package[: len(current_package)] == current_package: | ||||||
|  |         return reference_descendent(current_package, imports, py_package, py_type) | ||||||
|  |  | ||||||
|  |     if current_package[: len(py_package)] == py_package: | ||||||
|  |         return reference_ancestor(current_package, imports, py_package, py_type) | ||||||
|  |  | ||||||
|  |     return reference_cousin(current_package, imports, py_package, py_type) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def reference_absolute(imports, py_package, py_type): | ||||||
|  |     """ | ||||||
|  |     Returns a reference to a python type located in the root, i.e. sys.path. | ||||||
|  |     """ | ||||||
|  |     string_import = ".".join(py_package) | ||||||
|  |     string_alias = safe_snake_case(string_import) | ||||||
|  |     imports.add(f"import {string_import} as {string_alias}") | ||||||
|  |     return f"{string_alias}.{py_type}" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def reference_sibling(py_type: str) -> str: | ||||||
|  |     """ | ||||||
|  |     Returns a reference to a python type within the same package as the current package. | ||||||
|  |     """ | ||||||
|  |     return f'"{py_type}"' | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def reference_descendent( | ||||||
|  |     current_package: List[str], imports: Set[str], py_package: List[str], py_type: str | ||||||
|  | ) -> str: | ||||||
|  |     """ | ||||||
|  |     Returns a reference to a python type in a package that is a descendent of the current package, | ||||||
|  |     and adds the required import that is aliased to avoid name conflicts. | ||||||
|  |     """ | ||||||
|  |     importing_descendent = py_package[len(current_package) :] | ||||||
|  |     string_from = ".".join(importing_descendent[:-1]) | ||||||
|  |     string_import = importing_descendent[-1] | ||||||
|  |     if string_from: | ||||||
|  |         string_alias = "_".join(importing_descendent) | ||||||
|  |         imports.add(f"from .{string_from} import {string_import} as {string_alias}") | ||||||
|  |         return f"{string_alias}.{py_type}" | ||||||
|  |     else: | ||||||
|  |         imports.add(f"from . import {string_import}") | ||||||
|  |         return f"{string_import}.{py_type}" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def reference_ancestor( | ||||||
|  |     current_package: List[str], imports: Set[str], py_package: List[str], py_type: str | ||||||
|  | ) -> str: | ||||||
|  |     """ | ||||||
|  |     Returns a reference to a python type in a package which is an ancestor to the current package, | ||||||
|  |     and adds the required import that is aliased (if possible) to avoid name conflicts. | ||||||
|  |  | ||||||
|  |     Adds trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34). | ||||||
|  |     """ | ||||||
|  |     distance_up = len(current_package) - len(py_package) | ||||||
|  |     if py_package: | ||||||
|  |         string_import = py_package[-1] | ||||||
|  |         string_alias = f"_{'_' * distance_up}{string_import}__" | ||||||
|  |         string_from = f"..{'.' * distance_up}" | ||||||
|  |         imports.add(f"from {string_from} import {string_import} as {string_alias}") | ||||||
|  |         return f"{string_alias}.{py_type}" | ||||||
|  |     else: | ||||||
|  |         string_alias = f"{'_' * distance_up}{py_type}__" | ||||||
|  |         imports.add(f"from .{'.' * distance_up} import {py_type} as {string_alias}") | ||||||
|  |         return string_alias | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def reference_cousin( | ||||||
|  |     current_package: List[str], imports: Set[str], py_package: List[str], py_type: str | ||||||
|  | ) -> str: | ||||||
|  |     """ | ||||||
|  |     Returns a reference to a python type in a package that is not descendent, ancestor or sibling, | ||||||
|  |     and adds the required import that is aliased to avoid name conflicts. | ||||||
|  |     """ | ||||||
|  |     shared_ancestry = os.path.commonprefix([current_package, py_package]) | ||||||
|  |     distance_up = len(current_package) - len(shared_ancestry) | ||||||
|  |     string_from = f".{'.' * distance_up}" + ".".join( | ||||||
|  |         py_package[len(shared_ancestry) : -1] | ||||||
|  |     ) | ||||||
|  |     string_import = py_package[-1] | ||||||
|  |     # Add trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34) | ||||||
|  |     string_alias = ( | ||||||
|  |         f"{'_' * distance_up}" | ||||||
|  |         + safe_snake_case(".".join(py_package[len(shared_ancestry) :])) | ||||||
|  |         + "__" | ||||||
|  |     ) | ||||||
|  |     imports.add(f"from {string_from} import {string_import} as {string_alias}") | ||||||
|  |     return f"{string_alias}.{py_type}" | ||||||
							
								
								
									
										13
									
								
								betterproto/compile/naming.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								betterproto/compile/naming.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | |||||||
|  | from betterproto import casing | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def pythonize_class_name(name): | ||||||
|  |     return casing.pascal_case(name) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def pythonize_field_name(name: str): | ||||||
|  |     return casing.safe_snake_case(name) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def pythonize_method_name(name: str): | ||||||
|  |     return casing.safe_snake_case(name) | ||||||
							
								
								
									
										0
									
								
								betterproto/grpc/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								betterproto/grpc/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										170
									
								
								betterproto/grpc/grpclib_client.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										170
									
								
								betterproto/grpc/grpclib_client.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,170 @@ | |||||||
|  | from abc import ABC | ||||||
|  | import asyncio | ||||||
|  | import grpclib.const | ||||||
|  | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     AsyncIterable, | ||||||
|  |     AsyncIterator, | ||||||
|  |     Collection, | ||||||
|  |     Iterable, | ||||||
|  |     Mapping, | ||||||
|  |     Optional, | ||||||
|  |     Tuple, | ||||||
|  |     TYPE_CHECKING, | ||||||
|  |     Type, | ||||||
|  |     Union, | ||||||
|  | ) | ||||||
|  | from .._types import ST, T | ||||||
|  |  | ||||||
|  | if TYPE_CHECKING: | ||||||
|  |     from grpclib._protocols import IProtoMessage | ||||||
|  |     from grpclib.client import Channel, Stream | ||||||
|  |     from grpclib.metadata import Deadline | ||||||
|  |  | ||||||
|  |  | ||||||
|  | _Value = Union[str, bytes] | ||||||
|  | _MetadataLike = Union[Mapping[str, _Value], Collection[Tuple[str, _Value]]] | ||||||
|  | _MessageSource = Union[Iterable["IProtoMessage"], AsyncIterable["IProtoMessage"]] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ServiceStub(ABC): | ||||||
|  |     """ | ||||||
|  |     Base class for async gRPC clients. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__( | ||||||
|  |         self, | ||||||
|  |         channel: "Channel", | ||||||
|  |         *, | ||||||
|  |         timeout: Optional[float] = None, | ||||||
|  |         deadline: Optional["Deadline"] = None, | ||||||
|  |         metadata: Optional[_MetadataLike] = None, | ||||||
|  |     ) -> None: | ||||||
|  |         self.channel = channel | ||||||
|  |         self.timeout = timeout | ||||||
|  |         self.deadline = deadline | ||||||
|  |         self.metadata = metadata | ||||||
|  |  | ||||||
|  |     def __resolve_request_kwargs( | ||||||
|  |         self, | ||||||
|  |         timeout: Optional[float], | ||||||
|  |         deadline: Optional["Deadline"], | ||||||
|  |         metadata: Optional[_MetadataLike], | ||||||
|  |     ): | ||||||
|  |         return { | ||||||
|  |             "timeout": self.timeout if timeout is None else timeout, | ||||||
|  |             "deadline": self.deadline if deadline is None else deadline, | ||||||
|  |             "metadata": self.metadata if metadata is None else metadata, | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     async def _unary_unary( | ||||||
|  |         self, | ||||||
|  |         route: str, | ||||||
|  |         request: "IProtoMessage", | ||||||
|  |         response_type: Type[T], | ||||||
|  |         *, | ||||||
|  |         timeout: Optional[float] = None, | ||||||
|  |         deadline: Optional["Deadline"] = None, | ||||||
|  |         metadata: Optional[_MetadataLike] = None, | ||||||
|  |     ) -> T: | ||||||
|  |         """Make a unary request and return the response.""" | ||||||
|  |         async with self.channel.request( | ||||||
|  |             route, | ||||||
|  |             grpclib.const.Cardinality.UNARY_UNARY, | ||||||
|  |             type(request), | ||||||
|  |             response_type, | ||||||
|  |             **self.__resolve_request_kwargs(timeout, deadline, metadata), | ||||||
|  |         ) as stream: | ||||||
|  |             await stream.send_message(request, end=True) | ||||||
|  |             response = await stream.recv_message() | ||||||
|  |             assert response is not None | ||||||
|  |             return response | ||||||
|  |  | ||||||
|  |     async def _unary_stream( | ||||||
|  |         self, | ||||||
|  |         route: str, | ||||||
|  |         request: "IProtoMessage", | ||||||
|  |         response_type: Type[T], | ||||||
|  |         *, | ||||||
|  |         timeout: Optional[float] = None, | ||||||
|  |         deadline: Optional["Deadline"] = None, | ||||||
|  |         metadata: Optional[_MetadataLike] = None, | ||||||
|  |     ) -> AsyncIterator[T]: | ||||||
|  |         """Make a unary request and return the stream response iterator.""" | ||||||
|  |         async with self.channel.request( | ||||||
|  |             route, | ||||||
|  |             grpclib.const.Cardinality.UNARY_STREAM, | ||||||
|  |             type(request), | ||||||
|  |             response_type, | ||||||
|  |             **self.__resolve_request_kwargs(timeout, deadline, metadata), | ||||||
|  |         ) as stream: | ||||||
|  |             await stream.send_message(request, end=True) | ||||||
|  |             async for message in stream: | ||||||
|  |                 yield message | ||||||
|  |  | ||||||
|  |     async def _stream_unary( | ||||||
|  |         self, | ||||||
|  |         route: str, | ||||||
|  |         request_iterator: _MessageSource, | ||||||
|  |         request_type: Type[ST], | ||||||
|  |         response_type: Type[T], | ||||||
|  |         *, | ||||||
|  |         timeout: Optional[float] = None, | ||||||
|  |         deadline: Optional["Deadline"] = None, | ||||||
|  |         metadata: Optional[_MetadataLike] = None, | ||||||
|  |     ) -> T: | ||||||
|  |         """Make a stream request and return the response.""" | ||||||
|  |         async with self.channel.request( | ||||||
|  |             route, | ||||||
|  |             grpclib.const.Cardinality.STREAM_UNARY, | ||||||
|  |             request_type, | ||||||
|  |             response_type, | ||||||
|  |             **self.__resolve_request_kwargs(timeout, deadline, metadata), | ||||||
|  |         ) as stream: | ||||||
|  |             await self._send_messages(stream, request_iterator) | ||||||
|  |             response = await stream.recv_message() | ||||||
|  |             assert response is not None | ||||||
|  |             return response | ||||||
|  |  | ||||||
|  |     async def _stream_stream( | ||||||
|  |         self, | ||||||
|  |         route: str, | ||||||
|  |         request_iterator: _MessageSource, | ||||||
|  |         request_type: Type[ST], | ||||||
|  |         response_type: Type[T], | ||||||
|  |         *, | ||||||
|  |         timeout: Optional[float] = None, | ||||||
|  |         deadline: Optional["Deadline"] = None, | ||||||
|  |         metadata: Optional[_MetadataLike] = None, | ||||||
|  |     ) -> AsyncIterator[T]: | ||||||
|  |         """ | ||||||
|  |         Make a stream request and return an AsyncIterator to iterate over response | ||||||
|  |         messages. | ||||||
|  |         """ | ||||||
|  |         async with self.channel.request( | ||||||
|  |             route, | ||||||
|  |             grpclib.const.Cardinality.STREAM_STREAM, | ||||||
|  |             request_type, | ||||||
|  |             response_type, | ||||||
|  |             **self.__resolve_request_kwargs(timeout, deadline, metadata), | ||||||
|  |         ) as stream: | ||||||
|  |             await stream.send_request() | ||||||
|  |             sending_task = asyncio.ensure_future( | ||||||
|  |                 self._send_messages(stream, request_iterator) | ||||||
|  |             ) | ||||||
|  |             try: | ||||||
|  |                 async for response in stream: | ||||||
|  |                     yield response | ||||||
|  |             except: | ||||||
|  |                 sending_task.cancel() | ||||||
|  |                 raise | ||||||
|  |  | ||||||
|  |     @staticmethod | ||||||
|  |     async def _send_messages(stream, messages: _MessageSource): | ||||||
|  |         if isinstance(messages, AsyncIterable): | ||||||
|  |             async for message in messages: | ||||||
|  |                 await stream.send_message(message) | ||||||
|  |         else: | ||||||
|  |             for message in messages: | ||||||
|  |                 await stream.send_message(message) | ||||||
|  |         await stream.end() | ||||||
							
								
								
									
										0
									
								
								betterproto/grpc/util/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								betterproto/grpc/util/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										198
									
								
								betterproto/grpc/util/async_channel.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										198
									
								
								betterproto/grpc/util/async_channel.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,198 @@ | |||||||
|  | import asyncio | ||||||
|  | from typing import ( | ||||||
|  |     AsyncIterable, | ||||||
|  |     AsyncIterator, | ||||||
|  |     Iterable, | ||||||
|  |     Optional, | ||||||
|  |     TypeVar, | ||||||
|  |     Union, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | T = TypeVar("T") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ChannelClosed(Exception): | ||||||
|  |     """ | ||||||
|  |     An exception raised on an attempt to send through a closed channel | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ChannelDone(Exception): | ||||||
|  |     """ | ||||||
|  |     An exception raised on an attempt to send recieve from a channel that is both closed | ||||||
|  |     and empty. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AsyncChannel(AsyncIterable[T]): | ||||||
|  |     """ | ||||||
|  |     A buffered async channel for sending items between coroutines with FIFO ordering. | ||||||
|  |  | ||||||
|  |     This makes decoupled bidirection steaming gRPC requests easy if used like: | ||||||
|  |  | ||||||
|  |     .. code-block:: python | ||||||
|  |         client = GeneratedStub(grpclib_chan) | ||||||
|  |         request_chan = await AsyncChannel() | ||||||
|  |         # We can start be sending all the requests we already have | ||||||
|  |         await request_chan.send_from([ReqestObject(...), ReqestObject(...)]) | ||||||
|  |         async for response in client.rpc_call(request_chan): | ||||||
|  |             # The response iterator will remain active until the connection is closed | ||||||
|  |             ... | ||||||
|  |             # More items can be sent at any time | ||||||
|  |             await request_chan.send(ReqestObject(...)) | ||||||
|  |             ... | ||||||
|  |             # The channel must be closed to complete the gRPC connection | ||||||
|  |             request_chan.close() | ||||||
|  |  | ||||||
|  |     Items can be sent through the channel by either: | ||||||
|  |     - providing an iterable to the send_from method | ||||||
|  |     - passing them to the send method one at a time | ||||||
|  |  | ||||||
|  |     Items can be recieved from the channel by either: | ||||||
|  |     - iterating over the channel with a for loop to get all items | ||||||
|  |     - calling the recieve method to get one item at a time | ||||||
|  |  | ||||||
|  |     If the channel is empty then recievers will wait until either an item appears or the | ||||||
|  |     channel is closed. | ||||||
|  |  | ||||||
|  |     Once the channel is closed then subsequent attempt to send through the channel will | ||||||
|  |     fail with a ChannelClosed exception. | ||||||
|  |  | ||||||
|  |     When th channel is closed and empty then it is done, and further attempts to recieve | ||||||
|  |     from it will fail with a ChannelDone exception | ||||||
|  |  | ||||||
|  |     If multiple coroutines recieve from the channel concurrently, each item sent will be | ||||||
|  |     recieved by only one of the recievers. | ||||||
|  |  | ||||||
|  |     :param source: | ||||||
|  |         An optional iterable will items that should be sent through the channel | ||||||
|  |         immediately. | ||||||
|  |     :param buffer_limit: | ||||||
|  |         Limit the number of items that can be buffered in the channel, A value less than | ||||||
|  |         1 implies no limit. If the channel is full then attempts to send more items will | ||||||
|  |         result in the sender waiting until an item is recieved from the channel. | ||||||
|  |     :param close: | ||||||
|  |         If set to True then the channel will automatically close after exhausting source | ||||||
|  |         or immediately if no source is provided. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__( | ||||||
|  |         self, *, buffer_limit: int = 0, close: bool = False, | ||||||
|  |     ): | ||||||
|  |         self._queue: asyncio.Queue[Union[T, object]] = asyncio.Queue(buffer_limit) | ||||||
|  |         self._closed = False | ||||||
|  |         self._waiting_recievers: int = 0 | ||||||
|  |         # Track whether flush has been invoked so it can only happen once | ||||||
|  |         self._flushed = False | ||||||
|  |  | ||||||
|  |     def __aiter__(self) -> AsyncIterator[T]: | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     async def __anext__(self) -> T: | ||||||
|  |         if self.done(): | ||||||
|  |             raise StopAsyncIteration | ||||||
|  |         self._waiting_recievers += 1 | ||||||
|  |         try: | ||||||
|  |             result = await self._queue.get() | ||||||
|  |             if result is self.__flush: | ||||||
|  |                 raise StopAsyncIteration | ||||||
|  |             return result | ||||||
|  |         finally: | ||||||
|  |             self._waiting_recievers -= 1 | ||||||
|  |             self._queue.task_done() | ||||||
|  |  | ||||||
|  |     def closed(self) -> bool: | ||||||
|  |         """ | ||||||
|  |         Returns True if this channel is closed and no-longer accepting new items | ||||||
|  |         """ | ||||||
|  |         return self._closed | ||||||
|  |  | ||||||
|  |     def done(self) -> bool: | ||||||
|  |         """ | ||||||
|  |         Check if this channel is done. | ||||||
|  |  | ||||||
|  |         :return: True if this channel is closed and and has been drained of items in | ||||||
|  |         which case any further attempts to recieve an item from this channel will raise | ||||||
|  |         a ChannelDone exception. | ||||||
|  |         """ | ||||||
|  |         # After close the channel is not yet done until there is at least one waiting | ||||||
|  |         # reciever per enqueued item. | ||||||
|  |         return self._closed and self._queue.qsize() <= self._waiting_recievers | ||||||
|  |  | ||||||
|  |     async def send_from( | ||||||
|  |         self, source: Union[Iterable[T], AsyncIterable[T]], close: bool = False | ||||||
|  |     ) -> "AsyncChannel[T]": | ||||||
|  |         """ | ||||||
|  |         Iterates the given [Async]Iterable and sends all the resulting items. | ||||||
|  |         If close is set to True then subsequent send calls will be rejected with a | ||||||
|  |         ChannelClosed exception. | ||||||
|  |         :param source: an iterable of items to send | ||||||
|  |         :param close: | ||||||
|  |             if True then the channel will be closed after the source has been exhausted | ||||||
|  |  | ||||||
|  |         """ | ||||||
|  |         if self._closed: | ||||||
|  |             raise ChannelClosed("Cannot send through a closed channel") | ||||||
|  |         if isinstance(source, AsyncIterable): | ||||||
|  |             async for item in source: | ||||||
|  |                 await self._queue.put(item) | ||||||
|  |         else: | ||||||
|  |             for item in source: | ||||||
|  |                 await self._queue.put(item) | ||||||
|  |         if close: | ||||||
|  |             # Complete the closing process | ||||||
|  |             self.close() | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     async def send(self, item: T) -> "AsyncChannel[T]": | ||||||
|  |         """ | ||||||
|  |         Send a single item over this channel. | ||||||
|  |         :param item: The item to send | ||||||
|  |         """ | ||||||
|  |         if self._closed: | ||||||
|  |             raise ChannelClosed("Cannot send through a closed channel") | ||||||
|  |         await self._queue.put(item) | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     async def recieve(self) -> Optional[T]: | ||||||
|  |         """ | ||||||
|  |         Returns the next item from this channel when it becomes available, | ||||||
|  |         or None if the channel is closed before another item is sent. | ||||||
|  |         :return: An item from the channel | ||||||
|  |         """ | ||||||
|  |         if self.done(): | ||||||
|  |             raise ChannelDone("Cannot recieve from a closed channel") | ||||||
|  |         self._waiting_recievers += 1 | ||||||
|  |         try: | ||||||
|  |             result = await self._queue.get() | ||||||
|  |             if result is self.__flush: | ||||||
|  |                 return None | ||||||
|  |             return result | ||||||
|  |         finally: | ||||||
|  |             self._waiting_recievers -= 1 | ||||||
|  |             self._queue.task_done() | ||||||
|  |  | ||||||
|  |     def close(self): | ||||||
|  |         """ | ||||||
|  |         Close this channel to new items | ||||||
|  |         """ | ||||||
|  |         self._closed = True | ||||||
|  |         asyncio.ensure_future(self._flush_queue()) | ||||||
|  |  | ||||||
|  |     async def _flush_queue(self): | ||||||
|  |         """ | ||||||
|  |         To be called after the channel is closed. Pushes a number of self.__flush | ||||||
|  |         objects to the queue to ensure no waiting consumers get deadlocked. | ||||||
|  |         """ | ||||||
|  |         if not self._flushed: | ||||||
|  |             self._flushed = True | ||||||
|  |             deadlocked_recievers = max(0, self._waiting_recievers - self._queue.qsize()) | ||||||
|  |             for _ in range(deadlocked_recievers): | ||||||
|  |                 await self._queue.put(self.__flush) | ||||||
|  |  | ||||||
|  |     # A special signal object for flushing the queue when the channel is closed | ||||||
|  |     __flush = object() | ||||||
							
								
								
									
										0
									
								
								betterproto/lib/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								betterproto/lib/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										0
									
								
								betterproto/lib/google/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								betterproto/lib/google/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										1312
									
								
								betterproto/lib/google/protobuf/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1312
									
								
								betterproto/lib/google/protobuf/__init__.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -2,122 +2,64 @@ | |||||||
|  |  | ||||||
| import itertools | import itertools | ||||||
| import os.path | import os.path | ||||||
|  | import pathlib | ||||||
|  | import re | ||||||
| import sys | import sys | ||||||
| import textwrap | import textwrap | ||||||
| from collections import defaultdict | from typing import List, Union | ||||||
| from typing import Dict, List, Optional, Type |  | ||||||
|  | import betterproto | ||||||
|  | from betterproto.compile.importing import get_type_reference | ||||||
|  | from betterproto.compile.naming import ( | ||||||
|  |     pythonize_class_name, | ||||||
|  |     pythonize_field_name, | ||||||
|  |     pythonize_method_name, | ||||||
|  | ) | ||||||
|  |  | ||||||
| try: | try: | ||||||
|  |     # betterproto[compiler] specific dependencies | ||||||
|     import black |     import black | ||||||
| except ImportError: |     from google.protobuf.compiler import plugin_pb2 as plugin | ||||||
|  |     from google.protobuf.descriptor_pb2 import ( | ||||||
|  |         DescriptorProto, | ||||||
|  |         EnumDescriptorProto, | ||||||
|  |         FieldDescriptorProto, | ||||||
|  |     ) | ||||||
|  |     import google.protobuf.wrappers_pb2 as google_wrappers | ||||||
|  |     import jinja2 | ||||||
|  | except ImportError as err: | ||||||
|  |     missing_import = err.args[0][17:-1] | ||||||
|     print( |     print( | ||||||
|         "Unable to import `black` formatter. Did you install the compiler feature with `pip install betterproto[compiler]`?" |         "\033[31m" | ||||||
|  |         f"Unable to import `{missing_import}` from betterproto plugin! " | ||||||
|  |         "Please ensure that you've installed betterproto as " | ||||||
|  |         '`pip install "betterproto[compiler]"` so that compiler dependencies ' | ||||||
|  |         "are included." | ||||||
|  |         "\033[0m" | ||||||
|     ) |     ) | ||||||
|     raise SystemExit(1) |     raise SystemExit(1) | ||||||
|  |  | ||||||
| import jinja2 |  | ||||||
| import stringcase |  | ||||||
|  |  | ||||||
| from google.protobuf.compiler import plugin_pb2 as plugin | def py_type(package: str, imports: set, field: FieldDescriptorProto) -> str: | ||||||
| from google.protobuf.descriptor_pb2 import ( |     if field.type in [1, 2]: | ||||||
|     DescriptorProto, |  | ||||||
|     EnumDescriptorProto, |  | ||||||
|     FieldDescriptorProto, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| from betterproto.casing import safe_snake_case |  | ||||||
|  |  | ||||||
| import google.protobuf.wrappers_pb2 as google_wrappers |  | ||||||
|  |  | ||||||
| WRAPPER_TYPES: Dict[str, Optional[Type]] = defaultdict( |  | ||||||
|     lambda: None, |  | ||||||
|     { |  | ||||||
|         "google.protobuf.DoubleValue": google_wrappers.DoubleValue, |  | ||||||
|         "google.protobuf.FloatValue": google_wrappers.FloatValue, |  | ||||||
|         "google.protobuf.Int64Value": google_wrappers.Int64Value, |  | ||||||
|         "google.protobuf.UInt64Value": google_wrappers.UInt64Value, |  | ||||||
|         "google.protobuf.Int32Value": google_wrappers.Int32Value, |  | ||||||
|         "google.protobuf.UInt32Value": google_wrappers.UInt32Value, |  | ||||||
|         "google.protobuf.BoolValue": google_wrappers.BoolValue, |  | ||||||
|         "google.protobuf.StringValue": google_wrappers.StringValue, |  | ||||||
|         "google.protobuf.BytesValue": google_wrappers.BytesValue, |  | ||||||
|     }, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_ref_type( |  | ||||||
|     package: str, imports: set, type_name: str, unwrap: bool = True |  | ||||||
| ) -> str: |  | ||||||
|     """ |  | ||||||
|     Return a Python type name for a proto type reference. Adds the import if |  | ||||||
|     necessary. Unwraps well known type if required. |  | ||||||
|     """ |  | ||||||
|     # If the package name is a blank string, then this should still work |  | ||||||
|     # because by convention packages are lowercase and message/enum types are |  | ||||||
|     # pascal-cased. May require refactoring in the future. |  | ||||||
|     type_name = type_name.lstrip(".") |  | ||||||
|  |  | ||||||
|     # Check if type is wrapper. |  | ||||||
|     wrapper_class = WRAPPER_TYPES[type_name] |  | ||||||
|  |  | ||||||
|     if unwrap: |  | ||||||
|         if wrapper_class: |  | ||||||
|             wrapped_type = type(wrapper_class().value) |  | ||||||
|             return f"Optional[{wrapped_type.__name__}]" |  | ||||||
|  |  | ||||||
|         if type_name == "google.protobuf.Duration": |  | ||||||
|             return "timedelta" |  | ||||||
|  |  | ||||||
|         if type_name == "google.protobuf.Timestamp": |  | ||||||
|             return "datetime" |  | ||||||
|     elif wrapper_class: |  | ||||||
|         imports.add(f"from {wrapper_class.__module__} import {wrapper_class.__name__}") |  | ||||||
|         return f"{wrapper_class.__name__}" |  | ||||||
|  |  | ||||||
|     if type_name.startswith(package): |  | ||||||
|         parts = type_name.lstrip(package).lstrip(".").split(".") |  | ||||||
|         if len(parts) == 1 or (len(parts) > 1 and parts[0][0] == parts[0][0].upper()): |  | ||||||
|             # This is the current package, which has nested types flattened. |  | ||||||
|             # foo.bar_thing => FooBarThing |  | ||||||
|             cased = [stringcase.pascalcase(part) for part in parts] |  | ||||||
|             type_name = f'"{"".join(cased)}"' |  | ||||||
|  |  | ||||||
|     if "." in type_name: |  | ||||||
|         # This is imported from another package. No need |  | ||||||
|         # to use a forward ref and we need to add the import. |  | ||||||
|         parts = type_name.split(".") |  | ||||||
|         parts[-1] = stringcase.pascalcase(parts[-1]) |  | ||||||
|         imports.add(f"from .{'.'.join(parts[:-2])} import {parts[-2]}") |  | ||||||
|         type_name = f"{parts[-2]}.{parts[-1]}" |  | ||||||
|  |  | ||||||
|     return type_name |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def py_type( |  | ||||||
|     package: str, |  | ||||||
|     imports: set, |  | ||||||
|     message: DescriptorProto, |  | ||||||
|     descriptor: FieldDescriptorProto, |  | ||||||
| ) -> str: |  | ||||||
|     if descriptor.type in [1, 2, 6, 7, 15, 16]: |  | ||||||
|         return "float" |         return "float" | ||||||
|     elif descriptor.type in [3, 4, 5, 13, 17, 18]: |     elif field.type in [3, 4, 5, 6, 7, 13, 15, 16, 17, 18]: | ||||||
|         return "int" |         return "int" | ||||||
|     elif descriptor.type == 8: |     elif field.type == 8: | ||||||
|         return "bool" |         return "bool" | ||||||
|     elif descriptor.type == 9: |     elif field.type == 9: | ||||||
|         return "str" |         return "str" | ||||||
|     elif descriptor.type in [11, 14]: |     elif field.type in [11, 14]: | ||||||
|         # Type referencing another defined Message or a named enum |         # Type referencing another defined Message or a named enum | ||||||
|         return get_ref_type(package, imports, descriptor.type_name) |         return get_type_reference(package, imports, field.type_name) | ||||||
|     elif descriptor.type == 12: |     elif field.type == 12: | ||||||
|         return "bytes" |         return "bytes" | ||||||
|     else: |     else: | ||||||
|         raise NotImplementedError(f"Unknown type {descriptor.type}") |         raise NotImplementedError(f"Unknown type {field.type}") | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_py_zero(type_num: int) -> str: | def get_py_zero(type_num: int) -> Union[str, float]: | ||||||
|     zero = 0 |     zero: Union[str, float] = 0 | ||||||
|     if type_num in []: |     if type_num in []: | ||||||
|         zero = 0.0 |         zero = 0.0 | ||||||
|     elif type_num == 8: |     elif type_num == 8: | ||||||
| @@ -178,6 +120,8 @@ def get_comment(proto_file, path: List[int], indent: int = 4) -> str: | |||||||
|  |  | ||||||
|  |  | ||||||
| def generate_code(request, response): | def generate_code(request, response): | ||||||
|  |     plugin_options = request.parameter.split(",") if request.parameter else [] | ||||||
|  |  | ||||||
|     env = jinja2.Environment( |     env = jinja2.Environment( | ||||||
|         trim_blocks=True, |         trim_blocks=True, | ||||||
|         lstrip_blocks=True, |         lstrip_blocks=True, | ||||||
| @@ -187,16 +131,17 @@ def generate_code(request, response): | |||||||
|  |  | ||||||
|     output_map = {} |     output_map = {} | ||||||
|     for proto_file in request.proto_file: |     for proto_file in request.proto_file: | ||||||
|         out = proto_file.package |         if ( | ||||||
|         if out == "google.protobuf": |             proto_file.package == "google.protobuf" | ||||||
|  |             and "INCLUDE_GOOGLE" not in plugin_options | ||||||
|  |         ): | ||||||
|             continue |             continue | ||||||
|  |  | ||||||
|         if not out: |         output_file = str(pathlib.Path(*proto_file.package.split("."), "__init__.py")) | ||||||
|             out = os.path.splitext(proto_file.name)[0].replace(os.path.sep, ".") |  | ||||||
|  |  | ||||||
|         if out not in output_map: |         if output_file not in output_map: | ||||||
|             output_map[out] = {"package": proto_file.package, "files": []} |             output_map[output_file] = {"package": proto_file.package, "files": []} | ||||||
|         output_map[out]["files"].append(proto_file) |         output_map[output_file]["files"].append(proto_file) | ||||||
|  |  | ||||||
|     # TODO: Figure out how to handle gRPC request/response messages and add |     # TODO: Figure out how to handle gRPC request/response messages and add | ||||||
|     # processing below for Service. |     # processing below for Service. | ||||||
| @@ -215,17 +160,10 @@ def generate_code(request, response): | |||||||
|             "services": [], |             "services": [], | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         type_mapping = {} |  | ||||||
|  |  | ||||||
|         for proto_file in options["files"]: |         for proto_file in options["files"]: | ||||||
|             # print(proto_file.message_type, file=sys.stderr) |             item: DescriptorProto | ||||||
|             # print(proto_file.service, file=sys.stderr) |  | ||||||
|             # print(proto_file.source_code_info, file=sys.stderr) |  | ||||||
|  |  | ||||||
|             for item, path in traverse(proto_file): |             for item, path in traverse(proto_file): | ||||||
|                 # print(item, file=sys.stderr) |                 data = {"name": item.name, "py_name": pythonize_class_name(item.name)} | ||||||
|                 # print(path, file=sys.stderr) |  | ||||||
|                 data = {"name": item.name, "py_name": stringcase.pascalcase(item.name)} |  | ||||||
|  |  | ||||||
|                 if isinstance(item, DescriptorProto): |                 if isinstance(item, DescriptorProto): | ||||||
|                     # print(item, file=sys.stderr) |                     # print(item, file=sys.stderr) | ||||||
| @@ -242,7 +180,7 @@ def generate_code(request, response): | |||||||
|                     ) |                     ) | ||||||
|  |  | ||||||
|                     for i, f in enumerate(item.field): |                     for i, f in enumerate(item.field): | ||||||
|                         t = py_type(package, output["imports"], item, f) |                         t = py_type(package, output["imports"], f) | ||||||
|                         zero = get_py_zero(f.type) |                         zero = get_py_zero(f.type) | ||||||
|  |  | ||||||
|                         repeated = False |                         repeated = False | ||||||
| @@ -251,11 +189,13 @@ def generate_code(request, response): | |||||||
|                         field_type = f.Type.Name(f.type).lower()[5:] |                         field_type = f.Type.Name(f.type).lower()[5:] | ||||||
|  |  | ||||||
|                         field_wraps = "" |                         field_wraps = "" | ||||||
|                         if f.type_name.startswith( |                         match_wrapper = re.match( | ||||||
|                             ".google.protobuf" |                             r"\.google\.protobuf\.(.+)Value", f.type_name | ||||||
|                         ) and f.type_name.endswith("Value"): |                         ) | ||||||
|                             w = f.type_name.split(".").pop()[:-5].upper() |                         if match_wrapper: | ||||||
|                             field_wraps = f"betterproto.TYPE_{w}" |                             wrapped_type = "TYPE_" + match_wrapper.group(1).upper() | ||||||
|  |                             if hasattr(betterproto, wrapped_type): | ||||||
|  |                                 field_wraps = f"betterproto.{wrapped_type}" | ||||||
|  |  | ||||||
|                         map_types = None |                         map_types = None | ||||||
|                         if f.type == 11: |                         if f.type == 11: | ||||||
| @@ -275,13 +215,11 @@ def generate_code(request, response): | |||||||
|                                             k = py_type( |                                             k = py_type( | ||||||
|                                                 package, |                                                 package, | ||||||
|                                                 output["imports"], |                                                 output["imports"], | ||||||
|                                                 item, |  | ||||||
|                                                 nested.field[0], |                                                 nested.field[0], | ||||||
|                                             ) |                                             ) | ||||||
|                                             v = py_type( |                                             v = py_type( | ||||||
|                                                 package, |                                                 package, | ||||||
|                                                 output["imports"], |                                                 output["imports"], | ||||||
|                                                 item, |  | ||||||
|                                                 nested.field[1], |                                                 nested.field[1], | ||||||
|                                             ) |                                             ) | ||||||
|                                             t = f"Dict[{k}, {v}]" |                                             t = f"Dict[{k}, {v}]" | ||||||
| @@ -317,7 +255,7 @@ def generate_code(request, response): | |||||||
|                         data["properties"].append( |                         data["properties"].append( | ||||||
|                             { |                             { | ||||||
|                                 "name": f.name, |                                 "name": f.name, | ||||||
|                                 "py_name": safe_snake_case(f.name), |                                 "py_name": pythonize_field_name(f.name), | ||||||
|                                 "number": f.number, |                                 "number": f.number, | ||||||
|                                 "comment": get_comment(proto_file, path + [2, i]), |                                 "comment": get_comment(proto_file, path + [2, i]), | ||||||
|                                 "proto_type": int(f.type), |                                 "proto_type": int(f.type), | ||||||
| @@ -358,17 +296,14 @@ def generate_code(request, response): | |||||||
|  |  | ||||||
|                 data = { |                 data = { | ||||||
|                     "name": service.name, |                     "name": service.name, | ||||||
|                     "py_name": stringcase.pascalcase(service.name), |                     "py_name": pythonize_class_name(service.name), | ||||||
|                     "comment": get_comment(proto_file, [6, i]), |                     "comment": get_comment(proto_file, [6, i]), | ||||||
|                     "methods": [], |                     "methods": [], | ||||||
|                 } |                 } | ||||||
|  |  | ||||||
|                 for j, method in enumerate(service.method): |                 for j, method in enumerate(service.method): | ||||||
|                     if method.client_streaming: |  | ||||||
|                         raise NotImplementedError("Client streaming not yet supported") |  | ||||||
|  |  | ||||||
|                     input_message = None |                     input_message = None | ||||||
|                     input_type = get_ref_type( |                     input_type = get_type_reference( | ||||||
|                         package, output["imports"], method.input_type |                         package, output["imports"], method.input_type | ||||||
|                     ).strip('"') |                     ).strip('"') | ||||||
|                     for msg in output["messages"]: |                     for msg in output["messages"]: | ||||||
| @@ -382,14 +317,14 @@ def generate_code(request, response): | |||||||
|                     data["methods"].append( |                     data["methods"].append( | ||||||
|                         { |                         { | ||||||
|                             "name": method.name, |                             "name": method.name, | ||||||
|                             "py_name": stringcase.snakecase(method.name), |                             "py_name": pythonize_method_name(method.name), | ||||||
|                             "comment": get_comment(proto_file, [6, i, 2, j], indent=8), |                             "comment": get_comment(proto_file, [6, i, 2, j], indent=8), | ||||||
|                             "route": f"/{package}.{service.name}/{method.name}", |                             "route": f"/{package}.{service.name}/{method.name}", | ||||||
|                             "input": get_ref_type( |                             "input": get_type_reference( | ||||||
|                                 package, output["imports"], method.input_type |                                 package, output["imports"], method.input_type | ||||||
|                             ).strip('"'), |                             ).strip('"'), | ||||||
|                             "input_message": input_message, |                             "input_message": input_message, | ||||||
|                             "output": get_ref_type( |                             "output": get_type_reference( | ||||||
|                                 package, |                                 package, | ||||||
|                                 output["imports"], |                                 output["imports"], | ||||||
|                                 method.output_type, |                                 method.output_type, | ||||||
| @@ -400,8 +335,12 @@ def generate_code(request, response): | |||||||
|                         } |                         } | ||||||
|                     ) |                     ) | ||||||
|  |  | ||||||
|  |                     if method.client_streaming: | ||||||
|  |                         output["typing_imports"].add("AsyncIterable") | ||||||
|  |                         output["typing_imports"].add("Iterable") | ||||||
|  |                         output["typing_imports"].add("Union") | ||||||
|                     if method.server_streaming: |                     if method.server_streaming: | ||||||
|                         output["typing_imports"].add("AsyncGenerator") |                         output["typing_imports"].add("AsyncIterator") | ||||||
|  |  | ||||||
|                 output["services"].append(data) |                 output["services"].append(data) | ||||||
|  |  | ||||||
| @@ -411,8 +350,7 @@ def generate_code(request, response): | |||||||
|  |  | ||||||
|         # Fill response |         # Fill response | ||||||
|         f = response.file.add() |         f = response.file.add() | ||||||
|         # print(filename, file=sys.stderr) |         f.name = filename | ||||||
|         f.name = filename.replace(".", os.path.sep) + ".py" |  | ||||||
|  |  | ||||||
|         # Render and then format the output file. |         # Render and then format the output file. | ||||||
|         f.content = black.format_str( |         f.content = black.format_str( | ||||||
| @@ -420,32 +358,23 @@ def generate_code(request, response): | |||||||
|             mode=black.FileMode(target_versions=set([black.TargetVersion.PY37])), |             mode=black.FileMode(target_versions=set([black.TargetVersion.PY37])), | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     inits = set([""]) |     # Make each output directory a package with __init__ file | ||||||
|     for f in response.file: |     output_paths = set(pathlib.Path(path) for path in output_map.keys()) | ||||||
|         # Ensure output paths exist |     init_files = ( | ||||||
|         # print(f.name, file=sys.stderr) |         set( | ||||||
|         dirnames = os.path.dirname(f.name) |             directory.joinpath("__init__.py") | ||||||
|         if dirnames: |             for path in output_paths | ||||||
|             os.makedirs(dirnames, exist_ok=True) |             for directory in path.parents | ||||||
|             base = "" |         ) | ||||||
|             for part in dirnames.split(os.path.sep): |         - output_paths | ||||||
|                 base = os.path.join(base, part) |     ) | ||||||
|                 inits.add(base) |  | ||||||
|  |  | ||||||
|     for base in inits: |  | ||||||
|         name = os.path.join(base, "__init__.py") |  | ||||||
|  |  | ||||||
|         if os.path.exists(name): |  | ||||||
|             # Never overwrite inits as they may have custom stuff in them. |  | ||||||
|             continue |  | ||||||
|  |  | ||||||
|  |     for init_file in init_files: | ||||||
|         init = response.file.add() |         init = response.file.add() | ||||||
|         init.name = name |         init.name = str(init_file) | ||||||
|         init.content = b"" |  | ||||||
|  |  | ||||||
|     filenames = sorted([f.name for f in response.file]) |     for filename in sorted(output_paths.union(init_files)): | ||||||
|     for fname in filenames: |         print(f"Writing {filename}", file=sys.stderr) | ||||||
|         print(f"Writing {fname}", file=sys.stderr) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): | def main(): | ||||||
|   | |||||||
| @@ -63,11 +63,28 @@ class {{ service.py_name }}Stub(betterproto.ServiceStub): | |||||||
|  |  | ||||||
|     {% endif %} |     {% endif %} | ||||||
|     {% for method in service.methods %} |     {% for method in service.methods %} | ||||||
|     async def {{ method.py_name }}(self{% if method.input_message and method.input_message.properties %}, *, {% for field in method.input_message.properties %}{{ field.py_name }}: {% if field.zero == "None" and not field.type.startswith("Optional[") %}Optional[{{ field.type }}]{% else %}{{ field.type }}{% endif %} = {{ field.zero }}{% if not loop.last %}, {% endif %}{% endfor %}{% endif %}) -> {% if method.server_streaming %}AsyncGenerator[{{ method.output }}, None]{% else %}{{ method.output }}{% endif %}: |     async def {{ method.py_name }}(self | ||||||
|  |         {%- if not method.client_streaming -%} | ||||||
|  |             {%- if method.input_message and method.input_message.properties -%}, *, | ||||||
|  |                 {%- for field in method.input_message.properties -%} | ||||||
|  |                     {{ field.py_name }}: {% if field.zero == "None" and not field.type.startswith("Optional[") -%} | ||||||
|  |                                             Optional[{{ field.type }}] | ||||||
|  |                                          {%- else -%} | ||||||
|  |                                             {{ field.type }} | ||||||
|  |                                          {%- endif -%} = {{ field.zero }} | ||||||
|  |                     {%- if not loop.last %}, {% endif -%} | ||||||
|  |                 {%- endfor -%} | ||||||
|  |             {%- endif -%} | ||||||
|  |         {%- else -%} | ||||||
|  |             {# Client streaming: need a request iterator instead #} | ||||||
|  |             , request_iterator: Union[AsyncIterable["{{ method.input }}"], Iterable["{{ method.input }}"]] | ||||||
|  |         {%- endif -%} | ||||||
|  |             ) -> {% if method.server_streaming %}AsyncIterator[{{ method.output }}]{% else %}{{ method.output }}{% endif %}: | ||||||
|         {% if method.comment %} |         {% if method.comment %} | ||||||
| {{ method.comment }} | {{ method.comment }} | ||||||
|  |  | ||||||
|         {% endif %} |         {% endif %} | ||||||
|  |         {% if not method.client_streaming %} | ||||||
|         request = {{ method.input }}() |         request = {{ method.input }}() | ||||||
|         {% for field in method.input_message.properties %} |         {% for field in method.input_message.properties %} | ||||||
|             {% if field.field_type == 'message' %} |             {% if field.field_type == 'message' %} | ||||||
| @@ -77,20 +94,41 @@ class {{ service.py_name }}Stub(betterproto.ServiceStub): | |||||||
|         request.{{ field.py_name }} = {{ field.py_name }} |         request.{{ field.py_name }} = {{ field.py_name }} | ||||||
|             {% endif %} |             {% endif %} | ||||||
|         {% endfor %} |         {% endfor %} | ||||||
|  |         {% endif %} | ||||||
|  |  | ||||||
|         {% if method.server_streaming %} |         {% if method.server_streaming %} | ||||||
|  |             {% if method.client_streaming %} | ||||||
|  |         async for response in self._stream_stream( | ||||||
|  |             "{{ method.route }}", | ||||||
|  |             request_iterator, | ||||||
|  |             {{ method.input }}, | ||||||
|  |             {{ method.output }}, | ||||||
|  |         ): | ||||||
|  |             yield response | ||||||
|  |             {% else %}{# i.e. not client streaming #} | ||||||
|         async for response in self._unary_stream( |         async for response in self._unary_stream( | ||||||
|             "{{ method.route }}", |             "{{ method.route }}", | ||||||
|             request, |             request, | ||||||
|             {{ method.output }}, |             {{ method.output }}, | ||||||
|         ): |         ): | ||||||
|             yield response |             yield response | ||||||
|         {% else %} |  | ||||||
|  |             {% endif %}{# if client streaming #} | ||||||
|  |         {% else %}{# i.e. not server streaming #} | ||||||
|  |             {% if method.client_streaming %} | ||||||
|  |         return await self._stream_unary( | ||||||
|  |             "{{ method.route }}", | ||||||
|  |             request_iterator, | ||||||
|  |             {{ method.input }}, | ||||||
|  |             {{ method.output }} | ||||||
|  |         ) | ||||||
|  |             {% else %}{# i.e. not client streaming #} | ||||||
|         return await self._unary_unary( |         return await self._unary_unary( | ||||||
|             "{{ method.route }}", |             "{{ method.route }}", | ||||||
|             request, |             request, | ||||||
|             {{ method.output }}, |             {{ method.output }} | ||||||
|         ) |         ) | ||||||
|  |             {% endif %}{# client streaming #} | ||||||
|         {% endif %} |         {% endif %} | ||||||
|  |  | ||||||
|     {% endfor %} |     {% endfor %} | ||||||
|   | |||||||
| @@ -12,12 +12,12 @@ inputs/ | |||||||
|  |  | ||||||
| ## Test case directory structure | ## Test case directory structure | ||||||
|  |  | ||||||
| Each testcase has a `<name>.proto` file with a message called `Test`, a matching `.json` file and optionally a custom test file called `test_*.py`. | Each testcase has a `<name>.proto` file with a message called `Test`, and optionally a matching `.json` file and a custom test called `test_*.py`. | ||||||
|  |  | ||||||
| ```bash | ```bash | ||||||
| bool/ | bool/ | ||||||
|   bool.proto |   bool.proto | ||||||
|   bool.json |   bool.json     # optional | ||||||
|   test_bool.py  # optional |   test_bool.py  # optional | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| @@ -61,21 +61,22 @@ def test_value(): | |||||||
|  |  | ||||||
| The following tests are automatically executed for all cases: | The following tests are automatically executed for all cases: | ||||||
|  |  | ||||||
| - [x] Can the generated python code imported? | - [x] Can the generated python code be imported? | ||||||
| - [x] Can the generated message class be instantiated? | - [x] Can the generated message class be instantiated? | ||||||
| - [x] Is the generated code compatible with the Google's `grpc_tools.protoc` implementation? | - [x] Is the generated code compatible with the Google's `grpc_tools.protoc` implementation? | ||||||
|  |   - _when `.json` is present_  | ||||||
|  |  | ||||||
| ## Running the tests | ## Running the tests | ||||||
|  |  | ||||||
| - `pipenv run generate` | - `pipenv run generate`   | ||||||
|   This generates |   This generates: | ||||||
|   - `betterproto/tests/output_betterproto` — *the plugin generated python classes* |   - `betterproto/tests/output_betterproto` — *the plugin generated python classes* | ||||||
|   - `betterproto/tests/output_reference` — *reference implementation classes* |   - `betterproto/tests/output_reference` — *reference implementation classes* | ||||||
| - `pipenv run test` | - `pipenv run test` | ||||||
|  |  | ||||||
| ## Intentionally Failing tests | ## Intentionally Failing tests | ||||||
|  |  | ||||||
| The standard test suite includes tests that fail by intention. These tests document known bugs and missing features that are intended to be corrented in the future. | The standard test suite includes tests that fail by intention. These tests document known bugs and missing features that are intended to be corrected in the future. | ||||||
|  |  | ||||||
| When running `pytest`, they show up as `x` or  `X` in the test results. | When running `pytest`, they show up as `x` or  `X` in the test results. | ||||||
|  |  | ||||||
| @@ -87,4 +88,4 @@ betterproto/tests/test_inputs.py ..x...x..x...x.X........xx........x.....x...... | |||||||
| - `x` — XFAIL: expected failure | - `x` — XFAIL: expected failure | ||||||
| - `X` — XPASS: expected failure, but still passed | - `X` — XPASS: expected failure, but still passed | ||||||
|  |  | ||||||
| Test cases marked for expected failure are declared in [inputs/xfail.py](inputs.xfail.py) | Test cases marked for expected failure are declared in [inputs/config.py](inputs/config.py) | ||||||
							
								
								
									
										0
									
								
								betterproto/tests/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								betterproto/tests/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										120
									
								
								betterproto/tests/generate.py
									
									
									
									
									
										
										
										Normal file → Executable file
									
								
							
							
						
						
									
										120
									
								
								betterproto/tests/generate.py
									
									
									
									
									
										
										
										Normal file → Executable file
									
								
							| @@ -1,7 +1,9 @@ | |||||||
| #!/usr/bin/env python | #!/usr/bin/env python | ||||||
| import glob | import asyncio | ||||||
| import os | import os | ||||||
|  | from pathlib import Path | ||||||
| import shutil | import shutil | ||||||
|  | import subprocess | ||||||
| import sys | import sys | ||||||
| from typing import Set | from typing import Set | ||||||
|  |  | ||||||
| @@ -19,54 +21,100 @@ from betterproto.tests.util import ( | |||||||
| os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" | os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" | ||||||
|  |  | ||||||
|  |  | ||||||
| def clear_directory(path: str): | def clear_directory(dir_path: Path): | ||||||
|     for file_or_directory in glob.glob(os.path.join(path, "*")): |     for file_or_directory in dir_path.glob("*"): | ||||||
|         if os.path.isdir(file_or_directory): |         if file_or_directory.is_dir(): | ||||||
|             shutil.rmtree(file_or_directory) |             shutil.rmtree(file_or_directory) | ||||||
|         else: |         else: | ||||||
|             os.remove(file_or_directory) |             file_or_directory.unlink() | ||||||
|  |  | ||||||
|  |  | ||||||
| def generate(whitelist: Set[str]): | async def generate(whitelist: Set[str], verbose: bool): | ||||||
|     path_whitelist = {os.path.realpath(e) for e in whitelist if os.path.exists(e)} |     test_case_names = set(get_directories(inputs_path)) - {"__pycache__"} | ||||||
|     name_whitelist = {e for e in whitelist if not os.path.exists(e)} |  | ||||||
|  |  | ||||||
|     test_case_names = set(get_directories(inputs_path)) |     path_whitelist = set() | ||||||
|  |     name_whitelist = set() | ||||||
|  |     for item in whitelist: | ||||||
|  |         if item in test_case_names: | ||||||
|  |             name_whitelist.add(item) | ||||||
|  |             continue | ||||||
|  |         path_whitelist.add(item) | ||||||
|  |  | ||||||
|  |     generation_tasks = [] | ||||||
|     for test_case_name in sorted(test_case_names): |     for test_case_name in sorted(test_case_names): | ||||||
|         test_case_input_path = os.path.realpath( |         test_case_input_path = inputs_path.joinpath(test_case_name).resolve() | ||||||
|             os.path.join(inputs_path, test_case_name) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         if ( |         if ( | ||||||
|             whitelist |             whitelist | ||||||
|             and test_case_input_path not in path_whitelist |             and str(test_case_input_path) not in path_whitelist | ||||||
|             and test_case_name not in name_whitelist |             and test_case_name not in name_whitelist | ||||||
|         ): |         ): | ||||||
|             continue |             continue | ||||||
|  |         generation_tasks.append( | ||||||
|         test_case_output_path_reference = os.path.join( |             generate_test_case_output(test_case_input_path, test_case_name, verbose) | ||||||
|             output_path_reference, test_case_name |  | ||||||
|         ) |  | ||||||
|         test_case_output_path_betterproto = os.path.join( |  | ||||||
|             output_path_betterproto, test_case_name |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         print(f"Generating output for {test_case_name}") |     failed_test_cases = [] | ||||||
|         os.makedirs(test_case_output_path_reference, exist_ok=True) |     # Wait for all subprocs and match any failures to names to report | ||||||
|         os.makedirs(test_case_output_path_betterproto, exist_ok=True) |     for test_case_name, result in zip( | ||||||
|  |         sorted(test_case_names), await asyncio.gather(*generation_tasks) | ||||||
|  |     ): | ||||||
|  |         if result != 0: | ||||||
|  |             failed_test_cases.append(test_case_name) | ||||||
|  |  | ||||||
|         clear_directory(test_case_output_path_reference) |     if failed_test_cases: | ||||||
|         clear_directory(test_case_output_path_betterproto) |         sys.stderr.write( | ||||||
|  |             "\n\033[31;1;4mFailed to generate the following test cases:\033[0m\n" | ||||||
|  |         ) | ||||||
|  |         for failed_test_case in failed_test_cases: | ||||||
|  |             sys.stderr.write(f"- {failed_test_case}\n") | ||||||
|  |  | ||||||
|         protoc_reference(test_case_input_path, test_case_output_path_reference) |  | ||||||
|         protoc_plugin(test_case_input_path, test_case_output_path_betterproto) | async def generate_test_case_output( | ||||||
|  |     test_case_input_path: Path, test_case_name: str, verbose: bool | ||||||
|  | ) -> int: | ||||||
|  |     """ | ||||||
|  |     Returns the max of the subprocess return values | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     test_case_output_path_reference = output_path_reference.joinpath(test_case_name) | ||||||
|  |     test_case_output_path_betterproto = output_path_betterproto.joinpath(test_case_name) | ||||||
|  |  | ||||||
|  |     os.makedirs(test_case_output_path_reference, exist_ok=True) | ||||||
|  |     os.makedirs(test_case_output_path_betterproto, exist_ok=True) | ||||||
|  |  | ||||||
|  |     clear_directory(test_case_output_path_reference) | ||||||
|  |     clear_directory(test_case_output_path_betterproto) | ||||||
|  |  | ||||||
|  |     ( | ||||||
|  |         (ref_out, ref_err, ref_code), | ||||||
|  |         (plg_out, plg_err, plg_code), | ||||||
|  |     ) = await asyncio.gather( | ||||||
|  |         protoc_reference(test_case_input_path, test_case_output_path_reference), | ||||||
|  |         protoc_plugin(test_case_input_path, test_case_output_path_betterproto), | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     message = f"Generated output for {test_case_name!r}" | ||||||
|  |     if verbose: | ||||||
|  |         print(f"\033[31;1;4m{message}\033[0m") | ||||||
|  |         if ref_out: | ||||||
|  |             sys.stdout.buffer.write(ref_out) | ||||||
|  |         if ref_err: | ||||||
|  |             sys.stderr.buffer.write(ref_err) | ||||||
|  |         if plg_out: | ||||||
|  |             sys.stdout.buffer.write(plg_out) | ||||||
|  |         if plg_err: | ||||||
|  |             sys.stderr.buffer.write(plg_err) | ||||||
|  |         sys.stdout.buffer.flush() | ||||||
|  |         sys.stderr.buffer.flush() | ||||||
|  |     else: | ||||||
|  |         print(message) | ||||||
|  |  | ||||||
|  |     return max(ref_code, plg_code) | ||||||
|  |  | ||||||
|  |  | ||||||
| HELP = "\n".join( | HELP = "\n".join( | ||||||
|     [ |     ( | ||||||
|         "Usage: python generate.py", |         "Usage: python generate.py [-h] [-v] [DIRECTORIES or NAMES]", | ||||||
|         "       python generate.py [DIRECTORIES or NAMES]", |  | ||||||
|         "Generate python classes for standard tests.", |         "Generate python classes for standard tests.", | ||||||
|         "", |         "", | ||||||
|         "DIRECTORIES    One or more relative or absolute directories of test-cases to generate classes for.", |         "DIRECTORIES    One or more relative or absolute directories of test-cases to generate classes for.", | ||||||
| @@ -74,7 +122,7 @@ HELP = "\n".join( | |||||||
|         "", |         "", | ||||||
|         "NAMES          One or more test-case names to generate classes for.", |         "NAMES          One or more test-case names to generate classes for.", | ||||||
|         "               python generate.py bool double enums", |         "               python generate.py bool double enums", | ||||||
|     ] |     ) | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -82,9 +130,13 @@ def main(): | |||||||
|     if set(sys.argv).intersection({"-h", "--help"}): |     if set(sys.argv).intersection({"-h", "--help"}): | ||||||
|         print(HELP) |         print(HELP) | ||||||
|         return |         return | ||||||
|     whitelist = set(sys.argv[1:]) |     if sys.argv[1:2] == ["-v"]: | ||||||
|  |         verbose = True | ||||||
|     generate(whitelist) |         whitelist = set(sys.argv[2:]) | ||||||
|  |     else: | ||||||
|  |         verbose = False | ||||||
|  |         whitelist = set(sys.argv[1:]) | ||||||
|  |     asyncio.get_event_loop().run_until_complete(generate(whitelist, verbose)) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": | if __name__ == "__main__": | ||||||
|   | |||||||
							
								
								
									
										0
									
								
								betterproto/tests/grpc/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								betterproto/tests/grpc/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										154
									
								
								betterproto/tests/grpc/test_grpclib_client.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										154
									
								
								betterproto/tests/grpc/test_grpclib_client.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,154 @@ | |||||||
|  | import asyncio | ||||||
|  | from betterproto.tests.output_betterproto.service.service import ( | ||||||
|  |     DoThingResponse, | ||||||
|  |     DoThingRequest, | ||||||
|  |     GetThingRequest, | ||||||
|  |     GetThingResponse, | ||||||
|  |     TestStub as ThingServiceClient, | ||||||
|  | ) | ||||||
|  | import grpclib | ||||||
|  | from grpclib.testing import ChannelFor | ||||||
|  | import pytest | ||||||
|  | from betterproto.grpc.util.async_channel import AsyncChannel | ||||||
|  | from .thing_service import ThingService | ||||||
|  |  | ||||||
|  |  | ||||||
|  | async def _test_client(client, name="clean room", **kwargs): | ||||||
|  |     response = await client.do_thing(name=name) | ||||||
|  |     assert response.names == [name] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _assert_request_meta_recieved(deadline, metadata): | ||||||
|  |     def server_side_test(stream): | ||||||
|  |         assert stream.deadline._timestamp == pytest.approx( | ||||||
|  |             deadline._timestamp, 1 | ||||||
|  |         ), "The provided deadline should be recieved serverside" | ||||||
|  |         assert ( | ||||||
|  |             stream.metadata["authorization"] == metadata["authorization"] | ||||||
|  |         ), "The provided authorization metadata should be recieved serverside" | ||||||
|  |  | ||||||
|  |     return server_side_test | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_simple_service_call(): | ||||||
|  |     async with ChannelFor([ThingService()]) as channel: | ||||||
|  |         await _test_client(ThingServiceClient(channel)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_service_call_with_upfront_request_params(): | ||||||
|  |     # Setting deadline | ||||||
|  |     deadline = grpclib.metadata.Deadline.from_timeout(22) | ||||||
|  |     metadata = {"authorization": "12345"} | ||||||
|  |     async with ChannelFor( | ||||||
|  |         [ThingService(test_hook=_assert_request_meta_recieved(deadline, metadata),)] | ||||||
|  |     ) as channel: | ||||||
|  |         await _test_client( | ||||||
|  |             ThingServiceClient(channel, deadline=deadline, metadata=metadata) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     # Setting timeout | ||||||
|  |     timeout = 99 | ||||||
|  |     deadline = grpclib.metadata.Deadline.from_timeout(timeout) | ||||||
|  |     metadata = {"authorization": "12345"} | ||||||
|  |     async with ChannelFor( | ||||||
|  |         [ThingService(test_hook=_assert_request_meta_recieved(deadline, metadata),)] | ||||||
|  |     ) as channel: | ||||||
|  |         await _test_client( | ||||||
|  |             ThingServiceClient(channel, timeout=timeout, metadata=metadata) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_service_call_lower_level_with_overrides(): | ||||||
|  |     THING_TO_DO = "get milk" | ||||||
|  |  | ||||||
|  |     # Setting deadline | ||||||
|  |     deadline = grpclib.metadata.Deadline.from_timeout(22) | ||||||
|  |     metadata = {"authorization": "12345"} | ||||||
|  |     kwarg_deadline = grpclib.metadata.Deadline.from_timeout(28) | ||||||
|  |     kwarg_metadata = {"authorization": "12345"} | ||||||
|  |     async with ChannelFor( | ||||||
|  |         [ThingService(test_hook=_assert_request_meta_recieved(deadline, metadata),)] | ||||||
|  |     ) as channel: | ||||||
|  |         client = ThingServiceClient(channel, deadline=deadline, metadata=metadata) | ||||||
|  |         response = await client._unary_unary( | ||||||
|  |             "/service.Test/DoThing", | ||||||
|  |             DoThingRequest(THING_TO_DO), | ||||||
|  |             DoThingResponse, | ||||||
|  |             deadline=kwarg_deadline, | ||||||
|  |             metadata=kwarg_metadata, | ||||||
|  |         ) | ||||||
|  |         assert response.names == [THING_TO_DO] | ||||||
|  |  | ||||||
|  |     # Setting timeout | ||||||
|  |     timeout = 99 | ||||||
|  |     deadline = grpclib.metadata.Deadline.from_timeout(timeout) | ||||||
|  |     metadata = {"authorization": "12345"} | ||||||
|  |     kwarg_timeout = 9000 | ||||||
|  |     kwarg_deadline = grpclib.metadata.Deadline.from_timeout(kwarg_timeout) | ||||||
|  |     kwarg_metadata = {"authorization": "09876"} | ||||||
|  |     async with ChannelFor( | ||||||
|  |         [ | ||||||
|  |             ThingService( | ||||||
|  |                 test_hook=_assert_request_meta_recieved(kwarg_deadline, kwarg_metadata), | ||||||
|  |             ) | ||||||
|  |         ] | ||||||
|  |     ) as channel: | ||||||
|  |         client = ThingServiceClient(channel, deadline=deadline, metadata=metadata) | ||||||
|  |         response = await client._unary_unary( | ||||||
|  |             "/service.Test/DoThing", | ||||||
|  |             DoThingRequest(THING_TO_DO), | ||||||
|  |             DoThingResponse, | ||||||
|  |             timeout=kwarg_timeout, | ||||||
|  |             metadata=kwarg_metadata, | ||||||
|  |         ) | ||||||
|  |         assert response.names == [THING_TO_DO] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_async_gen_for_unary_stream_request(): | ||||||
|  |     thing_name = "my milkshakes" | ||||||
|  |  | ||||||
|  |     async with ChannelFor([ThingService()]) as channel: | ||||||
|  |         client = ThingServiceClient(channel) | ||||||
|  |         expected_versions = [5, 4, 3, 2, 1] | ||||||
|  |         async for response in client.get_thing_versions(name=thing_name): | ||||||
|  |             assert response.name == thing_name | ||||||
|  |             assert response.version == expected_versions.pop() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_async_gen_for_stream_stream_request(): | ||||||
|  |     some_things = ["cake", "cricket", "coral reef"] | ||||||
|  |     more_things = ["ball", "that", "56kmodem", "liberal humanism", "cheesesticks"] | ||||||
|  |     expected_things = (*some_things, *more_things) | ||||||
|  |  | ||||||
|  |     async with ChannelFor([ThingService()]) as channel: | ||||||
|  |         client = ThingServiceClient(channel) | ||||||
|  |         # Use an AsyncChannel to decouple sending and recieving, it'll send some_things | ||||||
|  |         # immediately and we'll use it to send more_things later, after recieving some | ||||||
|  |         # results | ||||||
|  |         request_chan = AsyncChannel() | ||||||
|  |         send_initial_requests = asyncio.ensure_future( | ||||||
|  |             request_chan.send_from(GetThingRequest(name) for name in some_things) | ||||||
|  |         ) | ||||||
|  |         response_index = 0 | ||||||
|  |         async for response in client.get_different_things(request_chan): | ||||||
|  |             assert response.name == expected_things[response_index] | ||||||
|  |             assert response.version == response_index + 1 | ||||||
|  |             response_index += 1 | ||||||
|  |             if more_things: | ||||||
|  |                 # Send some more requests as we recieve reponses to be sure coordination of | ||||||
|  |                 # send/recieve events doesn't matter | ||||||
|  |                 await request_chan.send(GetThingRequest(more_things.pop(0))) | ||||||
|  |             elif not send_initial_requests.done(): | ||||||
|  |                 # Make sure the sending task it completed | ||||||
|  |                 await send_initial_requests | ||||||
|  |             else: | ||||||
|  |                 # No more things to send make sure channel is closed | ||||||
|  |                 request_chan.close() | ||||||
|  |         assert response_index == len( | ||||||
|  |             expected_things | ||||||
|  |         ), "Didn't recieve all exptected responses" | ||||||
							
								
								
									
										100
									
								
								betterproto/tests/grpc/test_stream_stream.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										100
									
								
								betterproto/tests/grpc/test_stream_stream.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,100 @@ | |||||||
|  | import asyncio | ||||||
|  | import betterproto | ||||||
|  | from betterproto.grpc.util.async_channel import AsyncChannel | ||||||
|  | from dataclasses import dataclass | ||||||
|  | import pytest | ||||||
|  | from typing import AsyncIterator | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @dataclass | ||||||
|  | class Message(betterproto.Message): | ||||||
|  |     body: str = betterproto.string_field(1) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.fixture | ||||||
|  | def expected_responses(): | ||||||
|  |     return [Message("Hello world 1"), Message("Hello world 2"), Message("Done")] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ClientStub: | ||||||
|  |     async def connect(self, requests: AsyncIterator): | ||||||
|  |         await asyncio.sleep(0.1) | ||||||
|  |         async for request in requests: | ||||||
|  |             await asyncio.sleep(0.1) | ||||||
|  |             yield request | ||||||
|  |         await asyncio.sleep(0.1) | ||||||
|  |         yield Message("Done") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | async def to_list(generator: AsyncIterator): | ||||||
|  |     result = [] | ||||||
|  |     async for value in generator: | ||||||
|  |         result.append(value) | ||||||
|  |     return result | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.fixture | ||||||
|  | def client(): | ||||||
|  |     # channel = Channel(host='127.0.0.1', port=50051) | ||||||
|  |     # return ClientStub(channel) | ||||||
|  |     return ClientStub() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_send_from_before_connect_and_close_automatically( | ||||||
|  |     client, expected_responses | ||||||
|  | ): | ||||||
|  |     requests = AsyncChannel() | ||||||
|  |     await requests.send_from( | ||||||
|  |         [Message(body="Hello world 1"), Message(body="Hello world 2")], close=True | ||||||
|  |     ) | ||||||
|  |     responses = client.connect(requests) | ||||||
|  |  | ||||||
|  |     assert await to_list(responses) == expected_responses | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_send_from_after_connect_and_close_automatically( | ||||||
|  |     client, expected_responses | ||||||
|  | ): | ||||||
|  |     requests = AsyncChannel() | ||||||
|  |     responses = client.connect(requests) | ||||||
|  |     await requests.send_from( | ||||||
|  |         [Message(body="Hello world 1"), Message(body="Hello world 2")], close=True | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert await to_list(responses) == expected_responses | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_send_from_close_manually_immediately(client, expected_responses): | ||||||
|  |     requests = AsyncChannel() | ||||||
|  |     responses = client.connect(requests) | ||||||
|  |     await requests.send_from( | ||||||
|  |         [Message(body="Hello world 1"), Message(body="Hello world 2")], close=False | ||||||
|  |     ) | ||||||
|  |     requests.close() | ||||||
|  |  | ||||||
|  |     assert await to_list(responses) == expected_responses | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_send_individually_and_close_before_connect(client, expected_responses): | ||||||
|  |     requests = AsyncChannel() | ||||||
|  |     await requests.send(Message(body="Hello world 1")) | ||||||
|  |     await requests.send(Message(body="Hello world 2")) | ||||||
|  |     requests.close() | ||||||
|  |     responses = client.connect(requests) | ||||||
|  |  | ||||||
|  |     assert await to_list(responses) == expected_responses | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_send_individually_and_close_after_connect(client, expected_responses): | ||||||
|  |     requests = AsyncChannel() | ||||||
|  |     await requests.send(Message(body="Hello world 1")) | ||||||
|  |     await requests.send(Message(body="Hello world 2")) | ||||||
|  |     responses = client.connect(requests) | ||||||
|  |     requests.close() | ||||||
|  |  | ||||||
|  |     assert await to_list(responses) == expected_responses | ||||||
							
								
								
									
										83
									
								
								betterproto/tests/grpc/thing_service.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										83
									
								
								betterproto/tests/grpc/thing_service.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,83 @@ | |||||||
|  | from betterproto.tests.output_betterproto.service.service import ( | ||||||
|  |     DoThingResponse, | ||||||
|  |     DoThingRequest, | ||||||
|  |     GetThingRequest, | ||||||
|  |     GetThingResponse, | ||||||
|  |     TestStub as ThingServiceClient, | ||||||
|  | ) | ||||||
|  | import grpclib | ||||||
|  | from typing import Any, Dict | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ThingService: | ||||||
|  |     def __init__(self, test_hook=None): | ||||||
|  |         # This lets us pass assertions to the servicer ;) | ||||||
|  |         self.test_hook = test_hook | ||||||
|  |  | ||||||
|  |     async def do_thing( | ||||||
|  |         self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]" | ||||||
|  |     ): | ||||||
|  |         request = await stream.recv_message() | ||||||
|  |         if self.test_hook is not None: | ||||||
|  |             self.test_hook(stream) | ||||||
|  |         await stream.send_message(DoThingResponse([request.name])) | ||||||
|  |  | ||||||
|  |     async def do_many_things( | ||||||
|  |         self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]" | ||||||
|  |     ): | ||||||
|  |         thing_names = [request.name for request in stream] | ||||||
|  |         if self.test_hook is not None: | ||||||
|  |             self.test_hook(stream) | ||||||
|  |         await stream.send_message(DoThingResponse(thing_names)) | ||||||
|  |  | ||||||
|  |     async def get_thing_versions( | ||||||
|  |         self, stream: "grpclib.server.Stream[GetThingRequest, GetThingResponse]" | ||||||
|  |     ): | ||||||
|  |         request = await stream.recv_message() | ||||||
|  |         if self.test_hook is not None: | ||||||
|  |             self.test_hook(stream) | ||||||
|  |         for version_num in range(1, 6): | ||||||
|  |             await stream.send_message( | ||||||
|  |                 GetThingResponse(name=request.name, version=version_num) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |     async def get_different_things( | ||||||
|  |         self, stream: "grpclib.server.Stream[GetThingRequest, GetThingResponse]" | ||||||
|  |     ): | ||||||
|  |         if self.test_hook is not None: | ||||||
|  |             self.test_hook(stream) | ||||||
|  |         #  Respond to each input item immediately | ||||||
|  |         response_num = 0 | ||||||
|  |         async for request in stream: | ||||||
|  |             response_num += 1 | ||||||
|  |             await stream.send_message( | ||||||
|  |                 GetThingResponse(name=request.name, version=response_num) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |     def __mapping__(self) -> Dict[str, "grpclib.const.Handler"]: | ||||||
|  |         return { | ||||||
|  |             "/service.Test/DoThing": grpclib.const.Handler( | ||||||
|  |                 self.do_thing, | ||||||
|  |                 grpclib.const.Cardinality.UNARY_UNARY, | ||||||
|  |                 DoThingRequest, | ||||||
|  |                 DoThingResponse, | ||||||
|  |             ), | ||||||
|  |             "/service.Test/DoManyThings": grpclib.const.Handler( | ||||||
|  |                 self.do_many_things, | ||||||
|  |                 grpclib.const.Cardinality.STREAM_UNARY, | ||||||
|  |                 DoThingRequest, | ||||||
|  |                 DoThingResponse, | ||||||
|  |             ), | ||||||
|  |             "/service.Test/GetThingVersions": grpclib.const.Handler( | ||||||
|  |                 self.get_thing_versions, | ||||||
|  |                 grpclib.const.Cardinality.UNARY_STREAM, | ||||||
|  |                 GetThingRequest, | ||||||
|  |                 GetThingResponse, | ||||||
|  |             ), | ||||||
|  |             "/service.Test/GetDifferentThings": grpclib.const.Handler( | ||||||
|  |                 self.get_different_things, | ||||||
|  |                 grpclib.const.Cardinality.STREAM_STREAM, | ||||||
|  |                 GetThingRequest, | ||||||
|  |                 GetThingResponse, | ||||||
|  |             ), | ||||||
|  |         } | ||||||
| @@ -1,4 +1,4 @@ | |||||||
| from betterproto.tests.output_betterproto.bool.bool import Test | from betterproto.tests.output_betterproto.bool import Test | ||||||
|  |  | ||||||
|  |  | ||||||
| def test_value(): | def test_value(): | ||||||
|   | |||||||
| @@ -10,6 +10,7 @@ message Test { | |||||||
|   int32 camelCase = 1; |   int32 camelCase = 1; | ||||||
|   my_enum snake_case = 2; |   my_enum snake_case = 2; | ||||||
|   snake_case_message snake_case_message = 3; |   snake_case_message snake_case_message = 3; | ||||||
|  |   int32 UPPERCASE = 4; | ||||||
| } | } | ||||||
|  |  | ||||||
| message snake_case_message { | message snake_case_message { | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| import betterproto.tests.output_betterproto.casing.casing as casing | import betterproto.tests.output_betterproto.casing as casing | ||||||
| from betterproto.tests.output_betterproto.casing.casing import Test | from betterproto.tests.output_betterproto.casing import Test | ||||||
|  |  | ||||||
|  |  | ||||||
| def test_message_attributes(): | def test_message_attributes(): | ||||||
| @@ -8,6 +8,7 @@ def test_message_attributes(): | |||||||
|         message, "snake_case_message" |         message, "snake_case_message" | ||||||
|     ), "snake_case field name is same in python" |     ), "snake_case field name is same in python" | ||||||
|     assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python" |     assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python" | ||||||
|  |     assert hasattr(message, "uppercase"), "UPPERCASE field is lowercase in python" | ||||||
|  |  | ||||||
|  |  | ||||||
| def test_message_casing(): | def test_message_casing(): | ||||||
|   | |||||||
| @@ -0,0 +1,7 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |   int32 UPPERCASE = 1; | ||||||
|  |   int32 UPPERCASE_V2 = 2; | ||||||
|  |   int32 UPPER_CAMEL_CASE = 3; | ||||||
|  | } | ||||||
| @@ -0,0 +1,14 @@ | |||||||
|  | from betterproto.tests.output_betterproto.casing_message_field_uppercase import Test | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_message_casing(): | ||||||
|  |     message = Test() | ||||||
|  |     assert hasattr( | ||||||
|  |         message, "uppercase" | ||||||
|  |     ), "UPPERCASE attribute is converted to 'uppercase' in python" | ||||||
|  |     assert hasattr( | ||||||
|  |         message, "uppercase_v2" | ||||||
|  |     ), "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python" | ||||||
|  |     assert hasattr( | ||||||
|  |         message, "upper_camel_case" | ||||||
|  |     ), "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python" | ||||||
							
								
								
									
										21
									
								
								betterproto/tests/inputs/config.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								betterproto/tests/inputs/config.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | |||||||
|  | # Test cases that are expected to fail, e.g. unimplemented features or bug-fixes. | ||||||
|  | # Remove from list when fixed. | ||||||
|  | xfail = { | ||||||
|  |     "import_circular_dependency", | ||||||
|  |     "oneof_enum",  # 63 | ||||||
|  |     "namespace_keywords",  # 70 | ||||||
|  |     "namespace_builtin_types",  # 53 | ||||||
|  |     "googletypes_struct",  # 9 | ||||||
|  |     "googletypes_value",  # 9, | ||||||
|  |     "import_capitalized_package", | ||||||
|  |     "example",  # This is the example in the readme. Not a test. | ||||||
|  | } | ||||||
|  |  | ||||||
|  | services = { | ||||||
|  |     "googletypes_response", | ||||||
|  |     "googletypes_response_embedded", | ||||||
|  |     "service", | ||||||
|  |     "import_service_input_message", | ||||||
|  |     "googletypes_service_returns_empty", | ||||||
|  |     "googletypes_service_returns_googletype", | ||||||
|  | } | ||||||
							
								
								
									
										8
									
								
								betterproto/tests/inputs/example/example.proto
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								betterproto/tests/inputs/example/example.proto
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | package hello; | ||||||
|  |  | ||||||
|  | // Greeting represents a message you can tell a user. | ||||||
|  | message Greeting { | ||||||
|  |   string message = 1; | ||||||
|  | } | ||||||
							
								
								
									
										6
									
								
								betterproto/tests/inputs/fixed/fixed.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								betterproto/tests/inputs/fixed/fixed.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | { | ||||||
|  |   "foo": 4294967295, | ||||||
|  |   "bar": -2147483648, | ||||||
|  |   "baz": "18446744073709551615", | ||||||
|  |   "qux": "-9223372036854775808" | ||||||
|  | } | ||||||
							
								
								
									
										8
									
								
								betterproto/tests/inputs/fixed/fixed.proto
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								betterproto/tests/inputs/fixed/fixed.proto
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |   fixed32 foo = 1; | ||||||
|  |   sfixed32 bar = 2; | ||||||
|  |   fixed64 baz = 3; | ||||||
|  |   sfixed64 qux = 4; | ||||||
|  | } | ||||||
| @@ -1,5 +1,7 @@ | |||||||
| { | { | ||||||
|   "maybe": false, |   "maybe": false, | ||||||
|   "ts": "1972-01-01T10:00:20.021Z", |   "ts": "1972-01-01T10:00:20.021Z", | ||||||
|   "duration": "1.200s" |   "duration": "1.200s", | ||||||
|  |   "important": 10, | ||||||
|  |   "empty": {} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -3,10 +3,12 @@ syntax = "proto3"; | |||||||
| import "google/protobuf/duration.proto"; | import "google/protobuf/duration.proto"; | ||||||
| import "google/protobuf/timestamp.proto"; | import "google/protobuf/timestamp.proto"; | ||||||
| import "google/protobuf/wrappers.proto"; | import "google/protobuf/wrappers.proto"; | ||||||
|  | import "google/protobuf/empty.proto"; | ||||||
|  |  | ||||||
| message Test { | message Test { | ||||||
|   google.protobuf.BoolValue maybe = 1; |   google.protobuf.BoolValue maybe = 1; | ||||||
|   google.protobuf.Timestamp ts = 2; |   google.protobuf.Timestamp ts = 2; | ||||||
|   google.protobuf.Duration duration = 3; |   google.protobuf.Duration duration = 3; | ||||||
|   google.protobuf.Int32Value important = 4; |   google.protobuf.Int32Value important = 4; | ||||||
|  |   google.protobuf.Empty empty = 5; | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,29 +1,27 @@ | |||||||
| from typing import Any, Callable, Optional | from typing import Any, Callable, Optional | ||||||
|  |  | ||||||
| import google.protobuf.wrappers_pb2 as wrappers | import betterproto.lib.google.protobuf as protobuf | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
| from betterproto.tests.mocks import MockChannel | from betterproto.tests.mocks import MockChannel | ||||||
| from betterproto.tests.output_betterproto.googletypes_response.googletypes_response import ( | from betterproto.tests.output_betterproto.googletypes_response import TestStub | ||||||
|     TestStub, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| test_cases = [ | test_cases = [ | ||||||
|     (TestStub.get_double, wrappers.DoubleValue, 2.5), |     (TestStub.get_double, protobuf.DoubleValue, 2.5), | ||||||
|     (TestStub.get_float, wrappers.FloatValue, 2.5), |     (TestStub.get_float, protobuf.FloatValue, 2.5), | ||||||
|     (TestStub.get_int64, wrappers.Int64Value, -64), |     (TestStub.get_int64, protobuf.Int64Value, -64), | ||||||
|     (TestStub.get_u_int64, wrappers.UInt64Value, 64), |     (TestStub.get_u_int64, protobuf.UInt64Value, 64), | ||||||
|     (TestStub.get_int32, wrappers.Int32Value, -32), |     (TestStub.get_int32, protobuf.Int32Value, -32), | ||||||
|     (TestStub.get_u_int32, wrappers.UInt32Value, 32), |     (TestStub.get_u_int32, protobuf.UInt32Value, 32), | ||||||
|     (TestStub.get_bool, wrappers.BoolValue, True), |     (TestStub.get_bool, protobuf.BoolValue, True), | ||||||
|     (TestStub.get_string, wrappers.StringValue, "string"), |     (TestStub.get_string, protobuf.StringValue, "string"), | ||||||
|     (TestStub.get_bytes, wrappers.BytesValue, bytes(0xFF)[0:4]), |     (TestStub.get_bytes, protobuf.BytesValue, bytes(0xFF)[0:4]), | ||||||
| ] | ] | ||||||
|  |  | ||||||
|  |  | ||||||
| @pytest.mark.asyncio | @pytest.mark.asyncio | ||||||
| @pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) | @pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) | ||||||
| async def test_channel_receives_wrapped_type( | async def test_channel_recieves_wrapped_type( | ||||||
|     service_method: Callable[[TestStub], Any], wrapper_class: Callable, value |     service_method: Callable[[TestStub], Any], wrapper_class: Callable, value | ||||||
| ): | ): | ||||||
|     wrapped_value = wrapper_class() |     wrapped_value = wrapper_class() | ||||||
|   | |||||||
| @@ -1,7 +1,7 @@ | |||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
| from betterproto.tests.mocks import MockChannel | from betterproto.tests.mocks import MockChannel | ||||||
| from betterproto.tests.output_betterproto.googletypes_response_embedded.googletypes_response_embedded import ( | from betterproto.tests.output_betterproto.googletypes_response_embedded import ( | ||||||
|     Output, |     Output, | ||||||
|     TestStub, |     TestStub, | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -0,0 +1,11 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | import "google/protobuf/empty.proto"; | ||||||
|  |  | ||||||
|  | service Test { | ||||||
|  |     rpc Send (RequestMessage) returns (google.protobuf.Empty) { | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | message RequestMessage { | ||||||
|  | } | ||||||
| @@ -0,0 +1,16 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | import "google/protobuf/empty.proto"; | ||||||
|  | import "google/protobuf/struct.proto"; | ||||||
|  |  | ||||||
|  | // Tests that imports are generated correctly when returning Google well-known types | ||||||
|  |  | ||||||
|  | service Test { | ||||||
|  |     rpc GetEmpty (RequestMessage) returns (google.protobuf.Empty); | ||||||
|  |     rpc GetStruct (RequestMessage) returns (google.protobuf.Struct); | ||||||
|  |     rpc GetListValue (RequestMessage) returns (google.protobuf.ListValue); | ||||||
|  |     rpc GetValue (RequestMessage) returns (google.protobuf.Value); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | message RequestMessage { | ||||||
|  | } | ||||||
| @@ -0,0 +1,5 @@ | |||||||
|  | { | ||||||
|  |   "struct": { | ||||||
|  |     "key": true | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -0,0 +1,7 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | import "google/protobuf/struct.proto"; | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |   google.protobuf.Struct struct = 1; | ||||||
|  | } | ||||||
| @@ -0,0 +1,11 @@ | |||||||
|  | { | ||||||
|  |   "value1": "hello world", | ||||||
|  |   "value2": true, | ||||||
|  |   "value3": 1, | ||||||
|  |   "value4": null, | ||||||
|  |   "value5": [ | ||||||
|  |     1, | ||||||
|  |     2, | ||||||
|  |     3 | ||||||
|  |   ] | ||||||
|  | } | ||||||
| @@ -0,0 +1,13 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | import "google/protobuf/struct.proto"; | ||||||
|  |  | ||||||
|  | // Tests that fields of type google.protobuf.Value can contain arbitrary JSON-values. | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |   google.protobuf.Value value1 = 1; | ||||||
|  |   google.protobuf.Value value2 = 2; | ||||||
|  |   google.protobuf.Value value3 = 3; | ||||||
|  |   google.protobuf.Value value4 = 4; | ||||||
|  |   google.protobuf.Value value5 = 5; | ||||||
|  | } | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | package Capitalized; | ||||||
|  |  | ||||||
|  | message Message { | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,9 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | import "capitalized.proto"; | ||||||
|  |  | ||||||
|  | // Tests that we can import from a package with a capital name, that looks like a nested type, but isn't. | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |   Capitalized.Message message = 1; | ||||||
|  | } | ||||||
| @@ -3,9 +3,9 @@ syntax = "proto3"; | |||||||
| import "root.proto"; | import "root.proto"; | ||||||
| import "other.proto"; | import "other.proto"; | ||||||
|  |  | ||||||
| // This test-case verifies that future implementations will support circular dependencies in the generated python files. | // This test-case verifies support for circular dependencies in the generated python files. | ||||||
| // | // | ||||||
| // This becomes important when generating 1 python file/module per package, rather than 1 file per proto file. | // This is important because we generate 1 python file/module per package, rather than 1 file per proto file. | ||||||
| // | // | ||||||
| // Scenario: | // Scenario: | ||||||
| // | // | ||||||
| @@ -24,5 +24,5 @@ import "other.proto"; | |||||||
| //           (root: Test & RootPackageMessage) <-------> (other: OtherPackageMessage) | //           (root: Test & RootPackageMessage) <-------> (other: OtherPackageMessage) | ||||||
| message Test { | message Test { | ||||||
|   RootPackageMessage message = 1; |   RootPackageMessage message = 1; | ||||||
|   other.OtherPackageMessage other =2; |   other.OtherPackageMessage other = 2; | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,6 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | package cousin.cousin_subpackage; | ||||||
|  |  | ||||||
|  | message CousinMessage { | ||||||
|  | } | ||||||
							
								
								
									
										11
									
								
								betterproto/tests/inputs/import_cousin_package/test.proto
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								betterproto/tests/inputs/import_cousin_package/test.proto
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | package test.subpackage; | ||||||
|  |  | ||||||
|  | import "cousin.proto"; | ||||||
|  |  | ||||||
|  | // Verify that we can import message unrelated to us | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |     cousin.cousin_subpackage.CousinMessage message = 1; | ||||||
|  | } | ||||||
| @@ -0,0 +1,6 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | package cousin.subpackage; | ||||||
|  |  | ||||||
|  | message CousinMessage { | ||||||
|  | } | ||||||
| @@ -0,0 +1,11 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | package test.subpackage; | ||||||
|  |  | ||||||
|  | import "cousin.proto"; | ||||||
|  |  | ||||||
|  | // Verify that we can import a message unrelated to us, in a subpackage with the same name as us. | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |     cousin.subpackage.CousinMessage message = 1; | ||||||
|  | } | ||||||
| @@ -0,0 +1,11 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | import "users_v1.proto"; | ||||||
|  | import "posts_v1.proto"; | ||||||
|  |  | ||||||
|  | // Tests generated message can correctly reference two packages with the same leaf-name | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |   users.v1.User user = 1; | ||||||
|  |   posts.v1.Post post = 2; | ||||||
|  | } | ||||||
| @@ -0,0 +1,7 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | package posts.v1; | ||||||
|  |  | ||||||
|  | message Post { | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,7 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | package users.v1; | ||||||
|  |  | ||||||
|  | message User { | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -0,0 +1,11 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | package child; | ||||||
|  |  | ||||||
|  | import "root.proto"; | ||||||
|  |  | ||||||
|  | // Verify that we can import root message from child package | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |     RootMessage message = 1; | ||||||
|  | } | ||||||
| @@ -1,11 +0,0 @@ | |||||||
| syntax = "proto3"; |  | ||||||
|  |  | ||||||
| import "root.proto"; |  | ||||||
|  |  | ||||||
| package child; |  | ||||||
|  |  | ||||||
| // Tests generated imports when a message inside a child-package refers to a message defined in the root. |  | ||||||
|  |  | ||||||
| message Test { |  | ||||||
|   RootMessage message = 1; |  | ||||||
| } |  | ||||||
| @@ -0,0 +1,15 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | import "request_message.proto"; | ||||||
|  |  | ||||||
|  | // Tests generated service correctly imports the RequestMessage | ||||||
|  |  | ||||||
|  | service Test { | ||||||
|  |     rpc DoThing (RequestMessage) returns (RequestResponse); | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | message RequestResponse { | ||||||
|  |     int32 value = 1; | ||||||
|  | } | ||||||
|  |  | ||||||
| @@ -0,0 +1,5 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | message RequestMessage { | ||||||
|  |     int32 argument = 1; | ||||||
|  | } | ||||||
| @@ -0,0 +1,16 @@ | |||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from betterproto.tests.mocks import MockChannel | ||||||
|  | from betterproto.tests.output_betterproto.import_service_input_message import ( | ||||||
|  |     RequestResponse, | ||||||
|  |     TestStub, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.xfail(reason="#68 Request Input Messages are not imported for service") | ||||||
|  | @pytest.mark.asyncio | ||||||
|  | async def test_service_correctly_imports_reference_message(): | ||||||
|  |     mock_response = RequestResponse(value=10) | ||||||
|  |     service = TestStub(MockChannel([mock_response])) | ||||||
|  |     response = await service.do_thing() | ||||||
|  |     assert mock_response == response | ||||||
| @@ -1,5 +0,0 @@ | |||||||
| { |  | ||||||
|   "for": 1, |  | ||||||
|   "with": 2, |  | ||||||
|   "as": 3 |  | ||||||
| } |  | ||||||
| @@ -1,11 +0,0 @@ | |||||||
| syntax = "proto3"; |  | ||||||
|  |  | ||||||
| message Test { |  | ||||||
|   int32 for = 1; |  | ||||||
|   int32 with = 2; |  | ||||||
|   int32 as = 3; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| service TestService { |  | ||||||
|   rpc GetTest(Test) returns (Test) {} |  | ||||||
| } |  | ||||||
| @@ -0,0 +1,16 @@ | |||||||
|  | { | ||||||
|  |   "int": "value-for-int", | ||||||
|  |   "float": "value-for-float", | ||||||
|  |   "complex": "value-for-complex", | ||||||
|  |   "list": "value-for-list", | ||||||
|  |   "tuple": "value-for-tuple", | ||||||
|  |   "range": "value-for-range", | ||||||
|  |   "str": "value-for-str", | ||||||
|  |   "bytearray": "value-for-bytearray", | ||||||
|  |   "bytes": "value-for-bytes", | ||||||
|  |   "memoryview": "value-for-memoryview", | ||||||
|  |   "set": "value-for-set", | ||||||
|  |   "frozenset": "value-for-frozenset", | ||||||
|  |   "map": "value-for-map", | ||||||
|  |   "bool": "value-for-bool" | ||||||
|  | } | ||||||
| @@ -0,0 +1,38 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | // Tests that messages may contain fields with names that are python types | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |     // https://docs.python.org/2/library/stdtypes.html#numeric-types-int-float-long-complex | ||||||
|  |     string int = 1; | ||||||
|  |     string float = 2; | ||||||
|  |     string complex = 3; | ||||||
|  |  | ||||||
|  |     // https://docs.python.org/3/library/stdtypes.html#sequence-types-list-tuple-range | ||||||
|  |     string list = 4; | ||||||
|  |     string tuple = 5; | ||||||
|  |     string range = 6; | ||||||
|  |  | ||||||
|  |     // https://docs.python.org/3/library/stdtypes.html#str | ||||||
|  |     string str = 7; | ||||||
|  |  | ||||||
|  |     // https://docs.python.org/3/library/stdtypes.html#bytearray-objects | ||||||
|  |     string bytearray = 8; | ||||||
|  |  | ||||||
|  |     // https://docs.python.org/3/library/stdtypes.html#bytes-and-bytearray-operations | ||||||
|  |     string bytes = 9; | ||||||
|  |  | ||||||
|  |     // https://docs.python.org/3/library/stdtypes.html#memory-views | ||||||
|  |     string memoryview = 10; | ||||||
|  |  | ||||||
|  |     // https://docs.python.org/3/library/stdtypes.html#set-types-set-frozenset | ||||||
|  |     string set = 11; | ||||||
|  |     string frozenset = 12; | ||||||
|  |  | ||||||
|  |     // https://docs.python.org/3/library/stdtypes.html#dict | ||||||
|  |     string map = 13; | ||||||
|  |     string dict = 14; | ||||||
|  |  | ||||||
|  |     // https://docs.python.org/3/library/stdtypes.html#boolean-values | ||||||
|  |     string bool = 15; | ||||||
|  | } | ||||||
| @@ -0,0 +1,37 @@ | |||||||
|  | { | ||||||
|  |   "False": 1, | ||||||
|  |   "None": 2, | ||||||
|  |   "True": 3, | ||||||
|  |   "and": 4, | ||||||
|  |   "as": 5, | ||||||
|  |   "assert": 6, | ||||||
|  |   "async": 7, | ||||||
|  |   "await": 8, | ||||||
|  |   "break": 9, | ||||||
|  |   "class": 10, | ||||||
|  |   "continue": 11, | ||||||
|  |   "def": 12, | ||||||
|  |   "del": 13, | ||||||
|  |   "elif": 14, | ||||||
|  |   "else": 15, | ||||||
|  |   "except": 16, | ||||||
|  |   "finally": 17, | ||||||
|  |   "for": 18, | ||||||
|  |   "from": 19, | ||||||
|  |   "global": 20, | ||||||
|  |   "if": 21, | ||||||
|  |   "import": 22, | ||||||
|  |   "in": 23, | ||||||
|  |   "is": 24, | ||||||
|  |   "lambda": 25, | ||||||
|  |   "nonlocal": 26, | ||||||
|  |   "not": 27, | ||||||
|  |   "or": 28, | ||||||
|  |   "pass": 29, | ||||||
|  |   "raise": 30, | ||||||
|  |   "return": 31, | ||||||
|  |   "try": 32, | ||||||
|  |   "while": 33, | ||||||
|  |   "with": 34, | ||||||
|  |   "yield": 35 | ||||||
|  | } | ||||||
| @@ -0,0 +1,44 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | // Tests that messages may contain fields that are Python keywords | ||||||
|  | // | ||||||
|  | // Generated with Python 3.7.6 | ||||||
|  | // print('\n'.join(f'string {k} = {i+1};' for i,k in enumerate(keyword.kwlist))) | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |     string False = 1; | ||||||
|  |     string None = 2; | ||||||
|  |     string True = 3; | ||||||
|  |     string and = 4; | ||||||
|  |     string as = 5; | ||||||
|  |     string assert = 6; | ||||||
|  |     string async = 7; | ||||||
|  |     string await = 8; | ||||||
|  |     string break = 9; | ||||||
|  |     string class = 10; | ||||||
|  |     string continue = 11; | ||||||
|  |     string def = 12; | ||||||
|  |     string del = 13; | ||||||
|  |     string elif = 14; | ||||||
|  |     string else = 15; | ||||||
|  |     string except = 16; | ||||||
|  |     string finally = 17; | ||||||
|  |     string for = 18; | ||||||
|  |     string from = 19; | ||||||
|  |     string global = 20; | ||||||
|  |     string if = 21; | ||||||
|  |     string import = 22; | ||||||
|  |     string in = 23; | ||||||
|  |     string is = 24; | ||||||
|  |     string lambda = 25; | ||||||
|  |     string nonlocal = 26; | ||||||
|  |     string not = 27; | ||||||
|  |     string or = 28; | ||||||
|  |     string pass = 29; | ||||||
|  |     string raise = 30; | ||||||
|  |     string return = 31; | ||||||
|  |     string try = 32; | ||||||
|  |     string while = 33; | ||||||
|  |     string with = 34; | ||||||
|  |     string yield = 35; | ||||||
|  | } | ||||||
| @@ -15,4 +15,4 @@ message Test { | |||||||
|  |  | ||||||
| message Sibling { | message Sibling { | ||||||
|   int32 foo = 1; |   int32 foo = 1; | ||||||
| } | } | ||||||
							
								
								
									
										19
									
								
								betterproto/tests/inputs/nested2/nested2.proto
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								betterproto/tests/inputs/nested2/nested2.proto
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | import "package.proto"; | ||||||
|  |  | ||||||
|  | message Game { | ||||||
|  |     message Player { | ||||||
|  |         enum Race { | ||||||
|  |             human = 0; | ||||||
|  |             orc = 1; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | message Test { | ||||||
|  |     Game game = 1; | ||||||
|  |     Game.Player GamePlayer = 2; | ||||||
|  |     Game.Player.Race GamePlayerRace = 3; | ||||||
|  |     equipment.Weapon Weapon = 4; | ||||||
|  | } | ||||||
							
								
								
									
										7
									
								
								betterproto/tests/inputs/nested2/package.proto
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								betterproto/tests/inputs/nested2/package.proto
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | |||||||
|  | syntax = "proto3"; | ||||||
|  |  | ||||||
|  | package equipment; | ||||||
|  |  | ||||||
|  | message Weapon { | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -1,10 +1,10 @@ | |||||||
| { | { | ||||||
|   "root": { |   "top": { | ||||||
|     "name": "double-nested", |     "name": "double-nested", | ||||||
|     "parent": { |     "middle": { | ||||||
|       "child": [{"foo": "hello"}], |       "bottom": [{"foo": "hello"}], | ||||||
|       "enumChild": ["A"], |       "enumBottom": ["A"], | ||||||
|       "rootParentChild": [{"a": "hello"}], |       "topMiddleBottom": [{"a": "hello"}], | ||||||
|       "bar": true |       "bar": true | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -1,26 +1,26 @@ | |||||||
| syntax = "proto3"; | syntax = "proto3"; | ||||||
|  |  | ||||||
| message Test { | message Test { | ||||||
|   message Root { |   message Top { | ||||||
|     message Parent { |     message Middle { | ||||||
|       message RootParentChild { |       message TopMiddleBottom { | ||||||
|         string a = 1; |         string a = 1; | ||||||
|       } |       } | ||||||
|       enum EnumChild{ |       enum EnumBottom{ | ||||||
|         A = 0; |         A = 0; | ||||||
|         B = 1; |         B = 1; | ||||||
|       } |       } | ||||||
|       message Child { |       message Bottom { | ||||||
|         string foo = 1; |         string foo = 1; | ||||||
|       } |       } | ||||||
|       reserved 1; |       reserved 1; | ||||||
|       repeated Child child = 2; |       repeated Bottom bottom = 2; | ||||||
|       repeated EnumChild enumChild=3; |       repeated EnumBottom enumBottom=3; | ||||||
|       repeated RootParentChild rootParentChild=4; |       repeated TopMiddleBottom topMiddleBottom=4; | ||||||
|       bool bar = 5; |       bool bar = 5; | ||||||
|     } |     } | ||||||
|     string name = 1; |     string name = 1; | ||||||
|     Parent parent = 2; |     Middle middle = 2; | ||||||
|   } |   } | ||||||
|   Root root = 1; |   Top top = 1; | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| import betterproto | import betterproto | ||||||
| from betterproto.tests.output_betterproto.oneof.oneof import Test | from betterproto.tests.output_betterproto.oneof import Test | ||||||
| from betterproto.tests.util import get_test_case_json_data | from betterproto.tests.util import get_test_case_json_data | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,7 +1,7 @@ | |||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
| import betterproto | import betterproto | ||||||
| from betterproto.tests.output_betterproto.oneof_enum.oneof_enum import ( | from betterproto.tests.output_betterproto.oneof_enum import ( | ||||||
|     Move, |     Move, | ||||||
|     Signal, |     Signal, | ||||||
|     Test, |     Test, | ||||||
|   | |||||||
| @@ -1,7 +1,5 @@ | |||||||
| syntax = "proto3"; | syntax = "proto3"; | ||||||
|  |  | ||||||
| package ref; |  | ||||||
|  |  | ||||||
| import "repeatedmessage.proto"; | import "repeatedmessage.proto"; | ||||||
|  |  | ||||||
| message Test { | message Test { | ||||||
|   | |||||||
| @@ -3,13 +3,25 @@ syntax = "proto3"; | |||||||
| package service; | package service; | ||||||
|  |  | ||||||
| message DoThingRequest { | message DoThingRequest { | ||||||
|   int32 iterations = 1; |   string name = 1; | ||||||
| } | } | ||||||
|  |  | ||||||
| message DoThingResponse { | message DoThingResponse { | ||||||
|   int32 successfulIterations = 1; |   repeated string names = 1; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | message GetThingRequest { | ||||||
|  |   string name = 1; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | message GetThingResponse { | ||||||
|  |   string name = 1; | ||||||
|  |   int32 version = 2; | ||||||
| } | } | ||||||
|  |  | ||||||
| service Test { | service Test { | ||||||
|   rpc DoThing (DoThingRequest) returns (DoThingResponse); |   rpc DoThing (DoThingRequest) returns (DoThingResponse); | ||||||
|  |   rpc DoManyThings (stream DoThingRequest) returns (DoThingResponse); | ||||||
|  |   rpc GetThingVersions (GetThingRequest) returns (stream GetThingResponse); | ||||||
|  |   rpc GetDifferentThings (stream GetThingRequest) returns (stream GetThingResponse); | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,132 +0,0 @@ | |||||||
| import betterproto |  | ||||||
| import grpclib |  | ||||||
| from grpclib.testing import ChannelFor |  | ||||||
| import pytest |  | ||||||
| from typing import Dict |  | ||||||
|  |  | ||||||
| from betterproto.tests.output_betterproto.service.service import ( |  | ||||||
|     DoThingResponse, |  | ||||||
|     DoThingRequest, |  | ||||||
|     TestStub as ExampleServiceStub, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ExampleService: |  | ||||||
|     def __init__(self, test_hook=None): |  | ||||||
|         # This lets us pass assertions to the servicer ;) |  | ||||||
|         self.test_hook = test_hook |  | ||||||
|  |  | ||||||
|     async def DoThing( |  | ||||||
|         self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]" |  | ||||||
|     ): |  | ||||||
|         request = await stream.recv_message() |  | ||||||
|         print("self.test_hook", self.test_hook) |  | ||||||
|         if self.test_hook is not None: |  | ||||||
|             self.test_hook(stream) |  | ||||||
|         for iteration in range(request.iterations): |  | ||||||
|             pass |  | ||||||
|         await stream.send_message(DoThingResponse(request.iterations)) |  | ||||||
|  |  | ||||||
|     def __mapping__(self) -> Dict[str, grpclib.const.Handler]: |  | ||||||
|         return { |  | ||||||
|             "/service.Test/DoThing": grpclib.const.Handler( |  | ||||||
|                 self.DoThing, |  | ||||||
|                 grpclib.const.Cardinality.UNARY_UNARY, |  | ||||||
|                 DoThingRequest, |  | ||||||
|                 DoThingResponse, |  | ||||||
|             ) |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def _test_stub(stub, iterations=42, **kwargs): |  | ||||||
|     response = await stub.do_thing(iterations=iterations) |  | ||||||
|     assert response.successful_iterations == iterations |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _get_server_side_test(deadline, metadata): |  | ||||||
|     def server_side_test(stream): |  | ||||||
|         assert stream.deadline._timestamp == pytest.approx( |  | ||||||
|             deadline._timestamp, 1 |  | ||||||
|         ), "The provided deadline should be recieved serverside" |  | ||||||
|         assert ( |  | ||||||
|             stream.metadata["authorization"] == metadata["authorization"] |  | ||||||
|         ), "The provided authorization metadata should be recieved serverside" |  | ||||||
|  |  | ||||||
|     return server_side_test |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @pytest.mark.asyncio |  | ||||||
| async def test_simple_service_call(): |  | ||||||
|     async with ChannelFor([ExampleService()]) as channel: |  | ||||||
|         await _test_stub(ExampleServiceStub(channel)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @pytest.mark.asyncio |  | ||||||
| async def test_service_call_with_upfront_request_params(): |  | ||||||
|     # Setting deadline |  | ||||||
|     deadline = grpclib.metadata.Deadline.from_timeout(22) |  | ||||||
|     metadata = {"authorization": "12345"} |  | ||||||
|     async with ChannelFor( |  | ||||||
|         [ExampleService(test_hook=_get_server_side_test(deadline, metadata))] |  | ||||||
|     ) as channel: |  | ||||||
|         await _test_stub( |  | ||||||
|             ExampleServiceStub(channel, deadline=deadline, metadata=metadata) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     # Setting timeout |  | ||||||
|     timeout = 99 |  | ||||||
|     deadline = grpclib.metadata.Deadline.from_timeout(timeout) |  | ||||||
|     metadata = {"authorization": "12345"} |  | ||||||
|     async with ChannelFor( |  | ||||||
|         [ExampleService(test_hook=_get_server_side_test(deadline, metadata))] |  | ||||||
|     ) as channel: |  | ||||||
|         await _test_stub( |  | ||||||
|             ExampleServiceStub(channel, timeout=timeout, metadata=metadata) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @pytest.mark.asyncio |  | ||||||
| async def test_service_call_lower_level_with_overrides(): |  | ||||||
|     ITERATIONS = 99 |  | ||||||
|  |  | ||||||
|     # Setting deadline |  | ||||||
|     deadline = grpclib.metadata.Deadline.from_timeout(22) |  | ||||||
|     metadata = {"authorization": "12345"} |  | ||||||
|     kwarg_deadline = grpclib.metadata.Deadline.from_timeout(28) |  | ||||||
|     kwarg_metadata = {"authorization": "12345"} |  | ||||||
|     async with ChannelFor( |  | ||||||
|         [ExampleService(test_hook=_get_server_side_test(deadline, metadata))] |  | ||||||
|     ) as channel: |  | ||||||
|         stub = ExampleServiceStub(channel, deadline=deadline, metadata=metadata) |  | ||||||
|         response = await stub._unary_unary( |  | ||||||
|             "/service.Test/DoThing", |  | ||||||
|             DoThingRequest(ITERATIONS), |  | ||||||
|             DoThingResponse, |  | ||||||
|             deadline=kwarg_deadline, |  | ||||||
|             metadata=kwarg_metadata, |  | ||||||
|         ) |  | ||||||
|         assert response.successful_iterations == ITERATIONS |  | ||||||
|  |  | ||||||
|     # Setting timeout |  | ||||||
|     timeout = 99 |  | ||||||
|     deadline = grpclib.metadata.Deadline.from_timeout(timeout) |  | ||||||
|     metadata = {"authorization": "12345"} |  | ||||||
|     kwarg_timeout = 9000 |  | ||||||
|     kwarg_deadline = grpclib.metadata.Deadline.from_timeout(kwarg_timeout) |  | ||||||
|     kwarg_metadata = {"authorization": "09876"} |  | ||||||
|     async with ChannelFor( |  | ||||||
|         [ |  | ||||||
|             ExampleService( |  | ||||||
|                 test_hook=_get_server_side_test(kwarg_deadline, kwarg_metadata) |  | ||||||
|             ) |  | ||||||
|         ] |  | ||||||
|     ) as channel: |  | ||||||
|         stub = ExampleServiceStub(channel, deadline=deadline, metadata=metadata) |  | ||||||
|         response = await stub._unary_unary( |  | ||||||
|             "/service.Test/DoThing", |  | ||||||
|             DoThingRequest(ITERATIONS), |  | ||||||
|             DoThingResponse, |  | ||||||
|             timeout=kwarg_timeout, |  | ||||||
|             metadata=kwarg_metadata, |  | ||||||
|         ) |  | ||||||
|         assert response.successful_iterations == ITERATIONS |  | ||||||
| @@ -1,10 +0,0 @@ | |||||||
| # Test cases that are expected to fail, e.g. unimplemented features or bug-fixes. |  | ||||||
| # Remove from list when fixed. |  | ||||||
| tests = { |  | ||||||
|     "import_root_sibling", |  | ||||||
|     "import_child_package_from_package", |  | ||||||
|     "import_root_package_from_child", |  | ||||||
|     "import_parent_package_from_child", |  | ||||||
|     "import_circular_dependency", |  | ||||||
|     "oneof_enum", |  | ||||||
| } |  | ||||||
| @@ -8,6 +8,7 @@ class MockChannel(Channel): | |||||||
|     def __init__(self, responses=None) -> None: |     def __init__(self, responses=None) -> None: | ||||||
|         self.responses = responses if responses else [] |         self.responses = responses if responses else [] | ||||||
|         self.requests = [] |         self.requests = [] | ||||||
|  |         self._loop = None | ||||||
|  |  | ||||||
|     def request(self, route, cardinality, request, response_type, **kwargs): |     def request(self, route, cardinality, request, response_type, **kwargs): | ||||||
|         self.requests.append( |         self.requests.append( | ||||||
|   | |||||||
							
								
								
									
										125
									
								
								betterproto/tests/test_casing.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										125
									
								
								betterproto/tests/test_casing.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,125 @@ | |||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from betterproto.casing import camel_case, pascal_case, snake_case | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     ["value", "expected"], | ||||||
|  |     [ | ||||||
|  |         ("", ""), | ||||||
|  |         ("a", "A"), | ||||||
|  |         ("foobar", "Foobar"), | ||||||
|  |         ("fooBar", "FooBar"), | ||||||
|  |         ("FooBar", "FooBar"), | ||||||
|  |         ("foo.bar", "FooBar"), | ||||||
|  |         ("foo_bar", "FooBar"), | ||||||
|  |         ("FOOBAR", "Foobar"), | ||||||
|  |         ("FOOBar", "FooBar"), | ||||||
|  |         ("UInt32", "UInt32"), | ||||||
|  |         ("FOO_BAR", "FooBar"), | ||||||
|  |         ("FOOBAR1", "Foobar1"), | ||||||
|  |         ("FOOBAR_1", "Foobar1"), | ||||||
|  |         ("FOO1BAR2", "Foo1Bar2"), | ||||||
|  |         ("foo__bar", "FooBar"), | ||||||
|  |         ("_foobar", "Foobar"), | ||||||
|  |         ("foobaR", "FoobaR"), | ||||||
|  |         ("foo~bar", "FooBar"), | ||||||
|  |         ("foo:bar", "FooBar"), | ||||||
|  |         ("1foobar", "1Foobar"), | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_pascal_case(value, expected): | ||||||
|  |     actual = pascal_case(value, strict=True) | ||||||
|  |     assert actual == expected, f"{value} => {expected} (actual: {actual})" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     ["value", "expected"], | ||||||
|  |     [ | ||||||
|  |         ("", ""), | ||||||
|  |         ("a", "a"), | ||||||
|  |         ("foobar", "foobar"), | ||||||
|  |         ("fooBar", "fooBar"), | ||||||
|  |         ("FooBar", "fooBar"), | ||||||
|  |         ("foo.bar", "fooBar"), | ||||||
|  |         ("foo_bar", "fooBar"), | ||||||
|  |         ("FOOBAR", "foobar"), | ||||||
|  |         ("FOO_BAR", "fooBar"), | ||||||
|  |         ("FOOBAR1", "foobar1"), | ||||||
|  |         ("FOOBAR_1", "foobar1"), | ||||||
|  |         ("FOO1BAR2", "foo1Bar2"), | ||||||
|  |         ("foo__bar", "fooBar"), | ||||||
|  |         ("_foobar", "foobar"), | ||||||
|  |         ("foobaR", "foobaR"), | ||||||
|  |         ("foo~bar", "fooBar"), | ||||||
|  |         ("foo:bar", "fooBar"), | ||||||
|  |         ("1foobar", "1Foobar"), | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_camel_case_strict(value, expected): | ||||||
|  |     actual = camel_case(value, strict=True) | ||||||
|  |     assert actual == expected, f"{value} => {expected} (actual: {actual})" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     ["value", "expected"], | ||||||
|  |     [ | ||||||
|  |         ("foo_bar", "fooBar"), | ||||||
|  |         ("FooBar", "fooBar"), | ||||||
|  |         ("foo__bar", "foo_Bar"), | ||||||
|  |         ("foo__Bar", "foo__Bar"), | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_camel_case_not_strict(value, expected): | ||||||
|  |     actual = camel_case(value, strict=False) | ||||||
|  |     assert actual == expected, f"{value} => {expected} (actual: {actual})" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     ["value", "expected"], | ||||||
|  |     [ | ||||||
|  |         ("", ""), | ||||||
|  |         ("a", "a"), | ||||||
|  |         ("foobar", "foobar"), | ||||||
|  |         ("fooBar", "foo_bar"), | ||||||
|  |         ("FooBar", "foo_bar"), | ||||||
|  |         ("foo.bar", "foo_bar"), | ||||||
|  |         ("foo_bar", "foo_bar"), | ||||||
|  |         ("foo_Bar", "foo_bar"), | ||||||
|  |         ("FOOBAR", "foobar"), | ||||||
|  |         ("FOOBar", "foo_bar"), | ||||||
|  |         ("UInt32", "u_int32"), | ||||||
|  |         ("FOO_BAR", "foo_bar"), | ||||||
|  |         ("FOOBAR1", "foobar1"), | ||||||
|  |         ("FOOBAR_1", "foobar_1"), | ||||||
|  |         ("FOOBAR_123", "foobar_123"), | ||||||
|  |         ("FOO1BAR2", "foo1_bar2"), | ||||||
|  |         ("foo__bar", "foo_bar"), | ||||||
|  |         ("_foobar", "foobar"), | ||||||
|  |         ("foobaR", "fooba_r"), | ||||||
|  |         ("foo~bar", "foo_bar"), | ||||||
|  |         ("foo:bar", "foo_bar"), | ||||||
|  |         ("1foobar", "1_foobar"), | ||||||
|  |         ("GetUInt64", "get_u_int64"), | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_snake_case_strict(value, expected): | ||||||
|  |     actual = snake_case(value) | ||||||
|  |     assert actual == expected, f"{value} => {expected} (actual: {actual})" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     ["value", "expected"], | ||||||
|  |     [ | ||||||
|  |         ("fooBar", "foo_bar"), | ||||||
|  |         ("FooBar", "foo_bar"), | ||||||
|  |         ("foo_Bar", "foo__bar"), | ||||||
|  |         ("foo__bar", "foo__bar"), | ||||||
|  |         ("FOOBar", "foo_bar"), | ||||||
|  |         ("__foo", "__foo"), | ||||||
|  |         ("GetUInt64", "get_u_int64"), | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_snake_case_not_strict(value, expected): | ||||||
|  |     actual = snake_case(value, strict=False) | ||||||
|  |     assert actual == expected, f"{value} => {expected} (actual: {actual})" | ||||||
							
								
								
									
										315
									
								
								betterproto/tests/test_get_ref_type.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										315
									
								
								betterproto/tests/test_get_ref_type.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,315 @@ | |||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from ..compile.importing import get_type_reference, parse_source_type_name | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     ["google_type", "expected_name", "expected_import"], | ||||||
|  |     [ | ||||||
|  |         ( | ||||||
|  |             ".google.protobuf.Empty", | ||||||
|  |             "betterproto_lib_google_protobuf.Empty", | ||||||
|  |             "import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf", | ||||||
|  |         ), | ||||||
|  |         ( | ||||||
|  |             ".google.protobuf.Struct", | ||||||
|  |             "betterproto_lib_google_protobuf.Struct", | ||||||
|  |             "import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf", | ||||||
|  |         ), | ||||||
|  |         ( | ||||||
|  |             ".google.protobuf.ListValue", | ||||||
|  |             "betterproto_lib_google_protobuf.ListValue", | ||||||
|  |             "import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf", | ||||||
|  |         ), | ||||||
|  |         ( | ||||||
|  |             ".google.protobuf.Value", | ||||||
|  |             "betterproto_lib_google_protobuf.Value", | ||||||
|  |             "import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf", | ||||||
|  |         ), | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_reference_google_wellknown_types_non_wrappers( | ||||||
|  |     google_type: str, expected_name: str, expected_import: str | ||||||
|  | ): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference(package="", imports=imports, source_type=google_type) | ||||||
|  |  | ||||||
|  |     assert name == expected_name | ||||||
|  |     assert imports.__contains__( | ||||||
|  |         expected_import | ||||||
|  |     ), f"{expected_import} not found in {imports}" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     ["google_type", "expected_name"], | ||||||
|  |     [ | ||||||
|  |         (".google.protobuf.DoubleValue", "Optional[float]"), | ||||||
|  |         (".google.protobuf.FloatValue", "Optional[float]"), | ||||||
|  |         (".google.protobuf.Int32Value", "Optional[int]"), | ||||||
|  |         (".google.protobuf.Int64Value", "Optional[int]"), | ||||||
|  |         (".google.protobuf.UInt32Value", "Optional[int]"), | ||||||
|  |         (".google.protobuf.UInt64Value", "Optional[int]"), | ||||||
|  |         (".google.protobuf.BoolValue", "Optional[bool]"), | ||||||
|  |         (".google.protobuf.StringValue", "Optional[str]"), | ||||||
|  |         (".google.protobuf.BytesValue", "Optional[bytes]"), | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_referenceing_google_wrappers_unwraps_them( | ||||||
|  |     google_type: str, expected_name: str | ||||||
|  | ): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference(package="", imports=imports, source_type=google_type) | ||||||
|  |  | ||||||
|  |     assert name == expected_name | ||||||
|  |     assert imports == set() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     ["google_type", "expected_name"], | ||||||
|  |     [ | ||||||
|  |         (".google.protobuf.DoubleValue", "betterproto_lib_google_protobuf.DoubleValue"), | ||||||
|  |         (".google.protobuf.FloatValue", "betterproto_lib_google_protobuf.FloatValue"), | ||||||
|  |         (".google.protobuf.Int32Value", "betterproto_lib_google_protobuf.Int32Value"), | ||||||
|  |         (".google.protobuf.Int64Value", "betterproto_lib_google_protobuf.Int64Value"), | ||||||
|  |         (".google.protobuf.UInt32Value", "betterproto_lib_google_protobuf.UInt32Value"), | ||||||
|  |         (".google.protobuf.UInt64Value", "betterproto_lib_google_protobuf.UInt64Value"), | ||||||
|  |         (".google.protobuf.BoolValue", "betterproto_lib_google_protobuf.BoolValue"), | ||||||
|  |         (".google.protobuf.StringValue", "betterproto_lib_google_protobuf.StringValue"), | ||||||
|  |         (".google.protobuf.BytesValue", "betterproto_lib_google_protobuf.BytesValue"), | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_referenceing_google_wrappers_without_unwrapping( | ||||||
|  |     google_type: str, expected_name: str | ||||||
|  | ): | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="", imports=set(), source_type=google_type, unwrap=False | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert name == expected_name | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_child_package_from_package(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="package", imports=imports, source_type="package.child.Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from . import child"} | ||||||
|  |     assert name == "child.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_child_package_from_root(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference(package="", imports=imports, source_type="child.Message") | ||||||
|  |  | ||||||
|  |     assert imports == {"from . import child"} | ||||||
|  |     assert name == "child.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_camel_cased(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="", imports=imports, source_type="child_package.example_message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from . import child_package"} | ||||||
|  |     assert name == "child_package.ExampleMessage" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_nested_child_from_root(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="", imports=imports, source_type="nested.child.Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from .nested import child as nested_child"} | ||||||
|  |     assert name == "nested_child.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_deeply_nested_child_from_root(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="", imports=imports, source_type="deeply.nested.child.Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from .deeply.nested import child as deeply_nested_child"} | ||||||
|  |     assert name == "deeply_nested_child.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_deeply_nested_child_from_package(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="package", | ||||||
|  |         imports=imports, | ||||||
|  |         source_type="package.deeply.nested.child.Message", | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from .deeply.nested import child as deeply_nested_child"} | ||||||
|  |     assert name == "deeply_nested_child.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_root_sibling(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference(package="", imports=imports, source_type="Message") | ||||||
|  |  | ||||||
|  |     assert imports == set() | ||||||
|  |     assert name == '"Message"' | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_nested_siblings(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference(package="foo", imports=imports, source_type="foo.Message") | ||||||
|  |  | ||||||
|  |     assert imports == set() | ||||||
|  |     assert name == '"Message"' | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_deeply_nested_siblings(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="foo.bar", imports=imports, source_type="foo.bar.Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == set() | ||||||
|  |     assert name == '"Message"' | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_parent_package_from_child(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="package.child", imports=imports, source_type="package.Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from ... import package as __package__"} | ||||||
|  |     assert name == "__package__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_parent_package_from_deeply_nested_child(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="package.deeply.nested.child", | ||||||
|  |         imports=imports, | ||||||
|  |         source_type="package.deeply.nested.Message", | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from ... import nested as __nested__"} | ||||||
|  |     assert name == "__nested__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_ancestor_package_from_nested_child(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="package.ancestor.nested.child", | ||||||
|  |         imports=imports, | ||||||
|  |         source_type="package.ancestor.Message", | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from .... import ancestor as ___ancestor__"} | ||||||
|  |     assert name == "___ancestor__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_root_package_from_child(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="package.child", imports=imports, source_type="Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from ... import Message as __Message__"} | ||||||
|  |     assert name == "__Message__" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_root_package_from_deeply_nested_child(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="package.deeply.nested.child", imports=imports, source_type="Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from ..... import Message as ____Message__"} | ||||||
|  |     assert name == "____Message__" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_unrelated_package(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference(package="a", imports=imports, source_type="p.Message") | ||||||
|  |  | ||||||
|  |     assert imports == {"from .. import p as _p__"} | ||||||
|  |     assert name == "_p__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_unrelated_nested_package(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference(package="a.b", imports=imports, source_type="p.q.Message") | ||||||
|  |  | ||||||
|  |     assert imports == {"from ...p import q as __p_q__"} | ||||||
|  |     assert name == "__p_q__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_unrelated_deeply_nested_package(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="a.b.c.d", imports=imports, source_type="p.q.r.s.Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from .....p.q.r import s as ____p_q_r_s__"} | ||||||
|  |     assert name == "____p_q_r_s__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_cousin_package(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference(package="a.x", imports=imports, source_type="a.y.Message") | ||||||
|  |  | ||||||
|  |     assert imports == {"from .. import y as _y__"} | ||||||
|  |     assert name == "_y__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_cousin_package_different_name(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="test.package1", imports=imports, source_type="cousin.package2.Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from ...cousin import package2 as __cousin_package2__"} | ||||||
|  |     assert name == "__cousin_package2__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_cousin_package_same_name(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="test.package", imports=imports, source_type="cousin.package.Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from ...cousin import package as __cousin_package__"} | ||||||
|  |     assert name == "__cousin_package__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_far_cousin_package(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="a.x.y", imports=imports, source_type="a.b.c.Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from ...b import c as __b_c__"} | ||||||
|  |     assert name == "__b_c__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_reference_far_far_cousin_package(): | ||||||
|  |     imports = set() | ||||||
|  |     name = get_type_reference( | ||||||
|  |         package="a.x.y.z", imports=imports, source_type="a.b.c.d.Message" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     assert imports == {"from ....b.c import d as ___b_c_d__"} | ||||||
|  |     assert name == "___b_c_d__.Message" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     ["full_name", "expected_output"], | ||||||
|  |     [ | ||||||
|  |         ("package.SomeMessage.NestedType", ("package", "SomeMessage.NestedType")), | ||||||
|  |         (".package.SomeMessage.NestedType", ("package", "SomeMessage.NestedType")), | ||||||
|  |         (".service.ExampleRequest", ("service", "ExampleRequest")), | ||||||
|  |         (".package.lower_case_message", ("package", "lower_case_message")), | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_parse_field_type_name(full_name, expected_output): | ||||||
|  |     assert parse_source_type_name(full_name) == expected_output | ||||||
| @@ -3,14 +3,20 @@ import json | |||||||
| import os | import os | ||||||
| import sys | import sys | ||||||
| from collections import namedtuple | from collections import namedtuple | ||||||
|  | from types import ModuleType | ||||||
| from typing import Set | from typing import Set | ||||||
|  |  | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
| import betterproto | import betterproto | ||||||
| from betterproto.tests.inputs import xfail | from betterproto.tests.inputs import config as test_input_config | ||||||
| from betterproto.tests.mocks import MockChannel | from betterproto.tests.mocks import MockChannel | ||||||
| from betterproto.tests.util import get_directories, get_test_case_json_data, inputs_path | from betterproto.tests.util import ( | ||||||
|  |     find_module, | ||||||
|  |     get_directories, | ||||||
|  |     get_test_case_json_data, | ||||||
|  |     inputs_path, | ||||||
|  | ) | ||||||
|  |  | ||||||
| # Force pure-python implementation instead of C++, otherwise imports | # Force pure-python implementation instead of C++, otherwise imports | ||||||
| # break things because we can't properly reset the symbol database. | # break things because we can't properly reset the symbol database. | ||||||
| @@ -23,13 +29,17 @@ from google.protobuf.json_format import Parse | |||||||
|  |  | ||||||
| class TestCases: | class TestCases: | ||||||
|     def __init__(self, path, services: Set[str], xfail: Set[str]): |     def __init__(self, path, services: Set[str], xfail: Set[str]): | ||||||
|         _all = set(get_directories(path)) |         _all = set(get_directories(path)) - {"__pycache__"} | ||||||
|         _services = services |         _services = services | ||||||
|         _messages = _all - services |         _messages = (_all - services) - {"__pycache__"} | ||||||
|         _messages_with_json = { |         _messages_with_json = { | ||||||
|             test for test in _messages if get_test_case_json_data(test) |             test for test in _messages if get_test_case_json_data(test) | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         unknown_xfail_tests = xfail - _all | ||||||
|  |         if unknown_xfail_tests: | ||||||
|  |             raise Exception(f"Unknown test(s) in config.py: {unknown_xfail_tests}") | ||||||
|  |  | ||||||
|         self.all = self.apply_xfail_marks(_all, xfail) |         self.all = self.apply_xfail_marks(_all, xfail) | ||||||
|         self.services = self.apply_xfail_marks(_services, xfail) |         self.services = self.apply_xfail_marks(_services, xfail) | ||||||
|         self.messages = self.apply_xfail_marks(_messages, xfail) |         self.messages = self.apply_xfail_marks(_messages, xfail) | ||||||
| @@ -45,16 +55,18 @@ class TestCases: | |||||||
|  |  | ||||||
| test_cases = TestCases( | test_cases = TestCases( | ||||||
|     path=inputs_path, |     path=inputs_path, | ||||||
|     # test cases for services |     services=test_input_config.services, | ||||||
|     services={"googletypes_response", "googletypes_response_embedded", "service"}, |     xfail=test_input_config.xfail, | ||||||
|     xfail=xfail.tests, |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| plugin_output_package = "betterproto.tests.output_betterproto" | plugin_output_package = "betterproto.tests.output_betterproto" | ||||||
| reference_output_package = "betterproto.tests.output_reference" | reference_output_package = "betterproto.tests.output_reference" | ||||||
|  |  | ||||||
|  | TestData = namedtuple("TestData", ["plugin_module", "reference_module", "json_data"]) | ||||||
|  |  | ||||||
| TestData = namedtuple("TestData", "plugin_module, reference_module, json_data") |  | ||||||
|  | def module_has_entry_point(module: ModuleType): | ||||||
|  |     return any(hasattr(module, attr) for attr in ["Test", "TestStub"]) | ||||||
|  |  | ||||||
|  |  | ||||||
| @pytest.fixture | @pytest.fixture | ||||||
| @@ -72,11 +84,19 @@ def test_data(request): | |||||||
|  |  | ||||||
|     sys.path.append(reference_module_root) |     sys.path.append(reference_module_root) | ||||||
|  |  | ||||||
|  |     plugin_module = importlib.import_module(f"{plugin_output_package}.{test_case_name}") | ||||||
|  |  | ||||||
|  |     plugin_module_entry_point = find_module(plugin_module, module_has_entry_point) | ||||||
|  |  | ||||||
|  |     if not plugin_module_entry_point: | ||||||
|  |         raise Exception( | ||||||
|  |             f"Test case {repr(test_case_name)} has no entry point. " | ||||||
|  |             "Please add a proto message or service called Test and recompile." | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     yield ( |     yield ( | ||||||
|         TestData( |         TestData( | ||||||
|             plugin_module=importlib.import_module( |             plugin_module=plugin_module_entry_point, | ||||||
|                 f"{plugin_output_package}.{test_case_name}.{test_case_name}" |  | ||||||
|             ), |  | ||||||
|             reference_module=lambda: importlib.import_module( |             reference_module=lambda: importlib.import_module( | ||||||
|                 f"{reference_output_package}.{test_case_name}.{test_case_name}_pb2" |                 f"{reference_output_package}.{test_case_name}.{test_case_name}_pb2" | ||||||
|             ), |             ), | ||||||
| @@ -111,7 +131,7 @@ def test_message_json(repeat, test_data: TestData) -> None: | |||||||
|         message.from_json(json_data) |         message.from_json(json_data) | ||||||
|         message_json = message.to_json(0) |         message_json = message.to_json(0) | ||||||
|  |  | ||||||
|         assert json.loads(json_data) == json.loads(message_json) |         assert json.loads(message_json) == json.loads(json_data) | ||||||
|  |  | ||||||
|  |  | ||||||
| @pytest.mark.parametrize("test_data", test_cases.services, indirect=True) | @pytest.mark.parametrize("test_data", test_cases.services, indirect=True) | ||||||
|   | |||||||
| @@ -1,23 +1,27 @@ | |||||||
|  | import asyncio | ||||||
|  | import importlib | ||||||
| import os | import os | ||||||
| import subprocess | import pathlib | ||||||
| from typing import Generator | from pathlib import Path | ||||||
|  | from types import ModuleType | ||||||
|  | from typing import Callable, Generator, Optional | ||||||
|  |  | ||||||
| os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" | os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" | ||||||
|  |  | ||||||
| root_path = os.path.dirname(os.path.realpath(__file__)) | root_path = Path(__file__).resolve().parent | ||||||
| inputs_path = os.path.join(root_path, "inputs") | inputs_path = root_path.joinpath("inputs") | ||||||
| output_path_reference = os.path.join(root_path, "output_reference") | output_path_reference = root_path.joinpath("output_reference") | ||||||
| output_path_betterproto = os.path.join(root_path, "output_betterproto") | output_path_betterproto = root_path.joinpath("output_betterproto") | ||||||
|  |  | ||||||
| if os.name == "nt": | if os.name == "nt": | ||||||
|     plugin_path = os.path.join(root_path, "..", "plugin.bat") |     plugin_path = root_path.joinpath("..", "plugin.bat").resolve() | ||||||
| else: | else: | ||||||
|     plugin_path = os.path.join(root_path, "..", "plugin.py") |     plugin_path = root_path.joinpath("..", "plugin.py").resolve() | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_files(path, end: str) -> Generator[str, None, None]: | def get_files(path, suffix: str) -> Generator[str, None, None]: | ||||||
|     for r, dirs, files in os.walk(path): |     for r, dirs, files in os.walk(path): | ||||||
|         for filename in [f for f in files if f.endswith(end)]: |         for filename in [f for f in files if f.endswith(suffix)]: | ||||||
|             yield os.path.join(r, filename) |             yield os.path.join(r, filename) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -27,35 +31,62 @@ def get_directories(path): | |||||||
|             yield directory |             yield directory | ||||||
|  |  | ||||||
|  |  | ||||||
| def relative(file: str, path: str): | async def protoc_plugin(path: str, output_dir: str): | ||||||
|     return os.path.join(os.path.dirname(file), path) |     proc = await asyncio.create_subprocess_shell( | ||||||
|  |  | ||||||
|  |  | ||||||
| def read_relative(file: str, path: str): |  | ||||||
|     with open(relative(file, path)) as fh: |  | ||||||
|         return fh.read() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def protoc_plugin(path: str, output_dir: str): |  | ||||||
|     subprocess.run( |  | ||||||
|         f"protoc --plugin=protoc-gen-custom={plugin_path} --custom_out={output_dir} --proto_path={path} {path}/*.proto", |         f"protoc --plugin=protoc-gen-custom={plugin_path} --custom_out={output_dir} --proto_path={path} {path}/*.proto", | ||||||
|         shell=True, |         stdout=asyncio.subprocess.PIPE, | ||||||
|  |         stderr=asyncio.subprocess.PIPE, | ||||||
|     ) |     ) | ||||||
|  |     return (*(await proc.communicate()), proc.returncode) | ||||||
|  |  | ||||||
|  |  | ||||||
| def protoc_reference(path: str, output_dir: str): | async def protoc_reference(path: str, output_dir: str): | ||||||
|     subprocess.run( |     proc = await asyncio.create_subprocess_shell( | ||||||
|         f"protoc --python_out={output_dir} --proto_path={path} {path}/*.proto", |         f"protoc --python_out={output_dir} --proto_path={path} {path}/*.proto", | ||||||
|         shell=True, |         stdout=asyncio.subprocess.PIPE, | ||||||
|  |         stderr=asyncio.subprocess.PIPE, | ||||||
|     ) |     ) | ||||||
|  |     return (*(await proc.communicate()), proc.returncode) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_test_case_json_data(test_case_name, json_file_name=None): | def get_test_case_json_data(test_case_name: str, json_file_name: Optional[str] = None): | ||||||
|     test_data_file_name = json_file_name if json_file_name else f"{test_case_name}.json" |     test_data_file_name = json_file_name if json_file_name else f"{test_case_name}.json" | ||||||
|     test_data_file_path = os.path.join(inputs_path, test_case_name, test_data_file_name) |     test_data_file_path = inputs_path.joinpath(test_case_name, test_data_file_name) | ||||||
|  |  | ||||||
|     if not os.path.exists(test_data_file_path): |     if not test_data_file_path.exists(): | ||||||
|         return None |         return None | ||||||
|  |  | ||||||
|     with open(test_data_file_path) as fh: |     with test_data_file_path.open("r") as fh: | ||||||
|         return fh.read() |         return fh.read() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def find_module( | ||||||
|  |     module: ModuleType, predicate: Callable[[ModuleType], bool] | ||||||
|  | ) -> Optional[ModuleType]: | ||||||
|  |     """ | ||||||
|  |     Recursively search module tree for a module that matches the search predicate. | ||||||
|  |     Assumes that the submodules are directories containing __init__.py. | ||||||
|  |  | ||||||
|  |     Example: | ||||||
|  |  | ||||||
|  |         # find module inside foo that contains Test | ||||||
|  |         import foo | ||||||
|  |         test_module = find_module(foo, lambda m: hasattr(m, 'Test')) | ||||||
|  |     """ | ||||||
|  |     if predicate(module): | ||||||
|  |         return module | ||||||
|  |  | ||||||
|  |     module_path = pathlib.Path(*module.__path__) | ||||||
|  |  | ||||||
|  |     for sub in list(sub.parent for sub in module_path.glob("**/__init__.py")): | ||||||
|  |         if sub == module_path: | ||||||
|  |             continue | ||||||
|  |         sub_module_path = sub.relative_to(module_path) | ||||||
|  |         sub_module_name = ".".join(sub_module_path.parts) | ||||||
|  |  | ||||||
|  |         sub_module = importlib.import_module(f".{sub_module_name}", module.__name__) | ||||||
|  |  | ||||||
|  |         if predicate(sub_module): | ||||||
|  |             return sub_module | ||||||
|  |  | ||||||
|  |     return None | ||||||
|   | |||||||
							
								
								
									
										16
									
								
								docs/upgrading.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								docs/upgrading.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | |||||||
|  | # Upgrade Guide | ||||||
|  |  | ||||||
|  | ## [1.2.5] to [2.0.0b1] | ||||||
|  |  | ||||||
|  | ### Updated package structures | ||||||
|  |  | ||||||
|  | Generated code now strictly follows the *package structure* of the `.proto` files. | ||||||
|  | Consequently `.proto` files without a package will be combined in a single `__init__.py` file. | ||||||
|  | To avoid overwriting existing `__init__.py` files, its best to compile into a dedicated subdirectory. | ||||||
|  |  | ||||||
|  | Upgrading: | ||||||
|  |  | ||||||
|  | - Remove your previously compiled `.py` files. | ||||||
|  | - Create a new *empty* directory, e.g. `generated` or `lib/generated/proto` etcetera. | ||||||
|  | - Regenerate your python files into this directory | ||||||
|  | - Update import statements, e.g. `import ExampleMessage from generated` | ||||||
							
								
								
									
										968
									
								
								poetry.lock
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										968
									
								
								poetry.lock
									
									
									
										generated
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,968 @@ | |||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." | ||||||
|  | name = "appdirs" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "1.4.4" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Atomic file writes." | ||||||
|  | marker = "sys_platform == \"win32\"" | ||||||
|  | name = "atomicwrites" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | ||||||
|  | version = "1.4.0" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Classes Without Boilerplate" | ||||||
|  | name = "attrs" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | ||||||
|  | version = "19.3.0" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"] | ||||||
|  | dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"] | ||||||
|  | docs = ["sphinx", "zope.interface"] | ||||||
|  | tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Backport of Python 3.7's datetime.fromisoformat" | ||||||
|  | marker = "python_version < \"3.7\"" | ||||||
|  | name = "backports-datetime-fromisoformat" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "1.0.0" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "The uncompromising code formatter." | ||||||
|  | name = "black" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=3.6" | ||||||
|  | version = "19.10b0" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | appdirs = "*" | ||||||
|  | attrs = ">=18.1.0" | ||||||
|  | click = ">=6.5" | ||||||
|  | pathspec = ">=0.6,<1" | ||||||
|  | regex = "*" | ||||||
|  | toml = ">=0.9.4" | ||||||
|  | typed-ast = ">=1.4.0" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "A thin, practical wrapper around terminal coloring, styling, and positioning" | ||||||
|  | name = "blessings" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | ||||||
|  | version = "1.7" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | six = "*" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Fancy Interface to the Python Interpreter" | ||||||
|  | name = "bpython" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "0.19" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | curtsies = ">=0.1.18" | ||||||
|  | greenlet = "*" | ||||||
|  | pygments = "*" | ||||||
|  | requests = "*" | ||||||
|  | six = ">=1.5" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | jedi = ["jedi"] | ||||||
|  | urwid = ["urwid"] | ||||||
|  | watch = ["watchdog"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Python package for providing Mozilla's CA Bundle." | ||||||
|  | name = "certifi" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "2020.6.20" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Universal encoding detector for Python 2 and 3" | ||||||
|  | name = "chardet" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "3.0.4" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Composable command line interface toolkit" | ||||||
|  | name = "click" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" | ||||||
|  | version = "7.1.2" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Cross-platform colored terminal text." | ||||||
|  | marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" | ||||||
|  | name = "colorama" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" | ||||||
|  | version = "0.4.3" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Code coverage measurement for Python" | ||||||
|  | name = "coverage" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" | ||||||
|  | version = "5.1" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | toml = ["toml"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Curses-like terminal wrapper, with colored strings!" | ||||||
|  | name = "curtsies" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "0.3.1" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | blessings = ">=1.5" | ||||||
|  | wcwidth = ">=0.1.4" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "A backport of the dataclasses module for Python 3.6" | ||||||
|  | marker = "python_version >= \"3.6\" and python_version < \"3.7\" or python_version < \"3.7\"" | ||||||
|  | name = "dataclasses" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=3.6, <3.7" | ||||||
|  | version = "0.7" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Distribution utilities" | ||||||
|  | name = "distlib" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "0.3.0" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "A platform independent file lock." | ||||||
|  | name = "filelock" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "3.0.12" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Lightweight in-process concurrent programming" | ||||||
|  | name = "greenlet" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "0.4.16" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Pure-Python gRPC implementation for asyncio" | ||||||
|  | name = "grpclib" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=3.6" | ||||||
|  | version = "0.3.2" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | h2 = "*" | ||||||
|  | multidict = "*" | ||||||
|  |  | ||||||
|  | [package.dependencies.dataclasses] | ||||||
|  | python = "<3.7" | ||||||
|  | version = "*" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "HTTP/2 State-Machine based protocol implementation" | ||||||
|  | name = "h2" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "3.2.0" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | hpack = ">=3.0,<4" | ||||||
|  | hyperframe = ">=5.2.0,<6" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Pure-Python HPACK header compression" | ||||||
|  | name = "hpack" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "3.0.0" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "HTTP/2 framing layer for Python" | ||||||
|  | name = "hyperframe" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "5.2.0" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Internationalized Domain Names in Applications (IDNA)" | ||||||
|  | name = "idna" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | ||||||
|  | version = "2.9" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Read metadata from Python packages" | ||||||
|  | marker = "python_version < \"3.8\"" | ||||||
|  | name = "importlib-metadata" | ||||||
|  | optional = false | ||||||
|  | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" | ||||||
|  | version = "1.6.1" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | zipp = ">=0.5" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | docs = ["sphinx", "rst.linker"] | ||||||
|  | testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Read resources from Python packages" | ||||||
|  | marker = "python_version < \"3.7\"" | ||||||
|  | name = "importlib-resources" | ||||||
|  | optional = false | ||||||
|  | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" | ||||||
|  | version = "2.0.1" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | [package.dependencies.importlib-metadata] | ||||||
|  | python = "<3.8" | ||||||
|  | version = "*" | ||||||
|  |  | ||||||
|  | [package.dependencies.zipp] | ||||||
|  | python = "<3.8" | ||||||
|  | version = ">=0.4" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | docs = ["sphinx", "rst.linker", "jaraco.packaging"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "A very fast and expressive template engine." | ||||||
|  | name = "jinja2" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" | ||||||
|  | version = "2.11.2" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | MarkupSafe = ">=0.23" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | i18n = ["Babel (>=0.8)"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Safely add untrusted strings to HTML/XML markup." | ||||||
|  | name = "markupsafe" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" | ||||||
|  | version = "1.1.1" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "More routines for operating on iterables, beyond itertools" | ||||||
|  | name = "more-itertools" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=3.5" | ||||||
|  | version = "8.4.0" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "multidict implementation" | ||||||
|  | name = "multidict" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=3.5" | ||||||
|  | version = "4.7.6" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Optional static typing for Python" | ||||||
|  | name = "mypy" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=3.5" | ||||||
|  | version = "0.770" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | mypy-extensions = ">=0.4.3,<0.5.0" | ||||||
|  | typed-ast = ">=1.4.0,<1.5.0" | ||||||
|  | typing-extensions = ">=3.7.4" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | dmypy = ["psutil (>=4.0)"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Experimental type system extensions for programs checked with the mypy typechecker." | ||||||
|  | name = "mypy-extensions" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "0.4.3" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Core utilities for Python packages" | ||||||
|  | name = "packaging" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | ||||||
|  | version = "20.4" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | pyparsing = ">=2.0.2" | ||||||
|  | six = "*" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Utility library for gitignore style pattern matching of file paths." | ||||||
|  | name = "pathspec" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" | ||||||
|  | version = "0.8.0" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "plugin and hook calling mechanisms for python" | ||||||
|  | name = "pluggy" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | ||||||
|  | version = "0.13.1" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | [package.dependencies.importlib-metadata] | ||||||
|  | python = "<3.8" | ||||||
|  | version = ">=0.12" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | dev = ["pre-commit", "tox"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Protocol Buffers" | ||||||
|  | name = "protobuf" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "3.12.2" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | setuptools = "*" | ||||||
|  | six = ">=1.9" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "library with cross-python path, ini-parsing, io, code, log facilities" | ||||||
|  | name = "py" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | ||||||
|  | version = "1.8.2" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Pygments is a syntax highlighting package written in Python." | ||||||
|  | name = "pygments" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=3.5" | ||||||
|  | version = "2.6.1" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Python parsing module" | ||||||
|  | name = "pyparsing" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" | ||||||
|  | version = "2.4.7" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "pytest: simple powerful testing with Python" | ||||||
|  | name = "pytest" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=3.5" | ||||||
|  | version = "5.4.3" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | atomicwrites = ">=1.0" | ||||||
|  | attrs = ">=17.4.0" | ||||||
|  | colorama = "*" | ||||||
|  | more-itertools = ">=4.0.0" | ||||||
|  | packaging = "*" | ||||||
|  | pluggy = ">=0.12,<1.0" | ||||||
|  | py = ">=1.5.0" | ||||||
|  | wcwidth = "*" | ||||||
|  |  | ||||||
|  | [package.dependencies.importlib-metadata] | ||||||
|  | python = "<3.8" | ||||||
|  | version = ">=0.12" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | checkqa-mypy = ["mypy (v0.761)"] | ||||||
|  | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Pytest support for asyncio." | ||||||
|  | name = "pytest-asyncio" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">= 3.5" | ||||||
|  | version = "0.12.0" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | pytest = ">=5.4.0" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | testing = ["async_generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Pytest plugin for measuring coverage." | ||||||
|  | name = "pytest-cov" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" | ||||||
|  | version = "2.10.0" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | coverage = ">=4.4" | ||||||
|  | pytest = ">=4.6" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Alternative regular expression module, to replace re." | ||||||
|  | name = "regex" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "2020.6.8" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Python HTTP for Humans." | ||||||
|  | name = "requests" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" | ||||||
|  | version = "2.24.0" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | certifi = ">=2017.4.17" | ||||||
|  | chardet = ">=3.0.2,<4" | ||||||
|  | idna = ">=2.5,<3" | ||||||
|  | urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] | ||||||
|  | socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Python 2 and 3 compatibility utilities" | ||||||
|  | name = "six" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" | ||||||
|  | version = "1.15.0" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "Python Library for Tom's Obvious, Minimal Language" | ||||||
|  | name = "toml" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "0.10.1" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "tox is a generic virtualenv management and test command line tool" | ||||||
|  | name = "tox" | ||||||
|  | optional = false | ||||||
|  | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" | ||||||
|  | version = "3.15.2" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | colorama = ">=0.4.1" | ||||||
|  | filelock = ">=3.0.0" | ||||||
|  | packaging = ">=14" | ||||||
|  | pluggy = ">=0.12.0" | ||||||
|  | py = ">=1.4.17" | ||||||
|  | six = ">=1.14.0" | ||||||
|  | toml = ">=0.9.4" | ||||||
|  | virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" | ||||||
|  |  | ||||||
|  | [package.dependencies.importlib-metadata] | ||||||
|  | python = "<3.8" | ||||||
|  | version = ">=0.12,<2" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | docs = ["sphinx (>=2.0.0)", "towncrier (>=18.5.0)", "pygments-github-lexers (>=0.0.5)", "sphinxcontrib-autoprogram (>=0.1.5)"] | ||||||
|  | testing = ["freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-xdist (>=1.22.2)", "pytest-randomly (>=1.0.0)", "flaky (>=3.4.0)", "psutil (>=5.6.1)"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "main" | ||||||
|  | description = "a fork of Python 2 and 3 ast modules with type comment support" | ||||||
|  | name = "typed-ast" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "1.4.1" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Backported and Experimental Type Hints for Python 3.5+" | ||||||
|  | name = "typing-extensions" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "3.7.4.2" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "HTTP library with thread-safe connection pooling, file post, and more." | ||||||
|  | name = "urllib3" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" | ||||||
|  | version = "1.25.9" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | brotli = ["brotlipy (>=0.6.0)"] | ||||||
|  | secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"] | ||||||
|  | socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Virtual Python Environment builder" | ||||||
|  | name = "virtualenv" | ||||||
|  | optional = false | ||||||
|  | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" | ||||||
|  | version = "20.0.23" | ||||||
|  |  | ||||||
|  | [package.dependencies] | ||||||
|  | appdirs = ">=1.4.3,<2" | ||||||
|  | distlib = ">=0.3.0,<1" | ||||||
|  | filelock = ">=3.0.0,<4" | ||||||
|  | six = ">=1.9.0,<2" | ||||||
|  |  | ||||||
|  | [package.dependencies.importlib-metadata] | ||||||
|  | python = "<3.8" | ||||||
|  | version = ">=0.12,<2" | ||||||
|  |  | ||||||
|  | [package.dependencies.importlib-resources] | ||||||
|  | python = "<3.7" | ||||||
|  | version = ">=1.0" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | docs = ["sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)", "proselint (>=0.10.2)"] | ||||||
|  | testing = ["pytest (>=4)", "coverage (>=5)", "coverage-enable-subprocess (>=1)", "pytest-xdist (>=1.31.0)", "pytest-mock (>=2)", "pytest-env (>=0.6.2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "flaky (>=3)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Measures the displayed width of unicode strings in a terminal" | ||||||
|  | name = "wcwidth" | ||||||
|  | optional = false | ||||||
|  | python-versions = "*" | ||||||
|  | version = "0.2.4" | ||||||
|  |  | ||||||
|  | [[package]] | ||||||
|  | category = "dev" | ||||||
|  | description = "Backport of pathlib-compatible object wrapper for zip files" | ||||||
|  | marker = "python_version < \"3.8\"" | ||||||
|  | name = "zipp" | ||||||
|  | optional = false | ||||||
|  | python-versions = ">=3.6" | ||||||
|  | version = "3.1.0" | ||||||
|  |  | ||||||
|  | [package.extras] | ||||||
|  | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] | ||||||
|  | testing = ["jaraco.itertools", "func-timeout"] | ||||||
|  |  | ||||||
|  | [extras] | ||||||
|  | compiler = ["black", "jinja2", "protobuf"] | ||||||
|  |  | ||||||
|  | [metadata] | ||||||
|  | content-hash = "8a4fa01ede86e1b5ba35b9dab8b6eacee766a9b5666f48ab41445c01882ab003" | ||||||
|  | python-versions = "^3.6" | ||||||
|  |  | ||||||
|  | [metadata.files] | ||||||
|  | appdirs = [ | ||||||
|  |     {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, | ||||||
|  |     {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, | ||||||
|  | ] | ||||||
|  | atomicwrites = [ | ||||||
|  |     {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, | ||||||
|  |     {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, | ||||||
|  | ] | ||||||
|  | attrs = [ | ||||||
|  |     {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, | ||||||
|  |     {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, | ||||||
|  | ] | ||||||
|  | backports-datetime-fromisoformat = [ | ||||||
|  |     {file = "backports-datetime-fromisoformat-1.0.0.tar.gz", hash = "sha256:9577a2a9486cd7383a5f58b23bb8e81cf0821dbbc0eb7c87d3fa198c1df40f5c"}, | ||||||
|  | ] | ||||||
|  | black = [ | ||||||
|  |     {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"}, | ||||||
|  |     {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, | ||||||
|  | ] | ||||||
|  | blessings = [ | ||||||
|  |     {file = "blessings-1.7-py2-none-any.whl", hash = "sha256:caad5211e7ba5afe04367cdd4cfc68fa886e2e08f6f35e76b7387d2109ccea6e"}, | ||||||
|  |     {file = "blessings-1.7-py3-none-any.whl", hash = "sha256:b1fdd7e7a675295630f9ae71527a8ebc10bfefa236b3d6aa4932ee4462c17ba3"}, | ||||||
|  |     {file = "blessings-1.7.tar.gz", hash = "sha256:98e5854d805f50a5b58ac2333411b0482516a8210f23f43308baeb58d77c157d"}, | ||||||
|  | ] | ||||||
|  | bpython = [ | ||||||
|  |     {file = "bpython-0.19-py2.py3-none-any.whl", hash = "sha256:95d95783bfadfa0a25300a648de5aba4423b0ee76b034022a81dde2b5e853c00"}, | ||||||
|  |     {file = "bpython-0.19.tar.gz", hash = "sha256:476ce09a896c4d34bf5e56aca64650c56fdcfce45781a20dc1521221df8cc49c"}, | ||||||
|  | ] | ||||||
|  | certifi = [ | ||||||
|  |     {file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"}, | ||||||
|  |     {file = "certifi-2020.6.20.tar.gz", hash = "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"}, | ||||||
|  | ] | ||||||
|  | chardet = [ | ||||||
|  |     {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, | ||||||
|  |     {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, | ||||||
|  | ] | ||||||
|  | click = [ | ||||||
|  |     {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, | ||||||
|  |     {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, | ||||||
|  | ] | ||||||
|  | colorama = [ | ||||||
|  |     {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, | ||||||
|  |     {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, | ||||||
|  | ] | ||||||
|  | coverage = [ | ||||||
|  |     {file = "coverage-5.1-cp27-cp27m-macosx_10_12_x86_64.whl", hash = "sha256:0cb4be7e784dcdc050fc58ef05b71aa8e89b7e6636b99967fadbdba694cf2b65"}, | ||||||
|  |     {file = "coverage-5.1-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:c317eaf5ff46a34305b202e73404f55f7389ef834b8dbf4da09b9b9b37f76dd2"}, | ||||||
|  |     {file = "coverage-5.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b83835506dfc185a319031cf853fa4bb1b3974b1f913f5bb1a0f3d98bdcded04"}, | ||||||
|  |     {file = "coverage-5.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5f2294dbf7875b991c381e3d5af2bcc3494d836affa52b809c91697449d0eda6"}, | ||||||
|  |     {file = "coverage-5.1-cp27-cp27m-win32.whl", hash = "sha256:de807ae933cfb7f0c7d9d981a053772452217df2bf38e7e6267c9cbf9545a796"}, | ||||||
|  |     {file = "coverage-5.1-cp27-cp27m-win_amd64.whl", hash = "sha256:bf9cb9a9fd8891e7efd2d44deb24b86d647394b9705b744ff6f8261e6f29a730"}, | ||||||
|  |     {file = "coverage-5.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:acf3763ed01af8410fc36afea23707d4ea58ba7e86a8ee915dfb9ceff9ef69d0"}, | ||||||
|  |     {file = "coverage-5.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:dec5202bfe6f672d4511086e125db035a52b00f1648d6407cc8e526912c0353a"}, | ||||||
|  |     {file = "coverage-5.1-cp35-cp35m-macosx_10_12_x86_64.whl", hash = "sha256:7a5bdad4edec57b5fb8dae7d3ee58622d626fd3a0be0dfceda162a7035885ecf"}, | ||||||
|  |     {file = "coverage-5.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1601e480b9b99697a570cea7ef749e88123c04b92d84cedaa01e117436b4a0a9"}, | ||||||
|  |     {file = "coverage-5.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:dbe8c6ae7534b5b024296464f387d57c13caa942f6d8e6e0346f27e509f0f768"}, | ||||||
|  |     {file = "coverage-5.1-cp35-cp35m-win32.whl", hash = "sha256:a027ef0492ede1e03a8054e3c37b8def89a1e3c471482e9f046906ba4f2aafd2"}, | ||||||
|  |     {file = "coverage-5.1-cp35-cp35m-win_amd64.whl", hash = "sha256:0e61d9803d5851849c24f78227939c701ced6704f337cad0a91e0972c51c1ee7"}, | ||||||
|  |     {file = "coverage-5.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:2d27a3f742c98e5c6b461ee6ef7287400a1956c11421eb574d843d9ec1f772f0"}, | ||||||
|  |     {file = "coverage-5.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:66460ab1599d3cf894bb6baee8c684788819b71a5dc1e8fa2ecc152e5d752019"}, | ||||||
|  |     {file = "coverage-5.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5c542d1e62eece33c306d66fe0a5c4f7f7b3c08fecc46ead86d7916684b36d6c"}, | ||||||
|  |     {file = "coverage-5.1-cp36-cp36m-win32.whl", hash = "sha256:2742c7515b9eb368718cd091bad1a1b44135cc72468c731302b3d641895b83d1"}, | ||||||
|  |     {file = "coverage-5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:dead2ddede4c7ba6cb3a721870f5141c97dc7d85a079edb4bd8d88c3ad5b20c7"}, | ||||||
|  |     {file = "coverage-5.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:01333e1bd22c59713ba8a79f088b3955946e293114479bbfc2e37d522be03355"}, | ||||||
|  |     {file = "coverage-5.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e1ea316102ea1e1770724db01998d1603ed921c54a86a2efcb03428d5417e489"}, | ||||||
|  |     {file = "coverage-5.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:adeb4c5b608574a3d647011af36f7586811a2c1197c861aedb548dd2453b41cd"}, | ||||||
|  |     {file = "coverage-5.1-cp37-cp37m-win32.whl", hash = "sha256:782caea581a6e9ff75eccda79287daefd1d2631cc09d642b6ee2d6da21fc0a4e"}, | ||||||
|  |     {file = "coverage-5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:00f1d23f4336efc3b311ed0d807feb45098fc86dee1ca13b3d6768cdab187c8a"}, | ||||||
|  |     {file = "coverage-5.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:402e1744733df483b93abbf209283898e9f0d67470707e3c7516d84f48524f55"}, | ||||||
|  |     {file = "coverage-5.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a3f3654d5734a3ece152636aad89f58afc9213c6520062db3978239db122f03c"}, | ||||||
|  |     {file = "coverage-5.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6402bd2fdedabbdb63a316308142597534ea8e1895f4e7d8bf7476c5e8751fef"}, | ||||||
|  |     {file = "coverage-5.1-cp38-cp38-win32.whl", hash = "sha256:8fa0cbc7ecad630e5b0f4f35b0f6ad419246b02bc750de7ac66db92667996d24"}, | ||||||
|  |     {file = "coverage-5.1-cp38-cp38-win_amd64.whl", hash = "sha256:79a3cfd6346ce6c13145731d39db47b7a7b859c0272f02cdb89a3bdcbae233a0"}, | ||||||
|  |     {file = "coverage-5.1-cp39-cp39-win32.whl", hash = "sha256:a82b92b04a23d3c8a581fc049228bafde988abacba397d57ce95fe95e0338ab4"}, | ||||||
|  |     {file = "coverage-5.1-cp39-cp39-win_amd64.whl", hash = "sha256:bb28a7245de68bf29f6fb199545d072d1036a1917dca17a1e75bbb919e14ee8e"}, | ||||||
|  |     {file = "coverage-5.1.tar.gz", hash = "sha256:f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052"}, | ||||||
|  | ] | ||||||
|  | curtsies = [ | ||||||
|  |     {file = "curtsies-0.3.1-py2.py3-none-any.whl", hash = "sha256:9169d734323a1356e7563b1ca0bff3c5358c1b1dcce52506a9d4d8ab8a8f5604"}, | ||||||
|  |     {file = "curtsies-0.3.1.tar.gz", hash = "sha256:b2c913a8113c4382e1a221679f2338139b112839deb16c00ee873e57a4b33bd4"}, | ||||||
|  | ] | ||||||
|  | dataclasses = [ | ||||||
|  |     {file = "dataclasses-0.7-py3-none-any.whl", hash = "sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836"}, | ||||||
|  |     {file = "dataclasses-0.7.tar.gz", hash = "sha256:494a6dcae3b8bcf80848eea2ef64c0cc5cd307ffc263e17cdf42f3e5420808e6"}, | ||||||
|  | ] | ||||||
|  | distlib = [ | ||||||
|  |     {file = "distlib-0.3.0.zip", hash = "sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21"}, | ||||||
|  | ] | ||||||
|  | filelock = [ | ||||||
|  |     {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, | ||||||
|  |     {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, | ||||||
|  | ] | ||||||
|  | greenlet = [ | ||||||
|  |     {file = "greenlet-0.4.16-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:80cb0380838bf4e48da6adedb0c7cd060c187bb4a75f67a5aa9ec33689b84872"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp27-cp27m-win32.whl", hash = "sha256:df7de669cbf21de4b04a3ffc9920bc8426cab4c61365fa84d79bf97401a8bef7"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp27-cp27m-win_amd64.whl", hash = "sha256:1429dc183b36ec972055e13250d96e174491559433eb3061691b446899b87384"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:5ea034d040e6ab1d2ae04ab05a3f37dbd719c4dee3804b13903d4cc794b1336e"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c196a5394c56352e21cb7224739c6dd0075b69dd56f758505951d1d8d68cf8a9"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp35-cp35m-win32.whl", hash = "sha256:1000038ba0ea9032948e2156a9c15f5686f36945e8f9906e6b8db49f358e7b52"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp35-cp35m-win_amd64.whl", hash = "sha256:1b805231bfb7b2900a16638c3c8b45c694334c811f84463e52451e00c9412691"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e5db19d4a7d41bbeb3dd89b49fc1bc7e6e515b51bbf32589c618655a0ebe0bf0"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp36-cp36m-win32.whl", hash = "sha256:eac2a3f659d5f41d6bbfb6a97733bc7800ea5e906dc873732e00cebb98cec9e4"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp36-cp36m-win_amd64.whl", hash = "sha256:7eed31f4efc8356e200568ba05ad645525f1fbd8674f1e5be61a493e715e3873"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:682328aa576ec393c1872615bcb877cf32d800d4a2f150e1a5dc7e56644010b1"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp37-cp37m-win32.whl", hash = "sha256:3a35e33902b2e6079949feed7a2dafa5ac6f019da97bd255842bb22de3c11bf5"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b2a984bbfc543d144d88caad6cc7ff4a71be77102014bd617bd88cfb038727"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:d83c1d38658b0f81c282b41238092ed89d8f93c6e342224ab73fb39e16848721"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp38-cp38-win32.whl", hash = "sha256:e695ac8c3efe124d998230b219eb51afb6ef10524a50b3c45109c4b77a8a3a92"}, | ||||||
|  |     {file = "greenlet-0.4.16-cp38-cp38-win_amd64.whl", hash = "sha256:133ba06bad4e5f2f8bf6a0ac434e0fd686df749a86b3478903b92ec3a9c0c90b"}, | ||||||
|  |     {file = "greenlet-0.4.16.tar.gz", hash = "sha256:6e06eac722676797e8fce4adb8ad3dc57a1bb3adfb0dd3fdf8306c055a38456c"}, | ||||||
|  | ] | ||||||
|  | grpclib = [ | ||||||
|  |     {file = "grpclib-0.3.2.tar.gz", hash = "sha256:d1e76c56f5b9cf268942b93d1ef2046e3983c35ed3e6b592f02f1d9f0db36a81"}, | ||||||
|  | ] | ||||||
|  | h2 = [ | ||||||
|  |     {file = "h2-3.2.0-py2.py3-none-any.whl", hash = "sha256:61e0f6601fa709f35cdb730863b4e5ec7ad449792add80d1410d4174ed139af5"}, | ||||||
|  |     {file = "h2-3.2.0.tar.gz", hash = "sha256:875f41ebd6f2c44781259005b157faed1a5031df3ae5aa7bcb4628a6c0782f14"}, | ||||||
|  | ] | ||||||
|  | hpack = [ | ||||||
|  |     {file = "hpack-3.0.0-py2.py3-none-any.whl", hash = "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89"}, | ||||||
|  |     {file = "hpack-3.0.0.tar.gz", hash = "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"}, | ||||||
|  | ] | ||||||
|  | hyperframe = [ | ||||||
|  |     {file = "hyperframe-5.2.0-py2.py3-none-any.whl", hash = "sha256:5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40"}, | ||||||
|  |     {file = "hyperframe-5.2.0.tar.gz", hash = "sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f"}, | ||||||
|  | ] | ||||||
|  | idna = [ | ||||||
|  |     {file = "idna-2.9-py2.py3-none-any.whl", hash = "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"}, | ||||||
|  |     {file = "idna-2.9.tar.gz", hash = "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"}, | ||||||
|  | ] | ||||||
|  | importlib-metadata = [ | ||||||
|  |     {file = "importlib_metadata-1.6.1-py2.py3-none-any.whl", hash = "sha256:15ec6c0fd909e893e3a08b3a7c76ecb149122fb14b7efe1199ddd4c7c57ea958"}, | ||||||
|  |     {file = "importlib_metadata-1.6.1.tar.gz", hash = "sha256:0505dd08068cfec00f53a74a0ad927676d7757da81b7436a6eefe4c7cf75c545"}, | ||||||
|  | ] | ||||||
|  | importlib-resources = [ | ||||||
|  |     {file = "importlib_resources-2.0.1-py2.py3-none-any.whl", hash = "sha256:83985739b3a6679702f9ab33f0ad016ad564664d0568a31ac14d7c64789453e6"}, | ||||||
|  |     {file = "importlib_resources-2.0.1.tar.gz", hash = "sha256:f5edfcece1cc9435d0979c19e08739521f4cf1aa1adaf6e571f732df6f568962"}, | ||||||
|  | ] | ||||||
|  | jinja2 = [ | ||||||
|  |     {file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"}, | ||||||
|  |     {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"}, | ||||||
|  | ] | ||||||
|  | markupsafe = [ | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, | ||||||
|  |     {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, | ||||||
|  | ] | ||||||
|  | more-itertools = [ | ||||||
|  |     {file = "more-itertools-8.4.0.tar.gz", hash = "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5"}, | ||||||
|  |     {file = "more_itertools-8.4.0-py3-none-any.whl", hash = "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"}, | ||||||
|  | ] | ||||||
|  | multidict = [ | ||||||
|  |     {file = "multidict-4.7.6-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000"}, | ||||||
|  |     {file = "multidict-4.7.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a"}, | ||||||
|  |     {file = "multidict-4.7.6-cp35-cp35m-win32.whl", hash = "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5"}, | ||||||
|  |     {file = "multidict-4.7.6-cp35-cp35m-win_amd64.whl", hash = "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3"}, | ||||||
|  |     {file = "multidict-4.7.6-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87"}, | ||||||
|  |     {file = "multidict-4.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2"}, | ||||||
|  |     {file = "multidict-4.7.6-cp36-cp36m-win32.whl", hash = "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7"}, | ||||||
|  |     {file = "multidict-4.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463"}, | ||||||
|  |     {file = "multidict-4.7.6-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d"}, | ||||||
|  |     {file = "multidict-4.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255"}, | ||||||
|  |     {file = "multidict-4.7.6-cp37-cp37m-win32.whl", hash = "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507"}, | ||||||
|  |     {file = "multidict-4.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c"}, | ||||||
|  |     {file = "multidict-4.7.6-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b"}, | ||||||
|  |     {file = "multidict-4.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7"}, | ||||||
|  |     {file = "multidict-4.7.6-cp38-cp38-win32.whl", hash = "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d"}, | ||||||
|  |     {file = "multidict-4.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19"}, | ||||||
|  |     {file = "multidict-4.7.6.tar.gz", hash = "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430"}, | ||||||
|  | ] | ||||||
|  | mypy = [ | ||||||
|  |     {file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"}, | ||||||
|  |     {file = "mypy-0.770-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754"}, | ||||||
|  |     {file = "mypy-0.770-cp35-cp35m-win_amd64.whl", hash = "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65"}, | ||||||
|  |     {file = "mypy-0.770-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce"}, | ||||||
|  |     {file = "mypy-0.770-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761"}, | ||||||
|  |     {file = "mypy-0.770-cp36-cp36m-win_amd64.whl", hash = "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2"}, | ||||||
|  |     {file = "mypy-0.770-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8"}, | ||||||
|  |     {file = "mypy-0.770-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913"}, | ||||||
|  |     {file = "mypy-0.770-cp37-cp37m-win_amd64.whl", hash = "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9"}, | ||||||
|  |     {file = "mypy-0.770-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1"}, | ||||||
|  |     {file = "mypy-0.770-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27"}, | ||||||
|  |     {file = "mypy-0.770-cp38-cp38-win_amd64.whl", hash = "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3"}, | ||||||
|  |     {file = "mypy-0.770-py3-none-any.whl", hash = "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164"}, | ||||||
|  |     {file = "mypy-0.770.tar.gz", hash = "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae"}, | ||||||
|  | ] | ||||||
|  | mypy-extensions = [ | ||||||
|  |     {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, | ||||||
|  |     {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, | ||||||
|  | ] | ||||||
|  | packaging = [ | ||||||
|  |     {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, | ||||||
|  |     {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, | ||||||
|  | ] | ||||||
|  | pathspec = [ | ||||||
|  |     {file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"}, | ||||||
|  |     {file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"}, | ||||||
|  | ] | ||||||
|  | pluggy = [ | ||||||
|  |     {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, | ||||||
|  |     {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, | ||||||
|  | ] | ||||||
|  | protobuf = [ | ||||||
|  |     {file = "protobuf-3.12.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e1464a4a2cf12f58f662c8e6421772c07947266293fb701cb39cd9c1e183f63c"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:6f349adabf1c004aba53f7b4633459f8ca8a09654bf7e69b509c95a454755776"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:be04fe14ceed7f8641e30f36077c1a654ff6f17d0c7a5283b699d057d150d82a"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f4b73736108a416c76c17a8a09bc73af3d91edaa26c682aaa460ef91a47168d3"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp35-cp35m-win32.whl", hash = "sha256:5524c7020eb1fb7319472cb75c4c3206ef18b34d6034d2ee420a60f99cddeb07"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp35-cp35m-win_amd64.whl", hash = "sha256:bff02030bab8b969f4de597543e55bd05e968567acb25c0a87495a31eb09e925"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c9ca9f76805e5a637605f171f6c4772fc4a81eced4e2f708f79c75166a2c99ea"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:304e08440c4a41a0f3592d2a38934aad6919d692bb0edfb355548786728f9a5e"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp36-cp36m-win32.whl", hash = "sha256:b5a114ea9b7fc90c2cc4867a866512672a47f66b154c6d7ee7e48ddb68b68122"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp36-cp36m-win_amd64.whl", hash = "sha256:85b94d2653b0fdf6d879e39d51018bf5ccd86c81c04e18a98e9888694b98226f"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a7ab28a8f1f043c58d157bceb64f80e4d2f7f1b934bc7ff5e7f7a55a337ea8b0"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:eafe9fa19fcefef424ee089fb01ac7177ff3691af7cc2ae8791ae523eb6ca907"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp37-cp37m-win32.whl", hash = "sha256:612bc97e42b22af10ba25e4140963fbaa4c5181487d163f4eb55b0b15b3dfcd2"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp37-cp37m-win_amd64.whl", hash = "sha256:e72736dd822748b0721f41f9aaaf6a5b6d5cfc78f6c8690263aef8bba4457f0e"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:87535dc2d2ef007b9d44e309d2b8ea27a03d2fa09556a72364d706fcb7090828"}, | ||||||
|  |     {file = "protobuf-3.12.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:50b5fee674878b14baea73b4568dc478c46a31dd50157a5b5d2f71138243b1a9"}, | ||||||
|  |     {file = "protobuf-3.12.2-py2.py3-none-any.whl", hash = "sha256:a96f8fc625e9ff568838e556f6f6ae8eca8b4837cdfb3f90efcb7c00e342a2eb"}, | ||||||
|  |     {file = "protobuf-3.12.2.tar.gz", hash = "sha256:49ef8ab4c27812a89a76fa894fe7a08f42f2147078392c0dee51d4a444ef6df5"}, | ||||||
|  | ] | ||||||
|  | py = [ | ||||||
|  |     {file = "py-1.8.2-py2.py3-none-any.whl", hash = "sha256:a673fa23d7000440cc885c17dbd34fafcb7d7a6e230b29f6766400de36a33c44"}, | ||||||
|  |     {file = "py-1.8.2.tar.gz", hash = "sha256:f3b3a4c36512a4c4f024041ab51866f11761cc169670204b235f6b20523d4e6b"}, | ||||||
|  | ] | ||||||
|  | pygments = [ | ||||||
|  |     {file = "Pygments-2.6.1-py3-none-any.whl", hash = "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"}, | ||||||
|  |     {file = "Pygments-2.6.1.tar.gz", hash = "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44"}, | ||||||
|  | ] | ||||||
|  | pyparsing = [ | ||||||
|  |     {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, | ||||||
|  |     {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, | ||||||
|  | ] | ||||||
|  | pytest = [ | ||||||
|  |     {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, | ||||||
|  |     {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, | ||||||
|  | ] | ||||||
|  | pytest-asyncio = [ | ||||||
|  |     {file = "pytest-asyncio-0.12.0.tar.gz", hash = "sha256:475bd2f3dc0bc11d2463656b3cbaafdbec5a47b47508ea0b329ee693040eebd2"}, | ||||||
|  | ] | ||||||
|  | pytest-cov = [ | ||||||
|  |     {file = "pytest-cov-2.10.0.tar.gz", hash = "sha256:1a629dc9f48e53512fcbfda6b07de490c374b0c83c55ff7a1720b3fccff0ac87"}, | ||||||
|  |     {file = "pytest_cov-2.10.0-py2.py3-none-any.whl", hash = "sha256:6e6d18092dce6fad667cd7020deed816f858ad3b49d5b5e2b1cc1c97a4dba65c"}, | ||||||
|  | ] | ||||||
|  | regex = [ | ||||||
|  |     {file = "regex-2020.6.8-cp27-cp27m-win32.whl", hash = "sha256:fbff901c54c22425a5b809b914a3bfaf4b9570eee0e5ce8186ac71eb2025191c"}, | ||||||
|  |     {file = "regex-2020.6.8-cp27-cp27m-win_amd64.whl", hash = "sha256:112e34adf95e45158c597feea65d06a8124898bdeac975c9087fe71b572bd938"}, | ||||||
|  |     {file = "regex-2020.6.8-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:92d8a043a4241a710c1cf7593f5577fbb832cf6c3a00ff3fc1ff2052aff5dd89"}, | ||||||
|  |     {file = "regex-2020.6.8-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bae83f2a56ab30d5353b47f9b2a33e4aac4de9401fb582b55c42b132a8ac3868"}, | ||||||
|  |     {file = "regex-2020.6.8-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:b2ba0f78b3ef375114856cbdaa30559914d081c416b431f2437f83ce4f8b7f2f"}, | ||||||
|  |     {file = "regex-2020.6.8-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:95fa7726d073c87141f7bbfb04c284901f8328e2d430eeb71b8ffdd5742a5ded"}, | ||||||
|  |     {file = "regex-2020.6.8-cp36-cp36m-win32.whl", hash = "sha256:e3cdc9423808f7e1bb9c2e0bdb1c9dc37b0607b30d646ff6faf0d4e41ee8fee3"}, | ||||||
|  |     {file = "regex-2020.6.8-cp36-cp36m-win_amd64.whl", hash = "sha256:c78e66a922de1c95a208e4ec02e2e5cf0bb83a36ceececc10a72841e53fbf2bd"}, | ||||||
|  |     {file = "regex-2020.6.8-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:08997a37b221a3e27d68ffb601e45abfb0093d39ee770e4257bd2f5115e8cb0a"}, | ||||||
|  |     {file = "regex-2020.6.8-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2f6f211633ee8d3f7706953e9d3edc7ce63a1d6aad0be5dcee1ece127eea13ae"}, | ||||||
|  |     {file = "regex-2020.6.8-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:55b4c25cbb3b29f8d5e63aeed27b49fa0f8476b0d4e1b3171d85db891938cc3a"}, | ||||||
|  |     {file = "regex-2020.6.8-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:89cda1a5d3e33ec9e231ece7307afc101b5217523d55ef4dc7fb2abd6de71ba3"}, | ||||||
|  |     {file = "regex-2020.6.8-cp37-cp37m-win32.whl", hash = "sha256:690f858d9a94d903cf5cada62ce069b5d93b313d7d05456dbcd99420856562d9"}, | ||||||
|  |     {file = "regex-2020.6.8-cp37-cp37m-win_amd64.whl", hash = "sha256:1700419d8a18c26ff396b3b06ace315b5f2a6e780dad387e4c48717a12a22c29"}, | ||||||
|  |     {file = "regex-2020.6.8-cp38-cp38-manylinux1_i686.whl", hash = "sha256:654cb773b2792e50151f0e22be0f2b6e1c3a04c5328ff1d9d59c0398d37ef610"}, | ||||||
|  |     {file = "regex-2020.6.8-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:52e1b4bef02f4040b2fd547357a170fc1146e60ab310cdbdd098db86e929b387"}, | ||||||
|  |     {file = "regex-2020.6.8-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:cf59bbf282b627130f5ba68b7fa3abdb96372b24b66bdf72a4920e8153fc7910"}, | ||||||
|  |     {file = "regex-2020.6.8-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:5aaa5928b039ae440d775acea11d01e42ff26e1561c0ffcd3d805750973c6baf"}, | ||||||
|  |     {file = "regex-2020.6.8-cp38-cp38-win32.whl", hash = "sha256:97712e0d0af05febd8ab63d2ef0ab2d0cd9deddf4476f7aa153f76feef4b2754"}, | ||||||
|  |     {file = "regex-2020.6.8-cp38-cp38-win_amd64.whl", hash = "sha256:6ad8663c17db4c5ef438141f99e291c4d4edfeaacc0ce28b5bba2b0bf273d9b5"}, | ||||||
|  |     {file = "regex-2020.6.8.tar.gz", hash = "sha256:e9b64e609d37438f7d6e68c2546d2cb8062f3adb27e6336bc129b51be20773ac"}, | ||||||
|  | ] | ||||||
|  | requests = [ | ||||||
|  |     {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"}, | ||||||
|  |     {file = "requests-2.24.0.tar.gz", hash = "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"}, | ||||||
|  | ] | ||||||
|  | six = [ | ||||||
|  |     {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, | ||||||
|  |     {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, | ||||||
|  | ] | ||||||
|  | toml = [ | ||||||
|  |     {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, | ||||||
|  |     {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, | ||||||
|  | ] | ||||||
|  | tox = [ | ||||||
|  |     {file = "tox-3.15.2-py2.py3-none-any.whl", hash = "sha256:50a188b8e17580c1fb931f494a754e6507d4185f54fb18aca5ba3e12d2ffd55e"}, | ||||||
|  |     {file = "tox-3.15.2.tar.gz", hash = "sha256:c696d36cd7c6a28ada2da780400e44851b20ee19ef08cfe73344a1dcebbbe9f3"}, | ||||||
|  | ] | ||||||
|  | typed-ast = [ | ||||||
|  |     {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, | ||||||
|  |     {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, | ||||||
|  |     {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, | ||||||
|  | ] | ||||||
|  | typing-extensions = [ | ||||||
|  |     {file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"}, | ||||||
|  |     {file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"}, | ||||||
|  |     {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"}, | ||||||
|  | ] | ||||||
|  | urllib3 = [ | ||||||
|  |     {file = "urllib3-1.25.9-py2.py3-none-any.whl", hash = "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"}, | ||||||
|  |     {file = "urllib3-1.25.9.tar.gz", hash = "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527"}, | ||||||
|  | ] | ||||||
|  | virtualenv = [ | ||||||
|  |     {file = "virtualenv-20.0.23-py2.py3-none-any.whl", hash = "sha256:ccfb8e1e05a1174f7bd4c163700277ba730496094fe1a58bea9d4ac140a207c8"}, | ||||||
|  |     {file = "virtualenv-20.0.23.tar.gz", hash = "sha256:5102fbf1ec57e80671ef40ed98a84e980a71194cedf30c87c2b25c3a9e0b0107"}, | ||||||
|  | ] | ||||||
|  | wcwidth = [ | ||||||
|  |     {file = "wcwidth-0.2.4-py2.py3-none-any.whl", hash = "sha256:79375666b9954d4a1a10739315816324c3e73110af9d0e102d906fdb0aec009f"}, | ||||||
|  |     {file = "wcwidth-0.2.4.tar.gz", hash = "sha256:8c6b5b6ee1360b842645f336d9e5d68c55817c26d3050f46b235ef2bc650e48f"}, | ||||||
|  | ] | ||||||
|  | zipp = [ | ||||||
|  |     {file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"}, | ||||||
|  |     {file = "zipp-3.1.0.tar.gz", hash = "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"}, | ||||||
|  | ] | ||||||
| @@ -1,9 +1,60 @@ | |||||||
|  | [tool.poetry] | ||||||
|  | name = "betterproto" | ||||||
|  | version = "2.0.0b1" | ||||||
|  | description = "A better Protobuf / gRPC generator & library" | ||||||
|  | authors = ["Daniel G. Taylor <danielgtaylor@gmail.com>"] | ||||||
|  | readme = "README.md" | ||||||
|  | repository = "https://github.com/danielgtaylor/python-betterproto" | ||||||
|  | keywords = ["protobuf", "gRPC"] | ||||||
|  | license = "MIT" | ||||||
|  |  | ||||||
|  | exclude = ["betterproto/tests"] | ||||||
|  |  | ||||||
|  | [tool.poetry.dependencies] | ||||||
|  | python = "^3.6" | ||||||
|  | backports-datetime-fromisoformat = { version = "^1.0.0", python = "<3.7" } | ||||||
|  | black = { version = "^19.10b0", optional = true } | ||||||
|  | dataclasses = { version = "^0.7", python = ">=3.6, <3.7" } | ||||||
|  | grpclib = "^0.3.1" | ||||||
|  | jinja2 = { version = "^2.11.2", optional = true } | ||||||
|  | protobuf = { version = "^3.12.2", optional = true } | ||||||
|  |  | ||||||
|  | [tool.poetry.dev-dependencies] | ||||||
|  | black = "^19.10b0" | ||||||
|  | bpython = "^0.19" | ||||||
|  | jinja2 = "^2.11.2" | ||||||
|  | mypy = "^0.770" | ||||||
|  | protobuf = "^3.12.2" | ||||||
|  | pytest = "^5.4.2" | ||||||
|  | pytest-asyncio = "^0.12.0" | ||||||
|  | pytest-cov = "^2.9.0" | ||||||
|  | tox = "^3.15.1" | ||||||
|  |  | ||||||
|  | [tool.poetry.scripts] | ||||||
|  | protoc-gen-python_betterproto = "betterproto.plugin:main" | ||||||
|  |  | ||||||
|  | [tool.poetry.extras] | ||||||
|  | compiler = ["black", "jinja2", "protobuf"] | ||||||
|  |  | ||||||
| [tool.black] | [tool.black] | ||||||
| target-version = ['py36'] | target-version = ['py36'] | ||||||
|  |  | ||||||
| [tool.isort] | [tool.coverage.run] | ||||||
| multi_line_output = 3 | omit = ["betterproto/tests/*"] | ||||||
| include_trailing_comma = true |  | ||||||
| force_grid_wrap = 0 | [tool.tox] | ||||||
| use_parentheses = true | legacy_tox_ini = """ | ||||||
| line_length = 88 | [tox] | ||||||
|  | isolated_build = true | ||||||
|  | envlist = py36, py37, py38 | ||||||
|  |  | ||||||
|  | [testenv] | ||||||
|  | whitelist_externals = poetry | ||||||
|  | commands = | ||||||
|  |     poetry install -v --extras compiler | ||||||
|  |     poetry run pytest --cov betterproto | ||||||
|  | """ | ||||||
|  |  | ||||||
|  | [build-system] | ||||||
|  | requires = ["poetry>=0.12"] | ||||||
|  | build-backend = "poetry.masonry.api" | ||||||
|   | |||||||
							
								
								
									
										29
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										29
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,29 +0,0 @@ | |||||||
| from setuptools import setup, find_packages |  | ||||||
|  |  | ||||||
| setup( |  | ||||||
|     name="betterproto", |  | ||||||
|     version="1.2.5", |  | ||||||
|     description="A better Protobuf / gRPC generator & library", |  | ||||||
|     long_description=open("README.md", "r", encoding="utf-8").read(), |  | ||||||
|     long_description_content_type="text/markdown", |  | ||||||
|     url="http://github.com/danielgtaylor/python-betterproto", |  | ||||||
|     author="Daniel G. Taylor", |  | ||||||
|     author_email="danielgtaylor@gmail.com", |  | ||||||
|     license="MIT", |  | ||||||
|     entry_points={ |  | ||||||
|         "console_scripts": ["protoc-gen-python_betterproto=betterproto.plugin:main"] |  | ||||||
|     }, |  | ||||||
|     packages=find_packages( |  | ||||||
|         exclude=["tests", "*.tests", "*.tests.*", "output", "output.*"] |  | ||||||
|     ), |  | ||||||
|     package_data={"betterproto": ["py.typed", "templates/template.py.j2"]}, |  | ||||||
|     python_requires=">=3.6", |  | ||||||
|     install_requires=[ |  | ||||||
|         'dataclasses; python_version<"3.7"', |  | ||||||
|         'backports-datetime-fromisoformat; python_version<"3.7"', |  | ||||||
|         "grpclib", |  | ||||||
|         "stringcase", |  | ||||||
|     ], |  | ||||||
|     extras_require={"compiler": ["black", "jinja2", "protobuf"]}, |  | ||||||
|     zip_safe=False, |  | ||||||
| ) |  | ||||||
		Reference in New Issue
	
	Block a user