Compare commits
49 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
86e1d3defb | ||
|
01fa7fbbdb | ||
|
90196eb1bf | ||
|
3c111792a9 | ||
|
77e9d7bc91 | ||
|
fe2ddff88b | ||
|
0d23297f46 | ||
|
6be6d55e5b | ||
|
25674bc73a | ||
|
1715eda1a3 | ||
|
f5775049dd | ||
|
6fd0f8a42f | ||
|
f52dc009af | ||
|
9248d456f9 | ||
|
c24f2f6b09 | ||
|
73b75349ee | ||
|
7bc553221a | ||
|
7413a05e19 | ||
|
bf194ca8ce | ||
|
b06da0223a | ||
|
83554cdc5d | ||
|
6c76bfccad | ||
|
a1746e457c | ||
|
2a0435dea9 | ||
|
e87f67f1e1 | ||
|
7b4b7ac749 | ||
|
5b9b51db3f | ||
|
ffeee3c901 | ||
|
b4366d2427 | ||
|
ec1c80f3a9 | ||
|
d2083632eb | ||
|
125389461f | ||
|
c09c878eaf | ||
|
ef3e0c11d5 | ||
|
881f70f748 | ||
|
6ffca1a0c7 | ||
|
95e41720cb | ||
|
40c0008e6e | ||
|
ce75e55d60 | ||
|
4d4f951e09 | ||
|
354e861dad | ||
|
3a76486993 | ||
|
4d0a6b4de6 | ||
|
c01d2993e0 | ||
|
bab5ebf2f0 | ||
|
7e5cefd7d6 | ||
|
0cea28d521 | ||
|
b92e6551fd | ||
|
bbabde32a1 |
6
.github/workflows/pypi.yml
vendored
6
.github/workflows/pypi.yml
vendored
@@ -8,12 +8,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- uses: dschep/install-poetry-action@v1.3
|
||||
- name: Build dists
|
||||
run: |
|
||||
python3 setup.py sdist
|
||||
run: make build
|
||||
- name: Pypi Publish
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
with:
|
||||
|
7
.github/workflows/test.yml
vendored
7
.github/workflows/test.yml
vendored
@@ -19,10 +19,7 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements-dev.txt
|
||||
- uses: dschep/install-poetry-action@v1.3
|
||||
- name: CI
|
||||
env:
|
||||
MYSQL_PASS: root
|
||||
@@ -31,4 +28,4 @@ jobs:
|
||||
POSTGRES_PASS: 123456
|
||||
POSTGRES_HOST: 127.0.0.1
|
||||
POSTGRES_PORT: 5432
|
||||
run: make testall
|
||||
run: make ci
|
@@ -1,9 +1,44 @@
|
||||
=========
|
||||
ChangeLog
|
||||
=========
|
||||
0.2
|
||||
===
|
||||
|
||||
0.2.1
|
||||
-----
|
||||
- Fix bug in windows.
|
||||
- Enhance PostgreSQL support.
|
||||
|
||||
0.2.0
|
||||
-----
|
||||
- Update model file find method.
|
||||
- Set ``--safe`` bool.
|
||||
|
||||
0.1
|
||||
===
|
||||
0.1.9
|
||||
-----
|
||||
- Fix default_connection when upgrade
|
||||
- Find default app instead of default.
|
||||
- Diff MySQL ddl.
|
||||
- Check tortoise config.
|
||||
|
||||
0.1.8
|
||||
-----
|
||||
- Fix upgrade error when migrate.
|
||||
- Fix init db sql error.
|
||||
- Support change column.
|
||||
|
||||
0.1.7
|
||||
-----
|
||||
- Exclude models.Aerich.
|
||||
- Add init record when init-db.
|
||||
- Fix version num str.
|
||||
|
||||
0.1.6
|
||||
-----
|
||||
- update dependency_links
|
||||
|
||||
0.1.5
|
||||
-----
|
||||
- Add sqlite and postgres support.
|
||||
|
214
LICENSE
214
LICENSE
@@ -1,21 +1,201 @@
|
||||
The MIT License (MIT)
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Copyright (c) 2020 long2ice
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
1. Definitions.
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2020 long2ice
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
@@ -1,3 +0,0 @@
|
||||
include LICENSE
|
||||
include README.rst
|
||||
include requirements.txt
|
31
Makefile
31
Makefile
@@ -1,4 +1,4 @@
|
||||
checkfiles = aerich/ tests/
|
||||
checkfiles = aerich/ tests/ conftest.py
|
||||
black_opts = -l 100 -t py38
|
||||
py_warn = PYTHONDEVMODE=1
|
||||
MYSQL_HOST ?= "127.0.0.1"
|
||||
@@ -12,29 +12,26 @@ help:
|
||||
@echo "usage: make <target>"
|
||||
@echo "Targets:"
|
||||
@echo " up Updates dev/test dependencies"
|
||||
@echo " deps Ensure dev/test dependencies are installed"
|
||||
@echo " deps Ensure dev/test dependencies are installed"
|
||||
@echo " check Checks that build is sane"
|
||||
@echo " lint Reports all linter violations"
|
||||
@echo " test Runs all tests"
|
||||
@echo " style Auto-formats the code"
|
||||
|
||||
up:
|
||||
@poetry update
|
||||
|
||||
deps:
|
||||
@which pip-sync > /dev/null || pip install -q pip-tools
|
||||
@pip install -r requirements-dev.txt
|
||||
@poetry install -E dbdrivers --no-root
|
||||
|
||||
style: deps
|
||||
isort -rc $(checkfiles)
|
||||
isort -src $(checkfiles)
|
||||
black $(black_opts) $(checkfiles)
|
||||
|
||||
check: deps
|
||||
ifneq ($(shell which black),)
|
||||
black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
|
||||
endif
|
||||
flake8 $(checkfiles)
|
||||
mypy $(checkfiles)
|
||||
pylint -d C,W,R $(checkfiles)
|
||||
bandit -r $(checkfiles)
|
||||
python setup.py check -mrs
|
||||
bandit -x tests -r $(checkfiles)
|
||||
|
||||
test: deps
|
||||
$(py_warn) TEST_DB=sqlite://:memory: py.test
|
||||
@@ -50,10 +47,10 @@ test_postgres:
|
||||
|
||||
testall: deps test_sqlite test_postgres test_mysql
|
||||
|
||||
publish: deps
|
||||
rm -fR dist/
|
||||
python setup.py sdist
|
||||
twine upload dist/*
|
||||
build: deps
|
||||
@poetry build
|
||||
|
||||
ci:
|
||||
@act -P ubuntu-latest=nektos/act-environments-ubuntu:18.04 -b
|
||||
publish: deps
|
||||
@poetry publish --build
|
||||
|
||||
ci: check testall
|
182
README.md
Normal file
182
README.md
Normal file
@@ -0,0 +1,182 @@
|
||||
# Aerich
|
||||
|
||||
[](https://pypi.python.org/pypi/aerich)
|
||||
[](https://github.com/long2ice/aerich)
|
||||
[](https://github.com/long2ice/aerich/actions?query=workflow:pypi)
|
||||
[](https://github.com/long2ice/aerich/actions?query=workflow:test)
|
||||
|
||||
## Introduction
|
||||
|
||||
Tortoise-ORM is the best asyncio ORM now, but it lacks a database
|
||||
migrations tool like alembic for SQLAlchemy, or Django ORM with it\'s
|
||||
own migrations tool.
|
||||
|
||||
This project aim to be a best migrations tool for Tortoise-ORM and which
|
||||
written by one of contributors of Tortoise-ORM.
|
||||
|
||||
## Install
|
||||
|
||||
Just install from pypi:
|
||||
|
||||
```shell
|
||||
> pip install aerich
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```shell
|
||||
$ aerich -h
|
||||
|
||||
Usage: aerich [OPTIONS] COMMAND [ARGS]...
|
||||
|
||||
Options:
|
||||
-c, --config TEXT Config file. [default: aerich.ini]
|
||||
--app TEXT Tortoise-ORM app name. [default: models]
|
||||
-n, --name TEXT Name of section in .ini file to use for aerich config.
|
||||
[default: aerich]
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
Commands:
|
||||
downgrade Downgrade to previous version.
|
||||
heads Show current available heads in migrate location.
|
||||
history List all migrate items.
|
||||
init Init config file and generate root migrate location.
|
||||
init-db Generate schema and generate app migrate location.
|
||||
migrate Generate migrate changes file.
|
||||
upgrade Upgrade to latest version.
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
You need add `aerich.models` to your `Tortoise-ORM` config first,
|
||||
example:
|
||||
|
||||
```python
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
|
||||
"apps": {
|
||||
"models": {
|
||||
"models": ["tests.models", "aerich.models"],
|
||||
"default_connection": "default",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Initialization
|
||||
|
||||
```shell
|
||||
$ aerich init -h
|
||||
|
||||
Usage: aerich init [OPTIONS]
|
||||
|
||||
Init config file and generate root migrate location.
|
||||
|
||||
Options:
|
||||
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.
|
||||
[required]
|
||||
--location TEXT Migrate store location. [default: ./migrations]
|
||||
-h, --help Show this message and exit.
|
||||
```
|
||||
|
||||
Init config file and location:
|
||||
|
||||
```shell
|
||||
$ aerich init -t tests.backends.mysql.TORTOISE_ORM
|
||||
|
||||
Success create migrate location ./migrations
|
||||
Success generate config file aerich.ini
|
||||
```
|
||||
|
||||
### Init db
|
||||
|
||||
```shell
|
||||
$ aerich init-db
|
||||
|
||||
Success create app migrate location ./migrations/models
|
||||
Success generate schema for app "models"
|
||||
```
|
||||
|
||||
::: {.note}
|
||||
::: {.title}
|
||||
Note
|
||||
:::
|
||||
|
||||
If your Tortoise-ORM app is not default `models`, you must specify
|
||||
`--app` like `aerich --app other_models init-db`.
|
||||
:::
|
||||
|
||||
### Update models and make migrate
|
||||
|
||||
```shell
|
||||
$ aerich migrate --name drop_column
|
||||
|
||||
Success migrate 1_202029051520102929_drop_column.json
|
||||
```
|
||||
|
||||
Format of migrate filename is
|
||||
`{version_num}_{datetime}_{name|update}.json`
|
||||
|
||||
### Upgrade to latest version
|
||||
|
||||
```shell
|
||||
$ aerich upgrade
|
||||
|
||||
Success upgrade 1_202029051520102929_drop_column.json
|
||||
```
|
||||
|
||||
Now your db is migrated to latest.
|
||||
|
||||
### Downgrade to previous version
|
||||
|
||||
```shell
|
||||
$ aerich downgrade
|
||||
|
||||
Success downgrade 1_202029051520102929_drop_column.json
|
||||
```
|
||||
|
||||
Now your db rollback to previous version.
|
||||
|
||||
### Show history
|
||||
|
||||
```shell
|
||||
$ aerich history
|
||||
|
||||
1_202029051520102929_drop_column.json
|
||||
```
|
||||
|
||||
### Show heads to be migrated
|
||||
|
||||
```shell
|
||||
$ aerich heads
|
||||
|
||||
1_202029051520102929_drop_column.json
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
- Not support `rename column` now.
|
||||
- `Sqlite` and `Postgres` may not work as expected because I don\'t
|
||||
use those in my work.
|
||||
|
||||
## Support this project
|
||||
|
||||
- Just give a star!
|
||||
- Donation.
|
||||
|
||||
### AliPay
|
||||
|
||||
<img width="200" src="https://github.com/long2ice/aerich/raw/dev/images/alipay.jpeg"/>
|
||||
|
||||
### WeChat Pay
|
||||
|
||||
<img width="200" src="https://github.com/long2ice/aerich/raw/dev/images/wechatpay.jpeg"/>
|
||||
|
||||
### PayPal
|
||||
|
||||
Donate money by [paypal](https://www.paypal.me/long2ice) to my account long2ice.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the
|
||||
[Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) License.
|
160
README.rst
160
README.rst
@@ -1,160 +0,0 @@
|
||||
======
|
||||
Aerich
|
||||
======
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/aerich.svg?style=flat
|
||||
:target: https://pypi.python.org/pypi/aerich
|
||||
.. image:: https://img.shields.io/github/license/long2ice/aerich
|
||||
:target: https://github.com/long2ice/aerich
|
||||
.. image:: https://github.com/long2ice/aerich/workflows/pypi/badge.svg
|
||||
:target: https://github.com/long2ice/aerich/actions?query=workflow:pypi
|
||||
.. image:: https://github.com/long2ice/aerich/workflows/test/badge.svg
|
||||
:target: https://github.com/long2ice/aerich/actions?query=workflow:test
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
Tortoise-ORM is the best asyncio ORM now, but it lacks a database migrations tool like alembic for SQLAlchemy, or Django ORM with it's own migrations tool.
|
||||
|
||||
This project aim to be a best migrations tool for Tortoise-ORM and which written by one of contributors of Tortoise-ORM.
|
||||
|
||||
Install
|
||||
=======
|
||||
|
||||
Just install from pypi:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ pip install aerich
|
||||
|
||||
Quick Start
|
||||
===========
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich -h
|
||||
|
||||
Usage: aerich [OPTIONS] COMMAND [ARGS]...
|
||||
|
||||
Options:
|
||||
-c, --config TEXT Config file. [default: aerich.ini]
|
||||
--app TEXT Tortoise-ORM app name. [default: models]
|
||||
-n, --name TEXT Name of section in .ini file to use for aerich config.
|
||||
[default: aerich]
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
Commands:
|
||||
downgrade Downgrade to previous version.
|
||||
heads Show current available heads in migrate location.
|
||||
history List all migrate items.
|
||||
init Init config file and generate root migrate location.
|
||||
init-db Generate schema and generate app migrate location.
|
||||
migrate Generate migrate changes file.
|
||||
upgrade Upgrade to latest version.
|
||||
|
||||
Usage
|
||||
=====
|
||||
You need add ``aerich.models`` to your ``Tortoise-ORM`` config first, example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
|
||||
"apps": {
|
||||
"models": {
|
||||
"models": ["tests.models", "aerich.models"],
|
||||
"default_connection": "default",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
Initialization
|
||||
--------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich init -h
|
||||
|
||||
Usage: aerich init [OPTIONS]
|
||||
|
||||
Init config file and generate root migrate location.
|
||||
|
||||
Options:
|
||||
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.
|
||||
[required]
|
||||
--location TEXT Migrate store location. [default: ./migrations]
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
Init config file and location:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich init -t tests.backends.mysql.TORTOISE_ORM
|
||||
|
||||
Success create migrate location ./migrations
|
||||
Success generate config file aerich.ini
|
||||
|
||||
Init db
|
||||
-------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich init-db
|
||||
|
||||
Success create app migrate location ./migrations/models
|
||||
Success generate schema for app "models"
|
||||
|
||||
Update models and make migrate
|
||||
------------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich migrate --name drop_column
|
||||
|
||||
Success migrate 1_202029051520102929_drop_column.json
|
||||
|
||||
Format of migrate filename is ``{version_num}_{datetime}_{name|update}.json``
|
||||
|
||||
Upgrade to latest version
|
||||
-------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich upgrade
|
||||
|
||||
Success upgrade 1_202029051520102929_drop_column.json
|
||||
|
||||
Now your db is migrated to latest.
|
||||
|
||||
Downgrade to previous version
|
||||
-----------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich downgrade
|
||||
|
||||
Success downgrade 1_202029051520102929_drop_column.json
|
||||
|
||||
Now your db rollback to previous version.
|
||||
|
||||
Show history
|
||||
------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich history
|
||||
|
||||
1_202029051520102929_drop_column.json
|
||||
|
||||
Show heads to be migrated
|
||||
-------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich heads
|
||||
|
||||
1_202029051520102929_drop_column.json
|
||||
|
||||
License
|
||||
=======
|
||||
This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License.
|
@@ -1 +1 @@
|
||||
__version__ = "0.1.4"
|
||||
__version__ = "0.2.1"
|
||||
|
@@ -6,8 +6,10 @@ from enum import Enum
|
||||
|
||||
import asyncclick as click
|
||||
from asyncclick import Context, UsageError
|
||||
from tortoise import ConfigurationError, Tortoise, generate_schema_for_client
|
||||
from tortoise import Tortoise, generate_schema_for_client
|
||||
from tortoise.exceptions import OperationalError
|
||||
from tortoise.transactions import in_transaction
|
||||
from tortoise.utils import get_schema_sql
|
||||
|
||||
from aerich.migrate import Migrate
|
||||
from aerich.utils import get_app_connection, get_app_connection_name, get_tortoise_config
|
||||
@@ -26,11 +28,11 @@ parser = ConfigParser()
|
||||
|
||||
|
||||
@click.group(context_settings={"help_option_names": ["-h", "--help"]})
|
||||
@click.version_option(__version__)
|
||||
@click.version_option(__version__, "-V", "--version")
|
||||
@click.option(
|
||||
"-c", "--config", default="aerich.ini", show_default=True, help="Config file.",
|
||||
)
|
||||
@click.option("--app", default="models", show_default=True, help="Tortoise-ORM app name.")
|
||||
@click.option("--app", required=False, help="Tortoise-ORM app name.")
|
||||
@click.option(
|
||||
"-n",
|
||||
"--name",
|
||||
@@ -43,7 +45,6 @@ async def cli(ctx: Context, config, app, name):
|
||||
ctx.ensure_object(dict)
|
||||
ctx.obj["config_file"] = config
|
||||
ctx.obj["name"] = name
|
||||
ctx.obj["app"] = app
|
||||
|
||||
invoked_subcommand = ctx.invoked_subcommand
|
||||
if invoked_subcommand != "init":
|
||||
@@ -55,15 +56,15 @@ async def cli(ctx: Context, config, app, name):
|
||||
tortoise_orm = parser[name]["tortoise_orm"]
|
||||
|
||||
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
|
||||
|
||||
app = app or list(tortoise_config.get("apps").keys())[0]
|
||||
if "aerich.models" not in tortoise_config.get("apps").get(app).get("models"):
|
||||
raise UsageError("Check your tortoise config and add aerich.models to it.", ctx=ctx)
|
||||
ctx.obj["config"] = tortoise_config
|
||||
ctx.obj["location"] = location
|
||||
ctx.obj["app"] = app
|
||||
|
||||
if invoked_subcommand != "init-db":
|
||||
try:
|
||||
await Migrate.init_with_old_models(tortoise_config, app, location)
|
||||
except ConfigurationError:
|
||||
raise UsageError(ctx=ctx, message="You must exec init-db first")
|
||||
await Migrate.init_with_old_models(tortoise_config, app, location)
|
||||
|
||||
|
||||
@cli.command(help="Generate migrate changes file.")
|
||||
@@ -88,15 +89,19 @@ async def upgrade(ctx: Context):
|
||||
app = ctx.obj["app"]
|
||||
migrated = False
|
||||
for version in Migrate.get_all_version_files():
|
||||
if not await Aerich.exists(version=version, app=app):
|
||||
try:
|
||||
exists = await Aerich.exists(version=version, app=app)
|
||||
except OperationalError:
|
||||
exists = False
|
||||
if not exists:
|
||||
async with in_transaction(get_app_connection_name(config, app)) as conn:
|
||||
file_path = os.path.join(Migrate.migrate_location, version)
|
||||
with open(file_path, "r") as f:
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
content = json.load(f)
|
||||
upgrade_query_list = content.get("upgrade")
|
||||
for upgrade_query in upgrade_query_list:
|
||||
await conn.execute_query(upgrade_query)
|
||||
await Aerich.create(version=version, app=app)
|
||||
await conn.execute_script(upgrade_query)
|
||||
await Aerich.create(version=version, app=app)
|
||||
click.secho(f"Success upgrade {version}", fg=Color.green)
|
||||
migrated = True
|
||||
if not migrated:
|
||||
@@ -114,9 +119,11 @@ async def downgrade(ctx: Context):
|
||||
file = last_version.version
|
||||
async with in_transaction(get_app_connection_name(config, app)) as conn:
|
||||
file_path = os.path.join(Migrate.migrate_location, file)
|
||||
with open(file_path, "r") as f:
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
content = json.load(f)
|
||||
downgrade_query_list = content.get("downgrade")
|
||||
if not downgrade_query_list:
|
||||
return click.secho("No downgrade item found", fg=Color.yellow)
|
||||
for downgrade_query in downgrade_query_list:
|
||||
await conn.execute_query(downgrade_query)
|
||||
await last_version.delete()
|
||||
@@ -170,7 +177,7 @@ async def init(
|
||||
parser.set(name, "tortoise_orm", tortoise_orm)
|
||||
parser.set(name, "location", location)
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
with open(config_file, "w", encoding="utf-8") as f:
|
||||
parser.write(f)
|
||||
|
||||
if not os.path.isdir(location):
|
||||
@@ -183,7 +190,7 @@ async def init(
|
||||
@cli.command(help="Generate schema and generate app migrate location.")
|
||||
@click.option(
|
||||
"--safe",
|
||||
is_flag=True,
|
||||
type=bool,
|
||||
default=True,
|
||||
help="When set to true, creates the table only when it does not already exist.",
|
||||
show_default=True,
|
||||
@@ -198,6 +205,8 @@ async def init_db(ctx: Context, safe):
|
||||
if not os.path.isdir(dirname):
|
||||
os.mkdir(dirname)
|
||||
click.secho(f"Success create app migrate location {dirname}", fg=Color.green)
|
||||
else:
|
||||
return click.secho(f"Inited {app} already", fg=Color.yellow)
|
||||
|
||||
Migrate.write_old_models(config, app, location)
|
||||
|
||||
@@ -205,6 +214,15 @@ async def init_db(ctx: Context, safe):
|
||||
connection = get_app_connection(config, app)
|
||||
await generate_schema_for_client(connection, safe)
|
||||
|
||||
schema = get_schema_sql(connection, safe)
|
||||
|
||||
version = await Migrate.generate_version()
|
||||
await Aerich.create(version=version, app=app)
|
||||
with open(os.path.join(dirname, version), "w", encoding="utf-8") as f:
|
||||
content = {
|
||||
"upgrade": [schema],
|
||||
}
|
||||
json.dump(content, f, ensure_ascii=False, indent=2)
|
||||
return click.secho(f'Success generate schema for app "{app}"', fg=Color.green)
|
||||
|
||||
|
||||
|
@@ -8,16 +8,17 @@ from tortoise.fields import Field, JSONField, TextField, UUIDField
|
||||
class BaseDDL:
|
||||
schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator
|
||||
DIALECT = "sql"
|
||||
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS {table_name}"
|
||||
_ADD_COLUMN_TEMPLATE = "ALTER TABLE {table_name} ADD {column}"
|
||||
_DROP_COLUMN_TEMPLATE = "ALTER TABLE {table_name} DROP COLUMN {column_name}"
|
||||
_DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"'
|
||||
_ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}'
|
||||
_DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"'
|
||||
_ADD_INDEX_TEMPLATE = (
|
||||
"ALTER TABLE {table_name} ADD {unique} INDEX {index_name} ({column_names})"
|
||||
'ALTER TABLE "{table_name}" ADD {unique} INDEX "{index_name}" ({column_names})'
|
||||
)
|
||||
_DROP_INDEX_TEMPLATE = "ALTER TABLE {table_name} DROP INDEX {index_name}"
|
||||
_ADD_FK_TEMPLATE = "ALTER TABLE {table_name} ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
|
||||
_DROP_FK_TEMPLATE = "ALTER TABLE {table_name} DROP FOREIGN KEY {fk_name}"
|
||||
_M2M_TABLE_TEMPLATE = "CREATE TABLE {table_name} ({backward_key} {backward_type} NOT NULL REFERENCES {backward_table} ({backward_field}) ON DELETE CASCADE,{forward_key} {forward_type} NOT NULL REFERENCES {forward_table} ({forward_field}) ON DELETE CASCADE){extra}{comment};"
|
||||
_DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
|
||||
_ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
|
||||
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
|
||||
_M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE CASCADE){extra}{comment};'
|
||||
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
|
||||
|
||||
def __init__(self, client: "BaseDBAsyncClient"):
|
||||
self.client = client
|
||||
@@ -51,7 +52,7 @@ class BaseDDL:
|
||||
def drop_m2m(self, field: ManyToManyFieldInstance):
|
||||
return self._DROP_TABLE_TEMPLATE.format(table_name=field.through)
|
||||
|
||||
def add_column(self, model: "Type[Model]", field_object: Field):
|
||||
def _get_default(self, model: "Type[Model]", field_object: Field):
|
||||
db_table = model._meta.db_table
|
||||
default = field_object.default
|
||||
db_column = field_object.model_field_name
|
||||
@@ -74,6 +75,11 @@ class BaseDDL:
|
||||
default = ""
|
||||
else:
|
||||
default = ""
|
||||
return default
|
||||
|
||||
def add_column(self, model: "Type[Model]", field_object: Field):
|
||||
db_table = model._meta.db_table
|
||||
|
||||
return self._ADD_COLUMN_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=self.schema_generator._create_string(
|
||||
@@ -89,7 +95,7 @@ class BaseDDL:
|
||||
if field_object.description
|
||||
else "",
|
||||
is_primary_key=field_object.pk,
|
||||
default=default,
|
||||
default=self._get_default(model, field_object),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -98,6 +104,27 @@ class BaseDDL:
|
||||
table_name=model._meta.db_table, column_name=column_name
|
||||
)
|
||||
|
||||
def modify_column(self, model: "Type[Model]", field_object: Field):
|
||||
db_table = model._meta.db_table
|
||||
return self._MODIFY_COLUMN_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=self.schema_generator._create_string(
|
||||
db_column=field_object.model_field_name,
|
||||
field_type=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
|
||||
nullable="NOT NULL" if not field_object.null else "",
|
||||
unique="",
|
||||
comment=self.schema_generator._column_comment_generator(
|
||||
table=db_table,
|
||||
column=field_object.model_field_name,
|
||||
comment=field_object.description,
|
||||
)
|
||||
if field_object.description
|
||||
else "",
|
||||
is_primary_key=field_object.pk,
|
||||
default=self._get_default(model, field_object),
|
||||
),
|
||||
)
|
||||
|
||||
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
|
||||
return self._ADD_INDEX_TEMPLATE.format(
|
||||
unique="UNIQUE" if unique else "",
|
||||
@@ -152,3 +179,12 @@ class BaseDDL:
|
||||
to_field=to_field_name,
|
||||
),
|
||||
)
|
||||
|
||||
def alter_column_default(self, model: "Type[Model]", field_object: Field):
|
||||
pass
|
||||
|
||||
def alter_column_null(self, model: "Type[Model]", field_object: Field):
|
||||
pass
|
||||
|
||||
def set_comment(self, model: "Type[Model]", field_object: Field):
|
||||
pass
|
||||
|
@@ -6,3 +6,14 @@ from aerich.ddl import BaseDDL
|
||||
class MysqlDDL(BaseDDL):
|
||||
schema_generator_cls = MySQLSchemaGenerator
|
||||
DIALECT = MySQLSchemaGenerator.DIALECT
|
||||
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`"
|
||||
_ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}"
|
||||
_DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`"
|
||||
_ADD_INDEX_TEMPLATE = (
|
||||
"ALTER TABLE `{table_name}` ADD {unique} INDEX `{index_name}` ({column_names})"
|
||||
)
|
||||
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
|
||||
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
|
||||
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
|
||||
_M2M_TABLE_TEMPLATE = "CREATE TABLE `{table_name}` (`{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,`{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE){extra}{comment};"
|
||||
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"
|
||||
|
@@ -1,4 +1,8 @@
|
||||
from typing import Type
|
||||
|
||||
from tortoise import Model
|
||||
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
|
||||
from tortoise.fields import Field
|
||||
|
||||
from aerich.ddl import BaseDDL
|
||||
|
||||
@@ -6,3 +10,40 @@ from aerich.ddl import BaseDDL
|
||||
class PostgresDDL(BaseDDL):
|
||||
schema_generator_cls = AsyncpgSchemaGenerator
|
||||
DIALECT = AsyncpgSchemaGenerator.DIALECT
|
||||
_ALTER_DEFAULT_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {default}'
|
||||
_ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL'
|
||||
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}'
|
||||
_SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}'
|
||||
|
||||
def alter_column_default(self, model: "Type[Model]", field_object: Field):
|
||||
db_table = model._meta.db_table
|
||||
default = self._get_default(model, field_object)
|
||||
return self._ALTER_DEFAULT_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=field_object.model_field_name,
|
||||
default="SET" + default if default else "DROP DEFAULT",
|
||||
)
|
||||
|
||||
def alter_column_null(self, model: "Type[Model]", field_object: Field):
|
||||
db_table = model._meta.db_table
|
||||
return self._ALTER_NULL_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=field_object.model_field_name,
|
||||
set_drop="DROP" if field_object.null else "SET",
|
||||
)
|
||||
|
||||
def modify_column(self, model: "Type[Model]", field_object: Field):
|
||||
db_table = model._meta.db_table
|
||||
return self._MODIFY_COLUMN_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=field_object.model_field_name,
|
||||
datatype=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
|
||||
)
|
||||
|
||||
def set_comment(self, model: "Type[Model]", field_object: Field):
|
||||
db_table = model._meta.db_table
|
||||
return self._SET_COMMENT_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=field_object.model_field_name,
|
||||
comment="'{}'".format(field_object.description) if field_object.description else "NULL",
|
||||
)
|
||||
|
@@ -3,6 +3,7 @@ import os
|
||||
import re
|
||||
from copy import deepcopy
|
||||
from datetime import datetime
|
||||
from importlib import import_module
|
||||
from typing import Dict, List, Tuple, Type
|
||||
|
||||
from tortoise import (
|
||||
@@ -27,6 +28,7 @@ class Migrate:
|
||||
_downgrade_fk_m2m_index_operators: List[str] = []
|
||||
_upgrade_m2m: List[str] = []
|
||||
_downgrade_m2m: List[str] = []
|
||||
_aerich = Aerich.__name__
|
||||
|
||||
ddl: BaseDDL
|
||||
migrate_config: dict
|
||||
@@ -34,6 +36,7 @@ class Migrate:
|
||||
diff_app = "diff_models"
|
||||
app: str
|
||||
migrate_location: str
|
||||
dialect: str
|
||||
|
||||
@classmethod
|
||||
def get_old_model_file(cls):
|
||||
@@ -58,39 +61,44 @@ class Migrate:
|
||||
await Tortoise.init(config=migrate_config)
|
||||
|
||||
connection = get_app_connection(config, app)
|
||||
if connection.schema_generator.DIALECT == "mysql":
|
||||
cls.dialect = connection.schema_generator.DIALECT
|
||||
if cls.dialect == "mysql":
|
||||
from aerich.ddl.mysql import MysqlDDL
|
||||
|
||||
cls.ddl = MysqlDDL(connection)
|
||||
elif connection.schema_generator.DIALECT == "sqlite":
|
||||
elif cls.dialect == "sqlite":
|
||||
from aerich.ddl.sqlite import SqliteDDL
|
||||
|
||||
cls.ddl = SqliteDDL(connection)
|
||||
elif connection.schema_generator.DIALECT == "postgres":
|
||||
elif cls.dialect == "postgres":
|
||||
from aerich.ddl.postgres import PostgresDDL
|
||||
|
||||
cls.ddl = PostgresDDL(connection)
|
||||
else:
|
||||
raise NotImplementedError("Current only support MySQL")
|
||||
|
||||
@classmethod
|
||||
async def _get_last_version_num(cls):
|
||||
last_version = await cls.get_last_version()
|
||||
if not last_version:
|
||||
return 0
|
||||
return None
|
||||
version = last_version.version
|
||||
return version.split("_")[0]
|
||||
return int(version.split("_", 1)[0])
|
||||
|
||||
@classmethod
|
||||
async def generate_version(cls, name=None):
|
||||
now = datetime.now().strftime("%Y%M%D%H%M%S").replace("/", "")
|
||||
last_version_num = await cls._get_last_version_num()
|
||||
if last_version_num is None:
|
||||
return f"0_{now}_init.json"
|
||||
return f"{last_version_num + 1}_{now}_{name}.json"
|
||||
|
||||
@classmethod
|
||||
async def _generate_diff_sql(cls, name):
|
||||
now = datetime.now().strftime("%Y%M%D%H%M%S").replace("/", "")
|
||||
last_version_num = await cls._get_last_version_num()
|
||||
version = f"{last_version_num + 1}_{now}_{name}.json"
|
||||
version = await cls.generate_version(name)
|
||||
content = {
|
||||
"upgrade": cls.upgrade_operators,
|
||||
"downgrade": cls.downgrade_operators,
|
||||
}
|
||||
with open(os.path.join(cls.migrate_location, version), "w") as f:
|
||||
with open(os.path.join(cls.migrate_location, version), "w", encoding="utf-8") as f:
|
||||
json.dump(content, f, indent=2, ensure_ascii=False)
|
||||
return version
|
||||
|
||||
@@ -105,8 +113,8 @@ class Migrate:
|
||||
diff_models = apps.get(cls.diff_app)
|
||||
app_models = apps.get(cls.app)
|
||||
|
||||
cls._diff_models(diff_models, app_models)
|
||||
cls._diff_models(app_models, diff_models, False)
|
||||
cls.diff_models(diff_models, app_models)
|
||||
cls.diff_models(app_models, diff_models, False)
|
||||
|
||||
cls._merge_operators()
|
||||
|
||||
@@ -148,11 +156,11 @@ class Migrate:
|
||||
"""
|
||||
pattern = rf"(\n)?('|\")({app})(.\w+)('|\")"
|
||||
for i, model_file in enumerate(model_files):
|
||||
with open(model_file, "r") as f:
|
||||
with open(model_file, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
ret = re.sub(pattern, rf"\2{cls.diff_app}\4\5", content)
|
||||
mode = "w" if i == 0 else "a"
|
||||
with open(old_model_file, mode) as f:
|
||||
with open(old_model_file, mode, encoding="utf-8") as f:
|
||||
f.write(ret)
|
||||
|
||||
@classmethod
|
||||
@@ -166,8 +174,11 @@ class Migrate:
|
||||
"""
|
||||
temp_config = deepcopy(config)
|
||||
path = os.path.join(location, app, cls.old_models)
|
||||
path = path.replace("/", ".").lstrip(".")
|
||||
temp_config["apps"][cls.diff_app] = {"models": [path]}
|
||||
path = path.replace(os.sep, ".").lstrip(".")
|
||||
temp_config["apps"][cls.diff_app] = {
|
||||
"models": [path],
|
||||
"default_connection": config.get("apps").get(app).get("default_connection", "default"),
|
||||
}
|
||||
return temp_config
|
||||
|
||||
@classmethod
|
||||
@@ -179,15 +190,17 @@ class Migrate:
|
||||
:param location:
|
||||
:return:
|
||||
"""
|
||||
cls.app = app
|
||||
|
||||
old_model_files = []
|
||||
models = config.get("apps").get(app).get("models")
|
||||
for model in models:
|
||||
old_model_files.append(model.replace(".", "/") + ".py")
|
||||
old_model_files.append(import_module(model).__file__)
|
||||
|
||||
cls.cp_models(app, old_model_files, os.path.join(location, app, cls.get_old_model_file()))
|
||||
|
||||
@classmethod
|
||||
def _diff_models(
|
||||
def diff_models(
|
||||
cls, old_models: Dict[str, Type[Model]], new_models: Dict[str, Type[Model]], upgrade=True
|
||||
):
|
||||
"""
|
||||
@@ -197,6 +210,9 @@ class Migrate:
|
||||
:param upgrade:
|
||||
:return:
|
||||
"""
|
||||
old_models.pop(cls._aerich, None)
|
||||
new_models.pop(cls._aerich, None)
|
||||
|
||||
for new_model_str, new_model in new_models.items():
|
||||
if new_model_str not in old_models.keys():
|
||||
cls._add_operator(cls.add_model(new_model), upgrade)
|
||||
@@ -207,6 +223,10 @@ class Migrate:
|
||||
if old_model not in new_models.keys():
|
||||
cls._add_operator(cls.remove_model(old_models.get(old_model)), upgrade)
|
||||
|
||||
@classmethod
|
||||
def _is_fk_m2m(cls, field: Field):
|
||||
return isinstance(field, (ForeignKeyFieldInstance, ManyToManyFieldInstance))
|
||||
|
||||
@classmethod
|
||||
def add_model(cls, model: Type[Model]):
|
||||
return cls.ddl.create_table(model)
|
||||
@@ -247,6 +267,27 @@ class Migrate:
|
||||
)
|
||||
else:
|
||||
old_field = old_fields_map.get(new_key)
|
||||
new_field_dict = new_field.describe(serializable=True)
|
||||
new_field_dict.pop("unique")
|
||||
new_field_dict.pop("indexed")
|
||||
old_field_dict = old_field.describe(serializable=True)
|
||||
old_field_dict.pop("unique")
|
||||
old_field_dict.pop("indexed")
|
||||
if not cls._is_fk_m2m(new_field) and new_field_dict != old_field_dict:
|
||||
if cls.dialect == "postgres":
|
||||
if new_field.null != old_field.null:
|
||||
cls._add_operator(
|
||||
cls._alter_null(new_model, new_field), upgrade=upgrade
|
||||
)
|
||||
if new_field.default != old_field.default:
|
||||
cls._add_operator(
|
||||
cls._alter_default(new_model, new_field), upgrade=upgrade
|
||||
)
|
||||
if new_field.description != old_field.description:
|
||||
cls._add_operator(
|
||||
cls._set_comment(new_model, new_field), upgrade=upgrade
|
||||
)
|
||||
cls._add_operator(cls._modify_field(new_model, new_field), upgrade=upgrade)
|
||||
if (old_field.index and not new_field.index) or (
|
||||
old_field.unique and not new_field.unique
|
||||
):
|
||||
@@ -255,7 +296,7 @@ class Migrate:
|
||||
old_model, (old_field.model_field_name,), old_field.unique
|
||||
),
|
||||
upgrade,
|
||||
isinstance(old_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
|
||||
cls._is_fk_m2m(old_field),
|
||||
)
|
||||
elif (new_field.index and not old_field.index) or (
|
||||
new_field.unique and not old_field.unique
|
||||
@@ -263,16 +304,14 @@ class Migrate:
|
||||
cls._add_operator(
|
||||
cls._add_index(new_model, (new_field.model_field_name,), new_field.unique),
|
||||
upgrade,
|
||||
isinstance(new_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
|
||||
cls._is_fk_m2m(new_field),
|
||||
)
|
||||
|
||||
for old_key in old_keys:
|
||||
field = old_fields_map.get(old_key)
|
||||
if old_key not in new_keys and not cls._exclude_field(field, upgrade):
|
||||
cls._add_operator(
|
||||
cls._remove_field(old_model, field),
|
||||
upgrade,
|
||||
isinstance(field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
|
||||
cls._remove_field(old_model, field), upgrade, cls._is_fk_m2m(field),
|
||||
)
|
||||
|
||||
for new_index in new_indexes:
|
||||
@@ -341,6 +380,22 @@ class Migrate:
|
||||
return cls.ddl.create_m2m_table(model, field)
|
||||
return cls.ddl.add_column(model, field)
|
||||
|
||||
@classmethod
|
||||
def _alter_default(cls, model: Type[Model], field: Field):
|
||||
return cls.ddl.alter_column_default(model, field)
|
||||
|
||||
@classmethod
|
||||
def _alter_null(cls, model: Type[Model], field: Field):
|
||||
return cls.ddl.alter_column_null(model, field)
|
||||
|
||||
@classmethod
|
||||
def _set_comment(cls, model: Type[Model], field: Field):
|
||||
return cls.ddl.set_comment(model, field)
|
||||
|
||||
@classmethod
|
||||
def _modify_field(cls, model: Type[Model], field: Field):
|
||||
return cls.ddl.modify_column(model, field)
|
||||
|
||||
@classmethod
|
||||
def _remove_field(cls, model: Type[Model], field: Field):
|
||||
if isinstance(field, ForeignKeyFieldInstance):
|
||||
|
@@ -11,7 +11,7 @@ def get_app_connection_name(config, app) -> str:
|
||||
:param app:
|
||||
:return:
|
||||
"""
|
||||
return config.get("apps").get(app).get("default_connection")
|
||||
return config.get("apps").get(app).get("default_connection", "default")
|
||||
|
||||
|
||||
def get_app_connection(config, app) -> BaseDBAsyncClient:
|
||||
|
62
conftest.py
62
conftest.py
@@ -1,11 +1,61 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
import pytest
|
||||
from tortoise.contrib.test import finalizer, initializer
|
||||
from tortoise import Tortoise, expand_db_url, generate_schema_for_client
|
||||
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
|
||||
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
|
||||
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
||||
|
||||
from aerich.ddl.mysql import MysqlDDL
|
||||
from aerich.ddl.postgres import PostgresDDL
|
||||
from aerich.ddl.sqlite import SqliteDDL
|
||||
from aerich.migrate import Migrate
|
||||
|
||||
db_url = os.getenv("TEST_DB", "sqlite://:memory:")
|
||||
tortoise_orm = {
|
||||
"connections": {"default": expand_db_url(db_url, True)},
|
||||
"apps": {
|
||||
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def initialize_tests(request):
|
||||
db_url = os.getenv("TEST_DB", "sqlite://:memory:")
|
||||
initializer(["tests.models"], db_url=db_url)
|
||||
request.addfinalizer(finalizer)
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def reset_migrate():
|
||||
Migrate.upgrade_operators = []
|
||||
Migrate.downgrade_operators = []
|
||||
Migrate._upgrade_fk_m2m_index_operators = []
|
||||
Migrate._downgrade_fk_m2m_index_operators = []
|
||||
Migrate._upgrade_m2m = []
|
||||
Migrate._downgrade_m2m = []
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def loop():
|
||||
loop = asyncio.get_event_loop()
|
||||
return loop
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def initialize_tests(loop, request):
|
||||
tortoise_orm["connections"]["diff_models"] = "sqlite://:memory:"
|
||||
tortoise_orm["apps"]["diff_models"] = {
|
||||
"models": ["tests.diff_models"],
|
||||
"default_connection": "diff_models",
|
||||
}
|
||||
|
||||
loop.run_until_complete(Tortoise.init(config=tortoise_orm, _create_db=True))
|
||||
loop.run_until_complete(
|
||||
generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
|
||||
)
|
||||
|
||||
client = Tortoise.get_connection("default")
|
||||
if client.schema_generator is MySQLSchemaGenerator:
|
||||
Migrate.ddl = MysqlDDL(client)
|
||||
elif client.schema_generator is SqliteSchemaGenerator:
|
||||
Migrate.ddl = SqliteDDL(client)
|
||||
elif client.schema_generator is AsyncpgSchemaGenerator:
|
||||
Migrate.ddl = PostgresDDL(client)
|
||||
|
||||
request.addfinalizer(lambda: loop.run_until_complete(Tortoise._drop_databases()))
|
||||
|
BIN
images/alipay.jpeg
Normal file
BIN
images/alipay.jpeg
Normal file
Binary file not shown.
After Width: | Height: | Size: 75 KiB |
BIN
images/wechatpay.jpeg
Normal file
BIN
images/wechatpay.jpeg
Normal file
Binary file not shown.
After Width: | Height: | Size: 76 KiB |
358
poetry.lock
generated
358
poetry.lock
generated
@@ -1,8 +1,8 @@
|
||||
[[package]]
|
||||
category = "dev"
|
||||
category = "main"
|
||||
description = "MySQL driver for asyncio."
|
||||
name = "aiomysql"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
version = "0.0.20"
|
||||
|
||||
@@ -26,7 +26,7 @@ description = "High level compatibility layer for multiple asynchronous event lo
|
||||
name = "anyio"
|
||||
optional = false
|
||||
python-versions = ">=3.5.3"
|
||||
version = "1.3.0"
|
||||
version = "1.3.1"
|
||||
|
||||
[package.dependencies]
|
||||
async-generator = "*"
|
||||
@@ -78,10 +78,10 @@ dev = ["coverage", "pytest-runner", "pytest-trio", "pytest (>=3)", "sphinx", "to
|
||||
docs = ["sphinx"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
category = "main"
|
||||
description = "An asyncio PostgreSQL driver"
|
||||
name = "asyncpg"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = ">=3.5.0"
|
||||
version = "0.20.1"
|
||||
|
||||
@@ -90,14 +90,6 @@ dev = ["Cython (0.29.14)", "pytest (>=3.6.0)", "Sphinx (>=1.7.3,<1.8.0)", "sphin
|
||||
docs = ["Sphinx (>=1.7.3,<1.8.0)", "sphinxcontrib-asyncio (>=0.2.0,<0.3.0)", "sphinx-rtd-theme (>=0.2.4,<0.3.0)"]
|
||||
test = ["pycodestyle (>=2.5.0,<2.6.0)", "flake8 (>=3.7.9,<3.8.0)", "uvloop (>=0.14.0,<0.15.0)"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Enhance the standard unittest package with features for testing asyncio libraries"
|
||||
name = "asynctest"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
version = "0.13.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Atomic file writes."
|
||||
@@ -121,6 +113,21 @@ dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.int
|
||||
docs = ["sphinx", "zope.interface"]
|
||||
tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Security oriented static analyser for python code."
|
||||
name = "bandit"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "1.6.2"
|
||||
|
||||
[package.dependencies]
|
||||
GitPython = ">=1.0.1"
|
||||
PyYAML = ">=3.13"
|
||||
colorama = ">=0.3.9"
|
||||
six = ">=1.10.0"
|
||||
stevedore = ">=1.20.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "The uncompromising code formatter."
|
||||
@@ -142,25 +149,16 @@ typed-ast = ">=1.4.0"
|
||||
d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
category = "main"
|
||||
description = "Foreign Function Interface for Python calling C code."
|
||||
name = "cffi"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
version = "1.14.0"
|
||||
|
||||
[package.dependencies]
|
||||
pycparser = "*"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "Fast ISO8601 date time parser for Python written in C"
|
||||
marker = "sys_platform != \"win32\" and implementation_name == \"cpython\""
|
||||
name = "ciso8601"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "2.1.3"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Composable command line interface toolkit"
|
||||
@@ -172,17 +170,17 @@ version = "7.1.2"
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Cross-platform colored terminal text."
|
||||
marker = "sys_platform == \"win32\""
|
||||
marker = "sys_platform == \"win32\" or platform_system == \"Windows\""
|
||||
name = "colorama"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
version = "0.4.3"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
category = "main"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
name = "cryptography"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
|
||||
version = "2.9.2"
|
||||
|
||||
@@ -217,17 +215,38 @@ description = "the modular source code checker: pep8 pyflakes and co"
|
||||
name = "flake8"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
|
||||
version = "3.8.1"
|
||||
version = "3.8.3"
|
||||
|
||||
[package.dependencies]
|
||||
mccabe = ">=0.6.0,<0.7.0"
|
||||
pycodestyle = ">=2.6.0a1,<2.7.0"
|
||||
pyflakes = ">=2.2.0,<2.3.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Git Object Database"
|
||||
name = "gitdb"
|
||||
optional = false
|
||||
python-versions = ">=3.4"
|
||||
version = "4.0.5"
|
||||
|
||||
[package.dependencies]
|
||||
smmap = ">=3.0.1,<4"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Python Git Library"
|
||||
name = "gitpython"
|
||||
optional = false
|
||||
python-versions = ">=3.4"
|
||||
version = "3.1.3"
|
||||
|
||||
[package.dependencies]
|
||||
gitdb = ">=4.0.1,<5"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "Simple module to parse ISO 8601 dates"
|
||||
marker = "sys_platform == \"win32\" or implementation_name != \"cpython\""
|
||||
name = "iso8601"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
@@ -238,14 +257,12 @@ category = "dev"
|
||||
description = "A Python utility / library to sort Python imports."
|
||||
name = "isort"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "4.3.21"
|
||||
python-versions = ">=3.6,<4.0"
|
||||
version = "5.0.3"
|
||||
|
||||
[package.extras]
|
||||
pipfile = ["pipreqs", "requirementslib"]
|
||||
pyproject = ["toml"]
|
||||
requirements = ["pipreqs", "pip-api"]
|
||||
xdg_home = ["appdirs (>=1.4.0)"]
|
||||
pipfile_deprecated_finder = ["pipreqs", "requirementslib", "tomlkit (>=0.5.3)"]
|
||||
requirements_deprecated_finder = ["pipreqs", "pip-api"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
@@ -261,31 +278,7 @@ description = "More routines for operating on iterables, beyond itertools"
|
||||
name = "more-itertools"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
version = "8.3.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Optional static typing for Python"
|
||||
name = "mypy"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
version = "0.770"
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=0.4.3,<0.5.0"
|
||||
typed-ast = ">=1.4.0,<1.5.0"
|
||||
typing-extensions = ">=3.7.4"
|
||||
|
||||
[package.extras]
|
||||
dmypy = ["psutil (>=4.0)"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Experimental type system extensions for programs checked with the mypy typechecker."
|
||||
name = "mypy-extensions"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "0.4.3"
|
||||
version = "8.4.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
@@ -307,6 +300,14 @@ optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
version = "0.8.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Python Build Reasonableness"
|
||||
name = "pbr"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "5.4.5"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
@@ -324,7 +325,7 @@ description = "library with cross-python path, ini-parsing, io, code, log facili
|
||||
name = "py"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "1.8.1"
|
||||
version = "1.9.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
@@ -335,10 +336,10 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "2.6.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
category = "main"
|
||||
description = "C parser in Python"
|
||||
name = "pycparser"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "2.20"
|
||||
|
||||
@@ -364,10 +365,10 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "2.2.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
category = "main"
|
||||
description = "Pure Python MySQL Driver"
|
||||
name = "pymysql"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
version = "0.9.2"
|
||||
|
||||
@@ -388,7 +389,7 @@ description = "A SQL query builder API for Python"
|
||||
name = "pypika"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "0.37.6"
|
||||
version = "0.37.15"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
@@ -396,7 +397,7 @@ description = "pytest: simple powerful testing with Python"
|
||||
name = "pytest"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
version = "5.4.2"
|
||||
version = "5.4.3"
|
||||
|
||||
[package.dependencies]
|
||||
atomicwrites = ">=1.0"
|
||||
@@ -412,13 +413,27 @@ wcwidth = "*"
|
||||
checkqa-mypy = ["mypy (v0.761)"]
|
||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Pytest support for asyncio."
|
||||
name = "pytest-asyncio"
|
||||
optional = false
|
||||
python-versions = ">= 3.5"
|
||||
version = "0.14.0"
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=5.4.0"
|
||||
|
||||
[package.extras]
|
||||
testing = ["async-generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "run tests in isolated forked subprocesses"
|
||||
name = "pytest-forked"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "1.1.3"
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
version = "1.2.0"
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=3.1.0"
|
||||
@@ -440,21 +455,37 @@ six = "*"
|
||||
[package.extras]
|
||||
testing = ["filelock"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "YAML parser and emitter for Python"
|
||||
name = "pyyaml"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
version = "5.3.1"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Alternative regular expression module, to replace re."
|
||||
name = "regex"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "2020.5.14"
|
||||
version = "2020.6.8"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
category = "main"
|
||||
description = "Python 2 and 3 compatibility utilities"
|
||||
name = "six"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
version = "1.14.0"
|
||||
version = "1.15.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "A pure Python implementation of a sliding window memory map manager"
|
||||
name = "smmap"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "3.0.4"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
@@ -466,14 +497,14 @@ version = "1.1.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "tasks runner for python projects"
|
||||
name = "taskipy"
|
||||
description = "Manage dynamic plugins for Python applications"
|
||||
name = "stevedore"
|
||||
optional = false
|
||||
python-versions = ">=3.6,<4.0"
|
||||
version = "1.2.1"
|
||||
python-versions = ">=3.6"
|
||||
version = "2.0.1"
|
||||
|
||||
[package.dependencies]
|
||||
toml = ">=0.10.0,<0.11.0"
|
||||
pbr = ">=2.0.0,<2.1.0 || >2.1.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
@@ -489,19 +520,17 @@ description = "Easy async ORM for python, built with relations in mind"
|
||||
name = "tortoise-orm"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "0.16.11"
|
||||
version = "0.16.13"
|
||||
|
||||
[package.dependencies]
|
||||
aiosqlite = ">=0.11.0"
|
||||
ciso8601 = ">=2.1.2"
|
||||
iso8601 = ">=0.1.12"
|
||||
pypika = ">=0.36.5"
|
||||
typing-extensions = ">=3.7"
|
||||
|
||||
[package.source]
|
||||
reference = "1f67b7a0ca1384365d6ff89d9e245e733166d1a6"
|
||||
type = "git"
|
||||
url = "https://github.com/long2ice/tortoise-orm.git"
|
||||
[package.extras]
|
||||
accel = ["python-rapidjson", "ciso8601 (>=2.1.2)", "uvloop (>=0.12.0)"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "a fork of Python 2 and 3 ast modules with type comment support"
|
||||
@@ -520,14 +549,17 @@ version = "3.7.4.2"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Measures number of Terminal column cells of wide-character codes"
|
||||
description = "Measures the displayed width of unicode strings in a terminal"
|
||||
name = "wcwidth"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "0.1.9"
|
||||
version = "0.2.5"
|
||||
|
||||
[extras]
|
||||
dbdrivers = ["aiomysql", "asyncpg"]
|
||||
|
||||
[metadata]
|
||||
content-hash = "58a032bbb47859e87d2bce036af24149060cc531ff9220a14f6cd48db6252f39"
|
||||
content-hash = "485702557f7b65db14e44d459fb1c985f56caf4b14857006ef96304d6516d976"
|
||||
python-versions = "^3.8"
|
||||
|
||||
[metadata.files]
|
||||
@@ -540,8 +572,8 @@ aiosqlite = [
|
||||
{file = "aiosqlite-0.13.0.tar.gz", hash = "sha256:6e92961ae9e606b43b05e29b129e346b29e400fcbd63e3c0c564d89230257645"},
|
||||
]
|
||||
anyio = [
|
||||
{file = "anyio-1.3.0-py3-none-any.whl", hash = "sha256:db2c3d21576870b95d4fd0b8f4a0f9c64057f777c578f3a8127179a17c8c067e"},
|
||||
{file = "anyio-1.3.0.tar.gz", hash = "sha256:7deae0315dd10aa41c21528b83352e4b52f44e6153a21081a3d1cd8c03728e46"},
|
||||
{file = "anyio-1.3.1-py3-none-any.whl", hash = "sha256:f21b4fafeec1b7db81e09a907e44e374a1e39718d782a488fdfcdcf949c8950c"},
|
||||
{file = "anyio-1.3.1.tar.gz", hash = "sha256:a46bb2b7743455434afd9adea848a3c4e0b7321aee3e9d08844b11d348d3b5a0"},
|
||||
]
|
||||
apipkg = [
|
||||
{file = "apipkg-1.5-py2.py3-none-any.whl", hash = "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"},
|
||||
@@ -581,10 +613,6 @@ asyncpg = [
|
||||
{file = "asyncpg-0.20.1-cp38-cp38-win_amd64.whl", hash = "sha256:2af6a5a705accd36e13292ea43d08c20b15e52d684beb522cb3a7d3c9c8f3f48"},
|
||||
{file = "asyncpg-0.20.1.tar.gz", hash = "sha256:394bf19bdddbba07a38cd6fb526ebf66e120444d6b3097332b78efd5b26495b0"},
|
||||
]
|
||||
asynctest = [
|
||||
{file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"},
|
||||
{file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"},
|
||||
]
|
||||
atomicwrites = [
|
||||
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
|
||||
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
|
||||
@@ -593,6 +621,10 @@ attrs = [
|
||||
{file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
|
||||
{file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"},
|
||||
]
|
||||
bandit = [
|
||||
{file = "bandit-1.6.2-py2.py3-none-any.whl", hash = "sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952"},
|
||||
{file = "bandit-1.6.2.tar.gz", hash = "sha256:41e75315853507aa145d62a78a2a6c5e3240fe14ee7c601459d0df9418196065"},
|
||||
]
|
||||
black = [
|
||||
{file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"},
|
||||
{file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"},
|
||||
@@ -627,9 +659,6 @@ cffi = [
|
||||
{file = "cffi-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b"},
|
||||
{file = "cffi-1.14.0.tar.gz", hash = "sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6"},
|
||||
]
|
||||
ciso8601 = [
|
||||
{file = "ciso8601-2.1.3.tar.gz", hash = "sha256:bdbb5b366058b1c87735603b23060962c439ac9be66f1ae91e8c7dbd7d59e262"},
|
||||
]
|
||||
click = [
|
||||
{file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"},
|
||||
{file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"},
|
||||
@@ -664,8 +693,16 @@ execnet = [
|
||||
{file = "execnet-1.7.1.tar.gz", hash = "sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50"},
|
||||
]
|
||||
flake8 = [
|
||||
{file = "flake8-3.8.1-py2.py3-none-any.whl", hash = "sha256:6c1193b0c3f853ef763969238f6c81e9e63ace9d024518edc020d5f1d6d93195"},
|
||||
{file = "flake8-3.8.1.tar.gz", hash = "sha256:ea6623797bf9a52f4c9577d780da0bb17d65f870213f7b5bcc9fca82540c31d5"},
|
||||
{file = "flake8-3.8.3-py2.py3-none-any.whl", hash = "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c"},
|
||||
{file = "flake8-3.8.3.tar.gz", hash = "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"},
|
||||
]
|
||||
gitdb = [
|
||||
{file = "gitdb-4.0.5-py3-none-any.whl", hash = "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac"},
|
||||
{file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"},
|
||||
]
|
||||
gitpython = [
|
||||
{file = "GitPython-3.1.3-py3-none-any.whl", hash = "sha256:ef1d60b01b5ce0040ad3ec20bc64f783362d41fa0822a2742d3586e1f49bb8ac"},
|
||||
{file = "GitPython-3.1.3.tar.gz", hash = "sha256:e107af4d873daed64648b4f4beb89f89f0cfbe3ef558fc7821ed2331c2f8da1a"},
|
||||
]
|
||||
iso8601 = [
|
||||
{file = "iso8601-0.1.12-py2.py3-none-any.whl", hash = "sha256:210e0134677cc0d02f6028087fee1df1e1d76d372ee1db0bf30bf66c5c1c89a3"},
|
||||
@@ -673,36 +710,16 @@ iso8601 = [
|
||||
{file = "iso8601-0.1.12.tar.gz", hash = "sha256:49c4b20e1f38aa5cf109ddcd39647ac419f928512c869dc01d5c7098eddede82"},
|
||||
]
|
||||
isort = [
|
||||
{file = "isort-4.3.21-py2.py3-none-any.whl", hash = "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"},
|
||||
{file = "isort-4.3.21.tar.gz", hash = "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1"},
|
||||
{file = "isort-5.0.3-py3-none-any.whl", hash = "sha256:3fbfad425b0a08a2969c5e1821d88785c210a08656c029c28931a1620f2d0f12"},
|
||||
{file = "isort-5.0.3.tar.gz", hash = "sha256:4c48d4cd773a6226baaaa176839e6f7ff82ef7c7842f6c54374fe2b14df4024b"},
|
||||
]
|
||||
mccabe = [
|
||||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
||||
]
|
||||
more-itertools = [
|
||||
{file = "more-itertools-8.3.0.tar.gz", hash = "sha256:558bb897a2232f5e4f8e2399089e35aecb746e1f9191b6584a151647e89267be"},
|
||||
{file = "more_itertools-8.3.0-py3-none-any.whl", hash = "sha256:7818f596b1e87be009031c7653d01acc46ed422e6656b394b0f765ce66ed4982"},
|
||||
]
|
||||
mypy = [
|
||||
{file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"},
|
||||
{file = "mypy-0.770-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754"},
|
||||
{file = "mypy-0.770-cp35-cp35m-win_amd64.whl", hash = "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65"},
|
||||
{file = "mypy-0.770-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce"},
|
||||
{file = "mypy-0.770-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761"},
|
||||
{file = "mypy-0.770-cp36-cp36m-win_amd64.whl", hash = "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2"},
|
||||
{file = "mypy-0.770-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8"},
|
||||
{file = "mypy-0.770-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913"},
|
||||
{file = "mypy-0.770-cp37-cp37m-win_amd64.whl", hash = "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9"},
|
||||
{file = "mypy-0.770-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1"},
|
||||
{file = "mypy-0.770-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27"},
|
||||
{file = "mypy-0.770-cp38-cp38-win_amd64.whl", hash = "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3"},
|
||||
{file = "mypy-0.770-py3-none-any.whl", hash = "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164"},
|
||||
{file = "mypy-0.770.tar.gz", hash = "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae"},
|
||||
]
|
||||
mypy-extensions = [
|
||||
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
|
||||
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
|
||||
{file = "more-itertools-8.4.0.tar.gz", hash = "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5"},
|
||||
{file = "more_itertools-8.4.0-py3-none-any.whl", hash = "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"},
|
||||
]
|
||||
packaging = [
|
||||
{file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"},
|
||||
@@ -712,13 +729,17 @@ pathspec = [
|
||||
{file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"},
|
||||
{file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"},
|
||||
]
|
||||
pbr = [
|
||||
{file = "pbr-5.4.5-py2.py3-none-any.whl", hash = "sha256:579170e23f8e0c2f24b0de612f71f648eccb79fb1322c814ae6b3c07b5ba23e8"},
|
||||
{file = "pbr-5.4.5.tar.gz", hash = "sha256:07f558fece33b05caf857474a366dfcc00562bca13dd8b47b2b3e22d9f9bf55c"},
|
||||
]
|
||||
pluggy = [
|
||||
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
|
||||
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
|
||||
]
|
||||
py = [
|
||||
{file = "py-1.8.1-py2.py3-none-any.whl", hash = "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"},
|
||||
{file = "py-1.8.1.tar.gz", hash = "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa"},
|
||||
{file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"},
|
||||
{file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"},
|
||||
]
|
||||
pycodestyle = [
|
||||
{file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"},
|
||||
@@ -760,60 +781,83 @@ pyparsing = [
|
||||
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
|
||||
]
|
||||
pypika = [
|
||||
{file = "PyPika-0.37.6.tar.gz", hash = "sha256:64510fa36667e8bb654bdc1be5a3a77bac1dbc2f03d4848efac08e39d9cac6f5"},
|
||||
{file = "PyPika-0.37.15.tar.gz", hash = "sha256:f00c217330d91bbb5bc26c4cb54a8016b899fb8d5a8f7cb25b30861260fa4239"},
|
||||
]
|
||||
pytest = [
|
||||
{file = "pytest-5.4.2-py3-none-any.whl", hash = "sha256:95c710d0a72d91c13fae35dce195633c929c3792f54125919847fdcdf7caa0d3"},
|
||||
{file = "pytest-5.4.2.tar.gz", hash = "sha256:eb2b5e935f6a019317e455b6da83dd8650ac9ffd2ee73a7b657a30873d67a698"},
|
||||
{file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
|
||||
{file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
|
||||
]
|
||||
pytest-asyncio = [
|
||||
{file = "pytest-asyncio-0.14.0.tar.gz", hash = "sha256:9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700"},
|
||||
{file = "pytest_asyncio-0.14.0-py3-none-any.whl", hash = "sha256:2eae1e34f6c68fc0a9dc12d4bea190483843ff4708d24277c41568d6b6044f1d"},
|
||||
]
|
||||
pytest-forked = [
|
||||
{file = "pytest-forked-1.1.3.tar.gz", hash = "sha256:1805699ed9c9e60cb7a8179b8d4fa2b8898098e82d229b0825d8095f0f261100"},
|
||||
{file = "pytest_forked-1.1.3-py2.py3-none-any.whl", hash = "sha256:1ae25dba8ee2e56fb47311c9638f9e58552691da87e82d25b0ce0e4bf52b7d87"},
|
||||
{file = "pytest-forked-1.2.0.tar.gz", hash = "sha256:65f96334863d9cbe53d21f73e8febc4dd61b8d1fdcac7b487d9af07a5d02a938"},
|
||||
{file = "pytest_forked-1.2.0-py2.py3-none-any.whl", hash = "sha256:42a438336731465c5bd76ab38e1645647ac55914a08b507efbabe8783a08aa6c"},
|
||||
]
|
||||
pytest-xdist = [
|
||||
{file = "pytest-xdist-1.32.0.tar.gz", hash = "sha256:1d4166dcac69adb38eeaedb88c8fada8588348258a3492ab49ba9161f2971129"},
|
||||
{file = "pytest_xdist-1.32.0-py2.py3-none-any.whl", hash = "sha256:ba5ec9fde3410bd9a116ff7e4f26c92e02fa3d27975ef3ad03f330b3d4b54e91"},
|
||||
]
|
||||
pyyaml = [
|
||||
{file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"},
|
||||
{file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"},
|
||||
{file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"},
|
||||
{file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"},
|
||||
{file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"},
|
||||
{file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"},
|
||||
{file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"},
|
||||
{file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"},
|
||||
{file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"},
|
||||
{file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"},
|
||||
{file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"},
|
||||
]
|
||||
regex = [
|
||||
{file = "regex-2020.5.14-cp27-cp27m-win32.whl", hash = "sha256:e565569fc28e3ba3e475ec344d87ed3cd8ba2d575335359749298a0899fe122e"},
|
||||
{file = "regex-2020.5.14-cp27-cp27m-win_amd64.whl", hash = "sha256:d466967ac8e45244b9dfe302bbe5e3337f8dc4dec8d7d10f5e950d83b140d33a"},
|
||||
{file = "regex-2020.5.14-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:27ff7325b297fb6e5ebb70d10437592433601c423f5acf86e5bc1ee2919b9561"},
|
||||
{file = "regex-2020.5.14-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ea55b80eb0d1c3f1d8d784264a6764f931e172480a2f1868f2536444c5f01e01"},
|
||||
{file = "regex-2020.5.14-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:c9bce6e006fbe771a02bda468ec40ffccbf954803b470a0345ad39c603402577"},
|
||||
{file = "regex-2020.5.14-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:d881c2e657c51d89f02ae4c21d9adbef76b8325fe4d5cf0e9ad62f850f3a98fd"},
|
||||
{file = "regex-2020.5.14-cp36-cp36m-win32.whl", hash = "sha256:99568f00f7bf820c620f01721485cad230f3fb28f57d8fbf4a7967ec2e446994"},
|
||||
{file = "regex-2020.5.14-cp36-cp36m-win_amd64.whl", hash = "sha256:70c14743320a68c5dac7fc5a0f685be63bc2024b062fe2aaccc4acc3d01b14a1"},
|
||||
{file = "regex-2020.5.14-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a7c37f048ec3920783abab99f8f4036561a174f1314302ccfa4e9ad31cb00eb4"},
|
||||
{file = "regex-2020.5.14-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:89d76ce33d3266173f5be80bd4efcbd5196cafc34100fdab814f9b228dee0fa4"},
|
||||
{file = "regex-2020.5.14-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:51f17abbe973c7673a61863516bdc9c0ef467407a940f39501e786a07406699c"},
|
||||
{file = "regex-2020.5.14-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:ce5cc53aa9fbbf6712e92c7cf268274eaff30f6bd12a0754e8133d85a8fb0f5f"},
|
||||
{file = "regex-2020.5.14-cp37-cp37m-win32.whl", hash = "sha256:8044d1c085d49673aadb3d7dc20ef5cb5b030c7a4fa253a593dda2eab3059929"},
|
||||
{file = "regex-2020.5.14-cp37-cp37m-win_amd64.whl", hash = "sha256:c2062c7d470751b648f1cacc3f54460aebfc261285f14bc6da49c6943bd48bdd"},
|
||||
{file = "regex-2020.5.14-cp38-cp38-manylinux1_i686.whl", hash = "sha256:329ba35d711e3428db6b45a53b1b13a0a8ba07cbbcf10bbed291a7da45f106c3"},
|
||||
{file = "regex-2020.5.14-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:579ea215c81d18da550b62ff97ee187b99f1b135fd894a13451e00986a080cad"},
|
||||
{file = "regex-2020.5.14-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:3a9394197664e35566242686d84dfd264c07b20f93514e2e09d3c2b3ffdf78fe"},
|
||||
{file = "regex-2020.5.14-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ce367d21f33e23a84fb83a641b3834dd7dd8e9318ad8ff677fbfae5915a239f7"},
|
||||
{file = "regex-2020.5.14-cp38-cp38-win32.whl", hash = "sha256:1386e75c9d1574f6aa2e4eb5355374c8e55f9aac97e224a8a5a6abded0f9c927"},
|
||||
{file = "regex-2020.5.14-cp38-cp38-win_amd64.whl", hash = "sha256:7e61be8a2900897803c293247ef87366d5df86bf701083b6c43119c7c6c99108"},
|
||||
{file = "regex-2020.5.14.tar.gz", hash = "sha256:ce450ffbfec93821ab1fea94779a8440e10cf63819be6e176eb1973a6017aff5"},
|
||||
{file = "regex-2020.6.8-cp27-cp27m-win32.whl", hash = "sha256:fbff901c54c22425a5b809b914a3bfaf4b9570eee0e5ce8186ac71eb2025191c"},
|
||||
{file = "regex-2020.6.8-cp27-cp27m-win_amd64.whl", hash = "sha256:112e34adf95e45158c597feea65d06a8124898bdeac975c9087fe71b572bd938"},
|
||||
{file = "regex-2020.6.8-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:92d8a043a4241a710c1cf7593f5577fbb832cf6c3a00ff3fc1ff2052aff5dd89"},
|
||||
{file = "regex-2020.6.8-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bae83f2a56ab30d5353b47f9b2a33e4aac4de9401fb582b55c42b132a8ac3868"},
|
||||
{file = "regex-2020.6.8-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:b2ba0f78b3ef375114856cbdaa30559914d081c416b431f2437f83ce4f8b7f2f"},
|
||||
{file = "regex-2020.6.8-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:95fa7726d073c87141f7bbfb04c284901f8328e2d430eeb71b8ffdd5742a5ded"},
|
||||
{file = "regex-2020.6.8-cp36-cp36m-win32.whl", hash = "sha256:e3cdc9423808f7e1bb9c2e0bdb1c9dc37b0607b30d646ff6faf0d4e41ee8fee3"},
|
||||
{file = "regex-2020.6.8-cp36-cp36m-win_amd64.whl", hash = "sha256:c78e66a922de1c95a208e4ec02e2e5cf0bb83a36ceececc10a72841e53fbf2bd"},
|
||||
{file = "regex-2020.6.8-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:08997a37b221a3e27d68ffb601e45abfb0093d39ee770e4257bd2f5115e8cb0a"},
|
||||
{file = "regex-2020.6.8-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2f6f211633ee8d3f7706953e9d3edc7ce63a1d6aad0be5dcee1ece127eea13ae"},
|
||||
{file = "regex-2020.6.8-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:55b4c25cbb3b29f8d5e63aeed27b49fa0f8476b0d4e1b3171d85db891938cc3a"},
|
||||
{file = "regex-2020.6.8-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:89cda1a5d3e33ec9e231ece7307afc101b5217523d55ef4dc7fb2abd6de71ba3"},
|
||||
{file = "regex-2020.6.8-cp37-cp37m-win32.whl", hash = "sha256:690f858d9a94d903cf5cada62ce069b5d93b313d7d05456dbcd99420856562d9"},
|
||||
{file = "regex-2020.6.8-cp37-cp37m-win_amd64.whl", hash = "sha256:1700419d8a18c26ff396b3b06ace315b5f2a6e780dad387e4c48717a12a22c29"},
|
||||
{file = "regex-2020.6.8-cp38-cp38-manylinux1_i686.whl", hash = "sha256:654cb773b2792e50151f0e22be0f2b6e1c3a04c5328ff1d9d59c0398d37ef610"},
|
||||
{file = "regex-2020.6.8-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:52e1b4bef02f4040b2fd547357a170fc1146e60ab310cdbdd098db86e929b387"},
|
||||
{file = "regex-2020.6.8-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:cf59bbf282b627130f5ba68b7fa3abdb96372b24b66bdf72a4920e8153fc7910"},
|
||||
{file = "regex-2020.6.8-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:5aaa5928b039ae440d775acea11d01e42ff26e1561c0ffcd3d805750973c6baf"},
|
||||
{file = "regex-2020.6.8-cp38-cp38-win32.whl", hash = "sha256:97712e0d0af05febd8ab63d2ef0ab2d0cd9deddf4476f7aa153f76feef4b2754"},
|
||||
{file = "regex-2020.6.8-cp38-cp38-win_amd64.whl", hash = "sha256:6ad8663c17db4c5ef438141f99e291c4d4edfeaacc0ce28b5bba2b0bf273d9b5"},
|
||||
{file = "regex-2020.6.8.tar.gz", hash = "sha256:e9b64e609d37438f7d6e68c2546d2cb8062f3adb27e6336bc129b51be20773ac"},
|
||||
]
|
||||
six = [
|
||||
{file = "six-1.14.0-py2.py3-none-any.whl", hash = "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"},
|
||||
{file = "six-1.14.0.tar.gz", hash = "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"},
|
||||
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
|
||||
{file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
|
||||
]
|
||||
smmap = [
|
||||
{file = "smmap-3.0.4-py2.py3-none-any.whl", hash = "sha256:54c44c197c819d5ef1991799a7e30b662d1e520f2ac75c9efbeb54a742214cf4"},
|
||||
{file = "smmap-3.0.4.tar.gz", hash = "sha256:9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24"},
|
||||
]
|
||||
sniffio = [
|
||||
{file = "sniffio-1.1.0-py3-none-any.whl", hash = "sha256:20ed6d5b46f8ae136d00b9dcb807615d83ed82ceea6b2058cecb696765246da5"},
|
||||
{file = "sniffio-1.1.0.tar.gz", hash = "sha256:8e3810100f69fe0edd463d02ad407112542a11ffdc29f67db2bf3771afb87a21"},
|
||||
]
|
||||
taskipy = [
|
||||
{file = "taskipy-1.2.1-py3-none-any.whl", hash = "sha256:99bdaf5b19791c2345806847147e0fc2d28e1ac9446058def5a8b6b3fc9f23e2"},
|
||||
{file = "taskipy-1.2.1.tar.gz", hash = "sha256:5eb2c3b1606c896c7fa799848e71e8883b880759224958d07ba760e5db263175"},
|
||||
stevedore = [
|
||||
{file = "stevedore-2.0.1-py3-none-any.whl", hash = "sha256:c4724f8d7b8f6be42130663855d01a9c2414d6046055b5a65ab58a0e38637688"},
|
||||
{file = "stevedore-2.0.1.tar.gz", hash = "sha256:609912b87df5ad338ff8e44d13eaad4f4170a65b79ae9cb0aa5632598994a1b7"},
|
||||
]
|
||||
toml = [
|
||||
{file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"},
|
||||
{file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"},
|
||||
]
|
||||
tortoise-orm = []
|
||||
tortoise-orm = [
|
||||
{file = "tortoise-orm-0.16.13.tar.gz", hash = "sha256:5f6fa4430a570172cb49517a97d45338dbfb1a690ed707030467efd154e67855"},
|
||||
]
|
||||
typed-ast = [
|
||||
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"},
|
||||
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"},
|
||||
@@ -843,6 +887,6 @@ typing-extensions = [
|
||||
{file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"},
|
||||
]
|
||||
wcwidth = [
|
||||
{file = "wcwidth-0.1.9-py2.py3-none-any.whl", hash = "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1"},
|
||||
{file = "wcwidth-0.1.9.tar.gz", hash = "sha256:ee73862862a156bf77ff92b09034fc4825dd3af9cf81bc5b360668d425f3c5f1"},
|
||||
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
|
||||
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
||||
]
|
||||
|
@@ -1,32 +1,42 @@
|
||||
[tool.poetry]
|
||||
name = "aerich"
|
||||
version = "0.1.1"
|
||||
version = "0.2.1"
|
||||
description = "A database migrations tool for Tortoise ORM."
|
||||
authors = ["long2ice <long2ice@gmail.com>"]
|
||||
license = "Apache-2.0"
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/long2ice/aerich"
|
||||
repository = "https://github.com/long2ice/aerich.git"
|
||||
documentation = "https://github.com/long2ice/aerich"
|
||||
keywords = ["migrate", "Tortoise-ORM", "mysql"]
|
||||
packages = [
|
||||
{ include = "aerich" }
|
||||
]
|
||||
include = ["CHANGELOG.rst", "LICENSE", "README.md"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
tortoise-orm = {git = "https://github.com/long2ice/tortoise-orm.git", branch = "develop"}
|
||||
tortoise-orm = "*"
|
||||
asyncclick = "*"
|
||||
pydantic = "*"
|
||||
aiomysql = {version = "*", optional = true}
|
||||
asyncpg = {version = "*", optional = true}
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
taskipy = "*"
|
||||
asynctest = "*"
|
||||
flake8 = "*"
|
||||
isort = "*"
|
||||
black = "^19.10b0"
|
||||
pytest = "*"
|
||||
aiomysql = "*"
|
||||
asyncpg = "*"
|
||||
pytest-xdist = "*"
|
||||
mypy = "*"
|
||||
pytest-asyncio = "*"
|
||||
bandit = "*"
|
||||
|
||||
[tool.taskipy.tasks]
|
||||
export = "poetry export -f requirements.txt --without-hashes > requirements.txt"
|
||||
export-dev = "poetry export -f requirements.txt --dev --without-hashes > requirements-dev.txt"
|
||||
[tool.poetry.extras]
|
||||
dbdrivers = ["aiomysql", "asyncpg"]
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry>=0.12"]
|
||||
build-backend = "poetry.masonry.api"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
aerich = "aerich.cli:main"
|
@@ -1,2 +0,0 @@
|
||||
[pytest]
|
||||
addopts = -p no:warnings --ignore=src
|
@@ -1,48 +0,0 @@
|
||||
aiomysql==0.0.20
|
||||
aiosqlite==0.13.0
|
||||
anyio==1.3.0
|
||||
apipkg==1.5
|
||||
appdirs==1.4.4
|
||||
async-generator==1.10
|
||||
asyncclick==7.0.9
|
||||
asyncpg==0.20.1
|
||||
asynctest==0.13.0
|
||||
atomicwrites==1.4.0; sys_platform == "win32"
|
||||
attrs==19.3.0
|
||||
black==19.10b0
|
||||
cffi==1.14.0
|
||||
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
|
||||
click==7.1.2
|
||||
colorama==0.4.3; sys_platform == "win32"
|
||||
cryptography==2.9.2
|
||||
execnet==1.7.1
|
||||
flake8==3.8.1
|
||||
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
|
||||
isort==4.3.21
|
||||
mccabe==0.6.1
|
||||
more-itertools==8.3.0
|
||||
mypy==0.770
|
||||
mypy-extensions==0.4.3
|
||||
packaging==20.4
|
||||
pathspec==0.8.0
|
||||
pluggy==0.13.1
|
||||
py==1.8.1
|
||||
pycodestyle==2.6.0
|
||||
pycparser==2.20
|
||||
pydantic==1.5.1
|
||||
pyflakes==2.2.0
|
||||
pymysql==0.9.2
|
||||
pyparsing==2.4.7
|
||||
pypika==0.37.6
|
||||
pytest==5.4.2
|
||||
pytest-forked==1.1.3
|
||||
pytest-xdist==1.32.0
|
||||
regex==2020.5.14
|
||||
six==1.14.0
|
||||
sniffio==1.1.0
|
||||
taskipy==1.2.1
|
||||
toml==0.10.1
|
||||
-e git+https://github.com/long2ice/tortoise-orm.git@1f67b7a0ca1384365d6ff89d9e245e733166d1a6#egg=tortoise-orm
|
||||
typed-ast==1.4.1
|
||||
typing-extensions==3.7.4.2
|
||||
wcwidth==0.1.9
|
@@ -1,10 +0,0 @@
|
||||
aiosqlite==0.13.0
|
||||
anyio==1.3.0
|
||||
async-generator==1.10
|
||||
asyncclick==7.0.9
|
||||
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
|
||||
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
|
||||
pydantic==1.5.1
|
||||
pypika==0.37.6
|
||||
sniffio==1.1.0
|
||||
typing-extensions==3.7.4.2
|
47
setup.cfg
47
setup.cfg
@@ -1,47 +1,2 @@
|
||||
[flake8]
|
||||
max-line-length = 100
|
||||
exclude =
|
||||
ignore = E501,W503,DAR101,DAR201,DAR402
|
||||
|
||||
[darglint]
|
||||
docstring_style=sphinx
|
||||
|
||||
[isort]
|
||||
not_skip=__init__.py
|
||||
multi_line_output=3
|
||||
include_trailing_comma=True
|
||||
force_grid_wrap=0
|
||||
use_parentheses=True
|
||||
line_length=100
|
||||
|
||||
[tool:pytest]
|
||||
addopts = -n auto --tb=native -q
|
||||
|
||||
[mypy]
|
||||
pretty = True
|
||||
ignore_missing_imports = True
|
||||
check_untyped_defs = True
|
||||
disallow_subclassing_any = True
|
||||
disallow_untyped_calls = True
|
||||
disallow_untyped_defs = False
|
||||
disallow_incomplete_defs = False
|
||||
disallow_untyped_decorators = True
|
||||
no_implicit_optional = True
|
||||
warn_redundant_casts = True
|
||||
warn_unused_ignores = True
|
||||
warn_no_return = True
|
||||
warn_return_any = False
|
||||
warn_unused_configs = True
|
||||
warn_unreachable = True
|
||||
allow_redefinition = True
|
||||
strict_equality = True
|
||||
show_error_context = True
|
||||
|
||||
[mypy-tests.*]
|
||||
check_untyped_defs = False
|
||||
disallow_untyped_defs = False
|
||||
disallow_incomplete_defs = False
|
||||
warn_unreachable = False
|
||||
|
||||
[mypy-conftest]
|
||||
disallow_untyped_defs = False
|
||||
ignore = E501,W503
|
44
setup.py
44
setup.py
@@ -1,44 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
|
||||
def version():
|
||||
ver_str_line = open('aerich/__init__.py', 'rt').read()
|
||||
mob = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", ver_str_line, re.M)
|
||||
if not mob:
|
||||
raise RuntimeError("Unable to find version string")
|
||||
return mob.group(1)
|
||||
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
|
||||
long_description = f.read()
|
||||
|
||||
|
||||
def requirements():
|
||||
return open('requirements.txt', 'rt').read().splitlines()
|
||||
|
||||
|
||||
setup(
|
||||
name='aerich',
|
||||
version=version(),
|
||||
description='A database migrations tool for Tortoise-ORM.',
|
||||
author='long2ice',
|
||||
long_description_content_type='text/x-rst',
|
||||
long_description=long_description,
|
||||
author_email='long2ice@gmail.com',
|
||||
url='https://github.com/long2ice/aerich',
|
||||
license='MIT License',
|
||||
packages=find_packages(include=['aerich*']),
|
||||
include_package_data=True,
|
||||
zip_safe=True,
|
||||
entry_points={
|
||||
'console_scripts': ['aerich = aerich.cli:main'],
|
||||
},
|
||||
platforms='any',
|
||||
keywords=(
|
||||
'migrate Tortoise-ORM mysql'
|
||||
),
|
||||
dependency_links=['https://github.com/long2ice/tortoise-orm.git@develop#egg=tortoise-orm'],
|
||||
install_requires=requirements(),
|
||||
)
|
@@ -1,6 +0,0 @@
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
|
||||
"apps": {
|
||||
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}
|
||||
},
|
||||
}
|
||||
|
56
tests/diff_models.py
Normal file
56
tests/diff_models.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import datetime
|
||||
from enum import IntEnum
|
||||
|
||||
from tortoise import Model, fields
|
||||
|
||||
|
||||
class ProductType(IntEnum):
|
||||
article = 1
|
||||
page = 2
|
||||
|
||||
|
||||
class PermissionAction(IntEnum):
|
||||
create = 1
|
||||
delete = 2
|
||||
update = 3
|
||||
read = 4
|
||||
|
||||
|
||||
class Status(IntEnum):
|
||||
on = 1
|
||||
off = 0
|
||||
|
||||
|
||||
class User(Model):
|
||||
username = fields.CharField(max_length=20,)
|
||||
password = fields.CharField(max_length=200)
|
||||
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
|
||||
is_active = fields.BooleanField(default=True, description="Is Active")
|
||||
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
|
||||
avatar = fields.CharField(max_length=200, default="")
|
||||
intro = fields.TextField(default="")
|
||||
|
||||
|
||||
class Category(Model):
|
||||
slug = fields.CharField(max_length=200)
|
||||
user = fields.ForeignKeyField("diff_models.User", description="User")
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
|
||||
|
||||
class Product(Model):
|
||||
categories = fields.ManyToManyField("diff_models.Category")
|
||||
name = fields.CharField(max_length=50)
|
||||
view_num = fields.IntField(description="View Num")
|
||||
sort = fields.IntField()
|
||||
is_reviewed = fields.BooleanField(description="Is Reviewed")
|
||||
type = fields.IntEnumField(ProductType, description="Product Type")
|
||||
image = fields.CharField(max_length=200)
|
||||
body = fields.TextField()
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
|
||||
|
||||
class Config(Model):
|
||||
label = fields.CharField(max_length=200)
|
||||
key = fields.CharField(max_length=20)
|
||||
value = fields.JSONField()
|
||||
status: Status = fields.IntEnumField(Status, default=Status.on)
|
@@ -1,114 +1,183 @@
|
||||
from tortoise import Tortoise
|
||||
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
|
||||
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
|
||||
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
||||
from tortoise.contrib import test
|
||||
|
||||
from aerich.ddl.mysql import MysqlDDL
|
||||
from aerich.ddl.postgres import PostgresDDL
|
||||
from aerich.ddl.sqlite import SqliteDDL
|
||||
from tests.models import Category
|
||||
from aerich.migrate import Migrate
|
||||
from tests.models import Category, User
|
||||
|
||||
|
||||
class TestDDL(test.TruncationTestCase):
|
||||
maxDiff = None
|
||||
|
||||
def setUp(self) -> None:
|
||||
client = Tortoise.get_connection("models")
|
||||
if client.schema_generator is MySQLSchemaGenerator:
|
||||
self.ddl = MysqlDDL(client)
|
||||
elif client.schema_generator is SqliteSchemaGenerator:
|
||||
self.ddl = SqliteDDL(client)
|
||||
elif client.schema_generator is AsyncpgSchemaGenerator:
|
||||
self.ddl = PostgresDDL(client)
|
||||
|
||||
def test_create_table(self):
|
||||
ret = self.ddl.create_table(Category)
|
||||
if isinstance(self.ddl, MysqlDDL):
|
||||
self.assertEqual(
|
||||
ret,
|
||||
"""CREATE TABLE IF NOT EXISTS `category` (
|
||||
def test_create_table():
|
||||
ret = Migrate.ddl.create_table(Category)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert (
|
||||
ret
|
||||
== """CREATE TABLE IF NOT EXISTS `category` (
|
||||
`id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
|
||||
`slug` VARCHAR(200) NOT NULL,
|
||||
`name` VARCHAR(200) NOT NULL,
|
||||
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
|
||||
`user_id` INT NOT NULL COMMENT 'User',
|
||||
CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
|
||||
) CHARACTER SET utf8mb4;""",
|
||||
)
|
||||
elif isinstance(self.ddl, SqliteDDL):
|
||||
self.assertEqual(
|
||||
ret,
|
||||
"""CREATE TABLE IF NOT EXISTS "category" (
|
||||
) CHARACTER SET utf8mb4;"""
|
||||
)
|
||||
|
||||
elif isinstance(Migrate.ddl, SqliteDDL):
|
||||
assert (
|
||||
ret
|
||||
== """CREATE TABLE IF NOT EXISTS "category" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"slug" VARCHAR(200) NOT NULL,
|
||||
"name" VARCHAR(200) NOT NULL,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */
|
||||
);""",
|
||||
)
|
||||
elif isinstance(self.ddl, PostgresDDL):
|
||||
self.assertEqual(
|
||||
ret,
|
||||
"""CREATE TABLE IF NOT EXISTS "category" (
|
||||
);"""
|
||||
)
|
||||
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert (
|
||||
ret
|
||||
== """CREATE TABLE IF NOT EXISTS "category" (
|
||||
"id" SERIAL NOT NULL PRIMARY KEY,
|
||||
"slug" VARCHAR(200) NOT NULL,
|
||||
"name" VARCHAR(200) NOT NULL,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
|
||||
);
|
||||
COMMENT ON COLUMN "category"."user_id" IS 'User';""",
|
||||
)
|
||||
|
||||
def test_drop_table(self):
|
||||
ret = self.ddl.drop_table(Category)
|
||||
self.assertEqual(ret, "DROP TABLE IF EXISTS category")
|
||||
|
||||
def test_add_column(self):
|
||||
ret = self.ddl.add_column(Category, Category._meta.fields_map.get("name"))
|
||||
if isinstance(self.ddl, MysqlDDL):
|
||||
self.assertEqual(ret, "ALTER TABLE category ADD `name` VARCHAR(200) NOT NULL")
|
||||
elif isinstance(self.ddl, PostgresDDL):
|
||||
self.assertEqual(ret, 'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL')
|
||||
elif isinstance(self.ddl, SqliteDDL):
|
||||
self.assertEqual(ret, 'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL')
|
||||
|
||||
def test_drop_column(self):
|
||||
ret = self.ddl.drop_column(Category, "name")
|
||||
self.assertEqual(ret, "ALTER TABLE category DROP COLUMN name")
|
||||
self.assertEqual(ret, "ALTER TABLE category DROP COLUMN name")
|
||||
|
||||
def test_add_index(self):
|
||||
index = self.ddl.add_index(Category, ["name"])
|
||||
index_u = self.ddl.add_index(Category, ["name"], True)
|
||||
if isinstance(self.ddl, MysqlDDL):
|
||||
self.assertEqual(
|
||||
index, "ALTER TABLE category ADD INDEX idx_category_name_8b0cb9 (`name`)"
|
||||
)
|
||||
self.assertEqual(
|
||||
index_u, "ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 (`name`)"
|
||||
)
|
||||
elif isinstance(self.ddl, SqliteDDL):
|
||||
self.assertEqual(
|
||||
index_u, 'ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 ("name")'
|
||||
)
|
||||
self.assertEqual(
|
||||
index_u, 'ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 ("name")'
|
||||
)
|
||||
|
||||
def test_drop_index(self):
|
||||
ret = self.ddl.drop_index(Category, ["name"])
|
||||
self.assertEqual(ret, "ALTER TABLE category DROP INDEX idx_category_name_8b0cb9")
|
||||
ret = self.ddl.drop_index(Category, ["name"], True)
|
||||
self.assertEqual(ret, "ALTER TABLE category DROP INDEX uid_category_name_8b0cb9")
|
||||
|
||||
def test_add_fk(self):
|
||||
ret = self.ddl.add_fk(Category, Category._meta.fields_map.get("user"))
|
||||
self.assertEqual(
|
||||
ret,
|
||||
"ALTER TABLE category ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
|
||||
COMMENT ON COLUMN "category"."user_id" IS 'User';"""
|
||||
)
|
||||
|
||||
def test_drop_fk(self):
|
||||
ret = self.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
|
||||
self.assertEqual(ret, "ALTER TABLE category DROP FOREIGN KEY fk_category_user_e2e3874c")
|
||||
|
||||
def test_drop_table():
|
||||
ret = Migrate.ddl.drop_table(Category)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "DROP TABLE IF EXISTS `category`"
|
||||
else:
|
||||
assert ret == 'DROP TABLE IF EXISTS "category"'
|
||||
|
||||
|
||||
def test_add_column():
|
||||
ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name"))
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL"
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL'
|
||||
|
||||
|
||||
def test_modify_column():
|
||||
ret = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name"))
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL"
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200)'
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "category" MODIFY COLUMN "name" VARCHAR(200) NOT NULL'
|
||||
|
||||
ret = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active"))
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert (
|
||||
ret
|
||||
== "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1"
|
||||
)
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL'
|
||||
else:
|
||||
assert (
|
||||
ret
|
||||
== 'ALTER TABLE "user" MODIFY COLUMN "is_active" INT NOT NULL DEFAULT 1 /* Is Active */'
|
||||
)
|
||||
|
||||
|
||||
def test_alter_column_default():
|
||||
ret = Migrate.ddl.alter_column_default(Category, Category._meta.fields_map.get("name"))
|
||||
if isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP DEFAULT'
|
||||
else:
|
||||
assert ret is None
|
||||
|
||||
ret = Migrate.ddl.alter_column_default(Category, Category._meta.fields_map.get("created_at"))
|
||||
if isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert (
|
||||
ret == 'ALTER TABLE "category" ALTER COLUMN "created_at" SET DEFAULT CURRENT_TIMESTAMP'
|
||||
)
|
||||
else:
|
||||
assert ret is None
|
||||
|
||||
ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map.get("avatar"))
|
||||
if isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "user" ALTER COLUMN "avatar" SET DEFAULT \'\''
|
||||
else:
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_alter_column_null():
|
||||
ret = Migrate.ddl.alter_column_null(Category, Category._meta.fields_map.get("name"))
|
||||
if isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL'
|
||||
else:
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_set_comment():
|
||||
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("name"))
|
||||
if isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL'
|
||||
else:
|
||||
assert ret is None
|
||||
|
||||
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("user"))
|
||||
if isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'COMMENT ON COLUMN "category"."user" IS \'User\''
|
||||
else:
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_drop_column():
|
||||
ret = Migrate.ddl.drop_column(Category, "name")
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` DROP COLUMN `name`"
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "category" DROP COLUMN "name"'
|
||||
|
||||
|
||||
def test_add_index():
|
||||
index = Migrate.ddl.add_index(Category, ["name"])
|
||||
index_u = Migrate.ddl.add_index(Category, ["name"], True)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
|
||||
assert (
|
||||
index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)"
|
||||
)
|
||||
else:
|
||||
assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")'
|
||||
assert (
|
||||
index_u == 'ALTER TABLE "category" ADD UNIQUE INDEX "uid_category_name_8b0cb9" ("name")'
|
||||
)
|
||||
|
||||
|
||||
def test_drop_index():
|
||||
ret = Migrate.ddl.drop_index(Category, ["name"])
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`"
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"'
|
||||
|
||||
|
||||
def test_add_fk():
|
||||
ret = Migrate.ddl.add_fk(Category, Category._meta.fields_map.get("user"))
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert (
|
||||
ret
|
||||
== "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE"
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
ret
|
||||
== 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_e2e3874c" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE'
|
||||
)
|
||||
|
||||
|
||||
def test_drop_fk():
|
||||
ret = Migrate.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`"
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"'
|
||||
|
30
tests/test_migrate.py
Normal file
30
tests/test_migrate.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from tortoise import Tortoise
|
||||
|
||||
from aerich.ddl.mysql import MysqlDDL
|
||||
from aerich.migrate import Migrate
|
||||
|
||||
|
||||
def test_migrate():
|
||||
apps = Tortoise.apps
|
||||
models = apps.get("models")
|
||||
diff_models = apps.get("diff_models")
|
||||
Migrate.diff_models(diff_models, models)
|
||||
Migrate.diff_models(models, diff_models, False)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert Migrate.upgrade_operators == [
|
||||
"ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL",
|
||||
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
|
||||
]
|
||||
assert Migrate.downgrade_operators == [
|
||||
"ALTER TABLE `category` DROP COLUMN `name`",
|
||||
"ALTER TABLE `user` DROP INDEX `uid_user_usernam_9987ab`",
|
||||
]
|
||||
else:
|
||||
assert Migrate.upgrade_operators == [
|
||||
'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL',
|
||||
'ALTER TABLE "user" ADD UNIQUE INDEX "uid_user_usernam_9987ab" ("username")',
|
||||
]
|
||||
assert Migrate.downgrade_operators == [
|
||||
'ALTER TABLE "category" DROP COLUMN "name"',
|
||||
'ALTER TABLE "user" DROP INDEX "uid_user_usernam_9987ab"',
|
||||
]
|
Reference in New Issue
Block a user